diff --git a/EXTERNAL_MODEL_RESULTS.json b/EXTERNAL_MODEL_RESULTS.json
index afb47b93076f7ca46d2a729621151ec07b019dfb..1c1b650e1c9d4dd09058a43c1a6d85a81b3b0168 100644
--- a/EXTERNAL_MODEL_RESULTS.json
+++ b/EXTERNAL_MODEL_RESULTS.json
@@ -1,3018 +1,1725 @@
 {
-    "distiluse-base-multilingual-cased-v2": {
+    "instructor-large": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "distiluse-base-multilingual-cased-v2"
+                    "Model": "instructor-large"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "distiluse-base-multilingual-cased-v2",
-                    "AllegroReviews": 28.03,
-                    "AmazonCounterfactualClassification (de)": 68.14,
-                    "AmazonCounterfactualClassification (en)": 71.81,
-                    "AmazonCounterfactualClassification (en-ext)": 72.96,
-                    "AmazonCounterfactualClassification (ja)": 65.39,
-                    "AmazonPolarityClassification": 68.0,
-                    "AmazonReviewsClassification (de)": 35.03,
-                    "AmazonReviewsClassification (en)": 35.45,
-                    "AmazonReviewsClassification (es)": 36.24,
-                    "AmazonReviewsClassification (fr)": 35.7,
-                    "AmazonReviewsClassification (ja)": 31.08,
-                    "AmazonReviewsClassification (zh)": 33.89,
-                    "Banking77Classification": 71.48,
-                    "CBD": 60.0,
-                    "EmotionClassification": 40.04,
-                    "ImdbClassification": 61.52,
-                    "MTOPDomainClassification (de)": 86.19,
-                    "MTOPDomainClassification (en)": 91.59,
-                    "MTOPDomainClassification (es)": 87.75,
-                    "MTOPDomainClassification (fr)": 84.61,
-                    "MTOPDomainClassification (hi)": 76.41,
-                    "MTOPDomainClassification (th)": 73.62,
-                    "MTOPIntentClassification (de)": 59.21,
-                    "MTOPIntentClassification (en)": 66.4,
-                    "MTOPIntentClassification (es)": 57.21,
-                    "MTOPIntentClassification (fr)": 53.41,
-                    "MTOPIntentClassification (hi)": 45.54,
-                    "MTOPIntentClassification (th)": 47.73,
-                    "MasakhaNEWSClassification (fra)": 76.87,
-                    "MassiveIntentClassification (af)": 40.02,
-                    "MassiveIntentClassification (am)": 2.35,
-                    "MassiveIntentClassification (ar)": 43.14,
-                    "MassiveIntentClassification (az)": 25.6,
-                    "MassiveIntentClassification (bn)": 4.84,
-                    "MassiveIntentClassification (cy)": 15.43,
-                    "MassiveIntentClassification (da)": 52.33,
-                    "MassiveIntentClassification (de)": 51.57,
-                    "MassiveIntentClassification (el)": 49.65,
-                    "MassiveIntentClassification (en)": 66.71,
-                    "MassiveIntentClassification (es)": 56.57,
-                    "MassiveIntentClassification (fa)": 55.36,
-                    "MassiveIntentClassification (fi)": 45.72,
-                    "MassiveIntentClassification (fr)": 57.02,
-                    "MassiveIntentClassification (he)": 46.74,
-                    "MassiveIntentClassification (hi)": 48.55,
-                    "MassiveIntentClassification (hu)": 50.65,
-                    "MassiveIntentClassification (hy)": 40.79,
-                    "MassiveIntentClassification (id)": 56.0,
-                    "MassiveIntentClassification (is)": 16.08,
-                    "MassiveIntentClassification (it)": 57.65,
-                    "MassiveIntentClassification (ja)": 55.33,
-                    "MassiveIntentClassification (jv)": 28.16,
-                    "MassiveIntentClassification (ka)": 29.41,
-                    "MassiveIntentClassification (km)": 4.79,
-                    "MassiveIntentClassification (kn)": 3.37,
-                    "MassiveIntentClassification (ko)": 49.97,
-                    "MassiveIntentClassification (lv)": 44.31,
-                    "MassiveIntentClassification (ml)": 3.24,
-                    "MassiveIntentClassification (mn)": 40.37,
-                    "MassiveIntentClassification (ms)": 47.97,
-                    "MassiveIntentClassification (my)": 38.48,
-                    "MassiveIntentClassification (nb)": 46.01,
-                    "MassiveIntentClassification (nl)": 58.29,
-                    "MassiveIntentClassification (pl)": 53.1,
-                    "MassiveIntentClassification (pt)": 58.63,
-                    "MassiveIntentClassification (ro)": 50.63,
-                    "MassiveIntentClassification (ru)": 57.96,
-                    "MassiveIntentClassification (sl)": 50.66,
-                    "MassiveIntentClassification (sq)": 50.25,
-                    "MassiveIntentClassification (sv)": 52.41,
-                    "MassiveIntentClassification (sw)": 19.29,
-                    "MassiveIntentClassification (ta)": 3.79,
-                    "MassiveIntentClassification (te)": 3.36,
-                    "MassiveIntentClassification (th)": 45.28,
-                    "MassiveIntentClassification (tl)": 28.44,
-                    "MassiveIntentClassification (tr)": 50.47,
-                    "MassiveIntentClassification (ur)": 46.03,
-                    "MassiveIntentClassification (vi)": 45.25,
-                    "MassiveIntentClassification (zh-CN)": 59.22,
-                    "MassiveIntentClassification (zh-TW)": 54.96,
-                    "MassiveScenarioClassification (af)": 53.67,
-                    "MassiveScenarioClassification (am)": 7.72,
-                    "MassiveScenarioClassification (ar)": 52.19,
-                    "MassiveScenarioClassification (az)": 34.75,
-                    "MassiveScenarioClassification (bn)": 10.65,
-                    "MassiveScenarioClassification (cy)": 21.24,
-                    "MassiveScenarioClassification (da)": 62.55,
-                    "MassiveScenarioClassification (de)": 61.4,
-                    "MassiveScenarioClassification (el)": 60.68,
-                    "MassiveScenarioClassification (en)": 74.0,
-                    "MassiveScenarioClassification (es)": 64.61,
-                    "MassiveScenarioClassification (fa)": 59.24,
-                    "MassiveScenarioClassification (fi)": 54.66,
-                    "MassiveScenarioClassification (fr)": 65.2,
-                    "MassiveScenarioClassification (he)": 54.74,
-                    "MassiveScenarioClassification (hi)": 55.99,
-                    "MassiveScenarioClassification (hu)": 61.2,
-                    "MassiveScenarioClassification (hy)": 49.63,
-                    "MassiveScenarioClassification (id)": 65.25,
-                    "MassiveScenarioClassification (is)": 22.6,
-                    "MassiveScenarioClassification (it)": 64.63,
-                    "MassiveScenarioClassification (ja)": 62.32,
-                    "MassiveScenarioClassification (jv)": 35.77,
-                    "MassiveScenarioClassification (ka)": 39.08,
-                    "MassiveScenarioClassification (km)": 9.24,
-                    "MassiveScenarioClassification (kn)": 8.28,
-                    "MassiveScenarioClassification (ko)": 57.6,
-                    "MassiveScenarioClassification (lv)": 51.72,
-                    "MassiveScenarioClassification (ml)": 8.25,
-                    "MassiveScenarioClassification (mn)": 47.21,
-                    "MassiveScenarioClassification (ms)": 55.65,
-                    "MassiveScenarioClassification (my)": 43.31,
-                    "MassiveScenarioClassification (nb)": 54.98,
-                    "MassiveScenarioClassification (nl)": 67.49,
-                    "MassiveScenarioClassification (pl)": 61.29,
-                    "MassiveScenarioClassification (pt)": 64.26,
-                    "MassiveScenarioClassification (ro)": 58.03,
-                    "MassiveScenarioClassification (ru)": 65.41,
-                    "MassiveScenarioClassification (sl)": 59.36,
-                    "MassiveScenarioClassification (sq)": 62.69,
-                    "MassiveScenarioClassification (sv)": 64.35,
-                    "MassiveScenarioClassification (sw)": 25.12,
-                    "MassiveScenarioClassification (ta)": 8.67,
-                    "MassiveScenarioClassification (te)": 7.82,
-                    "MassiveScenarioClassification (th)": 54.65,
-                    "MassiveScenarioClassification (tl)": 36.09,
-                    "MassiveScenarioClassification (tr)": 60.89,
-                    "MassiveScenarioClassification (ur)": 54.71,
-                    "MassiveScenarioClassification (vi)": 55.15,
-                    "MassiveScenarioClassification (zh-CN)": 66.44,
-                    "MassiveScenarioClassification (zh-TW)": 62.89,
-                    "PAC": 68.17,
-                    "PolEmo2.0-IN": 48.84,
-                    "PolEmo2.0-OUT": 30.0,
-                    "ToxicConversationsClassification": 69.09,
-                    "TweetSentimentExtractionClassification": 59.97
+                    "Model": "instructor-large"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "distiluse-base-multilingual-cased-v2",
-                    "8TagsClustering": 12.51,
-                    "AlloProfClusteringP2P": 55.95,
-                    "AlloProfClusteringS2S": 35.39,
-                    "ArxivClusteringP2P": 33.59,
-                    "HALClusteringS2S": 18.2,
-                    "MLSUMClusteringP2P": 40.17,
-                    "MLSUMClusteringS2S": 34.65,
-                    "MasakhaNEWSClusteringP2P (fra)": 53.76,
-                    "MasakhaNEWSClusteringS2S (fra)": 32.76
+                    "Model": "instructor-large"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "distiluse-base-multilingual-cased-v2",
-                    "CDSC-E": 71.83,
-                    "OpusparcusPC (fr)": 92.07,
-                    "PPC": 86.83,
-                    "PSC": 96.35,
-                    "PawsXPairClassification (fr)": 51.08,
-                    "SICK-E-PL": 62.05,
-                    "SprintDuplicateQuestions": 87.15,
-                    "TwitterSemEval2015": 61.67,
-                    "TwitterURLCorpus": 84.02
+                    "Model": "instructor-large"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "distiluse-base-multilingual-cased-v2",
-                    "AlloprofReranking": 51.77,
-                    "AskUbuntuDupQuestions": 53.75,
-                    "MindSmallReranking": 30.39,
-                    "SciDocsRR": 69.22,
-                    "StackOverflowDupQuestions": 41.92,
-                    "SyntecReranking": 74.78
+                    "Model": "instructor-large"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "distiluse-base-multilingual-cased-v2",
-                    "AlloprofRetrieval": 26.99,
-                    "ArguAna-PL": 36.7,
-                    "BSARDRetrieval": 0.0,
-                    "DBPedia-PL": 12.36,
-                    "FiQA-PL": 8.02,
-                    "HotpotQA-PL": 20.83,
-                    "MSMARCO-PL": 4.57,
-                    "MintakaRetrieval (fr)": 22.55,
-                    "NFCorpus-PL": 16.28,
-                    "NQ-PL": 5.85,
-                    "Quora-PL": 71.95,
-                    "SCIDOCS-PL": 6.5,
-                    "SciFact-PL": 33.03,
-                    "SyntecRetrieval": 65.34,
-                    "TRECCOVID-PL": 16.91,
-                    "XPQARetrieval (fr)": 51.2
+                    "Model": "instructor-large",
+                    "BrightRetrieval (pony)": 1.32,
+                    "BrightRetrieval (sustainable_living)": 13.16,
+                    "BrightRetrieval (aops)": 7.94,
+                    "BrightRetrieval (biology)": 15.61,
+                    "BrightRetrieval (stackoverflow)": 11.21,
+                    "BrightRetrieval (theoremqa_theorems)": 9.29,
+                    "BrightRetrieval (psychology)": 21.94,
+                    "BrightRetrieval (economics)": 15.99,
+                    "BrightRetrieval (robotics)": 11.45,
+                    "BrightRetrieval (leetcode)": 20.0,
+                    "BrightRetrieval (earth_science)": 21.52,
+                    "BrightRetrieval (theoremqa_questions)": 20.07
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "distiluse-base-multilingual-cased-v2",
-                    "BIOSSES": 78.34,
-                    "CDSC-R": 87.67,
-                    "SICK-R": 75.25,
-                    "SICK-R-PL": 65.53,
-                    "SICKFr": 72.49,
-                    "STS12": 72.96,
-                    "STS13": 70.58,
-                    "STS14": 70.29,
-                    "STS15": 81.94,
-                    "STS16": 76.8,
-                    "STS17 (ar-ar)": 77.34,
-                    "STS17 (en-ar)": 77.46,
-                    "STS17 (en-de)": 80.24,
-                    "STS17 (en-en)": 86.19,
-                    "STS17 (en-tr)": 74.34,
-                    "STS17 (es-en)": 77.4,
-                    "STS17 (es-es)": 83.71,
-                    "STS17 (fr-en)": 79.28,
-                    "STS17 (it-en)": 80.82,
-                    "STS17 (ko-ko)": 76.4,
-                    "STS17 (nl-en)": 80.51,
-                    "STS22 (ar)": 49.04,
-                    "STS22 (de)": 35.73,
-                    "STS22 (de-en)": 47.51,
-                    "STS22 (de-fr)": 60.76,
-                    "STS22 (de-pl)": 36.09,
-                    "STS22 (en)": 62.88,
-                    "STS22 (es)": 59.34,
-                    "STS22 (es-en)": 68.96,
-                    "STS22 (es-it)": 63.28,
-                    "STS22 (fr)": 76.41,
-                    "STS22 (fr-pl)": 61.98,
-                    "STS22 (it)": 65.1,
-                    "STS22 (pl)": 34.58,
-                    "STS22 (pl-en)": 71.33,
-                    "STS22 (ru)": 52.4,
-                    "STS22 (tr)": 54.07,
-                    "STS22 (zh)": 54.32,
-                    "STS22 (zh-en)": 61.75,
-                    "STSBenchmark": 80.75,
-                    "STSBenchmarkMultilingualSTS (fr)": 77.49
+                    "Model": "instructor-large"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "distiluse-base-multilingual-cased-v2",
-                    "SummEvalFr": 28.12
+                    "Model": "instructor-large"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "instructor-large"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "distiluse-base-multilingual-cased-v2"
+                    "Model": "instructor-large"
                 }
             ]
         }
     },
-    "USER-base": {
+    "multi-qa-MiniLM-L6-cos-v1": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "USER-base",
-                    "Tatoeba (rus-Cyrl_eng-Latn)": 90.2
+                    "Model": "multi-qa-MiniLM-L6-cos-v1"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "USER-base",
-                    "GeoreviewClassification (rus-Cyrl)": 47.23,
-                    "HeadlineClassification (rus-Cyrl)": 74.88,
-                    "InappropriatenessClassification (rus-Cyrl)": 61.94,
-                    "KinopoiskClassification (rus-Cyrl)": 55.69,
-                    "MassiveIntentClassification (rus-Cyrl)": 65.57,
-                    "MassiveScenarioClassification (rus-Cyrl)": 68.33,
-                    "RuReviewsClassification (rus-Cyrl)": 66.44,
-                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 55.55,
-                    "RuSciBenchOECDClassification (rus-Cyrl)": 43.28
+                    "Model": "multi-qa-MiniLM-L6-cos-v1",
+                    "AmazonReviewsClassification (fr)": 27.05,
+                    "MTOPDomainClassification (fr)": 72.97,
+                    "MTOPIntentClassification (fr)": 37.18,
+                    "MasakhaNEWSClassification (fra)": 75.62,
+                    "MassiveIntentClassification (fr)": 42.64,
+                    "MassiveScenarioClassification (fr)": 49.92
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "USER-base",
-                    "GeoreviewClusteringP2P (rus-Cyrl)": 64.16,
-                    "MLSUMClusteringP2P (rus-Cyrl)": 48.09,
-                    "MLSUMClusteringS2S (rus-Cyrl)": 45.73,
-                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 51.38,
-                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 44.73
+                    "Model": "multi-qa-MiniLM-L6-cos-v1",
+                    "AlloProfClusteringP2P": 49.13,
+                    "AlloProfClusteringS2S": 26.16,
+                    "HALClusteringS2S": 12.49,
+                    "MLSUMClusteringP2P": 35.15,
+                    "MLSUMClusteringS2S": 25.95,
+                    "MasakhaNEWSClusteringP2P (fra)": 53.73,
+                    "MasakhaNEWSClusteringS2S (fra)": 27.27
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "USER-base",
-                    "OpusparcusPC (rus-Cyrl)": 91.65,
-                    "TERRa (rus-Cyrl)": 60.02
+                    "Model": "multi-qa-MiniLM-L6-cos-v1",
+                    "OpusparcusPC (fr)": 88.07,
+                    "PawsXPairClassification (fr)": 57.36
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "USER-base",
-                    "RuBQReranking (rus-Cyrl)": 64.42
+                    "Model": "multi-qa-MiniLM-L6-cos-v1",
+                    "AlloprofReranking": 40.28,
+                    "SyntecReranking": 65.08
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "USER-base",
-                    "RiaNewsRetrieval (rus-Cyrl)": 77.83,
-                    "RuBQRetrieval (rus-Cyrl)": 56.86
+                    "Model": "multi-qa-MiniLM-L6-cos-v1",
+                    "AlloprofRetrieval": 30.23,
+                    "BSARDRetrieval": 0.0,
+                    "MintakaRetrieval (fr)": 16.31,
+                    "SyntecRetrieval": 58.07,
+                    "XPQARetrieval (fr)": 48.83
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "USER-base",
-                    "RUParaPhraserSTS (rus-Cyrl)": 73.56,
-                    "RuSTSBenchmarkSTS (rus-Cyrl)": 82.26,
-                    "STS22 (rus-Cyrl)": 63.39,
-                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 81.81
+                    "Model": "multi-qa-MiniLM-L6-cos-v1",
+                    "SICKFr": 62.11,
+                    "STS22 (fr)": 74.62,
+                    "STSBenchmarkMultilingualSTS (fr)": 63.85
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "USER-base"
+                    "Model": "multi-qa-MiniLM-L6-cos-v1",
+                    "SummEvalFr": 27.59
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "multi-qa-MiniLM-L6-cos-v1"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "USER-base"
+                    "Model": "multi-qa-MiniLM-L6-cos-v1"
                 }
             ]
         }
     },
-    "bert-base-swedish-cased": {
+    "bert-base-25lang-cased": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "bert-base-swedish-cased",
-                    "BornholmBitextMining": 6.6
+                    "Model": "bert-base-25lang-cased"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "bert-base-swedish-cased",
-                    "AngryTweetsClassification": 44.58,
-                    "DKHateClassification": 55.53,
-                    "DanishPoliticalCommentsClassification": 28.97,
-                    "LccSentimentClassification": 41.2,
-                    "MassiveIntentClassification (da)": 37.98,
-                    "MassiveIntentClassification (nb)": 35.75,
-                    "MassiveIntentClassification (sv)": 52.75,
-                    "MassiveScenarioClassification (da)": 40.44,
-                    "MassiveScenarioClassification (nb)": 35.76,
-                    "MassiveScenarioClassification (sv)": 56.09,
-                    "NoRecClassification": 43.91,
-                    "NordicLangClassification": 62.45,
-                    "NorwegianParliament": 57.56,
-                    "ScalaDaClassification": 53.53,
-                    "ScalaNbClassification": 53.63
+                    "Model": "bert-base-25lang-cased",
+                    "AmazonReviewsClassification (fr)": 29.39,
+                    "MTOPDomainClassification (fr)": 63.63,
+                    "MTOPIntentClassification (fr)": 37.86,
+                    "MasakhaNEWSClassification (fra)": 63.91,
+                    "MassiveIntentClassification (fr)": 37.3,
+                    "MassiveScenarioClassification (fr)": 44.47
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "bert-base-swedish-cased"
+                    "Model": "bert-base-25lang-cased",
+                    "AlloProfClusteringP2P": 53.49,
+                    "AlloProfClusteringS2S": 43.1,
+                    "HALClusteringS2S": 19.78,
+                    "MLSUMClusteringP2P": 40.73,
+                    "MLSUMClusteringS2S": 31.94,
+                    "MasakhaNEWSClusteringP2P (fra)": 24.23,
+                    "MasakhaNEWSClusteringS2S (fra)": 24.46
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "bert-base-swedish-cased"
+                    "Model": "bert-base-25lang-cased",
+                    "OpusparcusPC (fr)": 86.79,
+                    "PawsXPairClassification (fr)": 53.39
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "bert-base-swedish-cased"
+                    "Model": "bert-base-25lang-cased",
+                    "AlloprofReranking": 36.25,
+                    "SyntecReranking": 53.25
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "bert-base-swedish-cased"
+                    "Model": "bert-base-25lang-cased",
+                    "AlloprofRetrieval": 1.6,
+                    "BSARDRetrieval": 0.0,
+                    "MintakaRetrieval (fr)": 3.55,
+                    "SyntecRetrieval": 18.95,
+                    "XPQARetrieval (fr)": 18.46
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "bert-base-swedish-cased"
+                    "Model": "bert-base-25lang-cased",
+                    "SICKFr": 58.76,
+                    "STS22 (fr)": 38.77,
+                    "STSBenchmarkMultilingualSTS (fr)": 52.25
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "bert-base-swedish-cased"
+                    "Model": "bert-base-25lang-cased",
+                    "SummEvalFr": 28.84
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "bert-base-25lang-cased"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "bert-base-swedish-cased"
+                    "Model": "bert-base-25lang-cased"
                 }
             ]
         }
     },
-    "m3e-large": {
+    "voyage-large-2-instruct": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "m3e-large"
+                    "Model": "voyage-large-2-instruct"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "m3e-large",
-                    "AmazonReviewsClassification (zh)": 44.44,
-                    "IFlyTek": 43.96,
-                    "JDReview": 86.92,
-                    "MassiveIntentClassification (zh-CN)": 67.23,
-                    "MassiveScenarioClassification (zh-CN)": 74.88,
-                    "MultilingualSentiment": 72.47,
-                    "OnlineShopping": 89.59,
-                    "TNews": 48.26,
-                    "Waimai": 86.08
+                    "Model": "voyage-large-2-instruct",
+                    "AmazonCounterfactualClassification (en)": 77.6,
+                    "AmazonPolarityClassification": 96.58,
+                    "AmazonReviewsClassification (en)": 50.77,
+                    "Banking77Classification": 86.96,
+                    "EmotionClassification": 59.81,
+                    "ImdbClassification": 96.13,
+                    "MTOPDomainClassification (en)": 98.86,
+                    "MTOPIntentClassification (en)": 86.97,
+                    "MassiveIntentClassification (en)": 81.08,
+                    "MassiveScenarioClassification (en)": 87.95,
+                    "ToxicConversationsClassification": 83.58,
+                    "TweetSentimentExtractionClassification": 71.55
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "m3e-large",
-                    "CLSClusteringP2P": 38.6,
-                    "CLSClusteringS2S": 38.02,
-                    "ThuNewsClusteringP2P": 60.39,
-                    "ThuNewsClusteringS2S": 58.51
+                    "Model": "voyage-large-2-instruct",
+                    "ArxivClusteringP2P": 51.81,
+                    "ArxivClusteringS2S": 44.73,
+                    "BiorxivClusteringP2P": 46.07,
+                    "BiorxivClusteringS2S": 40.64,
+                    "MedrxivClusteringP2P": 42.94,
+                    "MedrxivClusteringS2S": 41.44,
+                    "RedditClustering": 68.5,
+                    "RedditClusteringP2P": 64.86,
+                    "StackExchangeClustering": 74.16,
+                    "StackExchangeClusteringP2P": 45.1,
+                    "TwentyNewsgroupsClustering": 66.62
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "m3e-large",
-                    "Cmnli": 69.27,
-                    "Ocnli": 59.33
+                    "Model": "voyage-large-2-instruct",
+                    "SprintDuplicateQuestions": 94.5,
+                    "TwitterSemEval2015": 86.32,
+                    "TwitterURLCorpus": 86.9
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "m3e-large",
-                    "CMedQAv1": 77.76,
-                    "CMedQAv2": 78.27,
-                    "MMarcoReranking": 16.46,
-                    "T2Reranking": 66.13
+                    "Model": "voyage-large-2-instruct",
+                    "AskUbuntuDupQuestions": 64.92,
+                    "MindSmallReranking": 30.97,
+                    "SciDocsRR": 89.34,
+                    "StackOverflowDupQuestions": 55.11
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "m3e-large",
-                    "CmedqaRetrieval": 30.73,
-                    "CovidRetrieval": 61.33,
-                    "DuRetrieval": 74.69,
-                    "EcomRetrieval": 45.18,
-                    "MMarcoRetrieval": 61.06,
-                    "MedicalRetrieval": 48.66,
-                    "T2Retrieval": 72.36,
-                    "VideoRetrieval": 44.02
+                    "Model": "voyage-large-2-instruct",
+                    "ArguAna": 64.06,
+                    "BrightRetrieval (theoremqa_questions)": 26.06,
+                    "BrightRetrieval (earth_science)": 25.09,
+                    "BrightRetrieval (leetcode)": 30.6,
+                    "BrightRetrieval (economics)": 19.85,
+                    "BrightRetrieval (robotics)": 11.21,
+                    "BrightRetrieval (psychology)": 24.79,
+                    "BrightRetrieval (aops)": 7.45,
+                    "BrightRetrieval (sustainable_living)": 15.58,
+                    "BrightRetrieval (pony)": 1.48,
+                    "BrightRetrieval (theoremqa_theorems)": 10.13,
+                    "BrightRetrieval (biology)": 23.55,
+                    "BrightRetrieval (stackoverflow)": 15.03,
+                    "CQADupstackRetrieval": 46.6,
+                    "ClimateFEVER": 32.65,
+                    "DBPedia": 46.03,
+                    "FEVER": 91.47,
+                    "FiQA2018": 59.76,
+                    "HotpotQA": 70.86,
+                    "MSMARCO": 40.6,
+                    "NFCorpus": 40.32,
+                    "NQ": 65.92,
+                    "QuoraRetrieval": 87.4,
+                    "SCIDOCS": 24.32,
+                    "SciFact": 79.99,
+                    "TRECCOVID": 85.07,
+                    "Touche2020": 39.16
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "m3e-large",
-                    "AFQMC": 36.53,
-                    "ATEC": 41.8,
-                    "BQ": 65.2,
-                    "LCQMC": 74.2,
-                    "PAWSX": 15.95,
-                    "QBQTC": 32.65,
-                    "STS22 (zh)": 62.91,
-                    "STSB": 74.16
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
+                    "Model": "voyage-large-2-instruct",
+                    "BIOSSES": 89.24,
+                    "SICK-R": 83.16,
+                    "STS12": 73.34,
+                    "STS13": 88.49,
+                    "STS14": 86.49,
+                    "STS15": 91.13,
+                    "STS16": 85.68,
+                    "STS17 (en-en)": 90.06,
+                    "STS22 (en)": 66.32,
+                    "STSBenchmark": 89.22
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
                 {
-                    "Model": "m3e-large"
+                    "Model": "voyage-large-2-instruct",
+                    "SummEval": 30.84
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "voyage-large-2-instruct"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "m3e-large"
+                    "Model": "voyage-large-2-instruct"
                 }
             ]
         }
     },
-    "contriever-base-msmarco": {
+    "voyage-code-2": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "contriever-base-msmarco"
+                    "Model": "voyage-code-2"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "contriever-base-msmarco",
-                    "AmazonCounterfactualClassification (en)": 72.19,
-                    "AmazonPolarityClassification": 68.63,
-                    "AmazonReviewsClassification (en)": 37.42,
-                    "Banking77Classification": 80.02,
-                    "EmotionClassification": 44.77,
-                    "ImdbClassification": 67.04,
-                    "MTOPDomainClassification (en)": 93.18,
-                    "MTOPIntentClassification (en)": 69.31,
-                    "MassiveIntentClassification (en)": 67.78,
-                    "MassiveScenarioClassification (en)": 76.0,
-                    "ToxicConversationsClassification": 67.77,
-                    "TweetSentimentExtractionClassification": 56.1
+                    "Model": "voyage-code-2",
+                    "AmazonReviewsClassification (fr)": 42.15,
+                    "MTOPDomainClassification (fr)": 87.68,
+                    "MTOPIntentClassification (fr)": 59.44,
+                    "MasakhaNEWSClassification (fra)": 82.13,
+                    "MassiveIntentClassification (fr)": 63.08,
+                    "MassiveScenarioClassification (fr)": 70.15
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "contriever-base-msmarco",
-                    "ArxivClusteringP2P": 42.61,
-                    "ArxivClusteringS2S": 32.32,
-                    "BiorxivClusteringP2P": 34.97,
-                    "BiorxivClusteringS2S": 29.08,
-                    "MedrxivClusteringP2P": 31.19,
-                    "MedrxivClusteringS2S": 27.27,
-                    "RedditClustering": 54.89,
-                    "RedditClusteringP2P": 57.58,
-                    "StackExchangeClustering": 63.15,
-                    "StackExchangeClusteringP2P": 32.25,
-                    "TwentyNewsgroupsClustering": 46.82
+                    "Model": "voyage-code-2",
+                    "AlloProfClusteringP2P": 61.63,
+                    "AlloProfClusteringS2S": 50.67,
+                    "HALClusteringS2S": 27.44,
+                    "MLSUMClusteringP2P": 45.23,
+                    "MLSUMClusteringS2S": 41.48,
+                    "MasakhaNEWSClusteringP2P (fra)": 56.59,
+                    "MasakhaNEWSClusteringS2S (fra)": 35.18
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "contriever-base-msmarco",
-                    "SprintDuplicateQuestions": 95.55,
-                    "TwitterSemEval2015": 66.85,
-                    "TwitterURLCorpus": 85.21
+                    "Model": "voyage-code-2",
+                    "OpusparcusPC (fr)": 92.87,
+                    "PawsXPairClassification (fr)": 60.83
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "contriever-base-msmarco",
-                    "AskUbuntuDupQuestions": 56.69,
-                    "MindSmallReranking": 31.58,
-                    "SciDocsRR": 76.51,
-                    "StackOverflowDupQuestions": 47.78
+                    "Model": "voyage-code-2",
+                    "AlloprofReranking": 70.79,
+                    "SyntecReranking": 86.77
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "contriever-base-msmarco",
-                    "ArguAna": 48.32,
-                    "CQADupstackRetrieval": 33.67,
-                    "ClimateFEVER": 24.79,
-                    "DBPedia": 38.1,
-                    "FEVER": 59.29,
-                    "FiQA2018": 27.42,
-                    "HotpotQA": 56.81,
-                    "MSMARCO": 36.77,
-                    "NFCorpus": 31.32,
-                    "NQ": 41.83,
-                    "QuoraRetrieval": 86.72,
-                    "SCIDOCS": 17.12,
-                    "SciFact": 65.51,
-                    "TRECCOVID": 44.77,
-                    "Touche2020": 15.79
+                    "Model": "voyage-code-2",
+                    "AlloprofRetrieval": 52.61,
+                    "BSARDRetrieval": 0.29,
+                    "MintakaRetrieval (fr)": 19.05,
+                    "SyntecRetrieval": 82.77,
+                    "XPQARetrieval (fr)": 71.95
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "contriever-base-msmarco",
-                    "BIOSSES": 83.32,
-                    "SICK-R": 70.2,
-                    "STS12": 64.34,
-                    "STS13": 80.03,
-                    "STS14": 74.51,
-                    "STS15": 83.3,
-                    "STS16": 79.67,
-                    "STS17 (en-en)": 86.32,
-                    "STS22 (en)": 64.64,
-                    "STSBenchmark": 78.81
+                    "Model": "voyage-code-2",
+                    "SICKFr": 73.56,
+                    "STS22 (fr)": 79.99,
+                    "STSBenchmarkMultilingualSTS (fr)": 79.02
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "contriever-base-msmarco",
-                    "SummEval": 30.36
+                    "Model": "voyage-code-2",
+                    "SummEvalFr": 28.34
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "voyage-code-2"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "contriever-base-msmarco",
-                    "Core17InstructionRetrieval": -2.48,
-                    "News21InstructionRetrieval": -2.83,
-                    "Robust04InstructionRetrieval": -6.12
+                    "Model": "voyage-code-2"
                 }
             ]
         }
     },
-    "msmarco-bert-co-condensor": {
+    "e5-base": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "msmarco-bert-co-condensor"
+                    "Model": "e5-base",
+                    "BornholmBitextMining": 40.09
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "msmarco-bert-co-condensor",
-                    "AmazonCounterfactualClassification (en)": 64.06,
-                    "AmazonPolarityClassification": 66.88,
-                    "AmazonReviewsClassification (en)": 34.85,
-                    "Banking77Classification": 82.35,
-                    "EmotionClassification": 41.91,
-                    "ImdbClassification": 60.17,
-                    "MTOPDomainClassification (en)": 91.34,
-                    "MTOPIntentClassification (en)": 71.07,
-                    "MassiveIntentClassification (en)": 70.4,
-                    "MassiveScenarioClassification (en)": 73.73,
-                    "ToxicConversationsClassification": 64.01,
-                    "TweetSentimentExtractionClassification": 55.74
+                    "Model": "e5-base",
+                    "AngryTweetsClassification": 45.06,
+                    "DKHateClassification": 58.51,
+                    "DanishPoliticalCommentsClassification": 28.43,
+                    "LccSentimentClassification": 37.47,
+                    "MassiveIntentClassification (da)": 44.25,
+                    "MassiveIntentClassification (nb)": 41.57,
+                    "MassiveIntentClassification (sv)": 41.34,
+                    "MassiveScenarioClassification (da)": 52.99,
+                    "MassiveScenarioClassification (nb)": 50.33,
+                    "MassiveScenarioClassification (sv)": 50.0,
+                    "NoRecClassification": 42.0,
+                    "NordicLangClassification": 59.34,
+                    "NorwegianParliament": 57.42,
+                    "ScalaDaClassification": 50.08,
+                    "ScalaNbClassification": 50.18
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "msmarco-bert-co-condensor",
-                    "ArxivClusteringP2P": 36.94,
-                    "ArxivClusteringS2S": 29.03,
-                    "BiorxivClusteringP2P": 32.35,
-                    "BiorxivClusteringS2S": 28.16,
-                    "MedrxivClusteringP2P": 30.23,
-                    "MedrxivClusteringS2S": 27.01,
-                    "RedditClustering": 48.04,
-                    "RedditClusteringP2P": 53.53,
-                    "StackExchangeClustering": 59.54,
-                    "StackExchangeClusteringP2P": 30.48,
-                    "TwentyNewsgroupsClustering": 38.68
+                    "Model": "e5-base"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "msmarco-bert-co-condensor",
-                    "SprintDuplicateQuestions": 96.09,
-                    "TwitterSemEval2015": 65.95,
-                    "TwitterURLCorpus": 83.17
+                    "Model": "e5-base"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "msmarco-bert-co-condensor",
-                    "AskUbuntuDupQuestions": 58.99,
-                    "MindSmallReranking": 27.13,
-                    "SciDocsRR": 72.78,
-                    "StackOverflowDupQuestions": 48.48
+                    "Model": "e5-base"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "msmarco-bert-co-condensor",
-                    "ArguAna": 45.15,
-                    "CQADupstackRetrieval": 27.72,
-                    "ClimateFEVER": 16.96,
-                    "DBPedia": 27.86,
-                    "FEVER": 45.68,
-                    "FiQA2018": 15.62,
-                    "HotpotQA": 35.61,
-                    "MSMARCO": 29.57,
-                    "NFCorpus": 22.29,
-                    "NQ": 29.85,
-                    "QuoraRetrieval": 86.51,
-                    "SCIDOCS": 10.13,
-                    "SciFact": 52.31,
-                    "TRECCOVID": 40.54,
-                    "Touche2020": 8.57
+                    "Model": "e5-base",
+                    "LEMBNarrativeQARetrieval": 25.31,
+                    "LEMBNeedleRetrieval": 28.5,
+                    "LEMBPasskeyRetrieval": 33.25,
+                    "LEMBQMSumRetrieval": 23.83,
+                    "LEMBSummScreenFDRetrieval": 74.67,
+                    "LEMBWikimQARetrieval": 55.85
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "msmarco-bert-co-condensor",
-                    "BIOSSES": 77.32,
-                    "SICK-R": 72.0,
-                    "STS12": 68.19,
-                    "STS13": 80.4,
-                    "STS14": 74.02,
-                    "STS15": 82.57,
-                    "STS16": 79.78,
-                    "STS17 (en-en)": 85.94,
-                    "STS22 (en)": 67.54,
-                    "STSBenchmark": 76.97
+                    "Model": "e5-base"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "msmarco-bert-co-condensor",
-                    "SummEval": 29.5
+                    "Model": "e5-base"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "e5-base"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "msmarco-bert-co-condensor"
+                    "Model": "e5-base"
                 }
             ]
         }
     },
-    "bge-large-zh-v1.5": {
+    "udever-bloom-1b1": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "bge-large-zh-v1.5"
+                    "Model": "udever-bloom-1b1"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "bge-large-zh-v1.5",
-                    "AmazonReviewsClassification (zh)": 41.38,
-                    "IFlyTek": 48.74,
-                    "JDReview": 85.14,
-                    "MassiveIntentClassification (zh-CN)": 68.84,
-                    "MassiveScenarioClassification (zh-CN)": 74.7,
-                    "MultilingualSentiment": 72.97,
-                    "OnlineShopping": 91.43,
-                    "TNews": 52.1,
-                    "Waimai": 86.9
+                    "Model": "udever-bloom-1b1",
+                    "AmazonReviewsClassification (fr)": 35.12,
+                    "MTOPDomainClassification (fr)": 69.24,
+                    "MTOPIntentClassification (fr)": 51.25,
+                    "MasakhaNEWSClassification (fra)": 80.83,
+                    "MassiveIntentClassification (fr)": 43.21,
+                    "MassiveScenarioClassification (fr)": 49.78
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "bge-large-zh-v1.5",
-                    "CLSClusteringP2P": 41.44,
-                    "CLSClusteringS2S": 38.33,
-                    "ThuNewsClusteringP2P": 59.61,
-                    "ThuNewsClusteringS2S": 56.58
+                    "Model": "udever-bloom-1b1",
+                    "AlloProfClusteringP2P": 62.22,
+                    "AlloProfClusteringS2S": 27.06,
+                    "HALClusteringS2S": 13.86,
+                    "MLSUMClusteringP2P": 44.11,
+                    "MLSUMClusteringS2S": 30.47,
+                    "MasakhaNEWSClusteringP2P (fra)": 40.2,
+                    "MasakhaNEWSClusteringS2S (fra)": 27.35
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "bge-large-zh-v1.5",
-                    "Cmnli": 85.27,
-                    "Ocnli": 77.94
+                    "Model": "udever-bloom-1b1",
+                    "OpusparcusPC (fr)": 85.54,
+                    "PawsXPairClassification (fr)": 61.99
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "bge-large-zh-v1.5",
-                    "CMedQAv1": 83.45,
-                    "CMedQAv2": 85.44,
-                    "MMarcoReranking": 28.74,
-                    "T2Reranking": 65.74
+                    "Model": "udever-bloom-1b1",
+                    "AlloprofReranking": 39.13,
+                    "SyntecReranking": 62.58
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "bge-large-zh-v1.5",
-                    "CmedqaRetrieval": 42.57,
-                    "CovidRetrieval": 73.35,
-                    "DuRetrieval": 86.32,
-                    "EcomRetrieval": 65.33,
-                    "MMarcoRetrieval": 79.23,
-                    "MedicalRetrieval": 59.59,
-                    "T2Retrieval": 83.99,
-                    "VideoRetrieval": 73.32
+                    "Model": "udever-bloom-1b1",
+                    "AlloprofRetrieval": 12.37,
+                    "BSARDRetrieval": 0.0,
+                    "MintakaRetrieval (fr)": 2.78,
+                    "SyntecRetrieval": 40.57,
+                    "XPQARetrieval (fr)": 33.82
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "bge-large-zh-v1.5",
-                    "AFQMC": 44.36,
-                    "ATEC": 49.54,
-                    "BQ": 62.94,
-                    "LCQMC": 74.33,
-                    "PAWSX": 33.92,
-                    "QBQTC": 37.29,
-                    "STS22 (zh)": 68.94,
-                    "STSB": 78.7
+                    "Model": "udever-bloom-1b1",
+                    "SICKFr": 59.94,
+                    "STS22 (fr)": 77.1,
+                    "STSBenchmarkMultilingualSTS (fr)": 49.97
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "bge-large-zh-v1.5"
+                    "Model": "udever-bloom-1b1",
+                    "SummEvalFr": 29.48
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "udever-bloom-1b1"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "bge-large-zh-v1.5"
+                    "Model": "udever-bloom-1b1"
                 }
             ]
         }
     },
-    "universal-sentence-encoder-multilingual-large-3": {
+    "text2vec-base-multilingual": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "universal-sentence-encoder-multilingual-large-3"
+                    "Model": "text2vec-base-multilingual"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "universal-sentence-encoder-multilingual-large-3",
-                    "AmazonReviewsClassification (fr)": 35.09,
-                    "MTOPDomainClassification (fr)": 88.19,
-                    "MTOPIntentClassification (fr)": 63.64,
-                    "MasakhaNEWSClassification (fra)": 72.04,
-                    "MassiveIntentClassification (fr)": 65.8,
-                    "MassiveScenarioClassification (fr)": 73.47
+                    "Model": "text2vec-base-multilingual",
+                    "AmazonReviewsClassification (fr)": 34.25,
+                    "MTOPDomainClassification (fr)": 71.83,
+                    "MTOPIntentClassification (fr)": 44.53,
+                    "MasakhaNEWSClassification (fra)": 73.84,
+                    "MassiveIntentClassification (fr)": 51.93,
+                    "MassiveScenarioClassification (fr)": 58.31
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "universal-sentence-encoder-multilingual-large-3",
-                    "AlloProfClusteringP2P": 54.21,
-                    "AlloProfClusteringS2S": 37.95,
-                    "HALClusteringS2S": 18.94,
-                    "MLSUMClusteringP2P": 41.02,
-                    "MLSUMClusteringS2S": 37.97,
-                    "MasakhaNEWSClusteringP2P (fra)": 24.09,
-                    "MasakhaNEWSClusteringS2S (fra)": 40.24
+                    "Model": "text2vec-base-multilingual",
+                    "AlloProfClusteringP2P": 49.11,
+                    "AlloProfClusteringS2S": 32.72,
+                    "HALClusteringS2S": 16.19,
+                    "MLSUMClusteringP2P": 36.19,
+                    "MLSUMClusteringS2S": 30.39,
+                    "MasakhaNEWSClusteringP2P (fra)": 38.51,
+                    "MasakhaNEWSClusteringS2S (fra)": 32.51
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "universal-sentence-encoder-multilingual-large-3",
-                    "OpusparcusPC (fr)": 93.38,
-                    "PawsXPairClassification (fr)": 53.62
+                    "Model": "text2vec-base-multilingual",
+                    "OpusparcusPC (fr)": 92.04,
+                    "PawsXPairClassification (fr)": 65.57
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "universal-sentence-encoder-multilingual-large-3",
-                    "AlloprofReranking": 55.39,
-                    "SyntecReranking": 77.13
+                    "Model": "text2vec-base-multilingual",
+                    "AlloprofReranking": 51.48,
+                    "SyntecReranking": 70.28
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "universal-sentence-encoder-multilingual-large-3",
-                    "AlloprofRetrieval": 33.78,
+                    "Model": "text2vec-base-multilingual",
+                    "AlloprofRetrieval": 18.9,
                     "BSARDRetrieval": 0.0,
-                    "MintakaRetrieval (fr)": 26.21,
-                    "SyntecRetrieval": 63.69,
-                    "XPQARetrieval (fr)": 65.21
+                    "MintakaRetrieval (fr)": 14.81,
+                    "SyntecRetrieval": 49.69,
+                    "XPQARetrieval (fr)": 40.4
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "universal-sentence-encoder-multilingual-large-3",
-                    "SICKFr": 74.39,
-                    "STS22 (fr)": 71.11,
-                    "STSBenchmarkMultilingualSTS (fr)": 78.16
+                    "Model": "text2vec-base-multilingual",
+                    "SICKFr": 77.25,
+                    "STS22 (fr)": 74.1,
+                    "STSBenchmarkMultilingualSTS (fr)": 83.48
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "universal-sentence-encoder-multilingual-large-3",
-                    "SummEvalFr": 28.56
+                    "Model": "text2vec-base-multilingual",
+                    "SummEvalFr": 29.33
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "text2vec-base-multilingual"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "universal-sentence-encoder-multilingual-large-3"
+                    "Model": "text2vec-base-multilingual"
                 }
             ]
         }
     },
-    "FollowIR-7B": {
+    "text-embedding-ada-002-instruct": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "FollowIR-7B"
+                    "Model": "text-embedding-ada-002-instruct"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "FollowIR-7B"
+                    "Model": "text-embedding-ada-002-instruct"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "FollowIR-7B"
+                    "Model": "text-embedding-ada-002-instruct"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "FollowIR-7B"
+                    "Model": "text-embedding-ada-002-instruct"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "FollowIR-7B"
+                    "Model": "text-embedding-ada-002-instruct"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "FollowIR-7B"
+                    "Model": "text-embedding-ada-002-instruct",
+                    "ARCChallenge": 11.85,
+                    "AlphaNLI": 10.62,
+                    "HellaSwag": 24.8,
+                    "PIQA": 23.87,
+                    "Quail": 5.79,
+                    "RARbCode": 82.36,
+                    "RARbMath": 67.26,
+                    "SIQA": 2.64,
+                    "SpartQA": 4.75,
+                    "TempReasonL1": 1.44,
+                    "TempReasonL2Fact": 19.38,
+                    "TempReasonL2Pure": 2.43,
+                    "TempReasonL3Fact": 17.58,
+                    "TempReasonL3Pure": 7.31,
+                    "WinoGrande": 11.36
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "FollowIR-7B"
+                    "Model": "text-embedding-ada-002-instruct"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "FollowIR-7B"
+                    "Model": "text-embedding-ada-002-instruct"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "text-embedding-ada-002-instruct"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "FollowIR-7B",
-                    "Core17InstructionRetrieval": 16.48,
-                    "News21InstructionRetrieval": 6.26,
-                    "Robust04InstructionRetrieval": 13.72
+                    "Model": "text-embedding-ada-002-instruct"
                 }
             ]
         }
     },
-    "contriever-instruct": {
+    "jina-embeddings-v2-base-en": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "contriever-instruct"
+                    "Model": "jina-embeddings-v2-base-en"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "contriever-instruct"
+                    "Model": "jina-embeddings-v2-base-en"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "contriever-instruct"
+                    "Model": "jina-embeddings-v2-base-en"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "contriever-instruct"
+                    "Model": "jina-embeddings-v2-base-en"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "contriever-instruct"
+                    "Model": "jina-embeddings-v2-base-en"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "contriever-instruct",
-                    "ARCChallenge": 7.63,
-                    "AlphaNLI": 27.09,
-                    "PIQA": 21.73,
-                    "Quail": 4.92,
-                    "RARbCode": 7.12,
-                    "RARbMath": 21.83,
-                    "SIQA": 0.88,
-                    "SpartQA": 10.56,
-                    "TempReasonL1": 1.8,
-                    "TempReasonL2Fact": 22.03,
-                    "TempReasonL2Pure": 0.94,
-                    "TempReasonL3Fact": 20.82,
-                    "TempReasonL3Pure": 7.15,
-                    "WinoGrande": 26.3
+                    "Model": "jina-embeddings-v2-base-en",
+                    "LEMBNarrativeQARetrieval": 37.89,
+                    "LEMBNeedleRetrieval": 54.25,
+                    "LEMBPasskeyRetrieval": 50.25,
+                    "LEMBQMSumRetrieval": 38.87,
+                    "LEMBSummScreenFDRetrieval": 93.48,
+                    "LEMBWikimQARetrieval": 73.99
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "contriever-instruct"
+                    "Model": "jina-embeddings-v2-base-en"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "contriever-instruct"
+                    "Model": "jina-embeddings-v2-base-en"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "jina-embeddings-v2-base-en"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "contriever-instruct"
+                    "Model": "jina-embeddings-v2-base-en"
                 }
             ]
         }
     },
-    "sentence-t5-xxl": {
+    "titan-embed-text-v1": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "sentence-t5-xxl"
+                    "Model": "titan-embed-text-v1"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "sentence-t5-xxl",
-                    "AmazonCounterfactualClassification (en)": 77.07,
-                    "AmazonPolarityClassification": 92.79,
-                    "AmazonReviewsClassification (en)": 48.93,
-                    "AmazonReviewsClassification (fr)": 46.09,
-                    "Banking77Classification": 82.31,
-                    "EmotionClassification": 48.57,
-                    "ImdbClassification": 90.23,
-                    "MTOPDomainClassification (en)": 92.49,
-                    "MTOPDomainClassification (fr)": 86.2,
-                    "MTOPIntentClassification (en)": 68.33,
-                    "MTOPIntentClassification (fr)": 58.33,
-                    "MasakhaNEWSClassification (fra)": 79.1,
-                    "MassiveIntentClassification (en)": 73.44,
-                    "MassiveIntentClassification (fr)": 65.91,
-                    "MassiveScenarioClassification (en)": 74.82,
-                    "MassiveScenarioClassification (fr)": 68.53,
-                    "ToxicConversationsClassification": 70.04,
-                    "TweetSentimentExtractionClassification": 62.01
+                    "Model": "titan-embed-text-v1",
+                    "AmazonCounterfactualClassification (en)": 61.85,
+                    "Banking77Classification": 83.21
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "sentence-t5-xxl",
-                    "AlloProfClusteringP2P": 60.98,
-                    "AlloProfClusteringS2S": 43.5,
-                    "ArxivClusteringP2P": 42.89,
-                    "ArxivClusteringS2S": 33.47,
-                    "BiorxivClusteringP2P": 36.53,
-                    "BiorxivClusteringS2S": 28.66,
-                    "BlurbsClusteringP2P": 39.91,
-                    "BlurbsClusteringS2S": 15.94,
-                    "HALClusteringS2S": 21.4,
-                    "MLSUMClusteringP2P": 42.24,
-                    "MLSUMClusteringS2S": 35.25,
-                    "MasakhaNEWSClusteringP2P (fra)": 61.15,
-                    "MasakhaNEWSClusteringS2S (fra)": 38.24,
-                    "MedrxivClusteringP2P": 32.09,
-                    "MedrxivClusteringS2S": 26.82,
-                    "RedditClustering": 58.99,
-                    "RedditClusteringP2P": 64.46,
-                    "StackExchangeClustering": 70.78,
-                    "StackExchangeClusteringP2P": 35.25,
-                    "TenKGnadClusteringP2P": 43.43,
-                    "TenKGnadClusteringS2S": 19.69,
-                    "TwentyNewsgroupsClustering": 50.93
+                    "Model": "titan-embed-text-v1"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "sentence-t5-xxl",
-                    "OpusparcusPC (fr)": 93.94,
-                    "PawsXPairClassification (fr)": 63.98,
-                    "SprintDuplicateQuestions": 88.89,
-                    "TwitterSemEval2015": 80.28,
-                    "TwitterURLCorpus": 86.01
+                    "Model": "titan-embed-text-v1"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "sentence-t5-xxl",
-                    "AlloprofReranking": 68.36,
-                    "AskUbuntuDupQuestions": 66.16,
-                    "MindSmallReranking": 30.6,
-                    "SciDocsRR": 76.09,
-                    "StackOverflowDupQuestions": 52.85,
-                    "SyntecReranking": 85.15
+                    "Model": "titan-embed-text-v1",
+                    "SciDocsRR": 88.87
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "sentence-t5-xxl",
-                    "AlloprofRetrieval": 45.75,
-                    "ArguAna": 39.85,
-                    "BSARDRetrieval": 3.33,
-                    "CQADupstackRetrieval": 44.65,
-                    "ClimateFEVER": 14.63,
-                    "DBPedia": 39.19,
-                    "FEVER": 51.2,
-                    "FiQA2018": 46.68,
-                    "HotpotQA": 42.14,
-                    "MSMARCO": 27.67,
-                    "MintakaRetrieval (fr)": 34.93,
-                    "NFCorpus": 35.08,
-                    "NQ": 52.87,
-                    "QuoraRetrieval": 85.96,
-                    "SCIDOCS": 17.17,
-                    "SciFact": 55.38,
-                    "SyntecRetrieval": 78.97,
-                    "TRECCOVID": 59.48,
-                    "Touche2020": 21.65,
-                    "XPQARetrieval (fr)": 56.2
+                    "Model": "titan-embed-text-v1",
+                    "ArguAna": 48.83,
+                    "FiQA2018": 40.38,
+                    "MSMARCO": 35.19,
+                    "NQ": 51.08,
+                    "SciFact": 73.5,
+                    "TRECCOVID": 54.74
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "sentence-t5-xxl",
-                    "BIOSSES": 80.43,
-                    "SICK-R": 80.47,
-                    "SICKFr": 77.07,
-                    "STS12": 78.85,
-                    "STS13": 88.94,
-                    "STS14": 84.86,
-                    "STS15": 89.32,
-                    "STS16": 84.67,
-                    "STS17 (en-en)": 89.46,
-                    "STS22 (en)": 65.33,
-                    "STS22 (fr)": 76.8,
-                    "STSBenchmark": 84.01,
-                    "STSBenchmarkMultilingualSTS (fr)": 81.24
+                    "Model": "titan-embed-text-v1",
+                    "BIOSSES": 84.17,
+                    "SICK-R": 73.05,
+                    "STS12": 66.59,
+                    "STS13": 83.24,
+                    "STS14": 73.71,
+                    "STS15": 82.4,
+                    "STS16": NaN,
+                    "STS17 (en-en)": 80.9,
+                    "STSBenchmark": 74.85
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "sentence-t5-xxl",
-                    "SummEval": 30.08,
-                    "SummEvalFr": 30.39
+                    "Model": "titan-embed-text-v1"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "titan-embed-text-v1"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "sentence-t5-xxl"
+                    "Model": "titan-embed-text-v1"
                 }
             ]
         }
     },
-    "bge-large-en-v1.5-instruct": {
+    "nomic-embed-text-v1.5-64": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "bge-large-en-v1.5-instruct"
+                    "Model": "nomic-embed-text-v1.5-64"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "bge-large-en-v1.5-instruct"
+                    "Model": "nomic-embed-text-v1.5-64",
+                    "AmazonCounterfactualClassification (en)": 66.85,
+                    "AmazonPolarityClassification": 85.92,
+                    "AmazonReviewsClassification (en)": 41.02,
+                    "Banking77Classification": 80.63,
+                    "EmotionClassification": 40.55,
+                    "ImdbClassification": 76.6,
+                    "MTOPDomainClassification (en)": 86.31,
+                    "MTOPIntentClassification (en)": 62.77,
+                    "MassiveIntentClassification (en)": 64.95,
+                    "MassiveScenarioClassification (en)": 70.38,
+                    "ToxicConversationsClassification": 66.53,
+                    "TweetSentimentExtractionClassification": 55.23
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "bge-large-en-v1.5-instruct"
+                    "Model": "nomic-embed-text-v1.5-64",
+                    "ArxivClusteringP2P": 41.8,
+                    "ArxivClusteringS2S": 32.41,
+                    "BiorxivClusteringP2P": 34.81,
+                    "BiorxivClusteringS2S": 28.59,
+                    "MedrxivClusteringP2P": 32.73,
+                    "MedrxivClusteringS2S": 29.91,
+                    "RedditClustering": 50.31,
+                    "RedditClusteringP2P": 56.57,
+                    "StackExchangeClustering": 57.99,
+                    "StackExchangeClusteringP2P": 33.64,
+                    "TwentyNewsgroupsClustering": 44.61
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "bge-large-en-v1.5-instruct"
+                    "Model": "nomic-embed-text-v1.5-64",
+                    "SprintDuplicateQuestions": 90.06,
+                    "TwitterSemEval2015": 71.68,
+                    "TwitterURLCorpus": 85.03
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "bge-large-en-v1.5-instruct"
+                    "Model": "nomic-embed-text-v1.5-64",
+                    "AskUbuntuDupQuestions": 60.79,
+                    "MindSmallReranking": 29.7,
+                    "SciDocsRR": 75.79,
+                    "StackOverflowDupQuestions": 47.42
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "bge-large-en-v1.5-instruct",
-                    "ARCChallenge": 8.86,
-                    "AlphaNLI": 0.86,
-                    "HellaSwag": 26.24,
-                    "PIQA": 23.26,
-                    "Quail": 2.72,
-                    "RARbCode": 45.25,
-                    "RARbMath": 49.82,
-                    "SIQA": 0.59,
-                    "SpartQA": 2.34,
-                    "TempReasonL1": 1.17,
-                    "TempReasonL2Fact": 21.19,
-                    "TempReasonL2Pure": 2.1,
-                    "TempReasonL3Fact": 17.59,
-                    "TempReasonL3Pure": 5.99,
-                    "WinoGrande": 10.31
+                    "Model": "nomic-embed-text-v1.5-64",
+                    "ArguAna": 37.16,
+                    "CQADupstackRetrieval": 28.72,
+                    "ClimateFEVER": 31.48,
+                    "DBPedia": 28.19,
+                    "FEVER": 70.24,
+                    "FiQA2018": 25.78,
+                    "HotpotQA": 43.07,
+                    "MSMARCO": 35.95,
+                    "NFCorpus": 26.03,
+                    "NQ": 45.54,
+                    "QuoraRetrieval": 85.83,
+                    "SCIDOCS": 12.09,
+                    "SciFact": 52.71,
+                    "TRECCOVID": 67.83,
+                    "Touche2020": 23.13
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "bge-large-en-v1.5-instruct"
+                    "Model": "nomic-embed-text-v1.5-64",
+                    "BIOSSES": 77.18,
+                    "SICK-R": 78.76,
+                    "STS12": 77.3,
+                    "STS13": 84.18,
+                    "STS14": 79.37,
+                    "STS15": 84.69,
+                    "STS16": 83.36,
+                    "STS17 (en-en)": 85.73,
+                    "STS22 (en)": 63.83,
+                    "STSBenchmark": 83.46
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "bge-large-en-v1.5-instruct"
+                    "Model": "nomic-embed-text-v1.5-64",
+                    "SummEval": 28.41
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "nomic-embed-text-v1.5-64"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "bge-large-en-v1.5-instruct"
+                    "Model": "nomic-embed-text-v1.5-64"
                 }
             ]
         }
     },
-    "voyage-code-2": {
+    "instructor-base": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "voyage-code-2"
+                    "Model": "instructor-base"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "voyage-code-2",
-                    "AmazonReviewsClassification (fr)": 42.15,
-                    "MTOPDomainClassification (fr)": 87.68,
-                    "MTOPIntentClassification (fr)": 59.44,
-                    "MasakhaNEWSClassification (fra)": 82.13,
-                    "MassiveIntentClassification (fr)": 63.08,
-                    "MassiveScenarioClassification (fr)": 70.15
+                    "Model": "instructor-base"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "voyage-code-2",
-                    "AlloProfClusteringP2P": 61.63,
-                    "AlloProfClusteringS2S": 50.67,
-                    "HALClusteringS2S": 27.44,
-                    "MLSUMClusteringP2P": 45.23,
-                    "MLSUMClusteringS2S": 41.48,
-                    "MasakhaNEWSClusteringP2P (fra)": 56.59,
-                    "MasakhaNEWSClusteringS2S (fra)": 35.18
+                    "Model": "instructor-base"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "voyage-code-2",
-                    "OpusparcusPC (fr)": 92.87,
-                    "PawsXPairClassification (fr)": 60.83
+                    "Model": "instructor-base"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "voyage-code-2",
-                    "AlloprofReranking": 70.79,
-                    "SyntecReranking": 86.77
+                    "Model": "instructor-base"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "voyage-code-2",
-                    "AlloprofRetrieval": 52.61,
-                    "BSARDRetrieval": 0.29,
-                    "MintakaRetrieval (fr)": 19.05,
-                    "SyntecRetrieval": 82.77,
-                    "XPQARetrieval (fr)": 71.95
+                    "Model": "instructor-base"
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "voyage-code-2",
-                    "SICKFr": 73.56,
-                    "STS22 (fr)": 79.99,
-                    "STSBenchmarkMultilingualSTS (fr)": 79.02
+                    "Model": "instructor-base"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "voyage-code-2",
-                    "SummEvalFr": 28.34
+                    "Model": "instructor-base"
                 }
             ]
         },
-        "InstructionRetrieval": {
-            "p-MRR": [
+        "MultilabelClassification": {
+            "accuracy": [
                 {
-                    "Model": "voyage-code-2"
+                    "Model": "instructor-base"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "instructor-base",
+                    "Core17InstructionRetrieval": -1.09,
+                    "News21InstructionRetrieval": -1.78,
+                    "Robust04InstructionRetrieval": -10.42
                 }
             ]
         }
     },
-    "gte-Qwen1.5-7B-instruct": {
+    "bge-small-en-v1.5-instruct": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "gte-Qwen1.5-7B-instruct"
+                    "Model": "bge-small-en-v1.5-instruct"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "gte-Qwen1.5-7B-instruct",
-                    "AmazonCounterfactualClassification (en)": 83.16,
-                    "AmazonPolarityClassification": 96.7,
-                    "AmazonReviewsClassification (en)": 62.17,
-                    "AmazonReviewsClassification (zh)": 52.95,
-                    "Banking77Classification": 81.68,
-                    "EmotionClassification": 54.53,
-                    "IFlyTek": 53.77,
-                    "ImdbClassification": 95.58,
-                    "JDReview": 88.2,
-                    "MTOPDomainClassification (en)": 95.75,
-                    "MTOPIntentClassification (en)": 84.26,
-                    "MassiveIntentClassification (zh-CN)": 76.25,
-                    "MassiveIntentClassification (en)": 78.47,
-                    "MassiveScenarioClassification (en)": 78.19,
-                    "MassiveScenarioClassification (zh-CN)": 77.26,
-                    "MultilingualSentiment": 77.42,
-                    "OnlineShopping": 94.48,
-                    "TNews": 51.24,
-                    "ToxicConversationsClassification": 78.75,
-                    "TweetSentimentExtractionClassification": 66.0,
-                    "Waimai": 88.63
+                    "Model": "bge-small-en-v1.5-instruct"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "gte-Qwen1.5-7B-instruct",
-                    "ArxivClusteringP2P": 56.4,
-                    "ArxivClusteringS2S": 51.45,
-                    "BiorxivClusteringP2P": 49.01,
-                    "BiorxivClusteringS2S": 45.06,
-                    "CLSClusteringP2P": 47.21,
-                    "CLSClusteringS2S": 45.79,
-                    "MedrxivClusteringP2P": 44.37,
-                    "MedrxivClusteringS2S": 42.0,
-                    "RedditClustering": 73.37,
-                    "RedditClusteringP2P": 72.51,
-                    "StackExchangeClustering": 79.07,
-                    "StackExchangeClusteringP2P": 49.57,
-                    "ThuNewsClusteringP2P": 87.43,
-                    "ThuNewsClusteringS2S": 87.9,
-                    "TwentyNewsgroupsClustering": 51.31
+                    "Model": "bge-small-en-v1.5-instruct"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "gte-Qwen1.5-7B-instruct",
-                    "Cmnli": 91.81,
-                    "Ocnli": 85.22,
-                    "SprintDuplicateQuestions": 95.99,
-                    "TwitterSemEval2015": 79.36,
-                    "TwitterURLCorpus": 86.79
+                    "Model": "bge-small-en-v1.5-instruct"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "gte-Qwen1.5-7B-instruct",
-                    "AskUbuntuDupQuestions": 66.0,
-                    "CMedQAv1": 86.37,
-                    "CMedQAv2": 87.41,
-                    "MindSmallReranking": 32.71,
-                    "SciDocsRR": 87.89,
-                    "StackOverflowDupQuestions": 53.93,
-                    "T2Reranking": 68.11
+                    "Model": "bge-small-en-v1.5-instruct"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "gte-Qwen1.5-7B-instruct",
-                    "ArguAna": 62.65,
-                    "BrightRetrieval (stackoverflow)": 19.85,
-                    "BrightRetrieval (earth_science)": 36.22,
-                    "BrightRetrieval (leetcode)": 25.46,
-                    "BrightRetrieval (theoremqa_questions)": 26.97,
-                    "BrightRetrieval (economics)": 17.72,
-                    "BrightRetrieval (robotics)": 13.47,
-                    "BrightRetrieval (pony)": 9.79,
-                    "BrightRetrieval (aops)": 14.36,
-                    "BrightRetrieval (psychology)": 24.61,
-                    "BrightRetrieval (theoremqa_theorems)": 26.66,
-                    "BrightRetrieval (biology)": 30.92,
-                    "BrightRetrieval (sustainable_living)": 14.93,
-                    "CQADupstackRetrieval": 40.64,
-                    "ClimateFEVER": 44.0,
-                    "CmedqaRetrieval": 43.47,
-                    "CovidRetrieval": 80.87,
-                    "DBPedia": 48.04,
-                    "DuRetrieval": 86.01,
-                    "EcomRetrieval": 66.46,
-                    "FEVER": 93.35,
-                    "FiQA2018": 55.31,
-                    "HotpotQA": 72.25,
-                    "MMarcoRetrieval": 73.83,
-                    "MSMARCO": 41.68,
-                    "MedicalRetrieval": 61.33,
-                    "NFCorpus": 38.25,
-                    "NQ": 61.79,
-                    "QuoraRetrieval": 89.61,
-                    "SCIDOCS": 27.69,
-                    "SciFact": 75.31,
-                    "T2Retrieval": 83.58,
-                    "TRECCOVID": 72.72,
-                    "Touche2020": 20.3,
-                    "VideoRetrieval": 69.41
+                    "Model": "bge-small-en-v1.5-instruct",
+                    "ARCChallenge": 7.72,
+                    "AlphaNLI": 1.26,
+                    "HellaSwag": 23.41,
+                    "PIQA": 20.79,
+                    "Quail": 2.01,
+                    "RARbCode": 41.52,
+                    "RARbMath": 46.5,
+                    "SIQA": 0.98,
+                    "SpartQA": 2.86,
+                    "TempReasonL1": 1.27,
+                    "TempReasonL2Fact": 16.72,
+                    "TempReasonL2Pure": 1.1,
+                    "TempReasonL3Fact": 12.81,
+                    "TempReasonL3Pure": 4.63,
+                    "WinoGrande": 5.35
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "gte-Qwen1.5-7B-instruct",
-                    "AFQMC": 58.47,
-                    "ATEC": 55.46,
-                    "BIOSSES": 81.12,
-                    "BQ": 77.59,
-                    "LCQMC": 76.29,
-                    "PAWSX": 50.22,
-                    "QBQTC": 31.82,
-                    "SICK-R": 79.15,
-                    "STS12": 76.52,
-                    "STS13": 88.63,
-                    "STS14": 83.32,
-                    "STS15": 87.5,
-                    "STS16": 86.39,
-                    "STS17 (en-en)": 87.79,
-                    "STS22 (en)": 66.4,
-                    "STS22 (zh)": 67.36,
-                    "STSB": 81.37,
-                    "STSBenchmark": 87.35
+                    "Model": "bge-small-en-v1.5-instruct"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "gte-Qwen1.5-7B-instruct",
-                    "SummEval": 31.46
+                    "Model": "bge-small-en-v1.5-instruct"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "bge-small-en-v1.5-instruct"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "gte-Qwen1.5-7B-instruct"
+                    "Model": "bge-small-en-v1.5-instruct"
                 }
             ]
         }
     },
-    "text-similarity-curie-001": {
+    "sentence-camembert-large": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "text-similarity-curie-001"
+                    "Model": "sentence-camembert-large"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "text-similarity-curie-001"
+                    "Model": "sentence-camembert-large",
+                    "AmazonReviewsClassification (fr)": 37.97,
+                    "MTOPDomainClassification (fr)": 85.74,
+                    "MTOPIntentClassification (fr)": 58.62,
+                    "MasakhaNEWSClassification (fra)": 80.62,
+                    "MassiveIntentClassification (fr)": 62.65,
+                    "MassiveScenarioClassification (fr)": 69.29
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "text-similarity-curie-001",
-                    "RedditClustering": 40.79,
-                    "StackExchangeClustering": 55.14,
-                    "TwentyNewsgroupsClustering": 37.64
+                    "Model": "sentence-camembert-large",
+                    "AlloProfClusteringP2P": 62.69,
+                    "AlloProfClusteringS2S": 42.06,
+                    "HALClusteringS2S": 23.9,
+                    "MLSUMClusteringP2P": 42.04,
+                    "MLSUMClusteringS2S": 32.29,
+                    "MasakhaNEWSClusteringP2P (fra)": 54.51,
+                    "MasakhaNEWSClusteringS2S (fra)": 44.73
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "text-similarity-curie-001",
-                    "SprintDuplicateQuestions": 79.85,
-                    "TwitterSemEval2015": 69.45,
-                    "TwitterURLCorpus": 84.06
+                    "Model": "sentence-camembert-large",
+                    "OpusparcusPC (fr)": 94.63,
+                    "PawsXPairClassification (fr)": 59.59
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "text-similarity-curie-001",
-                    "AskUbuntuDupQuestions": 55.09,
-                    "SciDocsRR": 70.93,
-                    "StackOverflowDupQuestions": 42.42
+                    "Model": "sentence-camembert-large",
+                    "AlloprofReranking": 57.62,
+                    "SyntecReranking": 88.15
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "text-similarity-curie-001",
-                    "FiQA2018": 5.14,
-                    "NFCorpus": 19.96,
-                    "QuoraRetrieval": 83.11,
-                    "SciFact": 46.68,
-                    "TRECCOVID": 7.61
+                    "Model": "sentence-camembert-large",
+                    "AlloprofRetrieval": 31.62,
+                    "BSARDRetrieval": 0.0,
+                    "MintakaRetrieval (fr)": 21.87,
+                    "SyntecRetrieval": 81.11,
+                    "XPQARetrieval (fr)": 65.62
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "text-similarity-curie-001",
-                    "BIOSSES": 77.46,
-                    "SICK-R": 77.26,
-                    "STSBenchmark": 83.02
+                    "Model": "sentence-camembert-large",
+                    "SICKFr": 77.7,
+                    "STS22 (fr)": 81.73,
+                    "STSBenchmarkMultilingualSTS (fr)": 85.79
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "text-similarity-curie-001"
+                    "Model": "sentence-camembert-large",
+                    "SummEvalFr": 30.88
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "sentence-camembert-large"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "text-similarity-curie-001"
+                    "Model": "sentence-camembert-large"
                 }
             ]
         }
     },
-    "elser-v2": {
+    "dragon-plus": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "elser-v2"
+                    "Model": "dragon-plus"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "elser-v2",
-                    "AmazonCounterfactualClassification (en)": 74.16,
-                    "AmazonPolarityClassification": 61.91,
-                    "AmazonReviewsClassification (en)": 32.06,
-                    "Banking77Classification": 82.05,
-                    "EmotionClassification": 46.65,
-                    "ImdbClassification": 65.02,
-                    "MTOPDomainClassification (en)": 93.17,
-                    "MTOPIntentClassification (en)": 71.1,
-                    "MassiveIntentClassification (en)": 68.48,
-                    "MassiveScenarioClassification (en)": 74.98,
-                    "ToxicConversationsClassification": 68.15,
-                    "TweetSentimentExtractionClassification": 53.57
+                    "Model": "dragon-plus"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "elser-v2",
-                    "ArxivClusteringP2P": 35.27,
-                    "ArxivClusteringS2S": 23.18,
-                    "BiorxivClusteringP2P": 31.13,
-                    "BiorxivClusteringS2S": 26.78,
-                    "MedrxivClusteringP2P": 24.65,
-                    "MedrxivClusteringS2S": 24.21,
-                    "RedditClustering": 38.74,
-                    "RedditClusteringP2P": 51.92,
-                    "StackExchangeClustering": 42.7,
-                    "StackExchangeClusteringP2P": 28.7,
-                    "TwentyNewsgroupsClustering": 27.82
+                    "Model": "dragon-plus"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "elser-v2",
-                    "SprintDuplicateQuestions": 94.53,
-                    "TwitterSemEval2015": 64.41,
-                    "TwitterURLCorpus": 85.01
+                    "Model": "dragon-plus"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "elser-v2",
-                    "AskUbuntuDupQuestions": 58.31,
-                    "MindSmallReranking": 30.75,
-                    "SciDocsRR": 75.62,
-                    "StackOverflowDupQuestions": 48.4
+                    "Model": "dragon-plus"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "elser-v2",
-                    "ArguAna": 55.98,
-                    "CQADupstackRetrieval": 34.27,
-                    "ClimateFEVER": 27.08,
-                    "DBPedia": 42.7,
-                    "FEVER": 78.55,
-                    "FiQA2018": 41.57,
-                    "HotpotQA": 67.01,
-                    "MSMARCO": 38.9,
-                    "NFCorpus": 36.66,
-                    "NQ": 55.84,
-                    "QuoraRetrieval": 84.69,
-                    "SCIDOCS": 16.24,
-                    "SciFact": 71.8,
-                    "TRECCOVID": 72.72,
-                    "Touche2020": 26.27
+                    "Model": "dragon-plus",
+                    "ARCChallenge": 8.91,
+                    "AlphaNLI": 32.1,
+                    "HellaSwag": 27.69,
+                    "PIQA": 28.01,
+                    "Quail": 4.09,
+                    "RARbCode": 17.58,
+                    "RARbMath": 45.09,
+                    "SIQA": 2.0,
+                    "SpartQA": 10.34,
+                    "TempReasonL1": 1.82,
+                    "TempReasonL2Fact": 17.45,
+                    "TempReasonL2Pure": 0.55,
+                    "TempReasonL3Fact": 15.71,
+                    "TempReasonL3Pure": 7.97,
+                    "WinoGrande": 67.18
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "elser-v2",
-                    "BIOSSES": 83.79,
-                    "SICK-R": 68.78,
-                    "STS12": 64.81,
-                    "STS13": 80.1,
-                    "STS14": 74.96,
-                    "STS15": 83.7,
-                    "STS16": 80.55,
-                    "STS17 (en-en)": 85.74,
-                    "STS22 (en)": 67.5,
-                    "STSBenchmark": 79.54
+                    "Model": "dragon-plus"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "elser-v2",
-                    "SummEval": 31.03
+                    "Model": "dragon-plus"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "dragon-plus"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "elser-v2"
+                    "Model": "dragon-plus"
                 }
             ]
         }
     },
-    "flaubert_base_uncased": {
+    "flaubert_large_cased": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "flaubert_base_uncased"
+                    "Model": "flaubert_large_cased"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "flaubert_base_uncased",
-                    "AmazonReviewsClassification (fr)": 23.52,
-                    "MTOPDomainClassification (fr)": 27.74,
-                    "MTOPIntentClassification (fr)": 8.61,
-                    "MasakhaNEWSClassification (fra)": 62.61,
-                    "MassiveIntentClassification (fr)": 6.24,
-                    "MassiveScenarioClassification (fr)": 10.98
+                    "Model": "flaubert_large_cased",
+                    "AmazonReviewsClassification (fr)": 22.45,
+                    "MTOPDomainClassification (fr)": 24.27,
+                    "MTOPIntentClassification (fr)": 9.79,
+                    "MasakhaNEWSClassification (fra)": 55.64,
+                    "MassiveIntentClassification (fr)": 16.41,
+                    "MassiveScenarioClassification (fr)": 22.72
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "flaubert_base_uncased",
-                    "AlloProfClusteringP2P": 43.2,
-                    "AlloProfClusteringS2S": 12.94,
-                    "HALClusteringS2S": 1.8,
-                    "MLSUMClusteringP2P": 33.22,
-                    "MLSUMClusteringS2S": 14.9,
-                    "MasakhaNEWSClusteringP2P (fra)": 28.49,
-                    "MasakhaNEWSClusteringS2S (fra)": 22.58
+                    "Model": "flaubert_large_cased",
+                    "AlloProfClusteringP2P": 40.85,
+                    "AlloProfClusteringS2S": 21.76,
+                    "HALClusteringS2S": 5.26,
+                    "MLSUMClusteringP2P": 38.09,
+                    "MLSUMClusteringS2S": 18.71,
+                    "MasakhaNEWSClusteringP2P (fra)": 26.43,
+                    "MasakhaNEWSClusteringS2S (fra)": 24.68
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "flaubert_base_uncased",
-                    "OpusparcusPC (fr)": 82.0,
-                    "PawsXPairClassification (fr)": 52.78
+                    "Model": "flaubert_large_cased",
+                    "OpusparcusPC (fr)": 74.78,
+                    "PawsXPairClassification (fr)": 54.14
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "flaubert_base_uncased",
-                    "AlloprofReranking": 34.55,
-                    "SyntecReranking": 57.18
+                    "Model": "flaubert_large_cased",
+                    "AlloprofReranking": 26.29,
+                    "SyntecReranking": 42.8
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "flaubert_base_uncased",
-                    "AlloprofRetrieval": 1.72,
+                    "Model": "flaubert_large_cased",
+                    "AlloprofRetrieval": 0.58,
                     "BSARDRetrieval": 0.0,
-                    "MintakaRetrieval (fr)": 0.51,
-                    "SyntecRetrieval": 22.33,
-                    "XPQARetrieval (fr)": 9.09
+                    "MintakaRetrieval (fr)": 0.26,
+                    "SyntecRetrieval": 1.58,
+                    "XPQARetrieval (fr)": 3.69
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "flaubert_base_uncased",
-                    "SICKFr": 41.9,
-                    "STS22 (fr)": 55.15,
-                    "STSBenchmarkMultilingualSTS (fr)": 33.41
+                    "Model": "flaubert_large_cased",
+                    "SICKFr": 34.6,
+                    "STS22 (fr)": 48.52,
+                    "STSBenchmarkMultilingualSTS (fr)": 15.66
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "flaubert_base_uncased",
-                    "SummEvalFr": 29.43
+                    "Model": "flaubert_large_cased",
+                    "SummEvalFr": 29.25
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "flaubert_large_cased"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "flaubert_base_uncased"
+                    "Model": "flaubert_large_cased"
                 }
             ]
         }
     },
-    "multilingual-e5-base": {
+    "gte-Qwen2-7B-instruct": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "multilingual-e5-base",
-                    "BornholmBitextMining (dan-Latn)": 33.22,
-                    "BornholmBitextMining": 46.4,
-                    "Tatoeba (kzj-Latn_eng-Latn)": 6.26,
-                    "Tatoeba (ina-Latn_eng-Latn)": 86.11,
-                    "Tatoeba (bre-Latn_eng-Latn)": 5.44,
-                    "Tatoeba (kab-Latn_eng-Latn)": 21.77,
-                    "Tatoeba (ind-Latn_eng-Latn)": 90.26,
-                    "Tatoeba (mkd-Cyrl_eng-Latn)": 73.76,
-                    "Tatoeba (yue-Hant_eng-Latn)": 80.66,
-                    "Tatoeba (amh-Ethi_eng-Latn)": 74.93,
-                    "Tatoeba (ceb-Latn_eng-Latn)": 45.46,
-                    "Tatoeba (lit-Latn_eng-Latn)": 75.53,
-                    "Tatoeba (nds-Latn_eng-Latn)": 53.86,
-                    "Tatoeba (kur-Latn_eng-Latn)": 52.96,
-                    "Tatoeba (bel-Cyrl_eng-Latn)": 86.7,
-                    "Tatoeba (ile-Latn_eng-Latn)": 72.56,
-                    "Tatoeba (oci-Latn_eng-Latn)": 35.79,
-                    "Tatoeba (heb-Hebr_eng-Latn)": 74.26,
-                    "Tatoeba (mhr-Cyrl_eng-Latn)": 5.52,
-                    "Tatoeba (afr-Latn_eng-Latn)": 87.04,
-                    "Tatoeba (uig-Arab_eng-Latn)": 62.97,
-                    "Tatoeba (mar-Deva_eng-Latn)": 86.62,
-                    "Tatoeba (fry-Latn_eng-Latn)": 50.82,
-                    "Tatoeba (tat-Cyrl_eng-Latn)": 66.92,
-                    "Tatoeba (khm-Khmr_eng-Latn)": 47.27,
-                    "Tatoeba (dtp-Latn_eng-Latn)": 5.13,
-                    "Tatoeba (ben-Beng_eng-Latn)": 81.05,
-                    "Tatoeba (ido-Latn_eng-Latn)": 74.41,
-                    "Tatoeba (cha-Latn_eng-Latn)": 16.95,
-                    "Tatoeba (zsm-Latn_eng-Latn)": 92.45,
-                    "Tatoeba (pes-Arab_eng-Latn)": 87.18,
-                    "Tatoeba (hye-Armn_eng-Latn)": 85.85,
-                    "Tatoeba (cat-Latn_eng-Latn)": 84.09,
-                    "Tatoeba (cym-Latn_eng-Latn)": 65.69,
-                    "Tatoeba (aze-Latn_eng-Latn)": 84.71,
-                    "Tatoeba (yid-Hebr_eng-Latn)": 63.2,
-                    "Tatoeba (swg-Latn_eng-Latn)": 42.33,
-                    "Tatoeba (war-Latn_eng-Latn)": 47.18,
-                    "Tatoeba (swe-Latn_eng-Latn)": 91.33,
-                    "Tatoeba (slk-Latn_eng-Latn)": 86.42,
-                    "Tatoeba (gla-Latn_eng-Latn)": 43.08,
-                    "Tatoeba (xho-Latn_eng-Latn)": 73.24,
-                    "Tatoeba (dan-Latn_eng-Latn)": 91.23,
-                    "Tatoeba (ara-Arab_eng-Latn)": 82.86,
-                    "Tatoeba (ast-Latn_eng-Latn)": 74.36,
-                    "Tatoeba (hrv-Latn_eng-Latn)": 92.5,
-                    "Tatoeba (nob-Latn_eng-Latn)": 95.9,
-                    "Tatoeba (eus-Latn_eng-Latn)": 56.26,
-                    "Tatoeba (kaz-Cyrl_eng-Latn)": 75.56,
-                    "Tatoeba (tuk-Latn_eng-Latn)": 19.67,
-                    "Tatoeba (pam-Latn_eng-Latn)": 6.92,
-                    "Tatoeba (gsw-Latn_eng-Latn)": 43.53,
-                    "Tatoeba (slv-Latn_eng-Latn)": 81.93,
-                    "Tatoeba (dsb-Latn_eng-Latn)": 34.36,
-                    "Tatoeba (cor-Latn_eng-Latn)": 4.38,
-                    "Tatoeba (ces-Latn_eng-Latn)": 88.75,
-                    "Tatoeba (tam-Taml_eng-Latn)": 85.12,
-                    "Tatoeba (glg-Latn_eng-Latn)": 82.69,
-                    "Tatoeba (bul-Cyrl_eng-Latn)": 88.95,
-                    "Tatoeba (deu-Latn_eng-Latn)": 97.07,
-                    "Tatoeba (fin-Latn_eng-Latn)": 86.15,
-                    "Tatoeba (csb-Latn_eng-Latn)": 24.29,
-                    "Tatoeba (urd-Arab_eng-Latn)": 86.2,
-                    "Tatoeba (est-Latn_eng-Latn)": 70.64,
-                    "Tatoeba (wuu-Hans_eng-Latn)": 78.65,
-                    "Tatoeba (tha-Thai_eng-Latn)": 94.22,
-                    "Tatoeba (spa-Latn_eng-Latn)": 96.97,
-                    "Tatoeba (ukr-Cyrl_eng-Latn)": 88.29,
-                    "Tatoeba (awa-Deva_eng-Latn)": 68.39,
-                    "Tatoeba (mal-Mlym_eng-Latn)": 96.72,
-                    "Tatoeba (cbk-Latn_eng-Latn)": 60.66,
-                    "Tatoeba (hsb-Latn_eng-Latn)": 40.36,
-                    "Tatoeba (tzl-Latn_eng-Latn)": 34.44,
-                    "Tatoeba (gle-Latn_eng-Latn)": 58.62,
-                    "Tatoeba (orv-Cyrl_eng-Latn)": 16.0,
-                    "Tatoeba (isl-Latn_eng-Latn)": 76.9,
-                    "Tatoeba (jav-Latn_eng-Latn)": 61.25,
-                    "Tatoeba (fao-Latn_eng-Latn)": 64.72,
-                    "Tatoeba (pol-Latn_eng-Latn)": 94.57,
-                    "Tatoeba (max-Deva_eng-Latn)": 52.4,
-                    "Tatoeba (bos-Latn_eng-Latn)": 88.86,
-                    "Tatoeba (hun-Latn_eng-Latn)": 84.41,
-                    "Tatoeba (rus-Cyrl_eng-Latn)": 91.78,
-                    "Tatoeba (arq-Arab_eng-Latn)": 26.61,
-                    "Tatoeba (kor-Hang_eng-Latn)": 83.37,
-                    "Tatoeba (uzb-Latn_eng-Latn)": 62.63,
-                    "Tatoeba (pms-Latn_eng-Latn)": 44.61,
-                    "Tatoeba (ell-Grek_eng-Latn)": 89.96,
-                    "Tatoeba (swh-Latn_eng-Latn)": 66.81,
-                    "Tatoeba (epo-Latn_eng-Latn)": 92.07,
-                    "Tatoeba (jpn-Jpan_eng-Latn)": 90.3,
-                    "Tatoeba (tel-Telu_eng-Latn)": 88.49,
-                    "Tatoeba (srp-Cyrl_eng-Latn)": 89.08,
-                    "Tatoeba (nov-Latn_eng-Latn)": 66.96,
-                    "Tatoeba (cmn-Hans_eng-Latn)": 93.35,
-                    "Tatoeba (tgl-Latn_eng-Latn)": 83.78,
-                    "Tatoeba (ber-Tfng_eng-Latn)": 23.59,
-                    "Tatoeba (sqi-Latn_eng-Latn)": 90.06,
-                    "Tatoeba (ang-Latn_eng-Latn)": 29.87,
-                    "Tatoeba (ita-Latn_eng-Latn)": 90.61,
-                    "Tatoeba (por-Latn_eng-Latn)": 92.74,
-                    "Tatoeba (mon-Cyrl_eng-Latn)": 78.37,
-                    "Tatoeba (fra-Latn_eng-Latn)": 92.76,
-                    "Tatoeba (lat-Latn_eng-Latn)": 39.62,
-                    "Tatoeba (nno-Latn_eng-Latn)": 82.67,
-                    "Tatoeba (arz-Arab_eng-Latn)": 66.79,
-                    "Tatoeba (hin-Deva_eng-Latn)": 93.13,
-                    "Tatoeba (nld-Latn_eng-Latn)": 93.2,
-                    "Tatoeba (kat-Geor_eng-Latn)": 77.83,
-                    "Tatoeba (lfn-Latn_eng-Latn)": 52.85,
-                    "Tatoeba (lvs-Latn_eng-Latn)": 76.76,
-                    "Tatoeba (tur-Latn_eng-Latn)": 92.54,
-                    "Tatoeba (ron-Latn_eng-Latn)": 91.27,
-                    "Tatoeba (vie-Latn_eng-Latn)": 94.55
+                    "Model": "gte-Qwen2-7B-instruct"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "multilingual-e5-base",
-                    "AllegroReviews (pol-Latn)": 40.78,
-                    "AllegroReviews": 40.85,
-                    "AmazonCounterfactualClassification (en-ext)": 76.91,
-                    "AmazonCounterfactualClassification (en)": 77.36,
-                    "AmazonCounterfactualClassification (deu-Latn)": 70.81,
-                    "AmazonCounterfactualClassification (jpn-Jpan)": 72.02,
-                    "AmazonPolarityClassification": 91.76,
-                    "AmazonReviewsClassification (en)": 47.54,
-                    "AmazonReviewsClassification (deu-Latn)": 44.37,
-                    "AmazonReviewsClassification (spa-Latn)": 43.38,
-                    "AmazonReviewsClassification (fra-Latn)": 41.55,
-                    "AmazonReviewsClassification (jpn-Jpan)": 39.57,
-                    "AmazonReviewsClassification (cmn-Hans)": 38.34,
-                    "AmazonReviewsClassification (fr)": 40.94,
-                    "AngryTweetsClassification (dan-Latn)": 56.28,
-                    "AngryTweetsClassification": 54.65,
-                    "Banking77Classification": 73.53,
-                    "CBD (pol-Latn)": 62.6,
-                    "CBD": 62.66,
-                    "DKHateClassification": 63.53,
-                    "DanishPoliticalCommentsClassification (dan-Latn)": 36.41,
-                    "DanishPoliticalCommentsClassification": 36.69,
-                    "EmotionClassification": 45.68,
-                    "GeoreviewClassification (rus-Cyrl)": 46.05,
-                    "HeadlineClassification (rus-Cyrl)": 75.64,
-                    "IFlyTek (cmn-Hans)": 40.81,
-                    "IFlyTek": 44.93,
-                    "ImdbClassification": 84.29,
-                    "InappropriatenessClassification (rus-Cyrl)": 58.78,
-                    "JDReview (cmn-Hans)": 75.72,
-                    "JDReview": 76.21,
-                    "KinopoiskClassification (rus-Cyrl)": 50.89,
-                    "LccSentimentClassification (dan-Latn)": 60.13,
-                    "LccSentimentClassification": 59.67,
-                    "MTOPDomainClassification (en)": 90.9,
-                    "MTOPDomainClassification (deu-Latn)": 87.94,
-                    "MTOPDomainClassification (spa-Latn)": 85.96,
-                    "MTOPDomainClassification (fra-Latn)": 82.88,
-                    "MTOPDomainClassification (hin-Deva)": 83.92,
-                    "MTOPDomainClassification (tha-Thai)": 83.94,
-                    "MTOPDomainClassification (fr)": 84.79,
-                    "MTOPIntentClassification (en)": 61.6,
-                    "MTOPIntentClassification (deu-Latn)": 61.05,
-                    "MTOPIntentClassification (spa-Latn)": 55.36,
-                    "MTOPIntentClassification (fra-Latn)": 52.23,
-                    "MTOPIntentClassification (hin-Deva)": 53.93,
-                    "MTOPIntentClassification (tha-Thai)": 58.69,
-                    "MTOPIntentClassification (fr)": 55.51,
-                    "MasakhaNEWSClassification (amh-Ethi)": 83.8,
-                    "MasakhaNEWSClassification (eng)": 76.49,
-                    "MasakhaNEWSClassification (fra-Latn)": 76.35,
-                    "MasakhaNEWSClassification (hau-Latn)": 74.63,
-                    "MasakhaNEWSClassification (ibo-Latn)": 64.59,
-                    "MasakhaNEWSClassification (lin-Latn)": 70.57,
-                    "MasakhaNEWSClassification (lug-Latn)": 68.12,
-                    "MasakhaNEWSClassification (orm-Ethi)": 71.75,
-                    "MasakhaNEWSClassification (pcm-Latn)": 91.05,
-                    "MasakhaNEWSClassification (run-Latn)": 73.35,
-                    "MasakhaNEWSClassification (sna-Latn)": 84.17,
-                    "MasakhaNEWSClassification (som-Latn)": 60.1,
-                    "MasakhaNEWSClassification (swa-Latn)": 70.74,
-                    "MasakhaNEWSClassification (tir-Ethi)": 67.1,
-                    "MasakhaNEWSClassification (xho-Latn)": 76.03,
-                    "MasakhaNEWSClassification (yor-Latn)": 72.75,
-                    "MasakhaNEWSClassification (fra)": 79.69,
-                    "MassiveIntentClassification (tha-Thai)": 59.63,
-                    "MassiveIntentClassification (tam-Taml)": 48.93,
-                    "MassiveIntentClassification (fin-Latn)": 58.91,
-                    "MassiveIntentClassification (rus-Cyrl)": 62.78,
-                    "MassiveIntentClassification (afr-Latn)": 49.82,
-                    "MassiveIntentClassification (heb-Hebr)": 55.3,
-                    "MassiveIntentClassification (sqi-Latn)": 51.07,
-                    "MassiveIntentClassification (por-Latn)": 62.12,
-                    "MassiveIntentClassification (hye-Armn)": 48.77,
-                    "MassiveIntentClassification (cym-Latn)": 37.05,
-                    "MassiveIntentClassification (deu-Latn)": 59.82,
-                    "MassiveIntentClassification (fas-Arab)": 59.51,
-                    "MassiveIntentClassification (hun-Latn)": 57.69,
-                    "MassiveIntentClassification (urd-Arab)": 51.3,
-                    "MassiveIntentClassification (cmo-Hant)": 56.4,
-                    "MassiveIntentClassification (khm-Khmr)": 32.14,
-                    "MassiveIntentClassification (tel-Telu)": 50.09,
-                    "MassiveIntentClassification (vie-Latn)": 59.61,
-                    "MassiveIntentClassification (kan-Knda)": 48.63,
-                    "MassiveIntentClassification (ara-Arab)": 50.2,
-                    "MassiveIntentClassification (mya-Mymr)": 46.67,
-                    "MassiveIntentClassification (slv-Latn)": 53.84,
-                    "MassiveIntentClassification (jpn-Jpan)": 62.3,
-                    "MassiveIntentClassification (mon-Cyrl)": 46.8,
-                    "MassiveIntentClassification (jav-Latn)": 43.23,
-                    "MassiveIntentClassification (lav-Latn)": 51.17,
-                    "MassiveIntentClassification (ron-Latn)": 56.83,
-                    "MassiveIntentClassification (dan-Latn)": 60.69,
-                    "MassiveIntentClassification (nob-Latn)": 60.06,
-                    "MassiveIntentClassification (tgl-Latn)": 48.99,
-                    "MassiveIntentClassification (aze-Latn)": 51.36,
-                    "MassiveIntentClassification (ind-Latn)": 58.7,
-                    "MassiveIntentClassification (amh-Ethi)": 42.4,
-                    "MassiveIntentClassification (ben-Beng)": 51.69,
-                    "MassiveIntentClassification (ell-Grek)": 58.07,
-                    "MassiveIntentClassification (hin-Deva)": 56.75,
-                    "MassiveIntentClassification (nld-Latn)": 61.23,
-                    "MassiveIntentClassification (pol-Latn)": 60.98,
-                    "MassiveIntentClassification (swe-Latn)": 62.43,
-                    "MassiveIntentClassification (isl-Latn)": 44.52,
-                    "MassiveIntentClassification (mal-Mlym)": 53.75,
-                    "MassiveIntentClassification (msa-Latn)": 52.84,
-                    "MassiveIntentClassification (kat-Geor)": 37.56,
-                    "MassiveIntentClassification (tur-Latn)": 60.69,
-                    "MassiveIntentClassification (kor-Kore)": 59.97,
-                    "MassiveIntentClassification (ita-Latn)": 61.29,
-                    "MassiveIntentClassification (cmo-Hans)": 63.22,
-                    "MassiveIntentClassification (en)": 65.71,
-                    "MassiveIntentClassification (fra-Latn)": 61.32,
-                    "MassiveIntentClassification (swa-Latn)": 45.24,
-                    "MassiveIntentClassification (spa-Latn)": 61.13,
-                    "MassiveIntentClassification (da)": 60.16,
-                    "MassiveIntentClassification (nb)": 59.83,
-                    "MassiveIntentClassification (sv)": 61.78,
-                    "MassiveIntentClassification (pl)": 61.04,
-                    "MassiveScenarioClassification (ind-Latn)": 63.6,
-                    "MassiveScenarioClassification (tha-Thai)": 67.37,
-                    "MassiveScenarioClassification (cmo-Hans)": 70.24,
-                    "MassiveScenarioClassification (ben-Beng)": 57.0,
-                    "MassiveScenarioClassification (kan-Knda)": 53.49,
-                    "MassiveScenarioClassification (tel-Telu)": 54.24,
-                    "MassiveScenarioClassification (aze-Latn)": 55.15,
-                    "MassiveScenarioClassification (ell-Grek)": 65.38,
-                    "MassiveScenarioClassification (swa-Latn)": 52.64,
-                    "MassiveScenarioClassification (hin-Deva)": 62.91,
-                    "MassiveScenarioClassification (tur-Latn)": 65.18,
-                    "MassiveScenarioClassification (dan-Latn)": 67.97,
-                    "MassiveScenarioClassification (msa-Latn)": 58.35,
-                    "MassiveScenarioClassification (mya-Mymr)": 50.77,
-                    "MassiveScenarioClassification (mon-Cyrl)": 51.87,
-                    "MassiveScenarioClassification (tgl-Latn)": 54.36,
-                    "MassiveScenarioClassification (cmo-Hant)": 63.73,
-                    "MassiveScenarioClassification (ara-Arab)": 58.0,
-                    "MassiveScenarioClassification (slv-Latn)": 58.3,
-                    "MassiveScenarioClassification (spa-Latn)": 66.47,
-                    "MassiveScenarioClassification (urd-Arab)": 56.74,
-                    "MassiveScenarioClassification (fin-Latn)": 64.94,
-                    "MassiveScenarioClassification (tam-Taml)": 53.86,
-                    "MassiveScenarioClassification (ron-Latn)": 63.5,
-                    "MassiveScenarioClassification (hye-Armn)": 53.63,
-                    "MassiveScenarioClassification (vie-Latn)": 66.35,
-                    "MassiveScenarioClassification (deu-Latn)": 68.4,
-                    "MassiveScenarioClassification (afr-Latn)": 58.95,
-                    "MassiveScenarioClassification (en)": 71.57,
-                    "MassiveScenarioClassification (fra-Latn)": 67.37,
-                    "MassiveScenarioClassification (jpn-Jpan)": 69.89,
-                    "MassiveScenarioClassification (nld-Latn)": 68.62,
-                    "MassiveScenarioClassification (cym-Latn)": 43.84,
-                    "MassiveScenarioClassification (heb-Hebr)": 62.53,
-                    "MassiveScenarioClassification (pol-Latn)": 66.12,
-                    "MassiveScenarioClassification (fas-Arab)": 63.92,
-                    "MassiveScenarioClassification (lav-Latn)": 56.42,
-                    "MassiveScenarioClassification (por-Latn)": 65.49,
-                    "MassiveScenarioClassification (rus-Cyrl)": 68.21,
-                    "MassiveScenarioClassification (mal-Mlym)": 59.89,
-                    "MassiveScenarioClassification (hun-Latn)": 65.75,
-                    "MassiveScenarioClassification (nob-Latn)": 66.57,
-                    "MassiveScenarioClassification (kor-Kore)": 67.9,
-                    "MassiveScenarioClassification (isl-Latn)": 53.28,
-                    "MassiveScenarioClassification (khm-Khmr)": 38.45,
-                    "MassiveScenarioClassification (sqi-Latn)": 57.92,
-                    "MassiveScenarioClassification (jav-Latn)": 51.94,
-                    "MassiveScenarioClassification (amh-Ethi)": 50.33,
-                    "MassiveScenarioClassification (ita-Latn)": 66.17,
-                    "MassiveScenarioClassification (kat-Geor)": 43.38,
-                    "MassiveScenarioClassification (swe-Latn)": 69.35,
-                    "MassiveScenarioClassification (da)": 67.46,
-                    "MassiveScenarioClassification (nb)": 66.18,
-                    "MassiveScenarioClassification (sv)": 69.15,
-                    "MassiveScenarioClassification (pl)": 66.11,
-                    "MultilingualSentiment (cmn-Hans)": 67.56,
-                    "MultilingualSentiment": 65.28,
-                    "NoRecClassification (nob-Latn)": 53.74,
-                    "NoRecClassification": 57.58,
-                    "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 75.85,
-                    "NordicLangClassification": 75.94,
-                    "NorwegianParliament": 59.94,
-                    "OnlineShopping (cmn-Hans)": 88.66,
-                    "OnlineShopping": 88.4,
-                    "PAC (pol-Latn)": 70.87,
-                    "PAC": 70.87,
-                    "PolEmo2.0-IN (pol-Latn)": 67.59,
-                    "PolEmo2.0-IN": 67.66,
-                    "PolEmo2.0-OUT (pol-Latn)": 43.93,
-                    "PolEmo2.0-OUT": 43.91,
-                    "RuReviewsClassification (rus-Cyrl)": 62.99,
-                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 56.28,
-                    "RuSciBenchOECDClassification (rus-Cyrl)": 42.69,
-                    "ScalaDaClassification": 50.79,
-                    "ScalaNbClassification": 50.32,
-                    "TNews (cmn-Hans)": 47.52,
-                    "TNews": 47.06,
-                    "ToxicConversationsClassification": 64.33,
-                    "TweetSentimentExtractionClassification": 62.8,
-                    "Waimai (cmn-Hans)": 85.98,
-                    "Waimai": 84.42
+                    "Model": "gte-Qwen2-7B-instruct"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "multilingual-e5-base",
-                    "8TagsClustering": 24.97,
-                    "AlloProfClusteringP2P": 62.09,
-                    "AlloProfClusteringS2S": 32.98,
-                    "ArxivClusteringP2P": 43.35,
-                    "ArxivClusteringS2S": 36.0,
-                    "BiorxivClusteringP2P": 37.55,
-                    "BiorxivClusteringS2S": 30.33,
-                    "CLSClusteringP2P": 32.41,
-                    "CLSClusteringS2S": 36.99,
-                    "GeoreviewClusteringP2P (rus-Cyrl)": 54.46,
-                    "HALClusteringS2S": 22.48,
-                    "MLSUMClusteringP2P (rus-Cyrl)": 43.47,
-                    "MLSUMClusteringP2P": 43.48,
-                    "MLSUMClusteringS2S (rus-Cyrl)": 40.87,
-                    "MLSUMClusteringS2S": 38.53,
-                    "MasakhaNEWSClusteringP2P (amh-Ethi)": 58.05,
-                    "MasakhaNEWSClusteringP2P (eng)": 43.8,
-                    "MasakhaNEWSClusteringP2P (fra-Latn)": 58.28,
-                    "MasakhaNEWSClusteringP2P (hau-Latn)": 44.78,
-                    "MasakhaNEWSClusteringP2P (ibo-Latn)": 44.97,
-                    "MasakhaNEWSClusteringP2P (lin-Latn)": 48.08,
-                    "MasakhaNEWSClusteringP2P (lug-Latn)": 50.15,
-                    "MasakhaNEWSClusteringP2P (orm-Ethi)": 38.02,
-                    "MasakhaNEWSClusteringP2P (pcm-Latn)": 71.03,
-                    "MasakhaNEWSClusteringP2P (run-Latn)": 58.28,
-                    "MasakhaNEWSClusteringP2P (sna-Latn)": 59.25,
-                    "MasakhaNEWSClusteringP2P (som-Latn)": 37.27,
-                    "MasakhaNEWSClusteringP2P (swa-Latn)": 34.54,
-                    "MasakhaNEWSClusteringP2P (tir-Ethi)": 53.44,
-                    "MasakhaNEWSClusteringP2P (xho-Latn)": 40.32,
-                    "MasakhaNEWSClusteringP2P (yor-Latn)": 37.97,
-                    "MasakhaNEWSClusteringP2P (fra)": 47.91,
-                    "MasakhaNEWSClusteringS2S (amh-Ethi)": 49.38,
-                    "MasakhaNEWSClusteringS2S (eng)": 45.76,
-                    "MasakhaNEWSClusteringS2S (fra-Latn)": 55.43,
-                    "MasakhaNEWSClusteringS2S (hau-Latn)": 16.11,
-                    "MasakhaNEWSClusteringS2S (ibo-Latn)": 24.38,
-                    "MasakhaNEWSClusteringS2S (lin-Latn)": 44.8,
-                    "MasakhaNEWSClusteringS2S (lug-Latn)": 45.67,
-                    "MasakhaNEWSClusteringS2S (orm-Ethi)": 26.41,
-                    "MasakhaNEWSClusteringS2S (pcm-Latn)": 83.26,
-                    "MasakhaNEWSClusteringS2S (run-Latn)": 48.77,
-                    "MasakhaNEWSClusteringS2S (sna-Latn)": 43.9,
-                    "MasakhaNEWSClusteringS2S (som-Latn)": 25.43,
-                    "MasakhaNEWSClusteringS2S (swa-Latn)": 9.87,
-                    "MasakhaNEWSClusteringS2S (tir-Ethi)": 51.66,
-                    "MasakhaNEWSClusteringS2S (xho-Latn)": 29.65,
-                    "MasakhaNEWSClusteringS2S (yor-Latn)": 30.12,
-                    "MasakhaNEWSClusteringS2S (fra)": 51.16,
-                    "MedrxivClusteringP2P": 30.6,
-                    "MedrxivClusteringS2S": 28.73,
-                    "RedditClustering": 43.15,
-                    "RedditClusteringP2P": 61.69,
-                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 51.56,
-                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 44.78,
-                    "StackExchangeClustering": 55.31,
-                    "StackExchangeClusteringP2P": 33.51,
-                    "ThuNewsClusteringP2P": 40.98,
-                    "ThuNewsClusteringS2S": 52.36,
-                    "TwentyNewsgroupsClustering": 35.55
+                    "Model": "gte-Qwen2-7B-instruct"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "multilingual-e5-base",
-                    "CDSC-E (pol-Latn)": 72.7,
-                    "CDSC-E": 72.67,
-                    "Cmnli": 74.51,
-                    "Ocnli": 59.63,
-                    "OpusparcusPC (deu-Latn)": 95.83,
-                    "OpusparcusPC (en)": 98.71,
-                    "OpusparcusPC (fin-Latn)": 90.3,
-                    "OpusparcusPC (fra-Latn)": 92.12,
-                    "OpusparcusPC (rus-Cyrl)": 86.82,
-                    "OpusparcusPC (swe-Latn)": 93.05,
-                    "OpusparcusPC (fr)": 92.72,
-                    "PPC": 88.01,
-                    "PSC (pol-Latn)": 99.14,
-                    "PSC": 99.14,
-                    "PawsXPairClassification (deu-Latn)": 54.11,
-                    "PawsXPairClassification (en)": 55.79,
-                    "PawsXPairClassification (spa-Latn)": 54.13,
-                    "PawsXPairClassification (fra-Latn)": 56.01,
-                    "PawsXPairClassification (jpn-Hira)": 49.02,
-                    "PawsXPairClassification (kor-Hang)": 51.01,
-                    "PawsXPairClassification (cmn-Hans)": 55.13,
-                    "PawsXPairClassification (fr)": 56.93,
-                    "SICK-E-PL (pol-Latn)": 68.76,
-                    "SICK-E-PL": 68.77,
-                    "SprintDuplicateQuestions": 93.02,
-                    "TERRa (rus-Cyrl)": 54.96,
-                    "TwitterSemEval2015": 72.21,
-                    "TwitterURLCorpus": 85.48
+                    "Model": "gte-Qwen2-7B-instruct"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "multilingual-e5-base",
-                    "AlloprofReranking (fra-Latn)": 65.9,
-                    "AlloprofReranking": 58.1,
-                    "AskUbuntuDupQuestions": 59.28,
-                    "CMedQAv1": 65.21,
-                    "CMedQAv2": 66.06,
-                    "MMarcoReranking (cmn-Hans)": 30.52,
-                    "MMarcoReranking": 21.76,
-                    "MindSmallReranking": 29.28,
-                    "RuBQReranking (rus-Cyrl)": 72.01,
-                    "SciDocsRR": 81.81,
-                    "StackOverflowDupQuestions": 49.75,
-                    "SyntecReranking (fra-Latn)": 85.31,
-                    "SyntecReranking": 85.43,
-                    "T2Reranking (cmn-Hans)": 64.86,
-                    "T2Reranking": 64.39
+                    "Model": "gte-Qwen2-7B-instruct"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "multilingual-e5-base",
-                    "AILACasedocs": 26.05,
-                    "AILAStatutes": 20.37,
-                    "ARCChallenge": 9.61,
-                    "AlloprofRetrieval (fra-Latn)": 34.45,
-                    "AlloprofRetrieval": 36.21,
-                    "AlphaNLI": 16.44,
-                    "ArguAna": 44.21,
-                    "ArguAna-PL (pol-Latn)": 42.86,
-                    "ArguAna-PL": 42.81,
-                    "BSARDRetrieval (fra-Latn)": 18.83,
-                    "BSARDRetrieval": 0.0,
-                    "CmedqaRetrieval (cmn-Hans)": 27.2,
-                    "CmedqaRetrieval": 27.2,
-                    "CovidRetrieval (cmn-Hans)": 73.48,
-                    "CovidRetrieval": 73.45,
-                    "DBPedia-PL": 30.23,
-                    "DuRetrieval (cmn-Hans)": 81.66,
-                    "DuRetrieval": 81.64,
-                    "EcomRetrieval (cmn-Hans)": 54.01,
-                    "EcomRetrieval": 54.17,
-                    "FiQA-PL (pol-Latn)": 25.59,
-                    "FiQA-PL": 25.52,
-                    "FiQA2018": 38.15,
-                    "GerDaLIRSmall (deu-Latn)": 15.3,
-                    "HellaSwag": 24.79,
-                    "HotpotQA-PL": 63.52,
-                    "LEMBNarrativeQARetrieval": 23.6,
-                    "LEMBNeedleRetrieval": 32.0,
-                    "LEMBPasskeyRetrieval": 38.25,
-                    "LEMBQMSumRetrieval": 25.16,
-                    "LEMBSummScreenFDRetrieval": 68.21,
-                    "LEMBWikimQARetrieval": 56.04,
-                    "LeCaRDv2 (zho-Hans)": 59.0,
-                    "LegalBenchConsumerContractsQA": 69.02,
-                    "LegalBenchCorporateLobbying": 88.97,
-                    "LegalQuAD (deu-Latn)": 47.85,
-                    "LegalSummarization": 61.69,
-                    "MMarcoRetrieval (cmn-Hans)": 76.01,
-                    "MMarcoRetrieval": 76.04,
-                    "MSMARCO-PL": 29.52,
-                    "MedicalRetrieval (cmn-Hans)": 48.33,
-                    "MedicalRetrieval": 48.35,
-                    "MintakaRetrieval (ara-Arab)": 23.06,
-                    "MintakaRetrieval (deu-Latn)": 29.8,
-                    "MintakaRetrieval (spa-Latn)": 29.88,
-                    "MintakaRetrieval (fra-Latn)": 30.96,
-                    "MintakaRetrieval (hin-Deva)": 22.68,
-                    "MintakaRetrieval (ita-Latn)": 29.77,
-                    "MintakaRetrieval (jpn-Hira)": 22.98,
-                    "MintakaRetrieval (por-Latn)": 30.62,
-                    "MintakaRetrieval (fr)": 23.46,
-                    "NFCorpus": 32.49,
-                    "NFCorpus-PL (pol-Latn)": 25.99,
-                    "NFCorpus-PL": 25.98,
-                    "NQ-PL": 44.8,
-                    "PIQA": 25.09,
-                    "Quail": 3.52,
-                    "Quora-PL": 81.22,
-                    "RARbCode": 52.16,
-                    "RARbMath": 65.35,
-                    "RiaNewsRetrieval (rus-Cyrl)": 70.24,
-                    "RuBQRetrieval (rus-Cyrl)": 69.58,
-                    "SCIDOCS": 17.17,
-                    "SCIDOCS-PL (pol-Latn)": 12.36,
-                    "SCIDOCS-PL": 12.35,
-                    "SIQA": 3.72,
-                    "SciFact": 69.39,
-                    "SciFact-PL (pol-Latn)": 62.26,
-                    "SciFact-PL": 62.11,
-                    "SpartQA": 7.91,
-                    "SyntecRetrieval (fra-Latn)": 82.86,
-                    "SyntecRetrieval": 80.49,
-                    "T2Retrieval (cmn-Hans)": 70.77,
-                    "T2Retrieval": 70.86,
-                    "TRECCOVID": 69.5,
-                    "TRECCOVID-PL (pol-Latn)": 65.94,
-                    "TRECCOVID-PL": 66.06,
-                    "TempReasonL1": 0.72,
-                    "TempReasonL2Fact": 38.76,
-                    "TempReasonL2Pure": 1.63,
-                    "TempReasonL3Fact": 35.85,
-                    "TempReasonL3Pure": 7.11,
-                    "Touche2020": 21.5,
-                    "VideoRetrieval (cmn-Hans)": 61.26,
-                    "VideoRetrieval": 61.3,
-                    "WinoGrande": 56.18,
-                    "XPQARetrieval (ara-Arab_ara-Arab)": 39.97,
-                    "XPQARetrieval (eng-Latn_ara-Arab)": 17.23,
-                    "XPQARetrieval (ara-Arab_eng-Latn)": 34.35,
-                    "XPQARetrieval (deu-Latn_deu-Latn)": 72.11,
-                    "XPQARetrieval (eng-Latn_deu-Latn)": 28.91,
-                    "XPQARetrieval (deu-Latn_eng-Latn)": 61.46,
-                    "XPQARetrieval (spa-Latn_spa-Latn)": 58.35,
-                    "XPQARetrieval (eng-Latn_spa-Latn)": 25.27,
-                    "XPQARetrieval (spa-Latn_eng-Latn)": 51.07,
-                    "XPQARetrieval (fra-Latn_fra-Latn)": 59.56,
-                    "XPQARetrieval (eng-Latn_fra-Latn)": 23.69,
-                    "XPQARetrieval (fra-Latn_eng-Latn)": 53.9,
-                    "XPQARetrieval (hin-Deva_hin-Deva)": 70.56,
-                    "XPQARetrieval (eng-Latn_hin-Deva)": 27.57,
-                    "XPQARetrieval (hin-Deva_eng-Latn)": 63.68,
-                    "XPQARetrieval (ita-Latn_ita-Latn)": 70.38,
-                    "XPQARetrieval (eng-Latn_ita-Latn)": 26.06,
-                    "XPQARetrieval (ita-Latn_eng-Latn)": 56.2,
-                    "XPQARetrieval (jpn-Hira_jpn-Hira)": 71.97,
-                    "XPQARetrieval (eng-Latn_jpn-Hira)": 17.63,
-                    "XPQARetrieval (jpn-Hira_eng-Latn)": 61.03,
-                    "XPQARetrieval (kor-Hang_kor-Hang)": 36.12,
-                    "XPQARetrieval (eng-Latn_kor-Hang)": 20.27,
-                    "XPQARetrieval (kor-Hang_eng-Latn)": 29.26,
-                    "XPQARetrieval (pol-Latn_pol-Latn)": 48.1,
-                    "XPQARetrieval (eng-Latn_pol-Latn)": 19.48,
-                    "XPQARetrieval (pol-Latn_eng-Latn)": 40.18,
-                    "XPQARetrieval (por-Latn_por-Latn)": 44.76,
-                    "XPQARetrieval (eng-Latn_por-Latn)": 17.66,
-                    "XPQARetrieval (por-Latn_eng-Latn)": 40.52,
-                    "XPQARetrieval (tam-Taml_tam-Taml)": 35.25,
-                    "XPQARetrieval (eng-Latn_tam-Taml)": 12.64,
-                    "XPQARetrieval (tam-Taml_eng-Latn)": 26.73,
-                    "XPQARetrieval (cmn-Hans_cmn-Hans)": 67.06,
-                    "XPQARetrieval (eng-Latn_cmn-Hans)": 12.72,
-                    "XPQARetrieval (cmn-Hans_eng-Latn)": 53.53,
-                    "XPQARetrieval (fr)": 65.81
+                    "Model": "gte-Qwen2-7B-instruct",
+                    "BrightRetrieval (earth_science)": 40.66,
+                    "BrightRetrieval (sustainable_living)": 20.82,
+                    "BrightRetrieval (theoremqa_theorems)": 28.15,
+                    "BrightRetrieval (aops)": 15.1,
+                    "BrightRetrieval (economics)": 16.18,
+                    "BrightRetrieval (pony)": 1.25,
+                    "BrightRetrieval (stackoverflow)": 13.95,
+                    "BrightRetrieval (leetcode)": 31.07,
+                    "BrightRetrieval (biology)": 32.09,
+                    "BrightRetrieval (theoremqa_questions)": 29.9,
+                    "BrightRetrieval (robotics)": 12.82,
+                    "BrightRetrieval (psychology)": 26.58
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "multilingual-e5-base",
-                    "AFQMC (cmn-Hans)": 29.66,
-                    "AFQMC": 29.67,
-                    "ATEC (cmn-Hans)": 37.01,
-                    "ATEC": 37.01,
-                    "BIOSSES": 85.05,
-                    "BQ (cmn-Hans)": 45.45,
-                    "BQ": 45.45,
-                    "CDSC-R (pol-Latn)": 90.09,
-                    "CDSC-R": 90.08,
-                    "LCQMC (cmn-Hans)": 74.15,
-                    "LCQMC": 74.15,
-                    "PAWSX (cmn-Hans)": 12.13,
-                    "PAWSX": 12.14,
-                    "QBQTC": 28.81,
-                    "RUParaPhraserSTS (rus-Cyrl)": 70.17,
-                    "RuSTSBenchmarkSTS (rus-Cyrl)": 79.64,
-                    "SICK-R": 78.51,
-                    "SICK-R-PL (pol-Latn)": 71.23,
-                    "SICK-R-PL": 71.23,
-                    "SICKFr (fra-Latn)": 75.76,
-                    "SICKFr": 76.23,
-                    "STS12": 76.7,
-                    "STS13": 78.02,
-                    "STS14": 76.6,
-                    "STS15": 88.16,
-                    "STS16": 84.28,
-                    "STS17 (fra-Latn_eng-Latn)": 80.18,
-                    "STS17 (ita-Latn_eng-Latn)": 80.16,
-                    "STS17 (eng-Latn_ara-Arab)": 71.27,
-                    "STS17 (kor-Hang)": 79.95,
-                    "STS17 (eng-Latn_tur-Latn)": 63.3,
-                    "STS17 (spa-Latn_eng-Latn)": 76.56,
-                    "STS17 (spa-Latn)": 86.74,
-                    "STS17 (en-en)": 87.84,
-                    "STS17 (ara-Arab)": 74.48,
-                    "STS17 (nld-Latn_eng-Latn)": 79.29,
-                    "STS17 (eng-Latn_deu-Latn)": 82.08,
-                    "STS22 (fra-Latn)": 75.04,
-                    "STS22 (ara-Arab)": 57.82,
-                    "STS22 (en)": 62.26,
-                    "STS22 (spa-Latn)": 66.67,
-                    "STS22 (fra-Latn_pol-Latn)": 73.25,
-                    "STS22 (ita-Latn)": 77.76,
-                    "STS22 (pol-Latn_eng-Latn)": 70.37,
-                    "STS22 (tur-Latn)": 63.71,
-                    "STS22 (rus-Cyrl)": 60.67,
-                    "STS22 (deu-Latn)": 55.95,
-                    "STS22 (deu-Latn_fra-Latn)": 59.68,
-                    "STS22 (spa-Latn_eng-Latn)": 74.0,
-                    "STS22 (cmn-Hans_eng-Latn)": 69.8,
-                    "STS22 (pol-Latn)": 34.08,
-                    "STS22 (spa-Latn_ita-Latn)": 66.43,
-                    "STS22 (cmn-Hans)": 65.63,
-                    "STS22 (deu-Latn_pol-Latn)": 39.35,
-                    "STS22 (deu-Latn_eng-Latn)": 54.89,
-                    "STS22 (zh)": 65.64,
-                    "STS22 (pl)": 34.07,
-                    "STSB (cmn-Hans)": 79.04,
-                    "STSB": 79.05,
-                    "STSBenchmark": 85.64,
-                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 79.33,
-                    "STSBenchmarkMultilingualSTS (pol-Latn)": 74.93,
-                    "STSBenchmarkMultilingualSTS (spa-Latn)": 81.75,
-                    "STSBenchmarkMultilingualSTS (en)": 85.64,
-                    "STSBenchmarkMultilingualSTS (cmn-Hans)": 79.87,
-                    "STSBenchmarkMultilingualSTS (fra-Latn)": 80.85,
-                    "STSBenchmarkMultilingualSTS (deu-Latn)": 79.68,
-                    "STSBenchmarkMultilingualSTS (nld-Latn)": 75.96,
-                    "STSBenchmarkMultilingualSTS (por-Latn)": 67.16,
-                    "STSBenchmarkMultilingualSTS (ita-Latn)": 78.09,
-                    "STSBenchmarkMultilingualSTS (fr)": 80.62
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "multilingual-e5-base",
-                    "SummEval": 30.23,
-                    "SummEvalFr (fra-Latn)": 32.96,
-                    "SummEvalFr": 30.76
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "multilingual-e5-base"
-                }
-            ]
-        }
-    },
-    "nomic-embed-text-v1.5-128": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "nomic-embed-text-v1.5-128"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "nomic-embed-text-v1.5-128",
-                    "AmazonCounterfactualClassification (en)": 69.78,
-                    "AmazonPolarityClassification": 88.74,
-                    "AmazonReviewsClassification (en)": 43.11,
-                    "Banking77Classification": 82.78,
-                    "EmotionClassification": 42.92,
-                    "ImdbClassification": 80.87,
-                    "MTOPDomainClassification (en)": 89.61,
-                    "MTOPIntentClassification (en)": 68.9,
-                    "MassiveIntentClassification (en)": 69.34,
-                    "MassiveScenarioClassification (en)": 74.21,
-                    "ToxicConversationsClassification": 68.16,
-                    "TweetSentimentExtractionClassification": 57.99
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "nomic-embed-text-v1.5-128",
-                    "ArxivClusteringP2P": 43.87,
-                    "ArxivClusteringS2S": 34.57,
-                    "BiorxivClusteringP2P": 36.79,
-                    "BiorxivClusteringS2S": 30.68,
-                    "MedrxivClusteringP2P": 34.09,
-                    "MedrxivClusteringS2S": 31.3,
-                    "RedditClustering": 53.31,
-                    "RedditClusteringP2P": 58.96,
-                    "StackExchangeClustering": 59.92,
-                    "StackExchangeClusteringP2P": 33.88,
-                    "TwentyNewsgroupsClustering": 47.29
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "nomic-embed-text-v1.5-128",
-                    "SprintDuplicateQuestions": 91.45,
-                    "TwitterSemEval2015": 73.23,
-                    "TwitterURLCorpus": 85.93
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "nomic-embed-text-v1.5-128",
-                    "AskUbuntuDupQuestions": 61.16,
-                    "MindSmallReranking": 30.02,
-                    "SciDocsRR": 78.05,
-                    "StackOverflowDupQuestions": 49.0
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "nomic-embed-text-v1.5-128",
-                    "ArguAna": 43.4,
-                    "CQADupstackRetrieval": 34.67,
-                    "ClimateFEVER": 36.52,
-                    "DBPedia": 36.22,
-                    "FEVER": 80.48,
-                    "FiQA2018": 32.08,
-                    "HotpotQA": 60.09,
-                    "MSMARCO": 39.99,
-                    "NFCorpus": 30.72,
-                    "NQ": 53.62,
-                    "QuoraRetrieval": 87.07,
-                    "SCIDOCS": 15.56,
-                    "SciFact": 64.28,
-                    "TRECCOVID": 74.58,
-                    "Touche2020": 26.99
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "nomic-embed-text-v1.5-128",
-                    "BIOSSES": 80.19,
-                    "SICK-R": 79.09,
-                    "STS12": 77.49,
-                    "STS13": 85.62,
-                    "STS14": 80.5,
-                    "STS15": 85.84,
-                    "STS16": 83.9,
-                    "STS17 (en-en)": 86.27,
-                    "STS22 (en)": 64.24,
-                    "STSBenchmark": 84.28
+                    "Model": "gte-Qwen2-7B-instruct"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "nomic-embed-text-v1.5-128",
-                    "SummEval": 29.59
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "nomic-embed-text-v1.5-128"
-                }
-            ]
-        }
-    },
-    "SFR-Embedding-Mistral": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "SFR-Embedding-Mistral"
+                    "Model": "gte-Qwen2-7B-instruct"
                 }
             ]
         },
-        "Classification": {
+        "MultilabelClassification": {
             "accuracy": [
                 {
-                    "Model": "SFR-Embedding-Mistral"
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "SFR-Embedding-Mistral"
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "SFR-Embedding-Mistral"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "SFR-Embedding-Mistral"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "SFR-Embedding-Mistral",
-                    "BrightRetrieval (sustainable_living)": 19.79,
-                    "BrightRetrieval (economics)": 17.84,
-                    "BrightRetrieval (theoremqa_theorems)": 24.05,
-                    "BrightRetrieval (aops)": 7.43,
-                    "BrightRetrieval (theoremqa_questions)": 23.05,
-                    "BrightRetrieval (psychology)": 18.97,
-                    "BrightRetrieval (stackoverflow)": 12.72,
-                    "BrightRetrieval (pony)": 1.97,
-                    "BrightRetrieval (leetcode)": 27.35,
-                    "BrightRetrieval (biology)": 19.49,
-                    "BrightRetrieval (earth_science)": 26.63,
-                    "BrightRetrieval (robotics)": 16.7
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "SFR-Embedding-Mistral"
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "SFR-Embedding-Mistral"
+                    "Model": "gte-Qwen2-7B-instruct"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "SFR-Embedding-Mistral"
+                    "Model": "gte-Qwen2-7B-instruct"
                 }
             ]
         }
     },
-    "glove.6B.300d": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "glove.6B.300d",
-                    "BUCC (de-en)": 0.18,
-                    "BUCC (fr-en)": 0.19,
-                    "BUCC (ru-en)": 0.1,
-                    "BUCC (zh-en)": 0.0
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "glove.6B.300d",
-                    "AmazonCounterfactualClassification (en)": 56.91,
-                    "AmazonPolarityClassification": 60.32,
-                    "AmazonReviewsClassification (en)": 29.67,
-                    "Banking77Classification": 67.69,
-                    "EmotionClassification": 36.93,
-                    "ImdbClassification": 62.57,
-                    "MTOPDomainClassification (en)": 79.11,
-                    "MTOPIntentClassification (en)": 55.85,
-                    "MassiveIntentClassification (en)": 56.19,
-                    "MassiveScenarioClassification (en)": 66.03,
-                    "ToxicConversationsClassification": 65.4,
-                    "TweetSentimentExtractionClassification": 50.8
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "glove.6B.300d",
-                    "ArxivClusteringP2P": 32.56,
-                    "ArxivClusteringS2S": 23.14,
-                    "BiorxivClusteringP2P": 29.27,
-                    "BiorxivClusteringS2S": 19.18,
-                    "MedrxivClusteringP2P": 26.12,
-                    "MedrxivClusteringS2S": 20.38,
-                    "RedditClustering": 28.46,
-                    "RedditClusteringP2P": 35.82,
-                    "StackExchangeClustering": 35.8,
-                    "StackExchangeClusteringP2P": 28.51,
-                    "TwentyNewsgroupsClustering": 25.83
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "glove.6B.300d",
-                    "SprintDuplicateQuestions": 86.96,
-                    "TwitterSemEval2015": 48.45,
-                    "TwitterURLCorpus": 77.35
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "glove.6B.300d",
-                    "AskUbuntuDupQuestions": 49.57,
-                    "MindSmallReranking": 27.01,
-                    "SciDocsRR": 62.56,
-                    "StackOverflowDupQuestions": 34.03
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "glove.6B.300d",
-                    "ArguAna": 36.3,
-                    "CQADupstackRetrieval": 15.47,
-                    "ClimateFEVER": 14.44,
-                    "DBPedia": 18.28,
-                    "FEVER": 14.99,
-                    "FiQA2018": 10.09,
-                    "HotpotQA": 19.18,
-                    "MSMARCO": 9.6,
-                    "NFCorpus": 13.87,
-                    "NQ": 12.87,
-                    "QuoraRetrieval": 71.32,
-                    "SCIDOCS": 8.04,
-                    "SciFact": 29.58,
-                    "TRECCOVID": 36.22,
-                    "Touche2020": 13.99
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "glove.6B.300d",
-                    "BIOSSES": 44.93,
-                    "SICK-R": 55.43,
-                    "STS12": 54.64,
-                    "STS13": 69.16,
-                    "STS14": 60.81,
-                    "STS15": 72.31,
-                    "STS16": 65.34,
-                    "STS17 (en-en)": 77.95,
-                    "STS22 (en)": 56.35,
-                    "STSBenchmark": 61.54
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "glove.6B.300d",
-                    "SummEval": 28.87
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "glove.6B.300d"
-                }
-            ]
-        }
-    },
-    "multi-qa-MiniLM-L6-cos-v1": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "multi-qa-MiniLM-L6-cos-v1"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "multi-qa-MiniLM-L6-cos-v1",
-                    "AmazonReviewsClassification (fr)": 27.05,
-                    "MTOPDomainClassification (fr)": 72.97,
-                    "MTOPIntentClassification (fr)": 37.18,
-                    "MasakhaNEWSClassification (fra)": 75.62,
-                    "MassiveIntentClassification (fr)": 42.64,
-                    "MassiveScenarioClassification (fr)": 49.92
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "multi-qa-MiniLM-L6-cos-v1",
-                    "AlloProfClusteringP2P": 49.13,
-                    "AlloProfClusteringS2S": 26.16,
-                    "HALClusteringS2S": 12.49,
-                    "MLSUMClusteringP2P": 35.15,
-                    "MLSUMClusteringS2S": 25.95,
-                    "MasakhaNEWSClusteringP2P (fra)": 53.73,
-                    "MasakhaNEWSClusteringS2S (fra)": 27.27
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "multi-qa-MiniLM-L6-cos-v1",
-                    "OpusparcusPC (fr)": 88.07,
-                    "PawsXPairClassification (fr)": 57.36
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "multi-qa-MiniLM-L6-cos-v1",
-                    "AlloprofReranking": 40.28,
-                    "SyntecReranking": 65.08
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "multi-qa-MiniLM-L6-cos-v1",
-                    "AlloprofRetrieval": 30.23,
-                    "BSARDRetrieval": 0.0,
-                    "MintakaRetrieval (fr)": 16.31,
-                    "SyntecRetrieval": 58.07,
-                    "XPQARetrieval (fr)": 48.83
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "multi-qa-MiniLM-L6-cos-v1",
-                    "SICKFr": 62.11,
-                    "STS22 (fr)": 74.62,
-                    "STSBenchmarkMultilingualSTS (fr)": 63.85
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "multi-qa-MiniLM-L6-cos-v1",
-                    "SummEvalFr": 27.59
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "multi-qa-MiniLM-L6-cos-v1"
-                }
-            ]
-        }
-    },
-    "paraphrase-multilingual-MiniLM-L12-v2": {
+    "paraphrase-multilingual-MiniLM-L12-v2": {
         "BitextMining": {
             "f1": [
                 {
@@ -3745,24 +2452,24 @@
                     "STS17 (it-en)": 82.35,
                     "STS17 (ko-ko)": 77.03,
                     "STS17 (nl-en)": 81.71,
-                    "STS22 (ara-Arab)": 46.2,
-                    "STS22 (spa-Latn_eng-Latn)": 67.33,
-                    "STS22 (cmn-Hans)": 58.75,
-                    "STS22 (fra-Latn)": 70.55,
-                    "STS22 (en)": 62.07,
-                    "STS22 (deu-Latn)": 44.64,
                     "STS22 (pol-Latn)": 33.74,
-                    "STS22 (rus-Cyrl)": 57.08,
+                    "STS22 (spa-Latn)": 56.56,
+                    "STS22 (en)": 62.07,
+                    "STS22 (fra-Latn)": 70.55,
+                    "STS22 (spa-Latn_eng-Latn)": 67.33,
+                    "STS22 (fra-Latn_pol-Latn)": 50.71,
                     "STS22 (pol-Latn_eng-Latn)": 69.02,
-                    "STS22 (deu-Latn_eng-Latn)": 52.65,
+                    "STS22 (deu-Latn_fra-Latn)": 51.73,
+                    "STS22 (deu-Latn)": 44.64,
+                    "STS22 (ara-Arab)": 46.2,
+                    "STS22 (deu-Latn_pol-Latn)": 44.22,
                     "STS22 (cmn-Hans_eng-Latn)": 65.71,
+                    "STS22 (ita-Latn)": 55.22,
                     "STS22 (tur-Latn)": 53.39,
-                    "STS22 (spa-Latn)": 56.56,
-                    "STS22 (deu-Latn_pol-Latn)": 44.22,
+                    "STS22 (rus-Cyrl)": 57.08,
+                    "STS22 (cmn-Hans)": 58.75,
                     "STS22 (spa-Latn_ita-Latn)": 47.67,
-                    "STS22 (deu-Latn_fra-Latn)": 51.73,
-                    "STS22 (fra-Latn_pol-Latn)": 50.71,
-                    "STS22 (ita-Latn)": 55.22,
+                    "STS22 (deu-Latn_eng-Latn)": 52.65,
                     "STS22 (pl)": 33.73,
                     "STS22 (fr)": 70.55,
                     "STSB (cmn-Hans)": 78.91,
@@ -3791,6 +2498,15 @@
                 }
             ]
         },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "paraphrase-multilingual-MiniLM-L12-v2",
+                    "CEDRClassification (rus-Cyrl)": 37.76,
+                    "SensitiveTopicsClassification (rus-Cyrl)": 24.84
+                }
+            ]
+        },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
@@ -3799,162 +2515,299 @@
             ]
         }
     },
-    "bge-m3-instruct": {
+    "text-search-babbage-001": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "bge-m3-instruct"
+                    "Model": "text-search-babbage-001"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "bge-m3-instruct"
+                    "Model": "text-search-babbage-001"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "bge-m3-instruct"
+                    "Model": "text-search-babbage-001"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "bge-m3-instruct"
+                    "Model": "text-search-babbage-001"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "bge-m3-instruct"
+                    "Model": "text-search-babbage-001"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "bge-m3-instruct",
-                    "ARCChallenge": 9.03,
-                    "AlphaNLI": 24.69,
-                    "HellaSwag": 25.55,
-                    "PIQA": 19.03,
-                    "Quail": 7.08,
-                    "RARbCode": 39.58,
-                    "RARbMath": 64.51,
-                    "SIQA": 4.77,
-                    "SpartQA": 7.0,
-                    "TempReasonL1": 0.8,
-                    "TempReasonL2Fact": 34.99,
-                    "TempReasonL2Pure": 0.62,
-                    "TempReasonL3Fact": 32.47,
-                    "TempReasonL3Pure": 7.01,
-                    "WinoGrande": 35.33
+                    "Model": "text-search-babbage-001",
+                    "ArguAna": 49.2,
+                    "ClimateFEVER": 19.9,
+                    "FEVER": 77.0,
+                    "FiQA2018": 42.2,
+                    "HotpotQA": 63.1,
+                    "NFCorpus": 36.7,
+                    "QuoraRetrieval": 69.7,
+                    "SciFact": 70.4,
+                    "TRECCOVID": 58.5,
+                    "Touche2020": 29.7
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "bge-m3-instruct"
+                    "Model": "text-search-babbage-001"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "bge-m3-instruct"
+                    "Model": "text-search-babbage-001"
                 }
             ]
         },
-        "InstructionRetrieval": {
-            "p-MRR": [
+        "MultilabelClassification": {
+            "accuracy": [
                 {
-                    "Model": "bge-m3-instruct"
+                    "Model": "text-search-babbage-001"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "text-search-babbage-001"
                 }
             ]
         }
     },
-    "text-embedding-ada-002-instruct": {
+    "text-embedding-3-large-instruct": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "text-embedding-ada-002-instruct"
+                    "Model": "text-embedding-3-large-instruct"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "text-embedding-ada-002-instruct"
+                    "Model": "text-embedding-3-large-instruct"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "text-embedding-ada-002-instruct"
+                    "Model": "text-embedding-3-large-instruct"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "text-embedding-ada-002-instruct"
+                    "Model": "text-embedding-3-large-instruct"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "text-embedding-ada-002-instruct"
+                    "Model": "text-embedding-3-large-instruct"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "text-embedding-ada-002-instruct",
-                    "ARCChallenge": 11.85,
-                    "AlphaNLI": 10.62,
-                    "HellaSwag": 24.8,
-                    "PIQA": 23.87,
-                    "Quail": 5.79,
-                    "RARbCode": 82.36,
-                    "RARbMath": 67.26,
-                    "SIQA": 2.64,
-                    "SpartQA": 4.75,
-                    "TempReasonL1": 1.44,
-                    "TempReasonL2Fact": 19.38,
-                    "TempReasonL2Pure": 2.43,
-                    "TempReasonL3Fact": 17.58,
-                    "TempReasonL3Pure": 7.31,
-                    "WinoGrande": 11.36
+                    "Model": "text-embedding-3-large-instruct",
+                    "ARCChallenge": 21.22,
+                    "AlphaNLI": 34.23,
+                    "HellaSwag": 31.4,
+                    "PIQA": 37.52,
+                    "Quail": 13.6,
+                    "RARbCode": 89.41,
+                    "RARbMath": 87.73,
+                    "SIQA": 4.99,
+                    "SpartQA": 7.45,
+                    "TempReasonL1": 2.07,
+                    "TempReasonL2Fact": 39.77,
+                    "TempReasonL2Pure": 11.04,
+                    "TempReasonL3Fact": 37.04,
+                    "TempReasonL3Pure": 15.51,
+                    "WinoGrande": 33.92
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "text-embedding-ada-002-instruct"
+                    "Model": "text-embedding-3-large-instruct"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "text-embedding-ada-002-instruct"
+                    "Model": "text-embedding-3-large-instruct"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "text-embedding-3-large-instruct"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "text-embedding-ada-002-instruct"
+                    "Model": "text-embedding-3-large-instruct"
+                }
+            ]
+        }
+    },
+    "LLM2Vec-Sheared-Llama-unsupervised": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "LLM2Vec-Sheared-Llama-unsupervised"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "LLM2Vec-Sheared-Llama-unsupervised",
+                    "AmazonCounterfactualClassification (en)": 72.93,
+                    "AmazonPolarityClassification": 74.28,
+                    "AmazonReviewsClassification (en)": 36.14,
+                    "Banking77Classification": 79.0,
+                    "EmotionClassification": 42.85,
+                    "ImdbClassification": 71.92,
+                    "MTOPDomainClassification (en)": 91.24,
+                    "MTOPIntentClassification (en)": 74.08,
+                    "MassiveIntentClassification (en)": 69.99,
+                    "MassiveScenarioClassification (en)": 75.15,
+                    "ToxicConversationsClassification": 68.4,
+                    "TweetSentimentExtractionClassification": 56.08
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "LLM2Vec-Sheared-Llama-unsupervised",
+                    "ArxivClusteringP2P": 42.92,
+                    "ArxivClusteringS2S": 35.2,
+                    "BiorxivClusteringP2P": 35.02,
+                    "BiorxivClusteringS2S": 27.21,
+                    "MedrxivClusteringP2P": 30.15,
+                    "MedrxivClusteringS2S": 26.96,
+                    "RedditClustering": 38.67,
+                    "RedditClusteringP2P": 53.42,
+                    "StackExchangeClustering": 59.35,
+                    "StackExchangeClusteringP2P": 31.47,
+                    "TwentyNewsgroupsClustering": 31.54
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "LLM2Vec-Sheared-Llama-unsupervised",
+                    "SprintDuplicateQuestions": 77.36,
+                    "TwitterSemEval2015": 61.54,
+                    "TwitterURLCorpus": 77.73
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "LLM2Vec-Sheared-Llama-unsupervised",
+                    "AskUbuntuDupQuestions": 52.7,
+                    "MindSmallReranking": 29.52,
+                    "SciDocsRR": 67.76,
+                    "StackOverflowDupQuestions": 40.82
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "LLM2Vec-Sheared-Llama-unsupervised",
+                    "ArguAna": 43.64,
+                    "CQADupstackRetrieval": 18.5,
+                    "ClimateFEVER": 18.95,
+                    "DBPedia": 13.21,
+                    "FEVER": 16.96,
+                    "FiQA2018": 16.99,
+                    "HotpotQA": 22.64,
+                    "MSMARCO": 7.03,
+                    "NFCorpus": 15.73,
+                    "NQ": 17.96,
+                    "QuoraRetrieval": 78.23,
+                    "SCIDOCS": 5.53,
+                    "SciFact": 38.31,
+                    "TRECCOVID": 56.04,
+                    "Touche2020": 19.17
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "LLM2Vec-Sheared-Llama-unsupervised",
+                    "BIOSSES": 75.12,
+                    "SICK-R": 69.34,
+                    "STS12": 60.09,
+                    "STS13": 72.52,
+                    "STS14": 66.7,
+                    "STS15": 77.69,
+                    "STS16": 75.94,
+                    "STS17 (en-en)": 81.67,
+                    "STS22 (en)": 63.7,
+                    "STSBenchmark": 73.36
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "LLM2Vec-Sheared-Llama-unsupervised",
+                    "SummEval": 31.23
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "LLM2Vec-Sheared-Llama-unsupervised"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "LLM2Vec-Sheared-Llama-unsupervised"
                 }
             ]
         }
@@ -4042,6 +2895,13 @@
                 }
             ]
         },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "silver-retriever-base-v1"
+                }
+            ]
+        },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
@@ -4050,12351 +2910,2361 @@
             ]
         }
     },
-    "unsup-simcse-bert-base-uncased": {
+    "google-gecko.text-embedding-preview-0409": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "unsup-simcse-bert-base-uncased"
+                    "Model": "google-gecko.text-embedding-preview-0409"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "unsup-simcse-bert-base-uncased",
-                    "AmazonCounterfactualClassification (en)": 67.09,
-                    "AmazonPolarityClassification": 74.48,
-                    "AmazonReviewsClassification (en)": 33.85,
-                    "Banking77Classification": 73.55,
-                    "EmotionClassification": 42.22,
-                    "ImdbClassification": 69.63,
-                    "MTOPDomainClassification (en)": 81.71,
-                    "MTOPIntentClassification (en)": 59.23,
-                    "MassiveIntentClassification (en)": 59.84,
-                    "MassiveScenarioClassification (en)": 66.25,
-                    "ToxicConversationsClassification": 68.82,
-                    "TweetSentimentExtractionClassification": 53.36
+                    "Model": "google-gecko.text-embedding-preview-0409",
+                    "AmazonCounterfactualClassification (en)": 75.34,
+                    "AmazonPolarityClassification": 97.34,
+                    "AmazonReviewsClassification (en)": 51.17,
+                    "Banking77Classification": 88.62,
+                    "EmotionClassification": 52.51,
+                    "ImdbClassification": 95.65,
+                    "MTOPDomainClassification (en)": 98.35,
+                    "MTOPIntentClassification (en)": 83.43,
+                    "MassiveIntentClassification (en)": 80.22,
+                    "MassiveScenarioClassification (en)": 87.19,
+                    "ToxicConversationsClassification": 89.67,
+                    "TweetSentimentExtractionClassification": 74.52
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "unsup-simcse-bert-base-uncased",
-                    "ArxivClusteringP2P": 32.61,
-                    "ArxivClusteringS2S": 24.68,
-                    "BiorxivClusteringP2P": 24.9,
-                    "BiorxivClusteringS2S": 19.55,
-                    "MedrxivClusteringP2P": 23.6,
-                    "MedrxivClusteringS2S": 21.97,
-                    "RedditClustering": 32.18,
-                    "RedditClusteringP2P": 45.14,
-                    "StackExchangeClustering": 43.07,
-                    "StackExchangeClusteringP2P": 28.5,
-                    "TwentyNewsgroupsClustering": 23.21
+                    "Model": "google-gecko.text-embedding-preview-0409",
+                    "ArxivClusteringP2P": 46.27,
+                    "ArxivClusteringS2S": 38.36,
+                    "BiorxivClusteringP2P": 37.87,
+                    "BiorxivClusteringS2S": 35.67,
+                    "MedrxivClusteringP2P": 33.11,
+                    "MedrxivClusteringS2S": 31.54,
+                    "RedditClustering": 65.81,
+                    "RedditClusteringP2P": 66.62,
+                    "StackExchangeClustering": 74.52,
+                    "StackExchangeClusteringP2P": 37.63,
+                    "TwentyNewsgroupsClustering": 54.87
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "unsup-simcse-bert-base-uncased",
-                    "SprintDuplicateQuestions": 69.41,
-                    "TwitterSemEval2015": 60.21,
-                    "TwitterURLCorpus": 81.37
-                }
+                    "Model": "google-gecko.text-embedding-preview-0409",
+                    "SprintDuplicateQuestions": 96.26,
+                    "TwitterSemEval2015": 79.04,
+                    "TwitterURLCorpus": 87.53
+                }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "unsup-simcse-bert-base-uncased",
-                    "AskUbuntuDupQuestions": 51.57,
-                    "MindSmallReranking": 28.62,
-                    "SciDocsRR": 66.33,
-                    "StackOverflowDupQuestions": 39.35
+                    "Model": "google-gecko.text-embedding-preview-0409",
+                    "AskUbuntuDupQuestions": 64.4,
+                    "MindSmallReranking": 33.07,
+                    "SciDocsRR": 83.59,
+                    "StackOverflowDupQuestions": 54.56
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "unsup-simcse-bert-base-uncased",
-                    "ArguAna": 38.34,
-                    "CQADupstackRetrieval": 13.22,
-                    "ClimateFEVER": 11.8,
-                    "DBPedia": 15.04,
-                    "FEVER": 21.06,
-                    "FiQA2018": 9.84,
-                    "HotpotQA": 19.75,
-                    "MSMARCO": 9.35,
-                    "NFCorpus": 9.88,
-                    "NQ": 11.69,
-                    "QuoraRetrieval": 78.03,
-                    "SCIDOCS": 5.5,
-                    "SciFact": 25.72,
-                    "TRECCOVID": 26.2,
-                    "Touche2020": 8.9
+                    "Model": "google-gecko.text-embedding-preview-0409",
+                    "ArguAna": 62.18,
+                    "BrightRetrieval (earth_science)": 34.38,
+                    "BrightRetrieval (leetcode)": 29.64,
+                    "BrightRetrieval (theoremqa_questions)": 21.51,
+                    "BrightRetrieval (aops)": 9.33,
+                    "BrightRetrieval (sustainable_living)": 17.25,
+                    "BrightRetrieval (pony)": 3.59,
+                    "BrightRetrieval (theoremqa_theorems)": 16.77,
+                    "BrightRetrieval (stackoverflow)": 17.93,
+                    "BrightRetrieval (biology)": 22.98,
+                    "BrightRetrieval (robotics)": 15.98,
+                    "BrightRetrieval (economics)": 19.5,
+                    "BrightRetrieval (psychology)": 27.86,
+                    "CQADupstackRetrieval": 48.89,
+                    "ClimateFEVER": 33.21,
+                    "DBPedia": 47.12,
+                    "FEVER": 86.96,
+                    "FiQA2018": 59.24,
+                    "HotpotQA": 71.33,
+                    "MSMARCO": 32.58,
+                    "NFCorpus": 40.33,
+                    "NQ": 61.28,
+                    "QuoraRetrieval": 88.18,
+                    "SCIDOCS": 20.34,
+                    "SciFact": 75.42,
+                    "TRECCOVID": 82.62,
+                    "Touche2020": 25.86
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "unsup-simcse-bert-base-uncased",
-                    "BIOSSES": 72.31,
-                    "SICK-R": 72.24,
-                    "STS12": 66.05,
-                    "STS13": 81.49,
-                    "STS14": 73.61,
-                    "STS15": 79.72,
-                    "STS16": 78.12,
-                    "STS17 (en-en)": 83.58,
-                    "STS22 (en)": 59.65,
-                    "STSBenchmark": 76.52
+                    "Model": "google-gecko.text-embedding-preview-0409",
+                    "BIOSSES": 89.46,
+                    "SICK-R": 81.93,
+                    "STS12": 77.59,
+                    "STS13": 90.36,
+                    "STS14": 85.25,
+                    "STS15": 89.66,
+                    "STS16": 87.34,
+                    "STS17 (en-en)": 92.06,
+                    "STS22 (en)": 68.02,
+                    "STSBenchmark": 88.99
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "unsup-simcse-bert-base-uncased",
-                    "SummEval": 31.15
+                    "Model": "google-gecko.text-embedding-preview-0409",
+                    "SummEval": 32.63
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "google-gecko.text-embedding-preview-0409"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "unsup-simcse-bert-base-uncased"
+                    "Model": "google-gecko.text-embedding-preview-0409",
+                    "Core17InstructionRetrieval": 5.44,
+                    "News21InstructionRetrieval": 3.94,
+                    "Robust04InstructionRetrieval": -2.4
                 }
             ]
         }
     },
-    "voyage-large-2-instruct": {
+    "sup-simcse-bert-base-uncased": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "voyage-large-2-instruct"
+                    "Model": "sup-simcse-bert-base-uncased"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "voyage-large-2-instruct",
-                    "AmazonCounterfactualClassification (en)": 77.6,
-                    "AmazonPolarityClassification": 96.58,
-                    "AmazonReviewsClassification (en)": 50.77,
-                    "Banking77Classification": 86.96,
-                    "EmotionClassification": 59.81,
-                    "ImdbClassification": 96.13,
-                    "MTOPDomainClassification (en)": 98.86,
-                    "MTOPIntentClassification (en)": 86.97,
-                    "MassiveIntentClassification (en)": 81.08,
-                    "MassiveScenarioClassification (en)": 87.95,
-                    "ToxicConversationsClassification": 83.58,
-                    "TweetSentimentExtractionClassification": 71.55
+                    "Model": "sup-simcse-bert-base-uncased",
+                    "AmazonCounterfactualClassification (en)": 75.75,
+                    "AmazonPolarityClassification": 82.47,
+                    "AmazonReviewsClassification (en)": 39.6,
+                    "Banking77Classification": 75.76,
+                    "EmotionClassification": 44.81,
+                    "ImdbClassification": 73.53,
+                    "MTOPDomainClassification (en)": 84.29,
+                    "MTOPIntentClassification (en)": 63.14,
+                    "MassiveIntentClassification (en)": 65.95,
+                    "MassiveScenarioClassification (en)": 70.78,
+                    "ToxicConversationsClassification": 72.04,
+                    "TweetSentimentExtractionClassification": 59.73
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "voyage-large-2-instruct",
-                    "ArxivClusteringP2P": 51.81,
-                    "ArxivClusteringS2S": 44.73,
-                    "BiorxivClusteringP2P": 46.07,
-                    "BiorxivClusteringS2S": 40.64,
-                    "MedrxivClusteringP2P": 42.94,
-                    "MedrxivClusteringS2S": 41.44,
-                    "RedditClustering": 68.5,
-                    "RedditClusteringP2P": 64.86,
-                    "StackExchangeClustering": 74.16,
-                    "StackExchangeClusteringP2P": 45.1,
-                    "TwentyNewsgroupsClustering": 66.62
+                    "Model": "sup-simcse-bert-base-uncased",
+                    "ArxivClusteringP2P": 35.18,
+                    "ArxivClusteringS2S": 27.54,
+                    "BiorxivClusteringP2P": 30.15,
+                    "BiorxivClusteringS2S": 24.67,
+                    "MedrxivClusteringP2P": 26.25,
+                    "MedrxivClusteringS2S": 24.12,
+                    "RedditClustering": 40.23,
+                    "RedditClusteringP2P": 47.74,
+                    "StackExchangeClustering": 47.55,
+                    "StackExchangeClusteringP2P": 29.45,
+                    "TwentyNewsgroupsClustering": 34.86
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "voyage-large-2-instruct",
-                    "SprintDuplicateQuestions": 94.5,
-                    "TwitterSemEval2015": 86.32,
-                    "TwitterURLCorpus": 86.9
+                    "Model": "sup-simcse-bert-base-uncased",
+                    "SprintDuplicateQuestions": 69.39,
+                    "TwitterSemEval2015": 67.75,
+                    "TwitterURLCorpus": 83.89
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "voyage-large-2-instruct",
-                    "AskUbuntuDupQuestions": 64.92,
-                    "MindSmallReranking": 30.97,
-                    "SciDocsRR": 89.34,
-                    "StackOverflowDupQuestions": 55.11
+                    "Model": "sup-simcse-bert-base-uncased",
+                    "AskUbuntuDupQuestions": 51.8,
+                    "MindSmallReranking": 29.3,
+                    "SciDocsRR": 70.14,
+                    "StackOverflowDupQuestions": 38.9
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "voyage-large-2-instruct",
-                    "ArguAna": 64.06,
-                    "BrightRetrieval (theoremqa_questions)": 26.06,
-                    "BrightRetrieval (earth_science)": 25.09,
-                    "BrightRetrieval (leetcode)": 30.6,
-                    "BrightRetrieval (economics)": 19.85,
-                    "BrightRetrieval (robotics)": 11.21,
-                    "BrightRetrieval (psychology)": 24.79,
-                    "BrightRetrieval (aops)": 7.45,
-                    "BrightRetrieval (sustainable_living)": 15.58,
-                    "BrightRetrieval (pony)": 1.48,
-                    "BrightRetrieval (theoremqa_theorems)": 10.13,
-                    "BrightRetrieval (biology)": 23.55,
-                    "BrightRetrieval (stackoverflow)": 15.03,
-                    "CQADupstackRetrieval": 46.6,
-                    "ClimateFEVER": 32.65,
-                    "DBPedia": 46.03,
-                    "FEVER": 91.47,
-                    "FiQA2018": 59.76,
-                    "HotpotQA": 70.86,
-                    "MSMARCO": 40.6,
-                    "NFCorpus": 40.32,
-                    "NQ": 65.92,
-                    "QuoraRetrieval": 87.4,
-                    "SCIDOCS": 24.32,
-                    "SciFact": 79.99,
-                    "TRECCOVID": 85.07,
-                    "Touche2020": 39.16
+                    "Model": "sup-simcse-bert-base-uncased",
+                    "ArguAna": 38.33,
+                    "CQADupstackRetrieval": 14.5,
+                    "ClimateFEVER": 11.98,
+                    "DBPedia": 19.73,
+                    "FEVER": 20.41,
+                    "FiQA2018": 10.41,
+                    "HotpotQA": 22.9,
+                    "MSMARCO": 11.0,
+                    "NFCorpus": 12.42,
+                    "NQ": 16.08,
+                    "QuoraRetrieval": 79.62,
+                    "SCIDOCS": 7.53,
+                    "SciFact": 29.59,
+                    "TRECCOVID": 22.93,
+                    "Touche2020": 9.9
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "voyage-large-2-instruct",
-                    "BIOSSES": 89.24,
-                    "SICK-R": 83.16,
-                    "STS12": 73.34,
-                    "STS13": 88.49,
-                    "STS14": 86.49,
-                    "STS15": 91.13,
-                    "STS16": 85.68,
-                    "STS17 (en-en)": 90.06,
-                    "STS22 (en)": 66.32,
-                    "STSBenchmark": 89.22
+                    "Model": "sup-simcse-bert-base-uncased",
+                    "BIOSSES": 68.38,
+                    "SICK-R": 80.77,
+                    "STS12": 75.3,
+                    "STS13": 84.67,
+                    "STS14": 80.19,
+                    "STS15": 85.4,
+                    "STS16": 80.82,
+                    "STS17 (en-en)": 89.44,
+                    "STS22 (en)": 61.96,
+                    "STSBenchmark": 84.25
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "voyage-large-2-instruct",
-                    "SummEval": 30.84
+                    "Model": "sup-simcse-bert-base-uncased",
+                    "SummEval": 31.17
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "sup-simcse-bert-base-uncased"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "voyage-large-2-instruct"
+                    "Model": "sup-simcse-bert-base-uncased"
                 }
             ]
         }
     },
-    "rubert-base-cased": {
+    "gtr-t5-base": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "rubert-base-cased",
-                    "Tatoeba (rus-Cyrl_eng-Latn)": 16.76
+                    "Model": "gtr-t5-base"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "rubert-base-cased",
-                    "GeoreviewClassification (rus-Cyrl)": 37.22,
-                    "HeadlineClassification (rus-Cyrl)": 75.23,
-                    "InappropriatenessClassification (rus-Cyrl)": 57.34,
-                    "KinopoiskClassification (rus-Cyrl)": 49.91,
-                    "MassiveIntentClassification (rus-Cyrl)": 53.02,
-                    "MassiveScenarioClassification (rus-Cyrl)": 56.79,
-                    "RuReviewsClassification (rus-Cyrl)": 50.74,
-                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 48.03,
-                    "RuSciBenchOECDClassification (rus-Cyrl)": 36.13
+                    "Model": "gtr-t5-base",
+                    "AmazonCounterfactualClassification (en)": 69.33,
+                    "AmazonPolarityClassification": 67.82,
+                    "AmazonReviewsClassification (en)": 38.48,
+                    "Banking77Classification": 79.26,
+                    "EmotionClassification": 42.2,
+                    "ImdbClassification": 65.99,
+                    "MTOPDomainClassification (en)": 92.42,
+                    "MTOPIntentClassification (en)": 62.44,
+                    "MassiveIntentClassification (en)": 67.05,
+                    "MassiveScenarioClassification (en)": 75.4,
+                    "ToxicConversationsClassification": 66.6,
+                    "TweetSentimentExtractionClassification": 56.02
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "rubert-base-cased",
-                    "GeoreviewClusteringP2P (rus-Cyrl)": 28.77,
-                    "MLSUMClusteringP2P (rus-Cyrl)": 41.42,
-                    "MLSUMClusteringS2S (rus-Cyrl)": 40.52,
-                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 28.29,
-                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 26.67
+                    "Model": "gtr-t5-base",
+                    "ArxivClusteringP2P": 35.49,
+                    "ArxivClusteringS2S": 27.18,
+                    "BiorxivClusteringP2P": 27.66,
+                    "BiorxivClusteringS2S": 23.25,
+                    "MedrxivClusteringP2P": 27.57,
+                    "MedrxivClusteringS2S": 25.13,
+                    "RedditClustering": 56.13,
+                    "RedditClusteringP2P": 58.53,
+                    "StackExchangeClustering": 64.21,
+                    "StackExchangeClusteringP2P": 33.01,
+                    "TwentyNewsgroupsClustering": 46.72
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "rubert-base-cased",
-                    "OpusparcusPC (rus-Cyrl)": 81.65,
-                    "TERRa (rus-Cyrl)": 52.12
+                    "Model": "gtr-t5-base",
+                    "SprintDuplicateQuestions": 94.55,
+                    "TwitterSemEval2015": 72.23,
+                    "TwitterURLCorpus": 84.77
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "rubert-base-cased",
-                    "RuBQReranking (rus-Cyrl)": 41.65
+                    "Model": "gtr-t5-base",
+                    "AskUbuntuDupQuestions": 60.86,
+                    "MindSmallReranking": 31.33,
+                    "SciDocsRR": 73.71,
+                    "StackOverflowDupQuestions": 51.01
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "rubert-base-cased",
-                    "RiaNewsRetrieval (rus-Cyrl)": 5.58,
-                    "RuBQRetrieval (rus-Cyrl)": 9.52
+                    "Model": "gtr-t5-base",
+                    "ArguAna": 50.83,
+                    "CQADupstackRetrieval": 34.55,
+                    "ClimateFEVER": 24.88,
+                    "DBPedia": 35.24,
+                    "FEVER": 68.93,
+                    "FiQA2018": 35.15,
+                    "HotpotQA": 54.93,
+                    "MSMARCO": 41.16,
+                    "NFCorpus": 30.22,
+                    "NQ": 50.47,
+                    "QuoraRetrieval": 87.98,
+                    "SCIDOCS": 14.0,
+                    "SciFact": 59.74,
+                    "TRECCOVID": 56.05,
+                    "Touche2020": 25.89
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "rubert-base-cased",
-                    "RUParaPhraserSTS (rus-Cyrl)": 49.72,
-                    "RuSTSBenchmarkSTS (rus-Cyrl)": 53.95,
-                    "STS22 (rus-Cyrl)": 34.98,
-                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 53.76
+                    "Model": "gtr-t5-base",
+                    "BIOSSES": 79.0,
+                    "SICK-R": 71.45,
+                    "STS12": 68.59,
+                    "STS13": 79.09,
+                    "STS14": 74.64,
+                    "STS15": 84.85,
+                    "STS16": 81.57,
+                    "STS17 (en-en)": 85.8,
+                    "STS22 (en)": 66.17,
+                    "STSBenchmark": 79.58
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "rubert-base-cased"
+                    "Model": "gtr-t5-base",
+                    "SummEval": 29.67
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "gtr-t5-base"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "rubert-base-cased"
+                    "Model": "gtr-t5-base"
                 }
             ]
         }
     },
-    "text-embedding-3-small-instruct": {
+    "flan-t5-large": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "text-embedding-3-small-instruct"
+                    "Model": "flan-t5-large"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "text-embedding-3-small-instruct"
+                    "Model": "flan-t5-large"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "text-embedding-3-small-instruct"
+                    "Model": "flan-t5-large"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "text-embedding-3-small-instruct"
+                    "Model": "flan-t5-large"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "text-embedding-3-small-instruct"
+                    "Model": "flan-t5-large"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "text-embedding-3-small-instruct",
-                    "ARCChallenge": 13.76,
-                    "AlphaNLI": 21.14,
-                    "HellaSwag": 27.2,
-                    "PIQA": 29.59,
-                    "Quail": 6.64,
-                    "RARbCode": 72.14,
-                    "RARbMath": 64.31,
-                    "SIQA": 2.98,
-                    "SpartQA": 3.58,
-                    "TempReasonL1": 2.29,
-                    "TempReasonL2Fact": 26.34,
-                    "TempReasonL2Pure": 3.17,
-                    "TempReasonL3Fact": 22.72,
-                    "TempReasonL3Pure": 9.98,
-                    "WinoGrande": 25.49
+                    "Model": "flan-t5-large"
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "text-embedding-3-small-instruct"
+                    "Model": "flan-t5-large"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "text-embedding-3-small-instruct"
+                    "Model": "flan-t5-large"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "flan-t5-large"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "text-embedding-3-small-instruct"
+                    "Model": "flan-t5-large",
+                    "Core17InstructionRetrieval": 1.32,
+                    "News21InstructionRetrieval": 8.95,
+                    "Robust04InstructionRetrieval": 3.9
                 }
             ]
         }
     },
-    "deberta-v1-base": {
+    "bge-m3-instruct": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "deberta-v1-base",
-                    "Tatoeba (rus-Cyrl_eng-Latn)": 13.21
+                    "Model": "bge-m3-instruct"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "deberta-v1-base",
-                    "GeoreviewClassification (rus-Cyrl)": 40.19,
-                    "HeadlineClassification (rus-Cyrl)": 78.75,
-                    "InappropriatenessClassification (rus-Cyrl)": 61.33,
-                    "KinopoiskClassification (rus-Cyrl)": 48.78,
-                    "MassiveIntentClassification (rus-Cyrl)": 61.32,
-                    "MassiveScenarioClassification (rus-Cyrl)": 64.71,
-                    "RuReviewsClassification (rus-Cyrl)": 55.66,
-                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 53.53,
-                    "RuSciBenchOECDClassification (rus-Cyrl)": 41.34
+                    "Model": "bge-m3-instruct"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "deberta-v1-base",
-                    "GeoreviewClusteringP2P (rus-Cyrl)": 58.79,
-                    "MLSUMClusteringP2P (rus-Cyrl)": 47.33,
-                    "MLSUMClusteringS2S (rus-Cyrl)": 44.6,
-                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 36.66,
-                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 33.31
+                    "Model": "bge-m3-instruct"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "deberta-v1-base",
-                    "OpusparcusPC (rus-Cyrl)": 83.31,
-                    "TERRa (rus-Cyrl)": 53.78
+                    "Model": "bge-m3-instruct"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "deberta-v1-base",
-                    "RuBQReranking (rus-Cyrl)": 34.01
+                    "Model": "bge-m3-instruct"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "deberta-v1-base",
-                    "RiaNewsRetrieval (rus-Cyrl)": 4.84,
-                    "RuBQRetrieval (rus-Cyrl)": 10.15
+                    "Model": "bge-m3-instruct",
+                    "ARCChallenge": 9.03,
+                    "AlphaNLI": 24.69,
+                    "HellaSwag": 25.55,
+                    "PIQA": 19.03,
+                    "Quail": 7.08,
+                    "RARbCode": 39.58,
+                    "RARbMath": 64.51,
+                    "SIQA": 4.77,
+                    "SpartQA": 7.0,
+                    "TempReasonL1": 0.8,
+                    "TempReasonL2Fact": 34.99,
+                    "TempReasonL2Pure": 0.62,
+                    "TempReasonL3Fact": 32.47,
+                    "TempReasonL3Pure": 7.01,
+                    "WinoGrande": 35.33
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "deberta-v1-base",
-                    "RUParaPhraserSTS (rus-Cyrl)": 54.03,
-                    "RuSTSBenchmarkSTS (rus-Cyrl)": 58.47,
-                    "STS22 (rus-Cyrl)": 47.67,
-                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 58.45
+                    "Model": "bge-m3-instruct"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "deberta-v1-base"
+                    "Model": "bge-m3-instruct"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "bge-m3-instruct"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "deberta-v1-base"
+                    "Model": "bge-m3-instruct"
                 }
             ]
         }
     },
-    "text-search-ada-doc-001": {
+    "contriever-base-msmarco": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "text-search-ada-doc-001"
+                    "Model": "contriever-base-msmarco"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "text-search-ada-doc-001"
+                    "Model": "contriever-base-msmarco",
+                    "AmazonCounterfactualClassification (en)": 72.19,
+                    "AmazonPolarityClassification": 68.63,
+                    "AmazonReviewsClassification (en)": 37.42,
+                    "Banking77Classification": 80.02,
+                    "EmotionClassification": 44.77,
+                    "ImdbClassification": 67.04,
+                    "MTOPDomainClassification (en)": 93.18,
+                    "MTOPIntentClassification (en)": 69.31,
+                    "MassiveIntentClassification (en)": 67.78,
+                    "MassiveScenarioClassification (en)": 76.0,
+                    "ToxicConversationsClassification": 67.77,
+                    "TweetSentimentExtractionClassification": 56.1
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "text-search-ada-doc-001",
-                    "TwentyNewsgroupsClustering": 32.92
+                    "Model": "contriever-base-msmarco",
+                    "ArxivClusteringP2P": 42.61,
+                    "ArxivClusteringS2S": 32.32,
+                    "BiorxivClusteringP2P": 34.97,
+                    "BiorxivClusteringS2S": 29.08,
+                    "MedrxivClusteringP2P": 31.19,
+                    "MedrxivClusteringS2S": 27.27,
+                    "RedditClustering": 54.89,
+                    "RedditClusteringP2P": 57.58,
+                    "StackExchangeClustering": 63.15,
+                    "StackExchangeClusteringP2P": 32.25,
+                    "TwentyNewsgroupsClustering": 46.82
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "text-search-ada-doc-001"
+                    "Model": "contriever-base-msmarco",
+                    "SprintDuplicateQuestions": 95.55,
+                    "TwitterSemEval2015": 66.85,
+                    "TwitterURLCorpus": 85.21
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "text-search-ada-doc-001"
+                    "Model": "contriever-base-msmarco",
+                    "AskUbuntuDupQuestions": 56.69,
+                    "MindSmallReranking": 31.58,
+                    "SciDocsRR": 76.51,
+                    "StackOverflowDupQuestions": 47.78
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "text-search-ada-doc-001"
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "text-search-ada-doc-001"
+                    "Model": "contriever-base-msmarco",
+                    "ArguAna": 48.32,
+                    "CQADupstackRetrieval": 33.67,
+                    "ClimateFEVER": 24.79,
+                    "DBPedia": 38.1,
+                    "FEVER": 59.29,
+                    "FiQA2018": 27.42,
+                    "HotpotQA": 56.81,
+                    "MSMARCO": 36.77,
+                    "NFCorpus": 31.32,
+                    "NQ": 41.83,
+                    "QuoraRetrieval": 86.72,
+                    "SCIDOCS": 17.12,
+                    "SciFact": 65.51,
+                    "TRECCOVID": 44.77,
+                    "Touche2020": 15.79
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "contriever-base-msmarco",
+                    "BIOSSES": 83.32,
+                    "SICK-R": 70.2,
+                    "STS12": 64.34,
+                    "STS13": 80.03,
+                    "STS14": 74.51,
+                    "STS15": 83.3,
+                    "STS16": 79.67,
+                    "STS17 (en-en)": 86.32,
+                    "STS22 (en)": 64.64,
+                    "STSBenchmark": 78.81
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "text-search-ada-doc-001"
+                    "Model": "contriever-base-msmarco",
+                    "SummEval": 30.36
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "contriever-base-msmarco"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "text-search-ada-doc-001"
+                    "Model": "contriever-base-msmarco",
+                    "Core17InstructionRetrieval": -2.48,
+                    "News21InstructionRetrieval": -2.83,
+                    "Robust04InstructionRetrieval": -6.12
                 }
             ]
         }
     },
-    "all-MiniLM-L12-v2": {
+    "tart-dual-contriever-msmarco": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "all-MiniLM-L12-v2",
-                    "BornholmBitextMining (dan-Latn)": 35.25,
-                    "Tatoeba (spa-Latn_eng-Latn)": 11.26,
-                    "Tatoeba (bos-Latn_eng-Latn)": 7.05,
-                    "Tatoeba (xho-Latn_eng-Latn)": 3.66,
-                    "Tatoeba (fry-Latn_eng-Latn)": 14.53,
-                    "Tatoeba (tur-Latn_eng-Latn)": 3.69,
-                    "Tatoeba (fao-Latn_eng-Latn)": 5.92,
-                    "Tatoeba (vie-Latn_eng-Latn)": 5.06,
-                    "Tatoeba (ind-Latn_eng-Latn)": 5.3,
-                    "Tatoeba (pol-Latn_eng-Latn)": 4.29,
-                    "Tatoeba (swe-Latn_eng-Latn)": 7.31,
-                    "Tatoeba (ita-Latn_eng-Latn)": 12.57,
-                    "Tatoeba (dtp-Latn_eng-Latn)": 3.31,
-                    "Tatoeba (ron-Latn_eng-Latn)": 8.77,
-                    "Tatoeba (isl-Latn_eng-Latn)": 3.44,
-                    "Tatoeba (hrv-Latn_eng-Latn)": 5.68,
-                    "Tatoeba (cha-Latn_eng-Latn)": 13.07,
-                    "Tatoeba (cor-Latn_eng-Latn)": 2.47,
-                    "Tatoeba (cym-Latn_eng-Latn)": 5.13,
-                    "Tatoeba (jpn-Jpan_eng-Latn)": 2.18,
-                    "Tatoeba (lfn-Latn_eng-Latn)": 7.52,
-                    "Tatoeba (hun-Latn_eng-Latn)": 3.93,
-                    "Tatoeba (lat-Latn_eng-Latn)": 7.14,
-                    "Tatoeba (tgl-Latn_eng-Latn)": 3.34,
-                    "Tatoeba (kur-Latn_eng-Latn)": 7.3,
-                    "Tatoeba (war-Latn_eng-Latn)": 6.18,
-                    "Tatoeba (kab-Latn_eng-Latn)": 0.91,
-                    "Tatoeba (kaz-Cyrl_eng-Latn)": 0.82,
-                    "Tatoeba (slv-Latn_eng-Latn)": 4.52,
-                    "Tatoeba (nds-Latn_eng-Latn)": 11.35,
-                    "Tatoeba (pam-Latn_eng-Latn)": 4.73,
-                    "Tatoeba (bul-Cyrl_eng-Latn)": 0.23,
-                    "Tatoeba (ces-Latn_eng-Latn)": 4.2,
-                    "Tatoeba (nno-Latn_eng-Latn)": 7.45,
-                    "Tatoeba (ben-Beng_eng-Latn)": 0.02,
-                    "Tatoeba (amh-Ethi_eng-Latn)": 0.01,
-                    "Tatoeba (lit-Latn_eng-Latn)": 1.56,
-                    "Tatoeba (pes-Arab_eng-Latn)": 0.3,
-                    "Tatoeba (jav-Latn_eng-Latn)": 3.5,
-                    "Tatoeba (mal-Mlym_eng-Latn)": 0.24,
-                    "Tatoeba (lvs-Latn_eng-Latn)": 3.45,
-                    "Tatoeba (gsw-Latn_eng-Latn)": 9.9,
-                    "Tatoeba (fra-Latn_eng-Latn)": 17.53,
-                    "Tatoeba (orv-Cyrl_eng-Latn)": 0.15,
-                    "Tatoeba (kat-Geor_eng-Latn)": 0.45,
-                    "Tatoeba (awa-Deva_eng-Latn)": 0.44,
-                    "Tatoeba (epo-Latn_eng-Latn)": 8.5,
-                    "Tatoeba (mhr-Cyrl_eng-Latn)": 0.0,
-                    "Tatoeba (dan-Latn_eng-Latn)": 10.21,
-                    "Tatoeba (bel-Cyrl_eng-Latn)": 0.85,
-                    "Tatoeba (nld-Latn_eng-Latn)": 12.56,
-                    "Tatoeba (mkd-Cyrl_eng-Latn)": 0.01,
-                    "Tatoeba (mon-Cyrl_eng-Latn)": 0.06,
-                    "Tatoeba (ast-Latn_eng-Latn)": 9.99,
-                    "Tatoeba (cat-Latn_eng-Latn)": 11.79,
-                    "Tatoeba (oci-Latn_eng-Latn)": 8.72,
-                    "Tatoeba (khm-Khmr_eng-Latn)": 0.42,
-                    "Tatoeba (urd-Arab_eng-Latn)": 0.0,
-                    "Tatoeba (tzl-Latn_eng-Latn)": 6.87,
-                    "Tatoeba (arq-Arab_eng-Latn)": 0.28,
-                    "Tatoeba (uig-Arab_eng-Latn)": 0.4,
-                    "Tatoeba (dsb-Latn_eng-Latn)": 3.06,
-                    "Tatoeba (hsb-Latn_eng-Latn)": 2.89,
-                    "Tatoeba (kzj-Latn_eng-Latn)": 3.64,
-                    "Tatoeba (cbk-Latn_eng-Latn)": 9.76,
-                    "Tatoeba (afr-Latn_eng-Latn)": 7.59,
-                    "Tatoeba (gle-Latn_eng-Latn)": 3.08,
-                    "Tatoeba (csb-Latn_eng-Latn)": 5.21,
-                    "Tatoeba (mar-Deva_eng-Latn)": 0.04,
-                    "Tatoeba (arz-Arab_eng-Latn)": 0.0,
-                    "Tatoeba (tat-Cyrl_eng-Latn)": 0.75,
-                    "Tatoeba (hin-Deva_eng-Latn)": 0.0,
-                    "Tatoeba (ang-Latn_eng-Latn)": 14.63,
-                    "Tatoeba (heb-Hebr_eng-Latn)": 0.3,
-                    "Tatoeba (tuk-Latn_eng-Latn)": 2.66,
-                    "Tatoeba (ile-Latn_eng-Latn)": 17.43,
-                    "Tatoeba (zsm-Latn_eng-Latn)": 5.99,
-                    "Tatoeba (kor-Hang_eng-Latn)": 0.9,
-                    "Tatoeba (uzb-Latn_eng-Latn)": 2.2,
-                    "Tatoeba (fin-Latn_eng-Latn)": 3.65,
-                    "Tatoeba (hye-Armn_eng-Latn)": 0.5,
-                    "Tatoeba (ukr-Cyrl_eng-Latn)": 0.57,
-                    "Tatoeba (swh-Latn_eng-Latn)": 5.82,
-                    "Tatoeba (gla-Latn_eng-Latn)": 2.58,
-                    "Tatoeba (aze-Latn_eng-Latn)": 1.47,
-                    "Tatoeba (ara-Arab_eng-Latn)": 0.43,
-                    "Tatoeba (eus-Latn_eng-Latn)": 6.58,
-                    "Tatoeba (deu-Latn_eng-Latn)": 13.89,
-                    "Tatoeba (por-Latn_eng-Latn)": 11.36,
-                    "Tatoeba (ber-Tfng_eng-Latn)": 4.72,
-                    "Tatoeba (sqi-Latn_eng-Latn)": 5.86,
-                    "Tatoeba (pms-Latn_eng-Latn)": 8.94,
-                    "Tatoeba (ina-Latn_eng-Latn)": 25.36,
-                    "Tatoeba (ido-Latn_eng-Latn)": 11.08,
-                    "Tatoeba (slk-Latn_eng-Latn)": 4.2,
-                    "Tatoeba (glg-Latn_eng-Latn)": 12.6,
-                    "Tatoeba (nov-Latn_eng-Latn)": 19.45,
-                    "Tatoeba (tel-Telu_eng-Latn)": 0.67,
-                    "Tatoeba (tam-Taml_eng-Latn)": 0.33,
-                    "Tatoeba (bre-Latn_eng-Latn)": 3.68,
-                    "Tatoeba (tha-Thai_eng-Latn)": 0.67,
-                    "Tatoeba (nob-Latn_eng-Latn)": 8.02,
-                    "Tatoeba (est-Latn_eng-Latn)": 2.6,
-                    "Tatoeba (wuu-Hans_eng-Latn)": 1.89,
-                    "Tatoeba (swg-Latn_eng-Latn)": 11.9,
-                    "Tatoeba (max-Deva_eng-Latn)": 8.4,
-                    "Tatoeba (srp-Cyrl_eng-Latn)": 2.22,
-                    "Tatoeba (yue-Hant_eng-Latn)": 1.89,
-                    "Tatoeba (rus-Cyrl_eng-Latn)": 0.07,
-                    "Tatoeba (ell-Grek_eng-Latn)": 0.2,
-                    "Tatoeba (ceb-Latn_eng-Latn)": 3.95,
-                    "Tatoeba (yid-Hebr_eng-Latn)": 0.19,
-                    "Tatoeba (cmn-Hans_eng-Latn)": 2.45
+                    "Model": "tart-dual-contriever-msmarco"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "all-MiniLM-L12-v2",
-                    "AllegroReviews (pol-Latn)": 23.85,
-                    "AmazonCounterfactualClassification (en-ext)": 67.24,
-                    "AmazonCounterfactualClassification (en)": 65.28,
-                    "AmazonCounterfactualClassification (deu-Latn)": 57.13,
-                    "AmazonCounterfactualClassification (jpn-Jpan)": 59.94,
-                    "AmazonCounterfactualClassification (de)": 57.1,
-                    "AmazonCounterfactualClassification (ja)": 59.91,
-                    "AmazonPolarityClassification": 62.98,
-                    "AmazonReviewsClassification (en)": 30.79,
-                    "AmazonReviewsClassification (deu-Latn)": 25.92,
-                    "AmazonReviewsClassification (spa-Latn)": 27.64,
-                    "AmazonReviewsClassification (fra-Latn)": 27.53,
-                    "AmazonReviewsClassification (jpn-Jpan)": 23.57,
-                    "AmazonReviewsClassification (cmn-Hans)": 22.99,
-                    "AmazonReviewsClassification (de)": 25.91,
-                    "AmazonReviewsClassification (es)": 27.63,
-                    "AmazonReviewsClassification (fr)": 27.54,
-                    "AmazonReviewsClassification (ja)": 23.57,
-                    "AmazonReviewsClassification (zh)": 22.99,
-                    "AngryTweetsClassification (dan-Latn)": 42.87,
-                    "Banking77Classification": 80.4,
-                    "CBD (pol-Latn)": 48.46,
-                    "DanishPoliticalCommentsClassification (dan-Latn)": 27.07,
-                    "EmotionClassification": 41.17,
-                    "GeoreviewClassification (rus-Cyrl)": 23.49,
-                    "HeadlineClassification (rus-Cyrl)": 28.49,
-                    "IFlyTek (cmn-Hans)": 15.31,
-                    "ImdbClassification": 59.76,
-                    "InappropriatenessClassification (rus-Cyrl)": 50.85,
-                    "JDReview (cmn-Hans)": 59.57,
-                    "KinopoiskClassification (rus-Cyrl)": 34.17,
-                    "LccSentimentClassification (dan-Latn)": 41.93,
-                    "MTOPDomainClassification (en)": 91.9,
-                    "MTOPDomainClassification (deu-Latn)": 72.04,
-                    "MTOPDomainClassification (spa-Latn)": 72.99,
-                    "MTOPDomainClassification (fra-Latn)": 75.57,
-                    "MTOPDomainClassification (hin-Deva)": 40.4,
-                    "MTOPDomainClassification (tha-Thai)": 16.36,
-                    "MTOPDomainClassification (de)": 72.04,
-                    "MTOPDomainClassification (es)": 72.99,
-                    "MTOPDomainClassification (fr)": 75.59,
-                    "MTOPDomainClassification (hi)": 40.36,
-                    "MTOPDomainClassification (th)": 17.1,
-                    "MTOPIntentClassification (en)": 62.84,
-                    "MTOPIntentClassification (deu-Latn)": 43.42,
-                    "MTOPIntentClassification (spa-Latn)": 41.91,
-                    "MTOPIntentClassification (fra-Latn)": 38.96,
-                    "MTOPIntentClassification (hin-Deva)": 17.76,
-                    "MTOPIntentClassification (tha-Thai)": 6.13,
-                    "MTOPIntentClassification (de)": 43.41,
-                    "MTOPIntentClassification (es)": 41.88,
-                    "MTOPIntentClassification (fr)": 38.94,
-                    "MTOPIntentClassification (hi)": 17.75,
-                    "MTOPIntentClassification (th)": 5.63,
-                    "MasakhaNEWSClassification (amh-Ethi)": 30.64,
-                    "MasakhaNEWSClassification (eng)": 76.62,
-                    "MasakhaNEWSClassification (fra-Latn)": 67.18,
-                    "MasakhaNEWSClassification (hau-Latn)": 52.59,
-                    "MasakhaNEWSClassification (ibo-Latn)": 54.26,
-                    "MasakhaNEWSClassification (lin-Latn)": 62.23,
-                    "MasakhaNEWSClassification (lug-Latn)": 47.62,
-                    "MasakhaNEWSClassification (orm-Ethi)": 47.17,
-                    "MasakhaNEWSClassification (pcm-Latn)": 91.77,
-                    "MasakhaNEWSClassification (run-Latn)": 54.47,
-                    "MasakhaNEWSClassification (sna-Latn)": 66.53,
-                    "MasakhaNEWSClassification (som-Latn)": 40.27,
-                    "MasakhaNEWSClassification (swa-Latn)": 47.77,
-                    "MasakhaNEWSClassification (tir-Ethi)": 21.18,
-                    "MasakhaNEWSClassification (xho-Latn)": 54.34,
-                    "MasakhaNEWSClassification (yor-Latn)": 58.61,
-                    "MasakhaNEWSClassification (fra)": 72.2,
-                    "MassiveIntentClassification (jpn-Jpan)": 30.89,
-                    "MassiveIntentClassification (khm-Khmr)": 4.99,
-                    "MassiveIntentClassification (slv-Latn)": 38.48,
-                    "MassiveIntentClassification (hye-Armn)": 8.69,
-                    "MassiveIntentClassification (ita-Latn)": 43.16,
-                    "MassiveIntentClassification (fin-Latn)": 39.19,
-                    "MassiveIntentClassification (afr-Latn)": 38.84,
-                    "MassiveIntentClassification (kor-Kore)": 19.97,
-                    "MassiveIntentClassification (ben-Beng)": 13.7,
-                    "MassiveIntentClassification (heb-Hebr)": 23.71,
-                    "MassiveIntentClassification (dan-Latn)": 44.35,
-                    "MassiveIntentClassification (fra-Latn)": 44.75,
-                    "MassiveIntentClassification (pol-Latn)": 37.59,
-                    "MassiveIntentClassification (por-Latn)": 45.08,
-                    "MassiveIntentClassification (tha-Thai)": 10.46,
-                    "MassiveIntentClassification (nob-Latn)": 41.79,
-                    "MassiveIntentClassification (kat-Geor)": 9.17,
-                    "MassiveIntentClassification (tgl-Latn)": 38.63,
-                    "MassiveIntentClassification (swe-Latn)": 40.33,
-                    "MassiveIntentClassification (hun-Latn)": 37.95,
-                    "MassiveIntentClassification (cmo-Hant)": 22.38,
-                    "MassiveIntentClassification (hin-Deva)": 18.0,
-                    "MassiveIntentClassification (tur-Latn)": 35.93,
-                    "MassiveIntentClassification (vie-Latn)": 37.35,
-                    "MassiveIntentClassification (mal-Mlym)": 2.83,
-                    "MassiveIntentClassification (aze-Latn)": 34.3,
-                    "MassiveIntentClassification (amh-Ethi)": 2.45,
-                    "MassiveIntentClassification (kan-Knda)": 3.07,
-                    "MassiveIntentClassification (deu-Latn)": 44.12,
-                    "MassiveIntentClassification (rus-Cyrl)": 26.29,
-                    "MassiveIntentClassification (ara-Arab)": 21.02,
-                    "MassiveIntentClassification (msa-Latn)": 36.16,
-                    "MassiveIntentClassification (nld-Latn)": 41.77,
-                    "MassiveIntentClassification (fas-Arab)": 23.56,
-                    "MassiveIntentClassification (isl-Latn)": 35.17,
-                    "MassiveIntentClassification (cym-Latn)": 35.65,
-                    "MassiveIntentClassification (cmo-Hans)": 23.74,
-                    "MassiveIntentClassification (ell-Grek)": 28.68,
-                    "MassiveIntentClassification (spa-Latn)": 40.82,
-                    "MassiveIntentClassification (ind-Latn)": 39.65,
-                    "MassiveIntentClassification (jav-Latn)": 36.67,
-                    "MassiveIntentClassification (mon-Cyrl)": 23.27,
-                    "MassiveIntentClassification (mya-Mymr)": 4.36,
-                    "MassiveIntentClassification (sqi-Latn)": 41.47,
-                    "MassiveIntentClassification (tel-Telu)": 2.54,
-                    "MassiveIntentClassification (en)": 67.15,
-                    "MassiveIntentClassification (ron-Latn)": 41.64,
-                    "MassiveIntentClassification (tam-Taml)": 13.12,
-                    "MassiveIntentClassification (swa-Latn)": 35.26,
-                    "MassiveIntentClassification (urd-Arab)": 16.26,
-                    "MassiveIntentClassification (lav-Latn)": 38.54,
-                    "MassiveIntentClassification (af)": 38.94,
-                    "MassiveIntentClassification (am)": 2.45,
-                    "MassiveIntentClassification (ar)": 20.94,
-                    "MassiveIntentClassification (az)": 34.25,
-                    "MassiveIntentClassification (bn)": 13.67,
-                    "MassiveIntentClassification (cy)": 35.71,
-                    "MassiveIntentClassification (da)": 44.43,
-                    "MassiveIntentClassification (de)": 44.17,
-                    "MassiveIntentClassification (el)": 28.7,
-                    "MassiveIntentClassification (es)": 40.91,
-                    "MassiveIntentClassification (fa)": 23.52,
-                    "MassiveIntentClassification (fi)": 39.27,
-                    "MassiveIntentClassification (fr)": 44.82,
-                    "MassiveIntentClassification (he)": 23.65,
-                    "MassiveIntentClassification (hi)": 17.98,
-                    "MassiveIntentClassification (hu)": 38.0,
-                    "MassiveIntentClassification (hy)": 8.69,
-                    "MassiveIntentClassification (id)": 39.66,
-                    "MassiveIntentClassification (is)": 35.14,
-                    "MassiveIntentClassification (it)": 43.17,
-                    "MassiveIntentClassification (ja)": 30.94,
-                    "MassiveIntentClassification (jv)": 36.69,
-                    "MassiveIntentClassification (ka)": 9.17,
-                    "MassiveIntentClassification (km)": 4.99,
-                    "MassiveIntentClassification (kn)": 3.08,
-                    "MassiveIntentClassification (ko)": 19.97,
-                    "MassiveIntentClassification (lv)": 38.61,
-                    "MassiveIntentClassification (ml)": 2.85,
-                    "MassiveIntentClassification (mn)": 23.25,
-                    "MassiveIntentClassification (ms)": 36.21,
-                    "MassiveIntentClassification (my)": 4.38,
-                    "MassiveIntentClassification (nb)": 41.91,
-                    "MassiveIntentClassification (nl)": 41.85,
-                    "MassiveIntentClassification (pl)": 37.63,
-                    "MassiveIntentClassification (pt)": 45.12,
-                    "MassiveIntentClassification (ro)": 41.71,
-                    "MassiveIntentClassification (ru)": 26.33,
-                    "MassiveIntentClassification (sl)": 38.52,
-                    "MassiveIntentClassification (sq)": 41.62,
-                    "MassiveIntentClassification (sv)": 40.42,
-                    "MassiveIntentClassification (sw)": 35.28,
-                    "MassiveIntentClassification (ta)": 13.1,
-                    "MassiveIntentClassification (te)": 2.56,
-                    "MassiveIntentClassification (th)": 10.54,
-                    "MassiveIntentClassification (tl)": 38.56,
-                    "MassiveIntentClassification (tr)": 35.9,
-                    "MassiveIntentClassification (ur)": 16.18,
-                    "MassiveIntentClassification (vi)": 37.38,
-                    "MassiveIntentClassification (zh-CN)": 23.74,
-                    "MassiveIntentClassification (zh-TW)": 22.39,
-                    "MassiveScenarioClassification (jav-Latn)": 44.54,
-                    "MassiveScenarioClassification (aze-Latn)": 39.62,
-                    "MassiveScenarioClassification (cmo-Hans)": 33.19,
-                    "MassiveScenarioClassification (swa-Latn)": 43.18,
-                    "MassiveScenarioClassification (fra-Latn)": 53.77,
-                    "MassiveScenarioClassification (mon-Cyrl)": 29.01,
-                    "MassiveScenarioClassification (kat-Geor)": 14.85,
-                    "MassiveScenarioClassification (ben-Beng)": 18.98,
-                    "MassiveScenarioClassification (ind-Latn)": 44.37,
-                    "MassiveScenarioClassification (kor-Kore)": 25.72,
-                    "MassiveScenarioClassification (lav-Latn)": 42.75,
-                    "MassiveScenarioClassification (deu-Latn)": 52.08,
-                    "MassiveScenarioClassification (hun-Latn)": 44.1,
-                    "MassiveScenarioClassification (tam-Taml)": 19.4,
-                    "MassiveScenarioClassification (afr-Latn)": 45.72,
-                    "MassiveScenarioClassification (nob-Latn)": 47.35,
-                    "MassiveScenarioClassification (urd-Arab)": 24.45,
-                    "MassiveScenarioClassification (tha-Thai)": 18.32,
-                    "MassiveScenarioClassification (ita-Latn)": 51.7,
-                    "MassiveScenarioClassification (en)": 74.58,
-                    "MassiveScenarioClassification (sqi-Latn)": 49.12,
-                    "MassiveScenarioClassification (mya-Mymr)": 10.06,
-                    "MassiveScenarioClassification (ara-Arab)": 27.66,
-                    "MassiveScenarioClassification (tur-Latn)": 41.8,
-                    "MassiveScenarioClassification (khm-Khmr)": 9.75,
-                    "MassiveScenarioClassification (cym-Latn)": 41.43,
-                    "MassiveScenarioClassification (cmo-Hant)": 31.14,
-                    "MassiveScenarioClassification (hye-Armn)": 14.87,
-                    "MassiveScenarioClassification (ell-Grek)": 35.55,
-                    "MassiveScenarioClassification (ron-Latn)": 49.94,
-                    "MassiveScenarioClassification (kan-Knda)": 8.32,
-                    "MassiveScenarioClassification (jpn-Jpan)": 36.77,
-                    "MassiveScenarioClassification (fin-Latn)": 45.8,
-                    "MassiveScenarioClassification (swe-Latn)": 46.81,
-                    "MassiveScenarioClassification (dan-Latn)": 49.5,
-                    "MassiveScenarioClassification (msa-Latn)": 44.67,
-                    "MassiveScenarioClassification (hin-Deva)": 23.03,
-                    "MassiveScenarioClassification (tgl-Latn)": 48.29,
-                    "MassiveScenarioClassification (pol-Latn)": 44.74,
-                    "MassiveScenarioClassification (isl-Latn)": 43.11,
-                    "MassiveScenarioClassification (por-Latn)": 53.0,
-                    "MassiveScenarioClassification (slv-Latn)": 42.24,
-                    "MassiveScenarioClassification (rus-Cyrl)": 28.77,
-                    "MassiveScenarioClassification (tel-Telu)": 7.74,
-                    "MassiveScenarioClassification (heb-Hebr)": 25.73,
-                    "MassiveScenarioClassification (fas-Arab)": 29.0,
-                    "MassiveScenarioClassification (vie-Latn)": 40.97,
-                    "MassiveScenarioClassification (nld-Latn)": 49.14,
-                    "MassiveScenarioClassification (spa-Latn)": 50.73,
-                    "MassiveScenarioClassification (mal-Mlym)": 7.25,
-                    "MassiveScenarioClassification (amh-Ethi)": 7.41,
-                    "MassiveScenarioClassification (af)": 45.71,
-                    "MassiveScenarioClassification (am)": 7.41,
-                    "MassiveScenarioClassification (ar)": 27.62,
-                    "MassiveScenarioClassification (az)": 39.58,
-                    "MassiveScenarioClassification (bn)": 18.98,
-                    "MassiveScenarioClassification (cy)": 41.4,
-                    "MassiveScenarioClassification (da)": 49.47,
-                    "MassiveScenarioClassification (de)": 52.07,
-                    "MassiveScenarioClassification (el)": 35.51,
-                    "MassiveScenarioClassification (es)": 50.74,
-                    "MassiveScenarioClassification (fa)": 29.0,
-                    "MassiveScenarioClassification (fi)": 45.8,
-                    "MassiveScenarioClassification (fr)": 53.76,
-                    "MassiveScenarioClassification (he)": 25.68,
-                    "MassiveScenarioClassification (hi)": 23.02,
-                    "MassiveScenarioClassification (hu)": 44.09,
-                    "MassiveScenarioClassification (hy)": 14.83,
-                    "MassiveScenarioClassification (id)": 44.35,
-                    "MassiveScenarioClassification (is)": 43.08,
-                    "MassiveScenarioClassification (it)": 51.71,
-                    "MassiveScenarioClassification (ja)": 36.75,
-                    "MassiveScenarioClassification (jv)": 44.57,
-                    "MassiveScenarioClassification (ka)": 14.84,
-                    "MassiveScenarioClassification (km)": 9.75,
-                    "MassiveScenarioClassification (kn)": 8.32,
-                    "MassiveScenarioClassification (ko)": 25.72,
-                    "MassiveScenarioClassification (lv)": 42.75,
-                    "MassiveScenarioClassification (ml)": 7.25,
-                    "MassiveScenarioClassification (mn)": 29.03,
-                    "MassiveScenarioClassification (ms)": 44.65,
-                    "MassiveScenarioClassification (my)": 10.07,
-                    "MassiveScenarioClassification (nb)": 47.36,
-                    "MassiveScenarioClassification (nl)": 49.15,
-                    "MassiveScenarioClassification (pl)": 44.72,
-                    "MassiveScenarioClassification (pt)": 53.0,
-                    "MassiveScenarioClassification (ro)": 49.97,
-                    "MassiveScenarioClassification (ru)": 28.75,
-                    "MassiveScenarioClassification (sl)": 42.26,
-                    "MassiveScenarioClassification (sq)": 49.14,
-                    "MassiveScenarioClassification (sv)": 46.83,
-                    "MassiveScenarioClassification (sw)": 43.18,
-                    "MassiveScenarioClassification (ta)": 19.38,
-                    "MassiveScenarioClassification (te)": 7.74,
-                    "MassiveScenarioClassification (th)": 18.32,
-                    "MassiveScenarioClassification (tl)": 48.31,
-                    "MassiveScenarioClassification (tr)": 41.79,
-                    "MassiveScenarioClassification (ur)": 24.46,
-                    "MassiveScenarioClassification (vi)": 40.94,
-                    "MassiveScenarioClassification (zh-CN)": 33.18,
-                    "MassiveScenarioClassification (zh-TW)": 31.16,
-                    "MultilingualSentiment (cmn-Hans)": 40.52,
-                    "NoRecClassification (nob-Latn)": 37.73,
-                    "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 54.17,
-                    "OnlineShopping (cmn-Hans)": 58.65,
-                    "PAC (pol-Latn)": 59.53,
-                    "PolEmo2.0-IN (pol-Latn)": 38.32,
-                    "PolEmo2.0-OUT (pol-Latn)": 22.98,
-                    "RuReviewsClassification (rus-Cyrl)": 42.49,
-                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 10.49,
-                    "RuSciBenchOECDClassification (rus-Cyrl)": 8.31,
-                    "TNews (cmn-Hans)": 20.37,
-                    "ToxicConversationsClassification": 67.47,
-                    "TweetSentimentExtractionClassification": 54.25,
-                    "Waimai (cmn-Hans)": 63.48
+                    "Model": "tart-dual-contriever-msmarco"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "all-MiniLM-L12-v2",
-                    "AlloProfClusteringP2P": 46.03,
-                    "AlloProfClusteringS2S": 31.83,
-                    "ArxivClusteringP2P": 46.07,
-                    "ArxivClusteringS2S": 37.5,
-                    "BiorxivClusteringP2P": 36.99,
-                    "BiorxivClusteringS2S": 33.21,
-                    "GeoreviewClusteringP2P (rus-Cyrl)": 20.76,
-                    "HALClusteringS2S": 19.58,
-                    "MLSUMClusteringP2P": 34.35,
-                    "MLSUMClusteringS2S": 29.3,
-                    "MasakhaNEWSClusteringP2P (amh-Ethi)": 40.5,
-                    "MasakhaNEWSClusteringP2P (eng)": 55.86,
-                    "MasakhaNEWSClusteringP2P (fra-Latn)": 42.72,
-                    "MasakhaNEWSClusteringP2P (hau-Latn)": 26.61,
-                    "MasakhaNEWSClusteringP2P (ibo-Latn)": 44.26,
-                    "MasakhaNEWSClusteringP2P (lin-Latn)": 54.52,
-                    "MasakhaNEWSClusteringP2P (lug-Latn)": 43.87,
-                    "MasakhaNEWSClusteringP2P (orm-Ethi)": 24.87,
-                    "MasakhaNEWSClusteringP2P (pcm-Latn)": 74.42,
-                    "MasakhaNEWSClusteringP2P (run-Latn)": 51.73,
-                    "MasakhaNEWSClusteringP2P (sna-Latn)": 46.89,
-                    "MasakhaNEWSClusteringP2P (som-Latn)": 31.17,
-                    "MasakhaNEWSClusteringP2P (swa-Latn)": 23.72,
-                    "MasakhaNEWSClusteringP2P (tir-Ethi)": 44.08,
-                    "MasakhaNEWSClusteringP2P (xho-Latn)": 26.97,
-                    "MasakhaNEWSClusteringP2P (yor-Latn)": 32.51,
-                    "MasakhaNEWSClusteringP2P (fra)": 42.72,
-                    "MasakhaNEWSClusteringS2S (amh-Ethi)": 44.11,
-                    "MasakhaNEWSClusteringS2S (eng)": 40.71,
-                    "MasakhaNEWSClusteringS2S (fra-Latn)": 32.47,
-                    "MasakhaNEWSClusteringS2S (hau-Latn)": 20.63,
-                    "MasakhaNEWSClusteringS2S (ibo-Latn)": 35.33,
-                    "MasakhaNEWSClusteringS2S (lin-Latn)": 54.52,
-                    "MasakhaNEWSClusteringS2S (lug-Latn)": 51.42,
-                    "MasakhaNEWSClusteringS2S (orm-Ethi)": 24.84,
-                    "MasakhaNEWSClusteringS2S (pcm-Latn)": 70.72,
-                    "MasakhaNEWSClusteringS2S (run-Latn)": 50.88,
-                    "MasakhaNEWSClusteringS2S (sna-Latn)": 46.6,
-                    "MasakhaNEWSClusteringS2S (som-Latn)": 29.87,
-                    "MasakhaNEWSClusteringS2S (swa-Latn)": 10.82,
-                    "MasakhaNEWSClusteringS2S (tir-Ethi)": 43.63,
-                    "MasakhaNEWSClusteringS2S (xho-Latn)": 24.55,
-                    "MasakhaNEWSClusteringS2S (yor-Latn)": 32.85,
-                    "MasakhaNEWSClusteringS2S (fra)": 32.47,
-                    "MedrxivClusteringP2P": 34.25,
-                    "MedrxivClusteringS2S": 32.24,
-                    "RedditClustering": 51.18,
-                    "RedditClusteringP2P": 54.8,
-                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 10.65,
-                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 10.19,
-                    "StackExchangeClustering": 53.05,
-                    "StackExchangeClusteringP2P": 33.13,
-                    "TwentyNewsgroupsClustering": 47.47
+                    "Model": "tart-dual-contriever-msmarco"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "all-MiniLM-L12-v2",
-                    "CDSC-E (pol-Latn)": 49.04,
-                    "OpusparcusPC (deu-Latn)": 91.2,
-                    "OpusparcusPC (en)": 97.41,
-                    "OpusparcusPC (fin-Latn)": 85.99,
-                    "OpusparcusPC (fra-Latn)": 87.35,
-                    "OpusparcusPC (rus-Cyrl)": 79.23,
-                    "OpusparcusPC (swe-Latn)": 84.87,
-                    "PSC (pol-Latn)": 87.92,
-                    "PawsXPairClassification (deu-Latn)": 50.83,
-                    "PawsXPairClassification (en)": 58.62,
-                    "PawsXPairClassification (spa-Latn)": 52.08,
-                    "PawsXPairClassification (fra-Latn)": 55.54,
-                    "PawsXPairClassification (jpn-Hira)": 47.75,
-                    "PawsXPairClassification (kor-Hang)": 49.59,
-                    "PawsXPairClassification (cmn-Hans)": 52.8,
-                    "SICK-E-PL (pol-Latn)": 49.63,
-                    "SprintDuplicateQuestions": 92.45,
-                    "TERRa (rus-Cyrl)": 46.4,
-                    "TwitterSemEval2015": 70.02,
-                    "TwitterURLCorpus": 84.77
+                    "Model": "tart-dual-contriever-msmarco"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "all-MiniLM-L12-v2",
-                    "AlloprofReranking (fra-Latn)": 67.01,
-                    "AskUbuntuDupQuestions": 64.06,
-                    "MMarcoReranking (cmn-Hans)": 5.27,
-                    "MindSmallReranking": 31.02,
-                    "RuBQReranking (rus-Cyrl)": 38.51,
-                    "SciDocsRR": 87.2,
-                    "StackOverflowDupQuestions": 51.47,
-                    "SyntecReranking (fra-Latn)": 69.17,
-                    "T2Reranking (cmn-Hans)": 60.32
+                    "Model": "tart-dual-contriever-msmarco"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "all-MiniLM-L12-v2",
-                    "AILACasedocs": 16.8,
-                    "AILAStatutes": 20.71,
-                    "ARCChallenge": 10.23,
-                    "AlloprofRetrieval (fra-Latn)": 33.2,
-                    "AlloprofRetrieval": 33.2,
-                    "AlphaNLI": 25.35,
-                    "ArguAna": 47.13,
-                    "ArguAna-PL (pol-Latn)": 13.4,
-                    "BSARDRetrieval (fra-Latn)": 6.24,
-                    "CQADupstackRetrieval": 42.53,
-                    "ClimateFEVER": 21.57,
-                    "CmedqaRetrieval (cmn-Hans)": 2.58,
-                    "CovidRetrieval (cmn-Hans)": 10.79,
-                    "DBPedia": 33.35,
-                    "DuRetrieval (cmn-Hans)": 6.62,
-                    "EcomRetrieval (cmn-Hans)": 4.01,
-                    "FEVER": 55.9,
-                    "FiQA-PL (pol-Latn)": 5.82,
-                    "FiQA2018": 37.27,
-                    "GerDaLIRSmall (deu-Latn)": 1.35,
-                    "HellaSwag": 24.08,
-                    "HotpotQA": 44.59,
-                    "LEMBNarrativeQARetrieval": 19.64,
-                    "LEMBNeedleRetrieval": 12.25,
-                    "LEMBPasskeyRetrieval": 14.75,
-                    "LEMBQMSumRetrieval": 13.08,
-                    "LEMBSummScreenFDRetrieval": 46.98,
-                    "LEMBWikimQARetrieval": 44.88,
-                    "LeCaRDv2 (zho-Hans)": 18.77,
-                    "LegalBenchConsumerContractsQA": 60.21,
-                    "LegalBenchCorporateLobbying": 88.69,
-                    "LegalQuAD (deu-Latn)": 7.44,
-                    "LegalSummarization": 57.43,
-                    "MMarcoRetrieval (cmn-Hans)": 7.46,
-                    "MSMARCO": 39.03,
-                    "MedicalRetrieval (cmn-Hans)": 2.3,
-                    "MintakaRetrieval (ara-Arab)": 2.74,
-                    "MintakaRetrieval (deu-Latn)": 20.04,
-                    "MintakaRetrieval (spa-Latn)": 11.76,
-                    "MintakaRetrieval (fra-Latn)": 16.08,
-                    "MintakaRetrieval (hin-Deva)": 3.04,
-                    "MintakaRetrieval (ita-Latn)": 11.83,
-                    "MintakaRetrieval (jpn-Hira)": 7.31,
-                    "MintakaRetrieval (por-Latn)": 13.66,
-                    "NFCorpus": 32.25,
-                    "NFCorpus-PL (pol-Latn)": 15.43,
-                    "NQ": 46.47,
-                    "PIQA": 26.44,
-                    "Quail": 3.08,
-                    "QuoraRetrieval": 87.75,
-                    "RARbCode": 42.44,
-                    "RARbMath": 66.36,
-                    "RuBQRetrieval (rus-Cyrl)": 8.84,
-                    "SCIDOCS": 21.82,
-                    "SCIDOCS-PL (pol-Latn)": 5.34,
-                    "SIQA": 2.09,
-                    "SciFact": 62.64,
-                    "SciFact-PL (pol-Latn)": 22.48,
-                    "SpartQA": 2.67,
-                    "SyntecRetrieval (fra-Latn)": 60.8,
-                    "T2Retrieval (cmn-Hans)": 4.82,
-                    "TRECCOVID": 50.82,
-                    "TRECCOVID-PL (pol-Latn)": 16.52,
-                    "TempReasonL1": 1.66,
-                    "TempReasonL2Fact": 10.31,
-                    "TempReasonL2Pure": 0.63,
-                    "TempReasonL3Fact": 11.11,
-                    "TempReasonL3Pure": 6.63,
-                    "Touche2020": 17.22,
-                    "VideoRetrieval (cmn-Hans)": 9.38,
-                    "WinoGrande": 27.2,
-                    "XPQARetrieval (ara-Arab_ara-Arab)": 7.83,
-                    "XPQARetrieval (eng-Latn_ara-Arab)": 2.52,
-                    "XPQARetrieval (ara-Arab_eng-Latn)": 8.88,
-                    "XPQARetrieval (deu-Latn_deu-Latn)": 56.77,
-                    "XPQARetrieval (eng-Latn_deu-Latn)": 18.2,
-                    "XPQARetrieval (deu-Latn_eng-Latn)": 30.06,
-                    "XPQARetrieval (spa-Latn_spa-Latn)": 42.22,
-                    "XPQARetrieval (eng-Latn_spa-Latn)": 7.53,
-                    "XPQARetrieval (spa-Latn_eng-Latn)": 26.27,
-                    "XPQARetrieval (fra-Latn_fra-Latn)": 55.9,
-                    "XPQARetrieval (eng-Latn_fra-Latn)": 14.89,
-                    "XPQARetrieval (fra-Latn_eng-Latn)": 34.2,
-                    "XPQARetrieval (hin-Deva_hin-Deva)": 33.26,
-                    "XPQARetrieval (eng-Latn_hin-Deva)": 6.44,
-                    "XPQARetrieval (hin-Deva_eng-Latn)": 6.98,
-                    "XPQARetrieval (ita-Latn_ita-Latn)": 58.68,
-                    "XPQARetrieval (eng-Latn_ita-Latn)": 8.56,
-                    "XPQARetrieval (ita-Latn_eng-Latn)": 28.71,
-                    "XPQARetrieval (jpn-Hira_jpn-Hira)": 39.53,
-                    "XPQARetrieval (eng-Latn_jpn-Hira)": 5.7,
-                    "XPQARetrieval (jpn-Hira_eng-Latn)": 13.75,
-                    "XPQARetrieval (kor-Hang_kor-Hang)": 13.48,
-                    "XPQARetrieval (eng-Latn_kor-Hang)": 7.43,
-                    "XPQARetrieval (kor-Hang_eng-Latn)": 7.34,
-                    "XPQARetrieval (pol-Latn_pol-Latn)": 28.07,
-                    "XPQARetrieval (eng-Latn_pol-Latn)": 10.03,
-                    "XPQARetrieval (pol-Latn_eng-Latn)": 16.58,
-                    "XPQARetrieval (por-Latn_por-Latn)": 34.09,
-                    "XPQARetrieval (eng-Latn_por-Latn)": 7.38,
-                    "XPQARetrieval (por-Latn_eng-Latn)": 22.59,
-                    "XPQARetrieval (tam-Taml_tam-Taml)": 9.13,
-                    "XPQARetrieval (eng-Latn_tam-Taml)": 4.15,
-                    "XPQARetrieval (tam-Taml_eng-Latn)": 3.76,
-                    "XPQARetrieval (cmn-Hans_cmn-Hans)": 21.09,
-                    "XPQARetrieval (eng-Latn_cmn-Hans)": 6.58,
-                    "XPQARetrieval (cmn-Hans_eng-Latn)": 9.39,
-                    "XPQARetrieval (fr)": 55.9
+                    "Model": "tart-dual-contriever-msmarco"
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "all-MiniLM-L12-v2",
-                    "AFQMC (cmn-Hans)": 7.94,
-                    "ATEC (cmn-Hans)": 12.97,
-                    "BIOSSES": 83.57,
-                    "BQ (cmn-Hans)": 23.31,
-                    "CDSC-R (pol-Latn)": 82.5,
-                    "LCQMC (cmn-Hans)": 21.04,
-                    "PAWSX (cmn-Hans)": 7.31,
-                    "RUParaPhraserSTS (rus-Cyrl)": 45.47,
-                    "RuSTSBenchmarkSTS (rus-Cyrl)": 56.33,
-                    "SICK-R": 79.32,
-                    "SICK-R-PL (pol-Latn)": 54.26,
-                    "SICKFr (fra-Latn)": 63.16,
-                    "STS12": 73.08,
-                    "STS13": 82.13,
-                    "STS14": 76.73,
-                    "STS15": 85.58,
-                    "STS16": 80.23,
-                    "STS17 (nld-Latn_eng-Latn)": 24.51,
-                    "STS17 (eng-Latn_ara-Arab)": 0.54,
-                    "STS17 (ara-Arab)": 58.71,
-                    "STS17 (kor-Hang)": 43.37,
-                    "STS17 (eng-Latn_tur-Latn)": 0.43,
-                    "STS17 (ita-Latn_eng-Latn)": 24.28,
-                    "STS17 (eng-Latn_deu-Latn)": 27.54,
-                    "STS17 (fra-Latn_eng-Latn)": 30.7,
-                    "STS17 (spa-Latn)": 78.37,
-                    "STS17 (en-en)": 88.63,
-                    "STS17 (spa-Latn_eng-Latn)": 22.01,
-                    "STS17 (ar-ar)": 58.71,
-                    "STS17 (en-ar)": 0.54,
-                    "STS17 (en-de)": 27.54,
-                    "STS17 (en-tr)": 0.43,
-                    "STS17 (es-en)": 22.01,
-                    "STS17 (es-es)": 78.37,
-                    "STS17 (fr-en)": 30.7,
-                    "STS17 (it-en)": 24.28,
-                    "STS17 (ko-ko)": 43.37,
-                    "STS17 (nl-en)": 24.51,
-                    "STS22 (ara-Arab)": 17.54,
-                    "STS22 (cmn-Hans)": 33.15,
-                    "STS22 (fra-Latn)": 69.51,
-                    "STS22 (deu-Latn_eng-Latn)": 42.86,
-                    "STS22 (pol-Latn)": 19.22,
-                    "STS22 (spa-Latn_eng-Latn)": 53.99,
-                    "STS22 (pol-Latn_eng-Latn)": 42.67,
-                    "STS22 (tur-Latn)": 21.6,
-                    "STS22 (deu-Latn_fra-Latn)": 43.52,
-                    "STS22 (fra-Latn_pol-Latn)": 16.9,
-                    "STS22 (deu-Latn)": 22.53,
-                    "STS22 (deu-Latn_pol-Latn)": 1.63,
-                    "STS22 (en)": 65.67,
-                    "STS22 (spa-Latn)": 43.98,
-                    "STS22 (cmn-Hans_eng-Latn)": 44.39,
-                    "STS22 (spa-Latn_ita-Latn)": 40.71,
-                    "STS22 (ita-Latn)": 47.48,
-                    "STS22 (rus-Cyrl)": 11.19,
-                    "STS22 (ar)": 17.54,
-                    "STS22 (de)": 22.53,
-                    "STS22 (de-en)": 42.86,
-                    "STS22 (de-fr)": 43.52,
-                    "STS22 (de-pl)": 1.63,
-                    "STS22 (es)": 43.98,
-                    "STS22 (es-en)": 53.99,
-                    "STS22 (es-it)": 40.71,
-                    "STS22 (fr)": 69.51,
-                    "STS22 (fr-pl)": 16.9,
-                    "STS22 (it)": 47.48,
-                    "STS22 (pl)": 19.22,
-                    "STS22 (pl-en)": 42.67,
-                    "STS22 (ru)": 11.19,
-                    "STS22 (tr)": 21.6,
-                    "STS22 (zh)": 33.15,
-                    "STS22 (zh-en)": 44.39,
-                    "STSB (cmn-Hans)": 36.66,
-                    "STSBenchmark": 83.09,
-                    "STSBenchmarkMultilingualSTS (nld-Latn)": 60.03,
-                    "STSBenchmarkMultilingualSTS (spa-Latn)": 65.33,
-                    "STSBenchmarkMultilingualSTS (ita-Latn)": 60.71,
-                    "STSBenchmarkMultilingualSTS (cmn-Hans)": 38.93,
-                    "STSBenchmarkMultilingualSTS (en)": 83.09,
-                    "STSBenchmarkMultilingualSTS (por-Latn)": 63.85,
-                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 56.09,
-                    "STSBenchmarkMultilingualSTS (fra-Latn)": 66.68,
-                    "STSBenchmarkMultilingualSTS (pol-Latn)": 60.2,
-                    "STSBenchmarkMultilingualSTS (deu-Latn)": 63.28
+                    "Model": "tart-dual-contriever-msmarco"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "all-MiniLM-L12-v2",
-                    "SummEval": 27.9,
-                    "SummEvalFr (fra-Latn)": 26.63
+                    "Model": "tart-dual-contriever-msmarco"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "tart-dual-contriever-msmarco"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "all-MiniLM-L12-v2"
+                    "Model": "tart-dual-contriever-msmarco",
+                    "Core17InstructionRetrieval": -3.04,
+                    "News21InstructionRetrieval": -2.98,
+                    "Robust04InstructionRetrieval": -8.98
                 }
             ]
         }
     },
-    "Cohere-embed-english-v3.0-instruct": {
+    "voyage-law-2": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "Cohere-embed-english-v3.0-instruct"
+                    "Model": "voyage-law-2"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "Cohere-embed-english-v3.0-instruct"
+                    "Model": "voyage-law-2",
+                    "AmazonReviewsClassification (fr)": 41.98,
+                    "MTOPDomainClassification (fr)": 90.12,
+                    "MTOPIntentClassification (fr)": 62.44,
+                    "MasakhaNEWSClassification (fra)": 76.42,
+                    "MassiveIntentClassification (fr)": 66.94,
+                    "MassiveScenarioClassification (fr)": 72.78
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "Cohere-embed-english-v3.0-instruct"
-                }
-            ]
+                    "Model": "voyage-law-2",
+                    "AlloProfClusteringP2P": 62.5,
+                    "AlloProfClusteringS2S": 44.28,
+                    "HALClusteringS2S": 26.36,
+                    "MLSUMClusteringP2P (fr)": 44.03,
+                    "MLSUMClusteringS2S (fr)": 42.95,
+                    "MasakhaNEWSClusteringP2P (fra)": 50.68,
+                    "MasakhaNEWSClusteringS2S (fra)": 38.79
+                }
+            ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "Cohere-embed-english-v3.0-instruct"
+                    "Model": "voyage-law-2",
+                    "OpusparcusPC (fr)": 93.06,
+                    "PawsXPairClassification (fr)": 61.54
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "Cohere-embed-english-v3.0-instruct"
+                    "Model": "voyage-law-2",
+                    "AlloprofReranking": 72.92,
+                    "SyntecReranking": 91.2
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "Cohere-embed-english-v3.0-instruct",
-                    "ARCChallenge": 10.1,
-                    "AlphaNLI": 18.75,
-                    "HellaSwag": 29.02,
-                    "PIQA": 27.89,
-                    "Quail": 7.77,
-                    "RARbCode": 56.56,
-                    "RARbMath": 72.05,
-                    "SIQA": 5.03,
-                    "SpartQA": 3.33,
-                    "TempReasonL1": 1.43,
-                    "TempReasonL2Fact": 40.46,
-                    "TempReasonL2Pure": 2.39,
-                    "TempReasonL3Fact": 33.87,
-                    "TempReasonL3Pure": 7.52,
-                    "WinoGrande": 65.02
+                    "Model": "voyage-law-2",
+                    "AILACasedocs": 44.56,
+                    "AILAStatutes": 45.51,
+                    "AlloprofRetrieval": 57.28,
+                    "BSARDRetrieval": 11.83,
+                    "GerDaLIRSmall": 44.91,
+                    "LEMBNarrativeQARetrieval": 55.78,
+                    "LEMBNeedleRetrieval": 80.5,
+                    "LEMBPasskeyRetrieval": 93.75,
+                    "LEMBQMSumRetrieval": 57.26,
+                    "LEMBSummScreenFDRetrieval": 98.72,
+                    "LEMBWikimQARetrieval": 87.08,
+                    "LeCaRDv2": 72.75,
+                    "LegalBenchConsumerContractsQA": 83.27,
+                    "LegalBenchCorporateLobbying": 95.66,
+                    "LegalQuAD": 67.47,
+                    "LegalSummarization": 68.96,
+                    "MintakaRetrieval (fr)": 34.92,
+                    "SyntecRetrieval": 87.33,
+                    "XPQARetrieval (fr)": 73.56
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "Cohere-embed-english-v3.0-instruct"
+                    "Model": "voyage-law-2",
+                    "SICKFr": 74.09,
+                    "STS22 (fr)": 83.75,
+                    "STSBenchmarkMultilingualSTS (fr)": 83.02
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "Cohere-embed-english-v3.0-instruct"
+                    "Model": "voyage-law-2",
+                    "SummEvalFr": 30.34
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "voyage-law-2"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "Cohere-embed-english-v3.0-instruct"
+                    "Model": "voyage-law-2"
                 }
             ]
         }
     },
-    "Baichuan-text-embedding": {
+    "sentence-t5-base": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "Baichuan-text-embedding"
+                    "Model": "sentence-t5-base"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "Baichuan-text-embedding",
-                    "AmazonReviewsClassification (zh)": 48.3,
-                    "IFlyTek": 50.75,
-                    "JDReview": 87.69,
-                    "MassiveIntentClassification (zh-CN)": 74.91,
-                    "MassiveScenarioClassification (zh-CN)": 81.28,
-                    "MultilingualSentiment": 76.83,
-                    "OnlineShopping": 94.42,
-                    "TNews": 52.62,
-                    "Waimai": 88.77
+                    "Model": "sentence-t5-base",
+                    "AmazonCounterfactualClassification (de)": 69.98,
+                    "AmazonCounterfactualClassification (en)": 75.82,
+                    "AmazonCounterfactualClassification (en-ext)": 76.81,
+                    "AmazonCounterfactualClassification (ja)": 46.05,
+                    "AmazonPolarityClassification": 85.12,
+                    "AmazonReviewsClassification (de)": 37.9,
+                    "AmazonReviewsClassification (en)": 44.94,
+                    "AmazonReviewsClassification (es)": 37.33,
+                    "AmazonReviewsClassification (fr)": 37.35,
+                    "AmazonReviewsClassification (ja)": 22.29,
+                    "AmazonReviewsClassification (zh)": 21.53,
+                    "Banking77Classification": 76.48,
+                    "EmotionClassification": 51.35,
+                    "ImdbClassification": 77.34,
+                    "MTOPDomainClassification (de)": 76.98,
+                    "MTOPDomainClassification (en)": 90.34,
+                    "MTOPDomainClassification (es)": 73.61,
+                    "MTOPDomainClassification (fr)": 75.03,
+                    "MTOPDomainClassification (hi)": 21.4,
+                    "MTOPDomainClassification (th)": 16.21,
+                    "MTOPIntentClassification (de)": 44.43,
+                    "MTOPIntentClassification (en)": 63.32,
+                    "MTOPIntentClassification (es)": 42.03,
+                    "MTOPIntentClassification (fr)": 43.85,
+                    "MTOPIntentClassification (hi)": 3.8,
+                    "MTOPIntentClassification (th)": 5.21,
+                    "MasakhaNEWSClassification (fra)": 81.21,
+                    "MassiveIntentClassification (af)": 34.32,
+                    "MassiveIntentClassification (am)": 2.38,
+                    "MassiveIntentClassification (ar)": 4.53,
+                    "MassiveIntentClassification (az)": 31.76,
+                    "MassiveIntentClassification (bn)": 2.58,
+                    "MassiveIntentClassification (cy)": 28.94,
+                    "MassiveIntentClassification (da)": 38.82,
+                    "MassiveIntentClassification (de)": 45.23,
+                    "MassiveIntentClassification (el)": 10.05,
+                    "MassiveIntentClassification (en)": 69.74,
+                    "MassiveIntentClassification (es)": 45.32,
+                    "MassiveIntentClassification (fa)": 3.58,
+                    "MassiveIntentClassification (fi)": 33.52,
+                    "MassiveIntentClassification (fr)": 51.13,
+                    "MassiveIntentClassification (he)": 2.63,
+                    "MassiveIntentClassification (hi)": 2.68,
+                    "MassiveIntentClassification (hu)": 32.31,
+                    "MassiveIntentClassification (hy)": 3.33,
+                    "MassiveIntentClassification (id)": 35.5,
+                    "MassiveIntentClassification (is)": 29.82,
+                    "MassiveIntentClassification (it)": 45.59,
+                    "MassiveIntentClassification (ja)": 3.67,
+                    "MassiveIntentClassification (jv)": 31.15,
+                    "MassiveIntentClassification (ka)": 2.77,
+                    "MassiveIntentClassification (km)": 5.66,
+                    "MassiveIntentClassification (kn)": 2.59,
+                    "MassiveIntentClassification (ko)": 2.34,
+                    "MassiveIntentClassification (lv)": 33.97,
+                    "MassiveIntentClassification (ml)": 2.55,
+                    "MassiveIntentClassification (mn)": 14.7,
+                    "MassiveIntentClassification (ms)": 33.12,
+                    "MassiveIntentClassification (my)": 4.42,
+                    "MassiveIntentClassification (nb)": 38.53,
+                    "MassiveIntentClassification (nl)": 37.96,
+                    "MassiveIntentClassification (pl)": 34.41,
+                    "MassiveIntentClassification (pt)": 43.35,
+                    "MassiveIntentClassification (ro)": 42.69,
+                    "MassiveIntentClassification (ru)": 14.82,
+                    "MassiveIntentClassification (sl)": 34.54,
+                    "MassiveIntentClassification (sq)": 38.54,
+                    "MassiveIntentClassification (sv)": 35.98,
+                    "MassiveIntentClassification (sw)": 32.14,
+                    "MassiveIntentClassification (ta)": 1.41,
+                    "MassiveIntentClassification (te)": 2.5,
+                    "MassiveIntentClassification (th)": 3.71,
+                    "MassiveIntentClassification (tl)": 36.04,
+                    "MassiveIntentClassification (tr)": 33.77,
+                    "MassiveIntentClassification (ur)": 2.99,
+                    "MassiveIntentClassification (vi)": 22.62,
+                    "MassiveIntentClassification (zh-CN)": 1.12,
+                    "MassiveIntentClassification (zh-TW)": 4.63,
+                    "MassiveScenarioClassification (af)": 44.45,
+                    "MassiveScenarioClassification (am)": 7.51,
+                    "MassiveScenarioClassification (ar)": 12.32,
+                    "MassiveScenarioClassification (az)": 38.41,
+                    "MassiveScenarioClassification (bn)": 8.45,
+                    "MassiveScenarioClassification (cy)": 35.04,
+                    "MassiveScenarioClassification (da)": 48.36,
+                    "MassiveScenarioClassification (de)": 59.12,
+                    "MassiveScenarioClassification (el)": 17.68,
+                    "MassiveScenarioClassification (en)": 72.32,
+                    "MassiveScenarioClassification (es)": 55.61,
+                    "MassiveScenarioClassification (fa)": 6.86,
+                    "MassiveScenarioClassification (fi)": 41.34,
+                    "MassiveScenarioClassification (fr)": 59.92,
+                    "MassiveScenarioClassification (he)": 7.86,
+                    "MassiveScenarioClassification (hi)": 7.63,
+                    "MassiveScenarioClassification (hu)": 41.31,
+                    "MassiveScenarioClassification (hy)": 9.23,
+                    "MassiveScenarioClassification (id)": 44.64,
+                    "MassiveScenarioClassification (is)": 39.63,
+                    "MassiveScenarioClassification (it)": 54.58,
+                    "MassiveScenarioClassification (ja)": 4.96,
+                    "MassiveScenarioClassification (jv)": 40.73,
+                    "MassiveScenarioClassification (ka)": 7.51,
+                    "MassiveScenarioClassification (km)": 8.73,
+                    "MassiveScenarioClassification (kn)": 7.99,
+                    "MassiveScenarioClassification (ko)": 6.03,
+                    "MassiveScenarioClassification (lv)": 36.42,
+                    "MassiveScenarioClassification (ml)": 6.96,
+                    "MassiveScenarioClassification (mn)": 19.85,
+                    "MassiveScenarioClassification (ms)": 43.18,
+                    "MassiveScenarioClassification (my)": 9.46,
+                    "MassiveScenarioClassification (nb)": 46.6,
+                    "MassiveScenarioClassification (nl)": 50.0,
+                    "MassiveScenarioClassification (pl)": 42.3,
+                    "MassiveScenarioClassification (pt)": 52.24,
+                    "MassiveScenarioClassification (ro)": 53.7,
+                    "MassiveScenarioClassification (ru)": 20.69,
+                    "MassiveScenarioClassification (sl)": 39.79,
+                    "MassiveScenarioClassification (sq)": 50.16,
+                    "MassiveScenarioClassification (sv)": 46.69,
+                    "MassiveScenarioClassification (sw)": 40.48,
+                    "MassiveScenarioClassification (ta)": 7.47,
+                    "MassiveScenarioClassification (te)": 6.87,
+                    "MassiveScenarioClassification (th)": 8.26,
+                    "MassiveScenarioClassification (tl)": 48.94,
+                    "MassiveScenarioClassification (tr)": 41.83,
+                    "MassiveScenarioClassification (ur)": 9.77,
+                    "MassiveScenarioClassification (vi)": 30.01,
+                    "MassiveScenarioClassification (zh-CN)": 4.17,
+                    "MassiveScenarioClassification (zh-TW)": 7.91,
+                    "ToxicConversationsClassification": 68.2,
+                    "TweetSentimentExtractionClassification": 62.71
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "Baichuan-text-embedding",
-                    "CLSClusteringP2P": 60.37,
-                    "CLSClusteringS2S": 51.09,
-                    "ThuNewsClusteringP2P": 58.23,
-                    "ThuNewsClusteringS2S": 57.83
+                    "Model": "sentence-t5-base",
+                    "AlloProfClusteringP2P": 58.44,
+                    "AlloProfClusteringS2S": 35.93,
+                    "ArxivClusteringP2P": 39.28,
+                    "ArxivClusteringS2S": 27.26,
+                    "BiorxivClusteringP2P": 33.99,
+                    "BiorxivClusteringS2S": 22.92,
+                    "BlurbsClusteringP2P": 30.59,
+                    "BlurbsClusteringS2S": 11.57,
+                    "HALClusteringS2S": 17.72,
+                    "MLSUMClusteringP2P": 40.77,
+                    "MLSUMClusteringS2S": 30.06,
+                    "MasakhaNEWSClusteringP2P (fra)": 61.9,
+                    "MasakhaNEWSClusteringS2S (fra)": 35.64,
+                    "MedrxivClusteringP2P": 33.2,
+                    "MedrxivClusteringS2S": 26.13,
+                    "RedditClustering": 52.93,
+                    "RedditClusteringP2P": 59.67,
+                    "StackExchangeClustering": 63.13,
+                    "StackExchangeClusteringP2P": 35.68,
+                    "TenKGnadClusteringP2P": 44.88,
+                    "TenKGnadClusteringS2S": 18.11,
+                    "TwentyNewsgroupsClustering": 48.1
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "Baichuan-text-embedding",
-                    "Cmnli": 85.31,
-                    "Ocnli": 79.33
+                    "Model": "sentence-t5-base",
+                    "OpusparcusPC (fr)": 89.4,
+                    "PawsXPairClassification (fr)": 55.35,
+                    "SprintDuplicateQuestions": 91.23,
+                    "TwitterSemEval2015": 78.25,
+                    "TwitterURLCorpus": 86.05
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "Baichuan-text-embedding",
-                    "CMedQAv1": 88.06,
-                    "CMedQAv2": 88.46,
-                    "MMarcoReranking": 34.3,
-                    "T2Reranking": 67.85
+                    "Model": "sentence-t5-base",
+                    "AlloprofReranking": 50.12,
+                    "AskUbuntuDupQuestions": 59.73,
+                    "MindSmallReranking": 30.2,
+                    "SciDocsRR": 73.96,
+                    "StackOverflowDupQuestions": 48.46,
+                    "SyntecReranking": 78.05
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "Baichuan-text-embedding",
-                    "CmedqaRetrieval": 47.64,
-                    "CovidRetrieval": 86.86,
-                    "DuRetrieval": 88.43,
-                    "EcomRetrieval": 66.39,
-                    "MMarcoRetrieval": 80.17,
-                    "MedicalRetrieval": 61.1,
-                    "T2Retrieval": 80.11,
-                    "VideoRetrieval": 74.28
+                    "Model": "sentence-t5-base",
+                    "AlloprofRetrieval": 27.52,
+                    "ArguAna": 44.85,
+                    "BSARDRetrieval": 0.16,
+                    "CQADupstackRetrieval": 35.23,
+                    "ClimateFEVER": 10.37,
+                    "DBPedia": 27.77,
+                    "FEVER": 26.17,
+                    "FiQA2018": 34.83,
+                    "HotpotQA": 33.2,
+                    "MSMARCO": 20.7,
+                    "MintakaRetrieval (fr)": 21.04,
+                    "NFCorpus": 28.65,
+                    "NQ": 36.32,
+                    "QuoraRetrieval": 85.49,
+                    "SCIDOCS": 14.15,
+                    "SciFact": 45.76,
+                    "SyntecRetrieval": 67.0,
+                    "TRECCOVID": 40.7,
+                    "Touche2020": 20.3,
+                    "XPQARetrieval (fr)": 45.19
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "Baichuan-text-embedding",
-                    "AFQMC": 50.8,
-                    "ATEC": 53.23,
-                    "BQ": 66.49,
-                    "LCQMC": 76.6,
-                    "PAWSX": 47.56,
-                    "QBQTC": 39.96,
-                    "STS22 (zh)": 65.78,
-                    "STSB": 80.14
+                    "Model": "sentence-t5-base",
+                    "BIOSSES": 75.89,
+                    "SICK-R": 80.18,
+                    "SICKFr": 71.74,
+                    "STS12": 78.05,
+                    "STS13": 85.85,
+                    "STS14": 82.19,
+                    "STS15": 87.46,
+                    "STS16": 84.03,
+                    "STS17 (ar-ar)": 13.36,
+                    "STS17 (en-ar)": -5.65,
+                    "STS17 (en-de)": 67.11,
+                    "STS17 (en-en)": 89.57,
+                    "STS17 (en-tr)": -0.02,
+                    "STS17 (es-en)": 47.72,
+                    "STS17 (es-es)": 79.94,
+                    "STS17 (fr-en)": 56.61,
+                    "STS17 (it-en)": 30.46,
+                    "STS17 (ko-ko)": 10.06,
+                    "STS17 (nl-en)": 36.46,
+                    "STS22 (ar)": 31.2,
+                    "STS22 (de)": 42.08,
+                    "STS22 (de-en)": 46.9,
+                    "STS22 (de-fr)": 55.04,
+                    "STS22 (de-pl)": 33.94,
+                    "STS22 (en)": 62.66,
+                    "STS22 (es)": 53.81,
+                    "STS22 (es-en)": 65.19,
+                    "STS22 (es-it)": 55.29,
+                    "STS22 (fr)": 77.69,
+                    "STS22 (fr-pl)": 28.17,
+                    "STS22 (it)": 60.65,
+                    "STS22 (pl)": 24.42,
+                    "STS22 (pl-en)": 42.97,
+                    "STS22 (ru)": 12.13,
+                    "STS22 (tr)": 40.45,
+                    "STS22 (zh)": 32.9,
+                    "STS22 (zh-en)": 20.15,
+                    "STSBenchmark": 85.52,
+                    "STSBenchmarkMultilingualSTS (fr)": 74.04
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "Baichuan-text-embedding"
+                    "Model": "sentence-t5-base",
+                    "SummEval": 31.39,
+                    "SummEvalFr": 30.01
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "sentence-t5-base"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "Baichuan-text-embedding"
+                    "Model": "sentence-t5-base"
                 }
             ]
         }
     },
-    "LASER2": {
+    "glove.6B.300d": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "LASER2",
-                    "BUCC (de-en)": 99.21,
-                    "BUCC (fr-en)": 98.39,
-                    "BUCC (ru-en)": 97.62,
-                    "BUCC (zh-en)": 97.7,
-                    "Tatoeba (afr-eng)": 92.59,
-                    "Tatoeba (amh-eng)": 80.82,
-                    "Tatoeba (ang-eng)": 25.22,
-                    "Tatoeba (ara-eng)": 90.14,
-                    "Tatoeba (arq-eng)": 26.63,
-                    "Tatoeba (arz-eng)": 66.16,
-                    "Tatoeba (ast-eng)": 76.35,
-                    "Tatoeba (awa-eng)": 33.74,
-                    "Tatoeba (aze-eng)": 82.41,
-                    "Tatoeba (bel-eng)": 79.54,
-                    "Tatoeba (ben-eng)": 89.43,
-                    "Tatoeba (ber-eng)": 77.63,
-                    "Tatoeba (bos-eng)": 95.86,
-                    "Tatoeba (bre-eng)": 31.2,
-                    "Tatoeba (bul-eng)": 93.57,
-                    "Tatoeba (cat-eng)": 95.8,
-                    "Tatoeba (cbk-eng)": 77.17,
-                    "Tatoeba (ceb-eng)": 9.93,
-                    "Tatoeba (ces-eng)": 95.52,
-                    "Tatoeba (cha-eng)": 14.86,
-                    "Tatoeba (cmn-eng)": 85.62,
-                    "Tatoeba (cor-eng)": 4.45,
-                    "Tatoeba (csb-eng)": 27.03,
-                    "Tatoeba (cym-eng)": 5.85,
-                    "Tatoeba (dan-eng)": 95.22,
-                    "Tatoeba (deu-eng)": 99.07,
-                    "Tatoeba (dsb-eng)": 42.34,
-                    "Tatoeba (dtp-eng)": 7.39,
-                    "Tatoeba (ell-eng)": 96.2,
-                    "Tatoeba (epo-eng)": 96.61,
-                    "Tatoeba (est-eng)": 96.43,
-                    "Tatoeba (eus-eng)": 93.32,
-                    "Tatoeba (fao-eng)": 57.04,
-                    "Tatoeba (fin-eng)": 96.98,
-                    "Tatoeba (fra-eng)": 94.28,
-                    "Tatoeba (fry-eng)": 42.07,
-                    "Tatoeba (gla-eng)": 1.52,
-                    "Tatoeba (gle-eng)": 4.2,
-                    "Tatoeba (glg-eng)": 96.14,
-                    "Tatoeba (gsw-eng)": 27.52,
-                    "Tatoeba (heb-eng)": 0.0,
-                    "Tatoeba (hin-eng)": 95.32,
-                    "Tatoeba (hrv-eng)": 96.72,
-                    "Tatoeba (hsb-eng)": 45.75,
-                    "Tatoeba (hun-eng)": 95.2,
-                    "Tatoeba (hye-eng)": 88.72,
-                    "Tatoeba (ido-eng)": 80.86,
-                    "Tatoeba (ile-eng)": 87.88,
-                    "Tatoeba (ina-eng)": 93.93,
-                    "Tatoeba (ind-eng)": 92.98,
-                    "Tatoeba (isl-eng)": 94.32,
-                    "Tatoeba (ita-eng)": 94.32,
-                    "Tatoeba (jav-eng)": 9.95,
-                    "Tatoeba (jpn-eng)": 93.78,
-                    "Tatoeba (kab-eng)": 65.88,
-                    "Tatoeba (kat-eng)": 81.16,
-                    "Tatoeba (kaz-eng)": 53.3,
-                    "Tatoeba (khm-eng)": 74.19,
-                    "Tatoeba (kor-eng)": 87.97,
-                    "Tatoeba (kur-eng)": 19.09,
-                    "Tatoeba (kzj-eng)": 4.46,
-                    "Tatoeba (lat-eng)": 64.81,
-                    "Tatoeba (lfn-eng)": 63.39,
-                    "Tatoeba (lit-eng)": 96.2,
-                    "Tatoeba (lvs-eng)": 95.33,
-                    "Tatoeba (mal-eng)": 98.16,
-                    "Tatoeba (mar-eng)": 92.93,
-                    "Tatoeba (max-eng)": 36.96,
-                    "Tatoeba (mhr-eng)": 6.86,
-                    "Tatoeba (mkd-eng)": 93.63,
-                    "Tatoeba (mon-eng)": 3.42,
-                    "Tatoeba (nds-eng)": 77.13,
-                    "Tatoeba (nld-eng)": 95.35,
-                    "Tatoeba (nno-eng)": 72.75,
-                    "Tatoeba (nob-eng)": 95.77,
-                    "Tatoeba (nov-eng)": 60.02,
-                    "Tatoeba (oci-eng)": 58.13,
-                    "Tatoeba (orv-eng)": 23.24,
-                    "Tatoeba (pam-eng)": 3.24,
-                    "Tatoeba (pes-eng)": 93.13,
-                    "Tatoeba (pms-eng)": 36.23,
-                    "Tatoeba (pol-eng)": 97.32,
-                    "Tatoeba (por-eng)": 94.54,
-                    "Tatoeba (ron-eng)": 96.52,
-                    "Tatoeba (rus-eng)": 92.58,
-                    "Tatoeba (slk-eng)": 95.82,
-                    "Tatoeba (slv-eng)": 95.4,
-                    "Tatoeba (spa-eng)": 97.33,
-                    "Tatoeba (sqi-eng)": 97.22,
-                    "Tatoeba (srp-eng)": 93.64,
-                    "Tatoeba (swe-eng)": 95.31,
-                    "Tatoeba (swg-eng)": 33.1,
-                    "Tatoeba (swh-eng)": 55.66,
-                    "Tatoeba (tam-eng)": 87.32,
-                    "Tatoeba (tat-eng)": 34.74,
-                    "Tatoeba (tel-eng)": 96.72,
-                    "Tatoeba (tgl-eng)": 63.19,
-                    "Tatoeba (tha-eng)": 96.38,
-                    "Tatoeba (tuk-eng)": 16.35,
-                    "Tatoeba (tur-eng)": 98.03,
-                    "Tatoeba (tzl-eng)": 36.56,
-                    "Tatoeba (uig-eng)": 56.49,
-                    "Tatoeba (ukr-eng)": 93.52,
-                    "Tatoeba (urd-eng)": 84.23,
-                    "Tatoeba (uzb-eng)": 23.2,
-                    "Tatoeba (vie-eng)": 96.73,
-                    "Tatoeba (war-eng)": 8.25,
-                    "Tatoeba (wuu-eng)": 75.09,
-                    "Tatoeba (xho-eng)": 4.68,
-                    "Tatoeba (yid-eng)": 2.49,
-                    "Tatoeba (yue-eng)": 87.75,
-                    "Tatoeba (zsm-eng)": 95.41
+                    "Model": "glove.6B.300d",
+                    "BUCC (de-en)": 0.18,
+                    "BUCC (fr-en)": 0.19,
+                    "BUCC (ru-en)": 0.1,
+                    "BUCC (zh-en)": 0.0
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "LASER2",
-                    "AmazonCounterfactualClassification (de)": 67.82,
-                    "AmazonCounterfactualClassification (en)": 76.84,
-                    "AmazonCounterfactualClassification (en-ext)": 76.17,
-                    "AmazonCounterfactualClassification (ja)": 68.76,
-                    "AmazonPolarityClassification": 61.01,
-                    "AmazonReviewsClassification (de)": 31.07,
-                    "AmazonReviewsClassification (en)": 28.71,
-                    "AmazonReviewsClassification (es)": 32.72,
-                    "AmazonReviewsClassification (fr)": 31.12,
-                    "AmazonReviewsClassification (ja)": 28.94,
-                    "AmazonReviewsClassification (zh)": 30.89,
-                    "Banking77Classification": 57.76,
-                    "EmotionClassification": 24.83,
-                    "ImdbClassification": 57.58,
-                    "MTOPDomainClassification (de)": 74.08,
-                    "MTOPDomainClassification (en)": 75.36,
-                    "MTOPDomainClassification (es)": 73.47,
-                    "MTOPDomainClassification (fr)": 72.26,
-                    "MTOPDomainClassification (hi)": 72.95,
-                    "MTOPDomainClassification (th)": 72.68,
-                    "MTOPIntentClassification (de)": 51.62,
-                    "MTOPIntentClassification (en)": 49.47,
-                    "MTOPIntentClassification (es)": 52.75,
-                    "MTOPIntentClassification (fr)": 50.12,
-                    "MTOPIntentClassification (hi)": 45.55,
-                    "MTOPIntentClassification (th)": 50.07,
-                    "MasakhaNEWSClassification (fra)": 65.9,
-                    "MassiveIntentClassification (af)": 38.01,
-                    "MassiveIntentClassification (am)": 12.7,
-                    "MassiveIntentClassification (ar)": 37.16,
-                    "MassiveIntentClassification (az)": 19.98,
-                    "MassiveIntentClassification (bn)": 42.51,
-                    "MassiveIntentClassification (cy)": 17.33,
-                    "MassiveIntentClassification (da)": 45.61,
-                    "MassiveIntentClassification (de)": 44.79,
-                    "MassiveIntentClassification (el)": 46.71,
-                    "MassiveIntentClassification (en)": 47.91,
-                    "MassiveIntentClassification (es)": 45.44,
-                    "MassiveIntentClassification (fa)": 45.01,
-                    "MassiveIntentClassification (fi)": 45.94,
-                    "MassiveIntentClassification (fr)": 46.13,
-                    "MassiveIntentClassification (he)": 42.55,
-                    "MassiveIntentClassification (hi)": 40.2,
-                    "MassiveIntentClassification (hu)": 42.77,
-                    "MassiveIntentClassification (hy)": 28.07,
-                    "MassiveIntentClassification (id)": 45.81,
-                    "MassiveIntentClassification (is)": 39.86,
-                    "MassiveIntentClassification (it)": 48.25,
-                    "MassiveIntentClassification (ja)": 45.3,
-                    "MassiveIntentClassification (jv)": 24.3,
-                    "MassiveIntentClassification (ka)": 22.7,
-                    "MassiveIntentClassification (km)": 22.48,
-                    "MassiveIntentClassification (kn)": 4.32,
-                    "MassiveIntentClassification (ko)": 44.26,
-                    "MassiveIntentClassification (lv)": 39.75,
-                    "MassiveIntentClassification (ml)": 41.33,
-                    "MassiveIntentClassification (mn)": 16.2,
-                    "MassiveIntentClassification (ms)": 43.23,
-                    "MassiveIntentClassification (my)": 25.37,
-                    "MassiveIntentClassification (nb)": 37.74,
-                    "MassiveIntentClassification (nl)": 45.0,
-                    "MassiveIntentClassification (pl)": 44.99,
-                    "MassiveIntentClassification (pt)": 48.55,
-                    "MassiveIntentClassification (ro)": 44.3,
-                    "MassiveIntentClassification (ru)": 44.29,
-                    "MassiveIntentClassification (sl)": 44.72,
-                    "MassiveIntentClassification (sq)": 46.12,
-                    "MassiveIntentClassification (sv)": 45.95,
-                    "MassiveIntentClassification (sw)": 31.89,
-                    "MassiveIntentClassification (ta)": 29.63,
-                    "MassiveIntentClassification (te)": 36.03,
-                    "MassiveIntentClassification (th)": 43.39,
-                    "MassiveIntentClassification (tl)": 29.73,
-                    "MassiveIntentClassification (tr)": 43.93,
-                    "MassiveIntentClassification (ur)": 26.11,
-                    "MassiveIntentClassification (vi)": 44.33,
-                    "MassiveIntentClassification (zh-CN)": 40.62,
-                    "MassiveIntentClassification (zh-TW)": 32.93,
-                    "MassiveScenarioClassification (af)": 47.1,
-                    "MassiveScenarioClassification (am)": 17.7,
-                    "MassiveScenarioClassification (ar)": 45.21,
-                    "MassiveScenarioClassification (az)": 28.21,
-                    "MassiveScenarioClassification (bn)": 50.52,
-                    "MassiveScenarioClassification (cy)": 22.58,
-                    "MassiveScenarioClassification (da)": 54.87,
-                    "MassiveScenarioClassification (de)": 54.34,
-                    "MassiveScenarioClassification (el)": 55.47,
-                    "MassiveScenarioClassification (en)": 55.92,
-                    "MassiveScenarioClassification (es)": 52.77,
-                    "MassiveScenarioClassification (fa)": 52.5,
-                    "MassiveScenarioClassification (fi)": 52.63,
-                    "MassiveScenarioClassification (fr)": 54.32,
-                    "MassiveScenarioClassification (he)": 52.41,
-                    "MassiveScenarioClassification (hi)": 47.37,
-                    "MassiveScenarioClassification (hu)": 53.43,
-                    "MassiveScenarioClassification (hy)": 33.57,
-                    "MassiveScenarioClassification (id)": 54.38,
-                    "MassiveScenarioClassification (is)": 49.78,
-                    "MassiveScenarioClassification (it)": 54.84,
-                    "MassiveScenarioClassification (ja)": 54.12,
-                    "MassiveScenarioClassification (jv)": 32.71,
-                    "MassiveScenarioClassification (ka)": 26.92,
-                    "MassiveScenarioClassification (km)": 27.23,
-                    "MassiveScenarioClassification (kn)": 10.06,
-                    "MassiveScenarioClassification (ko)": 52.01,
-                    "MassiveScenarioClassification (lv)": 44.82,
-                    "MassiveScenarioClassification (ml)": 49.1,
-                    "MassiveScenarioClassification (mn)": 21.51,
-                    "MassiveScenarioClassification (ms)": 53.6,
-                    "MassiveScenarioClassification (my)": 29.72,
-                    "MassiveScenarioClassification (nb)": 43.9,
-                    "MassiveScenarioClassification (nl)": 53.33,
-                    "MassiveScenarioClassification (pl)": 52.92,
-                    "MassiveScenarioClassification (pt)": 53.41,
-                    "MassiveScenarioClassification (ro)": 50.48,
-                    "MassiveScenarioClassification (ru)": 51.84,
-                    "MassiveScenarioClassification (sl)": 51.29,
-                    "MassiveScenarioClassification (sq)": 55.65,
-                    "MassiveScenarioClassification (sv)": 54.64,
-                    "MassiveScenarioClassification (sw)": 42.04,
-                    "MassiveScenarioClassification (ta)": 36.72,
-                    "MassiveScenarioClassification (te)": 42.08,
-                    "MassiveScenarioClassification (th)": 52.15,
-                    "MassiveScenarioClassification (tl)": 37.34,
-                    "MassiveScenarioClassification (tr)": 52.56,
-                    "MassiveScenarioClassification (ur)": 32.6,
-                    "MassiveScenarioClassification (vi)": 50.97,
-                    "MassiveScenarioClassification (zh-CN)": 50.22,
-                    "MassiveScenarioClassification (zh-TW)": 42.32,
-                    "ToxicConversationsClassification": 54.05,
-                    "TweetSentimentExtractionClassification": 48.73
+                    "Model": "glove.6B.300d",
+                    "AmazonCounterfactualClassification (en)": 56.91,
+                    "AmazonPolarityClassification": 60.32,
+                    "AmazonReviewsClassification (en)": 29.67,
+                    "Banking77Classification": 67.69,
+                    "EmotionClassification": 36.93,
+                    "ImdbClassification": 62.57,
+                    "MTOPDomainClassification (en)": 79.11,
+                    "MTOPIntentClassification (en)": 55.85,
+                    "MassiveIntentClassification (en)": 56.19,
+                    "MassiveScenarioClassification (en)": 66.03,
+                    "ToxicConversationsClassification": 65.4,
+                    "TweetSentimentExtractionClassification": 50.8
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "LASER2",
-                    "AlloProfClusteringP2P": 48.45,
-                    "AlloProfClusteringS2S": 25.81,
-                    "ArxivClusteringP2P": 17.77,
-                    "ArxivClusteringS2S": 12.39,
-                    "BiorxivClusteringP2P": 12.4,
-                    "BiorxivClusteringS2S": 8.83,
-                    "HALClusteringS2S": 11.52,
-                    "MLSUMClusteringP2P": 34.53,
-                    "MLSUMClusteringS2S": 27.35,
-                    "MasakhaNEWSClusteringP2P (fra)": 32.04,
-                    "MasakhaNEWSClusteringS2S (fra)": 29.77,
-                    "MedrxivClusteringP2P": 17.91,
-                    "MedrxivClusteringS2S": 16.63,
-                    "RedditClustering": 9.96,
-                    "RedditClusteringP2P": 26.42,
-                    "StackExchangeClustering": 15.79,
-                    "StackExchangeClusteringP2P": 18.63,
-                    "TwentyNewsgroupsClustering": 11.38
+                    "Model": "glove.6B.300d",
+                    "ArxivClusteringP2P": 32.56,
+                    "ArxivClusteringS2S": 23.14,
+                    "BiorxivClusteringP2P": 29.27,
+                    "BiorxivClusteringS2S": 19.18,
+                    "MedrxivClusteringP2P": 26.12,
+                    "MedrxivClusteringS2S": 20.38,
+                    "RedditClustering": 28.46,
+                    "RedditClusteringP2P": 35.82,
+                    "StackExchangeClustering": 35.8,
+                    "StackExchangeClusteringP2P": 28.51,
+                    "TwentyNewsgroupsClustering": 25.83
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "LASER2",
-                    "OpusparcusPC (fr)": 93.77,
-                    "PawsXPairClassification (fr)": 69.53,
-                    "SprintDuplicateQuestions": 65.54,
-                    "TwitterSemEval2015": 59.57,
-                    "TwitterURLCorpus": 81.47
+                    "Model": "glove.6B.300d",
+                    "SprintDuplicateQuestions": 86.96,
+                    "TwitterSemEval2015": 48.45,
+                    "TwitterURLCorpus": 77.35
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "LASER2",
-                    "AlloprofReranking": 35.29,
-                    "AskUbuntuDupQuestions": 48.99,
-                    "MindSmallReranking": 24.79,
-                    "SciDocsRR": 54.99,
-                    "StackOverflowDupQuestions": 36.98,
-                    "SyntecReranking": 55.93
+                    "Model": "glove.6B.300d",
+                    "AskUbuntuDupQuestions": 49.57,
+                    "MindSmallReranking": 27.01,
+                    "SciDocsRR": 62.56,
+                    "StackOverflowDupQuestions": 34.03
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "LASER2",
-                    "AlloprofRetrieval": 3.1,
-                    "ArguAna": 12.86,
-                    "BSARDRetrieval": 0.36,
-                    "CQADupstackRetrieval": 4.12,
-                    "ClimateFEVER": 0.36,
-                    "DBPedia": 1.53,
-                    "FEVER": 0.77,
-                    "FiQA2018": 1.73,
-                    "HotpotQA": 5.5,
-                    "MSMARCO": 1.09,
-                    "MintakaRetrieval (fr)": 6.31,
-                    "NFCorpus": 2.44,
-                    "NQ": 0.64,
-                    "QuoraRetrieval": 71.14,
-                    "SCIDOCS": 0.78,
-                    "SciFact": 4.04,
-                    "SyntecRetrieval": 28.58,
-                    "TRECCOVID": 10.97,
-                    "Touche2020": 1.06,
-                    "XPQARetrieval (fr)": 42.59
+                    "Model": "glove.6B.300d",
+                    "ArguAna": 36.3,
+                    "CQADupstackRetrieval": 15.47,
+                    "ClimateFEVER": 14.44,
+                    "DBPedia": 18.28,
+                    "FEVER": 14.99,
+                    "FiQA2018": 10.09,
+                    "HotpotQA": 19.18,
+                    "MSMARCO": 9.6,
+                    "NFCorpus": 13.87,
+                    "NQ": 12.87,
+                    "QuoraRetrieval": 71.32,
+                    "SCIDOCS": 8.04,
+                    "SciFact": 29.58,
+                    "TRECCOVID": 36.22,
+                    "Touche2020": 13.99
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "LASER2",
-                    "BIOSSES": 62.01,
-                    "SICK-R": 62.86,
-                    "SICKFr": 64.95,
-                    "STS12": 62.6,
-                    "STS13": 59.62,
-                    "STS14": 57.03,
-                    "STS15": 71.57,
-                    "STS16": 70.75,
-                    "STS17 (ar-ar)": 67.47,
-                    "STS17 (en-ar)": 65.05,
-                    "STS17 (en-de)": 66.66,
-                    "STS17 (en-en)": 76.73,
-                    "STS17 (en-tr)": 70.05,
-                    "STS17 (es-en)": 55.3,
-                    "STS17 (es-es)": 79.67,
-                    "STS17 (fr-en)": 70.82,
-                    "STS17 (it-en)": 70.98,
-                    "STS17 (ko-ko)": 70.52,
-                    "STS17 (nl-en)": 68.12,
-                    "STS22 (ar)": 42.57,
-                    "STS22 (de)": 25.69,
-                    "STS22 (de-en)": 32.35,
-                    "STS22 (de-fr)": 37.41,
-                    "STS22 (de-pl)": 15.67,
-                    "STS22 (en)": 39.76,
-                    "STS22 (es)": 54.92,
-                    "STS22 (es-en)": 54.34,
-                    "STS22 (es-it)": 42.21,
-                    "STS22 (fr)": 58.61,
-                    "STS22 (fr-pl)": 39.44,
-                    "STS22 (it)": 60.31,
-                    "STS22 (pl)": 18.34,
-                    "STS22 (pl-en)": 53.63,
-                    "STS22 (ru)": 39.24,
-                    "STS22 (tr)": 36.97,
-                    "STS22 (zh)": 49.41,
-                    "STS22 (zh-en)": 46.19,
-                    "STSBenchmark": 69.77,
-                    "STSBenchmarkMultilingualSTS (fr)": 69.82
+                    "Model": "glove.6B.300d",
+                    "BIOSSES": 44.93,
+                    "SICK-R": 55.43,
+                    "STS12": 54.64,
+                    "STS13": 69.16,
+                    "STS14": 60.81,
+                    "STS15": 72.31,
+                    "STS16": 65.34,
+                    "STS17 (en-en)": 77.95,
+                    "STS22 (en)": 56.35,
+                    "STSBenchmark": 61.54
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "LASER2",
-                    "SummEval": 26.8,
-                    "SummEvalFr": 31.56
+                    "Model": "glove.6B.300d",
+                    "SummEval": 28.87
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "glove.6B.300d"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "LASER2"
+                    "Model": "glove.6B.300d"
                 }
             ]
         }
     },
-    "nomic-embed-text-v1": {
+    "bert-base-swedish-cased": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "nomic-embed-text-v1"
+                    "Model": "bert-base-swedish-cased",
+                    "BornholmBitextMining": 6.6
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "nomic-embed-text-v1"
+                    "Model": "bert-base-swedish-cased",
+                    "AngryTweetsClassification": 44.58,
+                    "DKHateClassification": 55.53,
+                    "DanishPoliticalCommentsClassification": 28.97,
+                    "LccSentimentClassification": 41.2,
+                    "MassiveIntentClassification (da)": 37.98,
+                    "MassiveIntentClassification (nb)": 35.75,
+                    "MassiveIntentClassification (sv)": 52.75,
+                    "MassiveScenarioClassification (da)": 40.44,
+                    "MassiveScenarioClassification (nb)": 35.76,
+                    "MassiveScenarioClassification (sv)": 56.09,
+                    "NoRecClassification": 43.91,
+                    "NordicLangClassification": 62.45,
+                    "NorwegianParliament": 57.56,
+                    "ScalaDaClassification": 53.53,
+                    "ScalaNbClassification": 53.63
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "nomic-embed-text-v1"
+                    "Model": "bert-base-swedish-cased"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "nomic-embed-text-v1"
+                    "Model": "bert-base-swedish-cased"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "nomic-embed-text-v1"
+                    "Model": "bert-base-swedish-cased"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "nomic-embed-text-v1",
-                    "LEMBNarrativeQARetrieval": 41.23,
-                    "LEMBNeedleRetrieval": 39.5,
-                    "LEMBPasskeyRetrieval": 44.75,
-                    "LEMBQMSumRetrieval": 36.65,
-                    "LEMBSummScreenFDRetrieval": 92.97,
-                    "LEMBWikimQARetrieval": 73.75
+                    "Model": "bert-base-swedish-cased"
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "nomic-embed-text-v1"
+                    "Model": "bert-base-swedish-cased"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "nomic-embed-text-v1"
+                    "Model": "bert-base-swedish-cased"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "bert-base-swedish-cased"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "nomic-embed-text-v1"
+                    "Model": "bert-base-swedish-cased"
                 }
             ]
         }
     },
-    "text-search-ada-001": {
+    "bge-large-zh-v1.5": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "text-search-ada-001"
+                    "Model": "bge-large-zh-v1.5"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "text-search-ada-001"
+                    "Model": "bge-large-zh-v1.5",
+                    "AmazonReviewsClassification (zh)": 41.38,
+                    "IFlyTek": 48.74,
+                    "JDReview": 85.14,
+                    "MassiveIntentClassification (zh-CN)": 68.84,
+                    "MassiveScenarioClassification (zh-CN)": 74.7,
+                    "MultilingualSentiment": 72.97,
+                    "OnlineShopping": 91.43,
+                    "TNews": 52.1,
+                    "Waimai": 86.9
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "text-search-ada-001",
-                    "BiorxivClusteringS2S": 26.05,
-                    "MedrxivClusteringS2S": 25.67,
-                    "TwentyNewsgroupsClustering": 44.92
+                    "Model": "bge-large-zh-v1.5",
+                    "CLSClusteringP2P": 41.44,
+                    "CLSClusteringS2S": 38.33,
+                    "ThuNewsClusteringP2P": 59.61,
+                    "ThuNewsClusteringS2S": 56.58
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "text-search-ada-001"
+                    "Model": "bge-large-zh-v1.5",
+                    "Cmnli": 85.27,
+                    "Ocnli": 77.94
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "text-search-ada-001"
+                    "Model": "bge-large-zh-v1.5",
+                    "CMedQAv1": 83.45,
+                    "CMedQAv2": 85.44,
+                    "MMarcoReranking": 28.74,
+                    "T2Reranking": 65.74
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "text-search-ada-001",
-                    "ArguAna": 46.91,
-                    "ClimateFEVER": 18.5,
-                    "DBPedia": 36.2,
-                    "FEVER": 72.1,
-                    "FiQA2018": 38.41,
-                    "HotpotQA": 59.39,
-                    "MSMARCO": 37.94,
-                    "NFCorpus": 33.17,
-                    "NQ": 42.81,
-                    "QuoraRetrieval": 70.57,
-                    "SCIDOCS": 14.83,
-                    "SciFact": 67.25,
-                    "TRECCOVID": 72.43,
-                    "Touche2020": 28.68
+                    "Model": "bge-large-zh-v1.5",
+                    "CmedqaRetrieval": 42.57,
+                    "CovidRetrieval": 73.35,
+                    "DuRetrieval": 86.32,
+                    "EcomRetrieval": 65.33,
+                    "MMarcoRetrieval": 79.23,
+                    "MedicalRetrieval": 59.59,
+                    "T2Retrieval": 83.99,
+                    "VideoRetrieval": 73.32
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "text-search-ada-001"
+                    "Model": "bge-large-zh-v1.5",
+                    "AFQMC": 44.36,
+                    "ATEC": 49.54,
+                    "BQ": 62.94,
+                    "LCQMC": 74.33,
+                    "PAWSX": 33.92,
+                    "QBQTC": 37.29,
+                    "STS22 (zh)": 68.94,
+                    "STSB": 78.7
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "text-search-ada-001"
+                    "Model": "bge-large-zh-v1.5"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "bge-large-zh-v1.5"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "text-search-ada-001"
+                    "Model": "bge-large-zh-v1.5"
                 }
             ]
         }
     },
-    "rubert-tiny": {
+    "LLM2Vec-Meta-Llama-3-supervised": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "rubert-tiny"
+                    "Model": "LLM2Vec-Meta-Llama-3-supervised"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "rubert-tiny",
-                    "GeoreviewClassification (rus-Cyrl)": 33.45,
-                    "HeadlineClassification (rus-Cyrl)": 57.65,
-                    "InappropriatenessClassification (rus-Cyrl)": 54.5,
-                    "KinopoiskClassification (rus-Cyrl)": 41.36,
-                    "MassiveIntentClassification (rus-Cyrl)": 50.1,
-                    "MassiveScenarioClassification (rus-Cyrl)": 52.15,
-                    "RuReviewsClassification (rus-Cyrl)": 49.56,
-                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 35.71,
-                    "RuSciBenchOECDClassification (rus-Cyrl)": 26.51
+                    "Model": "LLM2Vec-Meta-Llama-3-supervised",
+                    "AmazonCounterfactualClassification (en)": 79.94,
+                    "AmazonPolarityClassification": 86.07,
+                    "AmazonReviewsClassification (en)": 46.84,
+                    "Banking77Classification": 88.05,
+                    "EmotionClassification": 51.2,
+                    "ImdbClassification": 82.94,
+                    "MTOPDomainClassification (en)": 96.14,
+                    "MTOPIntentClassification (en)": 86.11,
+                    "MassiveIntentClassification (en)": 79.8,
+                    "MassiveScenarioClassification (en)": 81.52,
+                    "ToxicConversationsClassification": 70.59,
+                    "TweetSentimentExtractionClassification": 61.9
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "rubert-tiny",
-                    "GeoreviewClusteringP2P (rus-Cyrl)": 34.4,
-                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 29.89,
-                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 27.98
+                    "Model": "LLM2Vec-Meta-Llama-3-supervised",
+                    "ArxivClusteringP2P": 44.27,
+                    "ArxivClusteringS2S": 46.85,
+                    "BiorxivClusteringP2P": 32.35,
+                    "BiorxivClusteringS2S": 36.7,
+                    "MedrxivClusteringP2P": 30.71,
+                    "MedrxivClusteringS2S": 32.96,
+                    "RedditClustering": 61.72,
+                    "RedditClusteringP2P": 63.98,
+                    "StackExchangeClustering": 72.74,
+                    "StackExchangeClusteringP2P": 32.26,
+                    "TwentyNewsgroupsClustering": 56.41
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "rubert-tiny",
-                    "TERRa (rus-Cyrl)": 51.06
+                    "Model": "LLM2Vec-Meta-Llama-3-supervised",
+                    "SprintDuplicateQuestions": 95.09,
+                    "TwitterSemEval2015": 81.73,
+                    "TwitterURLCorpus": 86.56
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "rubert-tiny",
-                    "RuBQReranking (rus-Cyrl)": 35.44
+                    "Model": "LLM2Vec-Meta-Llama-3-supervised",
+                    "AskUbuntuDupQuestions": 65.19,
+                    "MindSmallReranking": 32.67,
+                    "SciDocsRR": 86.05,
+                    "StackOverflowDupQuestions": 54.82
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "rubert-tiny",
-                    "RiaNewsRetrieval (rus-Cyrl)": 0.79,
-                    "RuBQRetrieval (rus-Cyrl)": 3.24
+                    "Model": "LLM2Vec-Meta-Llama-3-supervised",
+                    "ArguAna": 62.78,
+                    "CQADupstackRetrieval": 48.25,
+                    "ClimateFEVER": 34.27,
+                    "DBPedia": 48.34,
+                    "FEVER": 90.2,
+                    "FiQA2018": 55.33,
+                    "HotpotQA": 71.76,
+                    "MSMARCO": 43.24,
+                    "NFCorpus": 41.83,
+                    "NQ": 64.21,
+                    "QuoraRetrieval": 87.16,
+                    "SCIDOCS": 22.96,
+                    "SciFact": 78.22,
+                    "TRECCOVID": 80.34,
+                    "Touche2020": 20.5
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "rubert-tiny",
-                    "RUParaPhraserSTS (rus-Cyrl)": 53.41,
-                    "RuSTSBenchmarkSTS (rus-Cyrl)": 58.16,
-                    "STS22 (rus-Cyrl)": 47.88
+                    "Model": "LLM2Vec-Meta-Llama-3-supervised",
+                    "BIOSSES": 84.92,
+                    "SICK-R": 83.94,
+                    "STS12": 79.27,
+                    "STS13": 84.83,
+                    "STS14": 82.94,
+                    "STS15": 88.09,
+                    "STS16": 86.54,
+                    "STS17 (en-en)": 89.58,
+                    "STS22 (en)": 67.67,
+                    "STSBenchmark": 88.05
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "rubert-tiny"
+                    "Model": "LLM2Vec-Meta-Llama-3-supervised",
+                    "SummEval": 30.94
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "LLM2Vec-Meta-Llama-3-supervised"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "rubert-tiny"
+                    "Model": "LLM2Vec-Meta-Llama-3-supervised"
                 }
             ]
         }
     },
-    "sbert_large_mt_nlu_ru": {
+    "text-similarity-davinci-001": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "sbert_large_mt_nlu_ru"
+                    "Model": "text-similarity-davinci-001"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "sbert_large_mt_nlu_ru",
-                    "GeoreviewClassification (rus-Cyrl)": 39.67,
-                    "HeadlineClassification (rus-Cyrl)": 77.19,
-                    "InappropriatenessClassification (rus-Cyrl)": 64.64,
-                    "KinopoiskClassification (rus-Cyrl)": 50.33,
-                    "MassiveIntentClassification (rus-Cyrl)": 61.42,
-                    "MassiveScenarioClassification (rus-Cyrl)": 68.13,
-                    "RuReviewsClassification (rus-Cyrl)": 58.29,
-                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 54.19,
-                    "RuSciBenchOECDClassification (rus-Cyrl)": 43.8
+                    "Model": "text-similarity-davinci-001"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "sbert_large_mt_nlu_ru",
-                    "GeoreviewClusteringP2P (rus-Cyrl)": 58.45,
-                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 52.2,
-                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 47.29
+                    "Model": "text-similarity-davinci-001",
+                    "RedditClustering": 31.78,
+                    "StackExchangeClustering": 36.86,
+                    "TwentyNewsgroupsClustering": 29.33
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "sbert_large_mt_nlu_ru",
-                    "TERRa (rus-Cyrl)": 51.97
+                    "Model": "text-similarity-davinci-001",
+                    "SprintDuplicateQuestions": 69.52,
+                    "TwitterSemEval2015": 74.42,
+                    "TwitterURLCorpus": 83.75
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "sbert_large_mt_nlu_ru",
-                    "RuBQReranking (rus-Cyrl)": 56.13
+                    "Model": "text-similarity-davinci-001",
+                    "AskUbuntuDupQuestions": 53.56,
+                    "SciDocsRR": 68.7,
+                    "StackOverflowDupQuestions": 39.41
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "sbert_large_mt_nlu_ru",
-                    "RiaNewsRetrieval (rus-Cyrl)": 21.4,
-                    "RuBQRetrieval (rus-Cyrl)": 29.8
+                    "Model": "text-similarity-davinci-001"
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "sbert_large_mt_nlu_ru",
-                    "RUParaPhraserSTS (rus-Cyrl)": 65.17,
-                    "RuSTSBenchmarkSTS (rus-Cyrl)": 71.22,
-                    "STS22 (rus-Cyrl)": 56.82
+                    "Model": "text-similarity-davinci-001",
+                    "BIOSSES": 68.95,
+                    "SICK-R": 78.72,
+                    "STSBenchmark": 84.08
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "sbert_large_mt_nlu_ru"
+                    "Model": "text-similarity-davinci-001"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "text-similarity-davinci-001"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "sbert_large_mt_nlu_ru"
+                    "Model": "text-similarity-davinci-001"
                 }
             ]
         }
     },
-    "gelectra-large": {
+    "bert-base-15lang-cased": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "gelectra-large"
+                    "Model": "bert-base-15lang-cased"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "gelectra-large"
+                    "Model": "bert-base-15lang-cased",
+                    "AmazonReviewsClassification (fr)": 29.35,
+                    "MTOPDomainClassification (fr)": 63.7,
+                    "MTOPIntentClassification (fr)": 37.85,
+                    "MasakhaNEWSClassification (fra)": 63.89,
+                    "MassiveIntentClassification (fr)": 37.28,
+                    "MassiveScenarioClassification (fr)": 44.47
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "gelectra-large",
-                    "BlurbsClusteringP2P": 13.96,
-                    "BlurbsClusteringS2S": 7.57,
-                    "TenKGnadClusteringP2P": 11.49,
-                    "TenKGnadClusteringS2S": 3.91
+                    "Model": "bert-base-15lang-cased",
+                    "AlloProfClusteringP2P": 53.16,
+                    "AlloProfClusteringS2S": 43.43,
+                    "HALClusteringS2S": 20.26,
+                    "MLSUMClusteringP2P": 41.22,
+                    "MLSUMClusteringS2S": 31.88,
+                    "MasakhaNEWSClusteringP2P (fra)": 24.23,
+                    "MasakhaNEWSClusteringS2S (fra)": 24.46
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "gelectra-large"
+                    "Model": "bert-base-15lang-cased",
+                    "OpusparcusPC (fr)": 86.78,
+                    "PawsXPairClassification (fr)": 53.38
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "gelectra-large"
+                    "Model": "bert-base-15lang-cased",
+                    "AlloprofReranking": 36.21,
+                    "SyntecReranking": 53.25
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "gelectra-large"
+                    "Model": "bert-base-15lang-cased",
+                    "AlloprofRetrieval": 1.61,
+                    "BSARDRetrieval": 0.0,
+                    "MintakaRetrieval (fr)": 3.55,
+                    "SyntecRetrieval": 18.95,
+                    "XPQARetrieval (fr)": 18.35
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "gelectra-large"
+                    "Model": "bert-base-15lang-cased",
+                    "SICKFr": 58.77,
+                    "STS22 (fr)": 40.4,
+                    "STSBenchmarkMultilingualSTS (fr)": 52.25
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "gelectra-large"
+                    "Model": "bert-base-15lang-cased",
+                    "SummEvalFr": 29.13
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "bert-base-15lang-cased"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "gelectra-large"
+                    "Model": "bert-base-15lang-cased"
                 }
             ]
         }
     },
-    "distilrubert-small-cased-conversational": {
+    "dfm-encoder-large-v1": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "distilrubert-small-cased-conversational",
-                    "Tatoeba (rus-Cyrl_eng-Latn)": 24.16
+                    "Model": "dfm-encoder-large-v1",
+                    "BornholmBitextMining": 11.65
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "distilrubert-small-cased-conversational",
-                    "GeoreviewClassification (rus-Cyrl)": 38.95,
-                    "HeadlineClassification (rus-Cyrl)": 75.59,
-                    "InappropriatenessClassification (rus-Cyrl)": 60.68,
-                    "KinopoiskClassification (rus-Cyrl)": 49.67,
-                    "MassiveIntentClassification (rus-Cyrl)": 63.12,
-                    "MassiveScenarioClassification (rus-Cyrl)": 68.08,
-                    "RuReviewsClassification (rus-Cyrl)": 54.05,
-                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 48.53,
-                    "RuSciBenchOECDClassification (rus-Cyrl)": 37.65
+                    "Model": "dfm-encoder-large-v1",
+                    "AngryTweetsClassification": 53.8,
+                    "DKHateClassification": 60.09,
+                    "DanishPoliticalCommentsClassification": 36.6,
+                    "LccSentimentClassification": 57.33,
+                    "MassiveIntentClassification (da)": 60.55,
+                    "MassiveIntentClassification (nb)": 52.49,
+                    "MassiveIntentClassification (sv)": 49.74,
+                    "MassiveScenarioClassification (da)": 64.16,
+                    "MassiveScenarioClassification (nb)": 54.59,
+                    "MassiveScenarioClassification (sv)": 50.1,
+                    "NoRecClassification": 48.3,
+                    "NordicLangClassification": 77.68,
+                    "NorwegianParliament": 58.78,
+                    "ScalaDaClassification": 63.08,
+                    "ScalaNbClassification": 58.95
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "distilrubert-small-cased-conversational",
-                    "GeoreviewClusteringP2P (rus-Cyrl)": 43.26,
-                    "MLSUMClusteringP2P (rus-Cyrl)": 50.08,
-                    "MLSUMClusteringS2S (rus-Cyrl)": 51.12,
-                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 37.84,
-                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 34.12
+                    "Model": "dfm-encoder-large-v1"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "distilrubert-small-cased-conversational",
-                    "OpusparcusPC (rus-Cyrl)": 84.35,
-                    "TERRa (rus-Cyrl)": 52.48
+                    "Model": "dfm-encoder-large-v1"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "distilrubert-small-cased-conversational",
-                    "RuBQReranking (rus-Cyrl)": 42.58
+                    "Model": "dfm-encoder-large-v1"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "distilrubert-small-cased-conversational",
-                    "RiaNewsRetrieval (rus-Cyrl)": 4.14,
-                    "RuBQRetrieval (rus-Cyrl)": 10.6
+                    "Model": "dfm-encoder-large-v1"
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "distilrubert-small-cased-conversational",
-                    "RUParaPhraserSTS (rus-Cyrl)": 55.01,
-                    "RuSTSBenchmarkSTS (rus-Cyrl)": 61.72,
-                    "STS22 (rus-Cyrl)": 51.87,
-                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 61.6
+                    "Model": "dfm-encoder-large-v1"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "distilrubert-small-cased-conversational"
+                    "Model": "dfm-encoder-large-v1"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "dfm-encoder-large-v1"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "distilrubert-small-cased-conversational"
+                    "Model": "dfm-encoder-large-v1"
                 }
             ]
         }
     },
-    "monot5-base-msmarco-10k": {
+    "nomic-embed-text-v1.5-512": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "monot5-base-msmarco-10k"
+                    "Model": "nomic-embed-text-v1.5-512"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "monot5-base-msmarco-10k"
+                    "Model": "nomic-embed-text-v1.5-512",
+                    "AmazonCounterfactualClassification (en)": 74.27,
+                    "AmazonPolarityClassification": 91.89,
+                    "AmazonReviewsClassification (en)": 46.97,
+                    "Banking77Classification": 84.15,
+                    "EmotionClassification": 47.73,
+                    "ImdbClassification": 85.47,
+                    "MTOPDomainClassification (en)": 92.62,
+                    "MTOPIntentClassification (en)": 74.27,
+                    "MassiveIntentClassification (en)": 73.07,
+                    "MassiveScenarioClassification (en)": 76.82,
+                    "ToxicConversationsClassification": 71.25,
+                    "TweetSentimentExtractionClassification": 60.4
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "monot5-base-msmarco-10k"
+                    "Model": "nomic-embed-text-v1.5-512",
+                    "ArxivClusteringP2P": 45.45,
+                    "ArxivClusteringS2S": 36.19,
+                    "BiorxivClusteringP2P": 38.41,
+                    "BiorxivClusteringS2S": 32.28,
+                    "MedrxivClusteringP2P": 34.47,
+                    "MedrxivClusteringS2S": 31.43,
+                    "RedditClustering": 55.9,
+                    "RedditClusteringP2P": 60.58,
+                    "StackExchangeClustering": 62.94,
+                    "StackExchangeClusteringP2P": 33.81,
+                    "TwentyNewsgroupsClustering": 49.36
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "monot5-base-msmarco-10k"
+                    "Model": "nomic-embed-text-v1.5-512",
+                    "SprintDuplicateQuestions": 92.91,
+                    "TwitterSemEval2015": 74.3,
+                    "TwitterURLCorpus": 86.57
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "monot5-base-msmarco-10k"
+                    "Model": "nomic-embed-text-v1.5-512",
+                    "AskUbuntuDupQuestions": 61.6,
+                    "MindSmallReranking": 30.34,
+                    "SciDocsRR": 80.33,
+                    "StackOverflowDupQuestions": 50.32
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "monot5-base-msmarco-10k"
+                    "Model": "nomic-embed-text-v1.5-512",
+                    "ArguAna": 47.45,
+                    "CQADupstackRetrieval": 39.06,
+                    "ClimateFEVER": 40.7,
+                    "DBPedia": 42.96,
+                    "FEVER": 85.7,
+                    "FiQA2018": 36.92,
+                    "HotpotQA": 71.48,
+                    "MSMARCO": 42.29,
+                    "NFCorpus": 33.31,
+                    "NQ": 58.83,
+                    "QuoraRetrieval": 87.87,
+                    "SCIDOCS": 17.88,
+                    "SciFact": 70.12,
+                    "TRECCOVID": 82.12,
+                    "Touche2020": 29.24
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "monot5-base-msmarco-10k"
+                    "Model": "nomic-embed-text-v1.5-512",
+                    "BIOSSES": 83.3,
+                    "SICK-R": 79.27,
+                    "STS12": 78.3,
+                    "STS13": 85.81,
+                    "STS14": 81.38,
+                    "STS15": 86.79,
+                    "STS16": 84.56,
+                    "STS17 (en-en)": 87.25,
+                    "STS22 (en)": 65.24,
+                    "STSBenchmark": 85.14
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "monot5-base-msmarco-10k"
+                    "Model": "nomic-embed-text-v1.5-512",
+                    "SummEval": 30.47
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "nomic-embed-text-v1.5-512"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "monot5-base-msmarco-10k",
-                    "Core17InstructionRetrieval": -4.06,
-                    "News21InstructionRetrieval": 5.02,
-                    "Robust04InstructionRetrieval": -6.2
+                    "Model": "nomic-embed-text-v1.5-512"
                 }
             ]
         }
     },
-    "all-MiniLM-L6-v2": {
+    "GritLM-7B-noinstruct": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "all-MiniLM-L6-v2",
-                    "BornholmBitextMining": 29.68,
-                    "BornholmBitextMining (dan-Latn)": 29.68,
-                    "Tatoeba (kab-Latn_eng-Latn)": 0.96,
-                    "Tatoeba (aze-Latn_eng-Latn)": 1.04,
-                    "Tatoeba (wuu-Hans_eng-Latn)": 0.6,
-                    "Tatoeba (fra-Latn_eng-Latn)": 8.17,
-                    "Tatoeba (nov-Latn_eng-Latn)": 13.97,
-                    "Tatoeba (slk-Latn_eng-Latn)": 3.27,
-                    "Tatoeba (mkd-Cyrl_eng-Latn)": 0.0,
-                    "Tatoeba (ukr-Cyrl_eng-Latn)": 0.3,
-                    "Tatoeba (kur-Latn_eng-Latn)": 5.21,
-                    "Tatoeba (hin-Deva_eng-Latn)": 0.0,
-                    "Tatoeba (tgl-Latn_eng-Latn)": 2.69,
-                    "Tatoeba (jav-Latn_eng-Latn)": 3.37,
-                    "Tatoeba (nob-Latn_eng-Latn)": 4.34,
-                    "Tatoeba (tam-Taml_eng-Latn)": 0.33,
-                    "Tatoeba (hsb-Latn_eng-Latn)": 2.65,
-                    "Tatoeba (srp-Cyrl_eng-Latn)": 1.28,
-                    "Tatoeba (cat-Latn_eng-Latn)": 6.93,
-                    "Tatoeba (jpn-Jpan_eng-Latn)": 0.97,
-                    "Tatoeba (kzj-Latn_eng-Latn)": 2.78,
-                    "Tatoeba (uig-Arab_eng-Latn)": 0.2,
-                    "Tatoeba (max-Deva_eng-Latn)": 6.93,
-                    "Tatoeba (dtp-Latn_eng-Latn)": 1.88,
-                    "Tatoeba (cbk-Latn_eng-Latn)": 7.04,
-                    "Tatoeba (bre-Latn_eng-Latn)": 3.22,
-                    "Tatoeba (arz-Arab_eng-Latn)": 0.0,
-                    "Tatoeba (heb-Hebr_eng-Latn)": 0.22,
-                    "Tatoeba (kat-Geor_eng-Latn)": 0.3,
-                    "Tatoeba (yid-Hebr_eng-Latn)": 0.14,
-                    "Tatoeba (lit-Latn_eng-Latn)": 0.92,
-                    "Tatoeba (ber-Tfng_eng-Latn)": 4.69,
-                    "Tatoeba (hun-Latn_eng-Latn)": 3.56,
-                    "Tatoeba (mhr-Cyrl_eng-Latn)": 0.0,
-                    "Tatoeba (isl-Latn_eng-Latn)": 2.37,
-                    "Tatoeba (ind-Latn_eng-Latn)": 3.86,
-                    "Tatoeba (tuk-Latn_eng-Latn)": 3.52,
-                    "Tatoeba (kor-Hang_eng-Latn)": 0.45,
-                    "Tatoeba (ara-Arab_eng-Latn)": 0.0,
-                    "Tatoeba (tzl-Latn_eng-Latn)": 4.58,
-                    "Tatoeba (swe-Latn_eng-Latn)": 6.06,
-                    "Tatoeba (ang-Latn_eng-Latn)": 15.64,
-                    "Tatoeba (mon-Cyrl_eng-Latn)": 0.38,
-                    "Tatoeba (urd-Arab_eng-Latn)": 0.1,
-                    "Tatoeba (vie-Latn_eng-Latn)": 3.07,
-                    "Tatoeba (ina-Latn_eng-Latn)": 17.63,
-                    "Tatoeba (hrv-Latn_eng-Latn)": 3.83,
-                    "Tatoeba (war-Latn_eng-Latn)": 4.94,
-                    "Tatoeba (cor-Latn_eng-Latn)": 2.41,
-                    "Tatoeba (tur-Latn_eng-Latn)": 3.59,
-                    "Tatoeba (bul-Cyrl_eng-Latn)": 0.21,
-                    "Tatoeba (spa-Latn_eng-Latn)": 5.63,
-                    "Tatoeba (tel-Telu_eng-Latn)": 0.46,
-                    "Tatoeba (nds-Latn_eng-Latn)": 9.56,
-                    "Tatoeba (lvs-Latn_eng-Latn)": 2.61,
-                    "Tatoeba (amh-Ethi_eng-Latn)": 0.25,
-                    "Tatoeba (pms-Latn_eng-Latn)": 7.62,
-                    "Tatoeba (xho-Latn_eng-Latn)": 4.01,
-                    "Tatoeba (epo-Latn_eng-Latn)": 5.46,
-                    "Tatoeba (por-Latn_eng-Latn)": 8.29,
-                    "Tatoeba (ile-Latn_eng-Latn)": 13.54,
-                    "Tatoeba (ell-Grek_eng-Latn)": 0.1,
-                    "Tatoeba (oci-Latn_eng-Latn)": 6.55,
-                    "Tatoeba (pes-Arab_eng-Latn)": 0.0,
-                    "Tatoeba (tat-Cyrl_eng-Latn)": 0.44,
-                    "Tatoeba (awa-Deva_eng-Latn)": 0.51,
-                    "Tatoeba (fao-Latn_eng-Latn)": 5.33,
-                    "Tatoeba (swg-Latn_eng-Latn)": 8.92,
-                    "Tatoeba (uzb-Latn_eng-Latn)": 2.34,
-                    "Tatoeba (cym-Latn_eng-Latn)": 6.09,
-                    "Tatoeba (mar-Deva_eng-Latn)": 0.0,
-                    "Tatoeba (fry-Latn_eng-Latn)": 11.22,
-                    "Tatoeba (ces-Latn_eng-Latn)": 3.04,
-                    "Tatoeba (afr-Latn_eng-Latn)": 5.89,
-                    "Tatoeba (csb-Latn_eng-Latn)": 3.78,
-                    "Tatoeba (pol-Latn_eng-Latn)": 2.58,
-                    "Tatoeba (gla-Latn_eng-Latn)": 2.7,
-                    "Tatoeba (deu-Latn_eng-Latn)": 7.89,
-                    "Tatoeba (cmn-Hans_eng-Latn)": 1.92,
-                    "Tatoeba (ita-Latn_eng-Latn)": 9.9,
-                    "Tatoeba (ben-Beng_eng-Latn)": 0.0,
-                    "Tatoeba (glg-Latn_eng-Latn)": 9.31,
-                    "Tatoeba (dsb-Latn_eng-Latn)": 2.9,
-                    "Tatoeba (pam-Latn_eng-Latn)": 3.54,
-                    "Tatoeba (ast-Latn_eng-Latn)": 6.84,
-                    "Tatoeba (bos-Latn_eng-Latn)": 5.58,
-                    "Tatoeba (nld-Latn_eng-Latn)": 10.16,
-                    "Tatoeba (bel-Cyrl_eng-Latn)": 0.5,
-                    "Tatoeba (orv-Cyrl_eng-Latn)": 0.0,
-                    "Tatoeba (gsw-Latn_eng-Latn)": 11.33,
-                    "Tatoeba (dan-Latn_eng-Latn)": 7.84,
-                    "Tatoeba (hye-Armn_eng-Latn)": 0.41,
-                    "Tatoeba (mal-Mlym_eng-Latn)": 0.15,
-                    "Tatoeba (arq-Arab_eng-Latn)": 0.11,
-                    "Tatoeba (kaz-Cyrl_eng-Latn)": 0.42,
-                    "Tatoeba (khm-Khmr_eng-Latn)": 0.42,
-                    "Tatoeba (tha-Thai_eng-Latn)": 0.3,
-                    "Tatoeba (swh-Latn_eng-Latn)": 5.8,
-                    "Tatoeba (gle-Latn_eng-Latn)": 2.75,
-                    "Tatoeba (ceb-Latn_eng-Latn)": 3.39,
-                    "Tatoeba (sqi-Latn_eng-Latn)": 3.58,
-                    "Tatoeba (slv-Latn_eng-Latn)": 3.25,
-                    "Tatoeba (ido-Latn_eng-Latn)": 7.48,
-                    "Tatoeba (yue-Hant_eng-Latn)": 0.86,
-                    "Tatoeba (nno-Latn_eng-Latn)": 5.38,
-                    "Tatoeba (est-Latn_eng-Latn)": 2.36,
-                    "Tatoeba (lfn-Latn_eng-Latn)": 4.55,
-                    "Tatoeba (lat-Latn_eng-Latn)": 5.04,
-                    "Tatoeba (cha-Latn_eng-Latn)": 13.29,
-                    "Tatoeba (eus-Latn_eng-Latn)": 5.54,
-                    "Tatoeba (fin-Latn_eng-Latn)": 2.79,
-                    "Tatoeba (rus-Cyrl_eng-Latn)": 0.07,
-                    "Tatoeba (ron-Latn_eng-Latn)": 6.82,
-                    "Tatoeba (zsm-Latn_eng-Latn)": 4.24
+                    "Model": "GritLM-7B-noinstruct"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "all-MiniLM-L6-v2",
-                    "AllegroReviews (pol-Latn)": 24.64,
-                    "AmazonCounterfactualClassification (en)": 63.64,
-                    "AmazonCounterfactualClassification (en-ext)": 65.59,
-                    "AmazonCounterfactualClassification (deu-Latn)": 57.82,
-                    "AmazonCounterfactualClassification (jpn-Jpan)": 60.9,
-                    "AmazonPolarityClassification": 64.26,
-                    "AmazonReviewsClassification (en)": 30.85,
-                    "AmazonReviewsClassification (deu-Latn)": 26.44,
-                    "AmazonReviewsClassification (spa-Latn)": 27.35,
-                    "AmazonReviewsClassification (fra-Latn)": 26.88,
-                    "AmazonReviewsClassification (jpn-Jpan)": 23.78,
-                    "AmazonReviewsClassification (cmn-Hans)": 23.67,
-                    "AngryTweetsClassification": 42.49,
-                    "AngryTweetsClassification (dan-Latn)": 42.48,
-                    "Banking77Classification": 80.04,
-                    "CBD (pol-Latn)": 50.9,
-                    "DKHateClassification": 55.05,
-                    "DanishPoliticalCommentsClassification": 26.96,
-                    "DanishPoliticalCommentsClassification (dan-Latn)": 26.7,
-                    "EmotionClassification": 40.83,
-                    "GeoreviewClassification (rus-Cyrl)": 27.08,
-                    "HeadlineClassification (rus-Cyrl)": 27.77,
-                    "IFlyTek (cmn-Hans)": 16.09,
-                    "ImdbClassification": 61.76,
-                    "InappropriatenessClassification (rus-Cyrl)": 51.73,
-                    "JDReview (cmn-Hans)": 59.98,
-                    "KinopoiskClassification (rus-Cyrl)": 33.93,
-                    "LccSentimentClassification": 38.47,
-                    "LccSentimentClassification (dan-Latn)": 38.53,
-                    "MTOPDomainClassification (en)": 91.68,
-                    "MTOPDomainClassification (deu-Latn)": 70.47,
-                    "MTOPDomainClassification (spa-Latn)": 72.99,
-                    "MTOPDomainClassification (fra-Latn)": 75.1,
-                    "MTOPDomainClassification (hin-Deva)": 40.74,
-                    "MTOPDomainClassification (tha-Thai)": 15.66,
-                    "MTOPIntentClassification (en)": 61.55,
-                    "MTOPIntentClassification (deu-Latn)": 45.7,
-                    "MTOPIntentClassification (spa-Latn)": 44.19,
-                    "MTOPIntentClassification (fra-Latn)": 39.67,
-                    "MTOPIntentClassification (hin-Deva)": 18.69,
-                    "MTOPIntentClassification (tha-Thai)": 5.78,
-                    "MasakhaNEWSClassification (fra)": 74.05,
-                    "MasakhaNEWSClassification (amh-Ethi)": 33.03,
-                    "MasakhaNEWSClassification (eng)": 77.11,
-                    "MasakhaNEWSClassification (fra-Latn)": 68.84,
-                    "MasakhaNEWSClassification (hau-Latn)": 50.49,
-                    "MasakhaNEWSClassification (ibo-Latn)": 52.15,
-                    "MasakhaNEWSClassification (lin-Latn)": 68.29,
-                    "MasakhaNEWSClassification (lug-Latn)": 47.58,
-                    "MasakhaNEWSClassification (orm-Ethi)": 50.68,
-                    "MasakhaNEWSClassification (pcm-Latn)": 92.56,
-                    "MasakhaNEWSClassification (run-Latn)": 54.81,
-                    "MasakhaNEWSClassification (sna-Latn)": 65.58,
-                    "MasakhaNEWSClassification (som-Latn)": 39.8,
-                    "MasakhaNEWSClassification (swa-Latn)": 47.25,
-                    "MasakhaNEWSClassification (tir-Ethi)": 28.97,
-                    "MasakhaNEWSClassification (xho-Latn)": 54.14,
-                    "MasakhaNEWSClassification (yor-Latn)": 55.01,
-                    "MassiveIntentClassification (en)": 66.94,
-                    "MassiveIntentClassification (da)": 40.99,
-                    "MassiveIntentClassification (nb)": 39.34,
-                    "MassiveIntentClassification (sv)": 38.1,
-                    "MassiveIntentClassification (aze-Latn)": 30.63,
-                    "MassiveIntentClassification (spa-Latn)": 39.88,
-                    "MassiveIntentClassification (tam-Taml)": 11.31,
-                    "MassiveIntentClassification (swe-Latn)": 38.09,
-                    "MassiveIntentClassification (fas-Arab)": 19.1,
-                    "MassiveIntentClassification (khm-Khmr)": 4.89,
-                    "MassiveIntentClassification (mon-Cyrl)": 20.35,
-                    "MassiveIntentClassification (hye-Armn)": 7.62,
-                    "MassiveIntentClassification (kan-Knda)": 3.14,
-                    "MassiveIntentClassification (cmo-Hans)": 24.4,
-                    "MassiveIntentClassification (rus-Cyrl)": 27.58,
-                    "MassiveIntentClassification (jpn-Jpan)": 31.87,
-                    "MassiveIntentClassification (deu-Latn)": 43.44,
-                    "MassiveIntentClassification (ind-Latn)": 39.02,
-                    "MassiveIntentClassification (cym-Latn)": 34.54,
-                    "MassiveIntentClassification (nld-Latn)": 40.2,
-                    "MassiveIntentClassification (hin-Deva)": 17.7,
-                    "MassiveIntentClassification (afr-Latn)": 37.45,
-                    "MassiveIntentClassification (ell-Grek)": 24.19,
-                    "MassiveIntentClassification (mal-Mlym)": 2.87,
-                    "MassiveIntentClassification (por-Latn)": 43.76,
-                    "MassiveIntentClassification (sqi-Latn)": 40.7,
-                    "MassiveIntentClassification (urd-Arab)": 14.42,
-                    "MassiveIntentClassification (vie-Latn)": 37.09,
-                    "MassiveIntentClassification (hun-Latn)": 35.69,
-                    "MassiveIntentClassification (ron-Latn)": 40.54,
-                    "MassiveIntentClassification (ara-Arab)": 19.05,
-                    "MassiveIntentClassification (nob-Latn)": 39.36,
-                    "MassiveIntentClassification (slv-Latn)": 36.7,
-                    "MassiveIntentClassification (lav-Latn)": 36.97,
-                    "MassiveIntentClassification (heb-Hebr)": 22.48,
-                    "MassiveIntentClassification (pol-Latn)": 36.07,
-                    "MassiveIntentClassification (ita-Latn)": 41.59,
-                    "MassiveIntentClassification (msa-Latn)": 35.07,
-                    "MassiveIntentClassification (mya-Mymr)": 4.24,
-                    "MassiveIntentClassification (isl-Latn)": 29.95,
-                    "MassiveIntentClassification (tel-Telu)": 2.46,
-                    "MassiveIntentClassification (swa-Latn)": 34.98,
-                    "MassiveIntentClassification (amh-Ethi)": 2.62,
-                    "MassiveIntentClassification (cmo-Hant)": 22.56,
-                    "MassiveIntentClassification (tha-Thai)": 11.26,
-                    "MassiveIntentClassification (ben-Beng)": 13.1,
-                    "MassiveIntentClassification (fin-Latn)": 38.37,
-                    "MassiveIntentClassification (fra-Latn)": 42.55,
-                    "MassiveIntentClassification (kor-Kore)": 16.05,
-                    "MassiveIntentClassification (kat-Geor)": 9.07,
-                    "MassiveIntentClassification (dan-Latn)": 41.0,
-                    "MassiveIntentClassification (tur-Latn)": 33.76,
-                    "MassiveIntentClassification (tgl-Latn)": 37.92,
-                    "MassiveIntentClassification (jav-Latn)": 35.91,
-                    "MassiveScenarioClassification (en)": 73.81,
-                    "MassiveScenarioClassification (da)": 47.01,
-                    "MassiveScenarioClassification (nb)": 44.67,
-                    "MassiveScenarioClassification (sv)": 42.93,
-                    "MassiveScenarioClassification (mal-Mlym)": 7.67,
-                    "MassiveScenarioClassification (khm-Khmr)": 9.25,
-                    "MassiveScenarioClassification (deu-Latn)": 51.47,
-                    "MassiveScenarioClassification (msa-Latn)": 43.67,
-                    "MassiveScenarioClassification (heb-Hebr)": 24.01,
-                    "MassiveScenarioClassification (mon-Cyrl)": 25.47,
-                    "MassiveScenarioClassification (mya-Mymr)": 10.61,
-                    "MassiveScenarioClassification (ind-Latn)": 43.46,
-                    "MassiveScenarioClassification (nob-Latn)": 44.67,
-                    "MassiveScenarioClassification (fra-Latn)": 51.14,
-                    "MassiveScenarioClassification (tgl-Latn)": 45.69,
-                    "MassiveScenarioClassification (amh-Ethi)": 7.57,
-                    "MassiveScenarioClassification (fas-Arab)": 23.97,
-                    "MassiveScenarioClassification (vie-Latn)": 40.47,
-                    "MassiveScenarioClassification (sqi-Latn)": 47.21,
-                    "MassiveScenarioClassification (dan-Latn)": 47.02,
-                    "MassiveScenarioClassification (spa-Latn)": 49.0,
-                    "MassiveScenarioClassification (pol-Latn)": 43.82,
-                    "MassiveScenarioClassification (tel-Telu)": 7.95,
-                    "MassiveScenarioClassification (tha-Thai)": 19.5,
-                    "MassiveScenarioClassification (kor-Kore)": 20.3,
-                    "MassiveScenarioClassification (cmo-Hans)": 33.65,
-                    "MassiveScenarioClassification (urd-Arab)": 23.73,
-                    "MassiveScenarioClassification (aze-Latn)": 35.59,
-                    "MassiveScenarioClassification (ron-Latn)": 48.23,
-                    "MassiveScenarioClassification (jav-Latn)": 43.59,
-                    "MassiveScenarioClassification (slv-Latn)": 41.9,
-                    "MassiveScenarioClassification (kat-Geor)": 14.92,
-                    "MassiveScenarioClassification (lav-Latn)": 40.43,
-                    "MassiveScenarioClassification (cym-Latn)": 39.0,
-                    "MassiveScenarioClassification (swe-Latn)": 42.95,
-                    "MassiveScenarioClassification (rus-Cyrl)": 30.46,
-                    "MassiveScenarioClassification (ben-Beng)": 20.56,
-                    "MassiveScenarioClassification (por-Latn)": 50.72,
-                    "MassiveScenarioClassification (hye-Armn)": 13.03,
-                    "MassiveScenarioClassification (jpn-Jpan)": 37.3,
-                    "MassiveScenarioClassification (nld-Latn)": 48.43,
-                    "MassiveScenarioClassification (swa-Latn)": 43.32,
-                    "MassiveScenarioClassification (tam-Taml)": 17.37,
-                    "MassiveScenarioClassification (isl-Latn)": 36.12,
-                    "MassiveScenarioClassification (kan-Knda)": 7.85,
-                    "MassiveScenarioClassification (ell-Grek)": 31.3,
-                    "MassiveScenarioClassification (tur-Latn)": 38.85,
-                    "MassiveScenarioClassification (cmo-Hant)": 31.18,
-                    "MassiveScenarioClassification (fin-Latn)": 42.38,
-                    "MassiveScenarioClassification (hin-Deva)": 23.71,
-                    "MassiveScenarioClassification (ara-Arab)": 25.99,
-                    "MassiveScenarioClassification (hun-Latn)": 41.61,
-                    "MassiveScenarioClassification (afr-Latn)": 43.87,
-                    "MassiveScenarioClassification (ita-Latn)": 49.8,
-                    "MultilingualSentiment (cmn-Hans)": 41.28,
-                    "NoRecClassification": 40.02,
-                    "NoRecClassification (nob-Latn)": 37.93,
-                    "NordicLangClassification": 54.71,
-                    "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 54.7,
-                    "NorwegianParliament": 54.8,
-                    "OnlineShopping (cmn-Hans)": 57.74,
-                    "PAC (pol-Latn)": 59.78,
-                    "PolEmo2.0-IN (pol-Latn)": 40.29,
-                    "PolEmo2.0-OUT (pol-Latn)": 25.0,
-                    "RuReviewsClassification (rus-Cyrl)": 41.79,
-                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 10.08,
-                    "RuSciBenchOECDClassification (rus-Cyrl)": 8.3,
-                    "ScalaDaClassification": 50.03,
-                    "ScalaNbClassification": 50.17,
-                    "TNews (cmn-Hans)": 20.12,
-                    "ToxicConversationsClassification": 62.09,
-                    "TweetSentimentExtractionClassification": 54.04,
-                    "Waimai (cmn-Hans)": 62.72
+                    "Model": "GritLM-7B-noinstruct"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "all-MiniLM-L6-v2",
-                    "AlloProfClusteringP2P": 51.83,
-                    "AlloProfClusteringS2S": 32.07,
-                    "ArxivClusteringP2P": 46.55,
-                    "ArxivClusteringS2S": 37.86,
-                    "BiorxivClusteringP2P": 38.37,
-                    "BiorxivClusteringS2S": 32.88,
-                    "GeoreviewClusteringP2P (rus-Cyrl)": 20.25,
-                    "HALClusteringS2S": 18.84,
-                    "MLSUMClusteringP2P": 36.74,
-                    "MLSUMClusteringP2P (rus-Cyrl)": 23.91,
-                    "MLSUMClusteringS2S": 28.12,
-                    "MLSUMClusteringS2S (rus-Cyrl)": 19.07,
-                    "MasakhaNEWSClusteringP2P (fra)": 34.92,
-                    "MasakhaNEWSClusteringP2P (amh-Ethi)": 43.85,
-                    "MasakhaNEWSClusteringP2P (eng)": 48.88,
-                    "MasakhaNEWSClusteringP2P (fra-Latn)": 34.92,
-                    "MasakhaNEWSClusteringP2P (hau-Latn)": 24.77,
-                    "MasakhaNEWSClusteringP2P (ibo-Latn)": 45.94,
-                    "MasakhaNEWSClusteringP2P (lin-Latn)": 69.56,
-                    "MasakhaNEWSClusteringP2P (lug-Latn)": 49.4,
-                    "MasakhaNEWSClusteringP2P (orm-Ethi)": 25.34,
-                    "MasakhaNEWSClusteringP2P (pcm-Latn)": 85.57,
-                    "MasakhaNEWSClusteringP2P (run-Latn)": 50.75,
-                    "MasakhaNEWSClusteringP2P (sna-Latn)": 41.68,
-                    "MasakhaNEWSClusteringP2P (som-Latn)": 29.02,
-                    "MasakhaNEWSClusteringP2P (swa-Latn)": 21.87,
-                    "MasakhaNEWSClusteringP2P (tir-Ethi)": 42.93,
-                    "MasakhaNEWSClusteringP2P (xho-Latn)": 28.58,
-                    "MasakhaNEWSClusteringP2P (yor-Latn)": 31.45,
-                    "MasakhaNEWSClusteringS2S (fra)": 40.58,
-                    "MasakhaNEWSClusteringS2S (amh-Ethi)": 45.44,
-                    "MasakhaNEWSClusteringS2S (eng)": 41.09,
-                    "MasakhaNEWSClusteringS2S (fra-Latn)": 40.58,
-                    "MasakhaNEWSClusteringS2S (hau-Latn)": 15.42,
-                    "MasakhaNEWSClusteringS2S (ibo-Latn)": 37.02,
-                    "MasakhaNEWSClusteringS2S (lin-Latn)": 65.14,
-                    "MasakhaNEWSClusteringS2S (lug-Latn)": 44.21,
-                    "MasakhaNEWSClusteringS2S (orm-Ethi)": 24.79,
-                    "MasakhaNEWSClusteringS2S (pcm-Latn)": 61.48,
-                    "MasakhaNEWSClusteringS2S (run-Latn)": 51.25,
-                    "MasakhaNEWSClusteringS2S (sna-Latn)": 42.74,
-                    "MasakhaNEWSClusteringS2S (som-Latn)": 30.08,
-                    "MasakhaNEWSClusteringS2S (swa-Latn)": 9.55,
-                    "MasakhaNEWSClusteringS2S (tir-Ethi)": 46.04,
-                    "MasakhaNEWSClusteringS2S (xho-Latn)": 27.08,
-                    "MasakhaNEWSClusteringS2S (yor-Latn)": 31.04,
-                    "MedrxivClusteringP2P": 34.39,
-                    "MedrxivClusteringS2S": 31.86,
-                    "RedditClustering": 50.7,
-                    "RedditClusteringP2P": 54.8,
-                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 10.21,
-                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 9.43,
-                    "StackExchangeClustering": 53.14,
-                    "StackExchangeClusteringP2P": 34.26,
-                    "TwentyNewsgroupsClustering": 46.49
+                    "Model": "GritLM-7B-noinstruct"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "all-MiniLM-L6-v2",
-                    "CDSC-E (pol-Latn)": 47.27,
-                    "OpusparcusPC (fr)": 86.53,
-                    "OpusparcusPC (deu-Latn)": 89.91,
-                    "OpusparcusPC (en)": 97.46,
-                    "OpusparcusPC (fin-Latn)": 85.44,
-                    "OpusparcusPC (fra-Latn)": 86.53,
-                    "OpusparcusPC (rus-Cyrl)": 79.28,
-                    "OpusparcusPC (swe-Latn)": 83.78,
-                    "PSC (pol-Latn)": 81.87,
-                    "PawsXPairClassification (fr)": 55.4,
-                    "PawsXPairClassification (deu-Latn)": 51.22,
-                    "PawsXPairClassification (en)": 59.1,
-                    "PawsXPairClassification (spa-Latn)": 52.21,
-                    "PawsXPairClassification (fra-Latn)": 55.41,
-                    "PawsXPairClassification (jpn-Hira)": 48.97,
-                    "PawsXPairClassification (kor-Hang)": 50.53,
-                    "PawsXPairClassification (cmn-Hans)": 53.11,
-                    "SICK-E-PL (pol-Latn)": 47.32,
-                    "SprintDuplicateQuestions": 94.55,
-                    "TERRa (rus-Cyrl)": 45.03,
-                    "TwitterSemEval2015": 67.86,
-                    "TwitterURLCorpus": 84.7
+                    "Model": "GritLM-7B-noinstruct"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "all-MiniLM-L6-v2",
-                    "AlloprofReranking": 31.69,
-                    "AlloprofReranking (fra-Latn)": 62.62,
-                    "AskUbuntuDupQuestions": 63.48,
-                    "MMarcoReranking (cmn-Hans)": 4.74,
-                    "MindSmallReranking": 30.8,
-                    "RuBQReranking (rus-Cyrl)": 27.05,
-                    "SciDocsRR": 87.12,
-                    "StackOverflowDupQuestions": 50.76,
-                    "SyntecReranking": 59.57,
-                    "SyntecReranking (fra-Latn)": 67.31,
-                    "T2Reranking (cmn-Hans)": 56.26
+                    "Model": "GritLM-7B-noinstruct"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "all-MiniLM-L6-v2",
-                    "AILACasedocs": 19.72,
-                    "AILAStatutes": 20.52,
-                    "ARCChallenge": 9.48,
-                    "AlloprofRetrieval": 28.41,
-                    "AlloprofRetrieval (fra-Latn)": 28.41,
-                    "AlphaNLI": 28.19,
-                    "ArguAna": 50.17,
-                    "ArguAna-PL (pol-Latn)": 11.5,
-                    "BSARDRetrieval": 0.0,
-                    "BSARDRetrieval (fra-Latn)": 4.8,
-                    "CQADupstackRetrieval": 41.32,
-                    "ClimateFEVER": 20.27,
-                    "CmedqaRetrieval (cmn-Hans)": 2.03,
-                    "CovidRetrieval (cmn-Hans)": 0.8,
-                    "DBPedia": 32.33,
-                    "DuRetrieval (cmn-Hans)": 3.03,
-                    "EcomRetrieval (cmn-Hans)": 3.7,
-                    "FEVER": 51.93,
-                    "FiQA-PL (pol-Latn)": 2.29,
-                    "FiQA2018": 36.87,
-                    "GerDaLIRSmall (deu-Latn)": 2.41,
-                    "HellaSwag": 24.21,
-                    "HotpotQA": 46.51,
-                    "LEMBNarrativeQARetrieval": 18.27,
-                    "LEMBNeedleRetrieval": 20.0,
-                    "LEMBPasskeyRetrieval": 23.25,
-                    "LEMBQMSumRetrieval": 16.32,
-                    "LEMBSummScreenFDRetrieval": 54.8,
-                    "LEMBWikimQARetrieval": 46.23,
-                    "LeCaRDv2 (zho-Hans)": 17.5,
-                    "LegalBenchConsumerContractsQA": 65.6,
-                    "LegalBenchCorporateLobbying": 86.41,
-                    "LegalQuAD (deu-Latn)": 11.81,
-                    "LegalSummarization": 59.0,
-                    "MMarcoRetrieval (cmn-Hans)": 6.21,
-                    "MSMARCO": 36.54,
-                    "MedicalRetrieval (cmn-Hans)": 1.76,
-                    "MintakaRetrieval (fr)": 9.19,
-                    "MintakaRetrieval (ara-Arab)": 2.22,
-                    "MintakaRetrieval (deu-Latn)": 15.43,
-                    "MintakaRetrieval (spa-Latn)": 7.72,
-                    "MintakaRetrieval (fra-Latn)": 9.19,
-                    "MintakaRetrieval (hin-Deva)": 2.65,
-                    "MintakaRetrieval (ita-Latn)": 8.48,
-                    "MintakaRetrieval (jpn-Hira)": 6.7,
-                    "MintakaRetrieval (por-Latn)": 9.76,
-                    "NFCorpus": 31.59,
-                    "NFCorpus-PL (pol-Latn)": 10.62,
-                    "NQ": 43.87,
-                    "PIQA": 25.28,
-                    "Quail": 3.92,
-                    "QuoraRetrieval": 87.56,
-                    "RARbCode": 44.27,
-                    "RARbMath": 68.19,
-                    "RiaNewsRetrieval (rus-Cyrl)": 0.67,
-                    "RuBQRetrieval (rus-Cyrl)": 2.64,
-                    "SCIDOCS": 21.64,
-                    "SCIDOCS-PL (pol-Latn)": 3.75,
-                    "SIQA": 1.56,
-                    "SciFact": 64.51,
-                    "SciFact-PL (pol-Latn)": 16.14,
-                    "SpartQA": 1.65,
-                    "SyntecRetrieval": 60.15,
-                    "SyntecRetrieval (fra-Latn)": 60.15,
-                    "T2Retrieval (cmn-Hans)": 1.6,
-                    "TRECCOVID": 47.25,
-                    "TRECCOVID-PL (pol-Latn)": 8.66,
-                    "TempReasonL1": 1.53,
-                    "TempReasonL2Fact": 17.65,
-                    "TempReasonL2Pure": 0.46,
-                    "TempReasonL3Fact": 14.16,
-                    "TempReasonL3Pure": 6.33,
-                    "Touche2020": 16.9,
-                    "VideoRetrieval (cmn-Hans)": 9.79,
-                    "WinoGrande": 47.33,
-                    "XPQARetrieval (fr)": 51.79,
-                    "XPQARetrieval (ara-Arab_ara-Arab)": 8.03,
-                    "XPQARetrieval (eng-Latn_ara-Arab)": 1.86,
-                    "XPQARetrieval (ara-Arab_eng-Latn)": 6.87,
-                    "XPQARetrieval (deu-Latn_deu-Latn)": 53.25,
-                    "XPQARetrieval (eng-Latn_deu-Latn)": 10.99,
-                    "XPQARetrieval (deu-Latn_eng-Latn)": 27.59,
-                    "XPQARetrieval (spa-Latn_spa-Latn)": 38.87,
-                    "XPQARetrieval (eng-Latn_spa-Latn)": 5.46,
-                    "XPQARetrieval (spa-Latn_eng-Latn)": 22.2,
-                    "XPQARetrieval (fra-Latn_fra-Latn)": 51.79,
-                    "XPQARetrieval (eng-Latn_fra-Latn)": 8.57,
-                    "XPQARetrieval (fra-Latn_eng-Latn)": 31.36,
-                    "XPQARetrieval (hin-Deva_hin-Deva)": 35.3,
-                    "XPQARetrieval (eng-Latn_hin-Deva)": 6.28,
-                    "XPQARetrieval (hin-Deva_eng-Latn)": 6.0,
-                    "XPQARetrieval (ita-Latn_ita-Latn)": 54.57,
-                    "XPQARetrieval (eng-Latn_ita-Latn)": 6.79,
-                    "XPQARetrieval (ita-Latn_eng-Latn)": 24.13,
-                    "XPQARetrieval (jpn-Hira_jpn-Hira)": 39.23,
-                    "XPQARetrieval (eng-Latn_jpn-Hira)": 4.1,
-                    "XPQARetrieval (jpn-Hira_eng-Latn)": 13.05,
-                    "XPQARetrieval (kor-Hang_kor-Hang)": 10.24,
-                    "XPQARetrieval (eng-Latn_kor-Hang)": 5.72,
-                    "XPQARetrieval (kor-Hang_eng-Latn)": 6.37,
-                    "XPQARetrieval (pol-Latn_pol-Latn)": 22.33,
-                    "XPQARetrieval (eng-Latn_pol-Latn)": 7.58,
-                    "XPQARetrieval (pol-Latn_eng-Latn)": 14.43,
-                    "XPQARetrieval (por-Latn_por-Latn)": 31.93,
-                    "XPQARetrieval (eng-Latn_por-Latn)": 5.9,
-                    "XPQARetrieval (por-Latn_eng-Latn)": 20.74,
-                    "XPQARetrieval (tam-Taml_tam-Taml)": 7.43,
-                    "XPQARetrieval (eng-Latn_tam-Taml)": 3.42,
-                    "XPQARetrieval (tam-Taml_eng-Latn)": 2.91,
-                    "XPQARetrieval (cmn-Hans_cmn-Hans)": 19.39,
-                    "XPQARetrieval (eng-Latn_cmn-Hans)": 5.05,
-                    "XPQARetrieval (cmn-Hans_eng-Latn)": 8.77
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "all-MiniLM-L6-v2",
-                    "AFQMC (cmn-Hans)": 8.59,
-                    "ATEC (cmn-Hans)": 13.52,
-                    "BIOSSES": 81.64,
-                    "BQ (cmn-Hans)": 23.84,
-                    "CDSC-R (pol-Latn)": 79.45,
-                    "LCQMC (cmn-Hans)": 23.85,
-                    "PAWSX (cmn-Hans)": 7.21,
-                    "RUParaPhraserSTS (rus-Cyrl)": 43.93,
-                    "RuSTSBenchmarkSTS (rus-Cyrl)": 55.56,
-                    "SICK-R": 77.58,
-                    "SICK-R-PL (pol-Latn)": 52.43,
-                    "SICKFr": 62.48,
-                    "SICKFr (fra-Latn)": 62.48,
-                    "STS12": 72.37,
-                    "STS13": 80.6,
-                    "STS14": 75.59,
-                    "STS15": 85.39,
-                    "STS16": 78.99,
-                    "STS17 (ar-ar)": 50.89,
-                    "STS17 (en-ar)": -4.28,
-                    "STS17 (en-de)": 35.82,
-                    "STS17 (en-en)": 87.59,
-                    "STS17 (en-tr)": 4.5,
-                    "STS17 (es-en)": 16.31,
-                    "STS17 (es-es)": 76.12,
-                    "STS17 (fr-en)": 37.09,
-                    "STS17 (it-en)": 24.45,
-                    "STS17 (ko-ko)": 43.39,
-                    "STS17 (nl-en)": 29.0,
-                    "STS17 (ara-Arab)": 50.89,
-                    "STS17 (spa-Latn_eng-Latn)": 16.31,
-                    "STS17 (kor-Hang)": 43.39,
-                    "STS17 (eng-Latn_tur-Latn)": 4.5,
-                    "STS17 (fra-Latn_eng-Latn)": 37.09,
-                    "STS17 (nld-Latn_eng-Latn)": 29.0,
-                    "STS17 (eng-Latn_ara-Arab)": -4.28,
-                    "STS17 (spa-Latn)": 76.12,
-                    "STS17 (eng-Latn_deu-Latn)": 35.82,
-                    "STS17 (ita-Latn_eng-Latn)": 24.45,
-                    "STS22 (ar)": 22.64,
-                    "STS22 (de)": 31.04,
-                    "STS22 (de-en)": 44.04,
-                    "STS22 (de-fr)": 30.07,
-                    "STS22 (de-pl)": 4.93,
-                    "STS22 (en)": 67.71,
-                    "STS22 (es)": 54.78,
-                    "STS22 (es-en)": 53.42,
-                    "STS22 (es-it)": 44.27,
-                    "STS22 (fr)": 77.0,
-                    "STS22 (fr-pl)": 50.71,
-                    "STS22 (it)": 60.4,
-                    "STS22 (pl)": 26.77,
-                    "STS22 (pl-en)": 32.8,
-                    "STS22 (ru)": 14.72,
-                    "STS22 (tr)": 33.69,
-                    "STS22 (zh)": 44.93,
-                    "STS22 (zh-en)": 41.64,
-                    "STS22 (tur-Latn)": 33.69,
-                    "STS22 (spa-Latn)": 54.78,
-                    "STS22 (ara-Arab)": 22.64,
-                    "STS22 (deu-Latn_pol-Latn)": -4.93,
-                    "STS22 (spa-Latn_eng-Latn)": 53.42,
-                    "STS22 (cmn-Hans_eng-Latn)": 41.64,
-                    "STS22 (rus-Cyrl)": 14.72,
-                    "STS22 (spa-Latn_ita-Latn)": 44.27,
-                    "STS22 (deu-Latn_fra-Latn)": 30.07,
-                    "STS22 (deu-Latn)": 31.04,
-                    "STS22 (fra-Latn_pol-Latn)": 50.71,
-                    "STS22 (pol-Latn)": 26.77,
-                    "STS22 (pol-Latn_eng-Latn)": 32.8,
-                    "STS22 (deu-Latn_eng-Latn)": 44.04,
-                    "STS22 (ita-Latn)": 60.4,
-                    "STS22 (fra-Latn)": 77.0,
-                    "STS22 (cmn-Hans)": 44.93,
-                    "STSB (cmn-Hans)": 37.8,
-                    "STSBenchmark": 82.03,
-                    "STSBenchmarkMultilingualSTS (fr)": 64.93,
-                    "STSBenchmarkMultilingualSTS (pol-Latn)": 56.42,
-                    "STSBenchmarkMultilingualSTS (por-Latn)": 61.56,
-                    "STSBenchmarkMultilingualSTS (ita-Latn)": 59.24,
-                    "STSBenchmarkMultilingualSTS (fra-Latn)": 64.93,
-                    "STSBenchmarkMultilingualSTS (deu-Latn)": 62.4,
-                    "STSBenchmarkMultilingualSTS (cmn-Hans)": 39.74,
-                    "STSBenchmarkMultilingualSTS (spa-Latn)": 61.62,
-                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 55.55,
-                    "STSBenchmarkMultilingualSTS (en)": 82.03,
-                    "STSBenchmarkMultilingualSTS (nld-Latn)": 55.46
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "all-MiniLM-L6-v2",
-                    "SummEval": 30.81,
-                    "SummEvalFr": 28.28,
-                    "SummEvalFr (fra-Latn)": 28.29
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "all-MiniLM-L6-v2"
-                }
-            ]
-        }
-    },
-    "flan-t5-base": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "flan-t5-base"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "flan-t5-base"
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "flan-t5-base"
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "flan-t5-base"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "flan-t5-base"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "flan-t5-base"
+                    "Model": "GritLM-7B-noinstruct",
+                    "ARCChallenge": 16.57,
+                    "AlphaNLI": 29.56,
+                    "HellaSwag": 36.03,
+                    "PIQA": 35.8,
+                    "Quail": 8.68,
+                    "RARbCode": 83.14,
+                    "RARbMath": 83.01,
+                    "SIQA": 5.73,
+                    "SpartQA": 1.56,
+                    "TempReasonL1": 2.57,
+                    "TempReasonL2Fact": 48.25,
+                    "TempReasonL2Pure": 8.98,
+                    "TempReasonL3Fact": 34.11,
+                    "TempReasonL3Pure": 12.44,
+                    "WinoGrande": 52.12
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "flan-t5-base"
+                    "Model": "GritLM-7B-noinstruct"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "flan-t5-base"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "flan-t5-base",
-                    "Core17InstructionRetrieval": -3.31,
-                    "News21InstructionRetrieval": -0.12,
-                    "Robust04InstructionRetrieval": 5.35
-                }
-            ]
-        }
-    },
-    "gottbert-base": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "gottbert-base"
+                    "Model": "GritLM-7B-noinstruct"
                 }
             ]
         },
-        "Classification": {
+        "MultilabelClassification": {
             "accuracy": [
                 {
-                    "Model": "gottbert-base"
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "gottbert-base",
-                    "BlurbsClusteringP2P": 34.49,
-                    "BlurbsClusteringS2S": 8.37,
-                    "TenKGnadClusteringP2P": 33.66,
-                    "TenKGnadClusteringS2S": 9.34
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "gottbert-base"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "gottbert-base"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "gottbert-base"
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "gottbert-base"
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "gottbert-base"
+                    "Model": "GritLM-7B-noinstruct"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "gottbert-base"
+                    "Model": "GritLM-7B-noinstruct"
                 }
             ]
         }
     },
-    "text2vec-base-multilingual": {
+    "LaBSE": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "text2vec-base-multilingual"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "text2vec-base-multilingual",
-                    "AmazonReviewsClassification (fr)": 34.25,
-                    "MTOPDomainClassification (fr)": 71.83,
-                    "MTOPIntentClassification (fr)": 44.53,
-                    "MasakhaNEWSClassification (fra)": 73.84,
-                    "MassiveIntentClassification (fr)": 51.93,
-                    "MassiveScenarioClassification (fr)": 58.31
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "text2vec-base-multilingual",
-                    "AlloProfClusteringP2P": 49.11,
-                    "AlloProfClusteringS2S": 32.72,
-                    "HALClusteringS2S": 16.19,
-                    "MLSUMClusteringP2P": 36.19,
-                    "MLSUMClusteringS2S": 30.39,
-                    "MasakhaNEWSClusteringP2P (fra)": 38.51,
-                    "MasakhaNEWSClusteringS2S (fra)": 32.51
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "text2vec-base-multilingual",
-                    "OpusparcusPC (fr)": 92.04,
-                    "PawsXPairClassification (fr)": 65.57
+                    "Model": "LaBSE",
+                    "BUCC (de-en)": 99.35,
+                    "BUCC (fr-en)": 98.72,
+                    "BUCC (ru-en)": 97.78,
+                    "BUCC (zh-en)": 99.16,
+                    "BornholmBitextMining (dan-Latn)": 45.63,
+                    "Tatoeba (ber-Tfng_eng-Latn)": 8.4,
+                    "Tatoeba (kab-Latn_eng-Latn)": 4.31,
+                    "Tatoeba (tur-Latn_eng-Latn)": 98.0,
+                    "Tatoeba (gle-Latn_eng-Latn)": 93.8,
+                    "Tatoeba (awa-Deva_eng-Latn)": 71.7,
+                    "Tatoeba (yue-Hant_eng-Latn)": 89.58,
+                    "Tatoeba (tzl-Latn_eng-Latn)": 58.88,
+                    "Tatoeba (tat-Cyrl_eng-Latn)": 85.92,
+                    "Tatoeba (fin-Latn_eng-Latn)": 96.37,
+                    "Tatoeba (cor-Latn_eng-Latn)": 10.11,
+                    "Tatoeba (hye-Armn_eng-Latn)": 94.09,
+                    "Tatoeba (ben-Beng_eng-Latn)": 88.55,
+                    "Tatoeba (epo-Latn_eng-Latn)": 98.2,
+                    "Tatoeba (ile-Latn_eng-Latn)": 85.58,
+                    "Tatoeba (nld-Latn_eng-Latn)": 96.07,
+                    "Tatoeba (mar-Deva_eng-Latn)": 92.65,
+                    "Tatoeba (cmn-Hans_eng-Latn)": 95.1,
+                    "Tatoeba (hin-Deva_eng-Latn)": 96.87,
+                    "Tatoeba (tgl-Latn_eng-Latn)": 96.02,
+                    "Tatoeba (mon-Cyrl_eng-Latn)": 95.91,
+                    "Tatoeba (oci-Latn_eng-Latn)": 65.81,
+                    "Tatoeba (dan-Latn_eng-Latn)": 95.71,
+                    "Tatoeba (mkd-Cyrl_eng-Latn)": 93.6,
+                    "Tatoeba (ces-Latn_eng-Latn)": 96.68,
+                    "Tatoeba (fra-Latn_eng-Latn)": 94.86,
+                    "Tatoeba (yid-Hebr_eng-Latn)": 88.79,
+                    "Tatoeba (est-Latn_eng-Latn)": 96.55,
+                    "Tatoeba (ast-Latn_eng-Latn)": 90.68,
+                    "Tatoeba (ind-Latn_eng-Latn)": 93.66,
+                    "Tatoeba (bre-Latn_eng-Latn)": 15.07,
+                    "Tatoeba (eus-Latn_eng-Latn)": 95.01,
+                    "Tatoeba (heb-Hebr_eng-Latn)": 91.53,
+                    "Tatoeba (rus-Cyrl_eng-Latn)": 93.75,
+                    "Tatoeba (lfn-Latn_eng-Latn)": 67.54,
+                    "Tatoeba (jav-Latn_eng-Latn)": 79.77,
+                    "Tatoeba (ukr-Cyrl_eng-Latn)": 93.97,
+                    "Tatoeba (ell-Grek_eng-Latn)": 95.35,
+                    "Tatoeba (nds-Latn_eng-Latn)": 79.42,
+                    "Tatoeba (arz-Arab_eng-Latn)": 76.0,
+                    "Tatoeba (gla-Latn_eng-Latn)": 85.66,
+                    "Tatoeba (cbk-Latn_eng-Latn)": 79.44,
+                    "Tatoeba (max-Deva_eng-Latn)": 63.26,
+                    "Tatoeba (ron-Latn_eng-Latn)": 96.92,
+                    "Tatoeba (ido-Latn_eng-Latn)": 89.42,
+                    "Tatoeba (lvs-Latn_eng-Latn)": 95.88,
+                    "Tatoeba (khm-Khmr_eng-Latn)": 78.37,
+                    "Tatoeba (urd-Arab_eng-Latn)": 93.22,
+                    "Tatoeba (glg-Latn_eng-Latn)": 96.82,
+                    "Tatoeba (gsw-Latn_eng-Latn)": 46.5,
+                    "Tatoeba (swe-Latn_eng-Latn)": 95.63,
+                    "Tatoeba (swh-Latn_eng-Latn)": 84.5,
+                    "Tatoeba (tha-Thai_eng-Latn)": 96.14,
+                    "Tatoeba (tam-Taml_eng-Latn)": 89.0,
+                    "Tatoeba (uzb-Latn_eng-Latn)": 84.23,
+                    "Tatoeba (bul-Cyrl_eng-Latn)": 94.58,
+                    "Tatoeba (kur-Latn_eng-Latn)": 83.59,
+                    "Tatoeba (ina-Latn_eng-Latn)": 95.37,
+                    "Tatoeba (nov-Latn_eng-Latn)": 74.38,
+                    "Tatoeba (afr-Latn_eng-Latn)": 96.18,
+                    "Tatoeba (csb-Latn_eng-Latn)": 52.57,
+                    "Tatoeba (war-Latn_eng-Latn)": 60.29,
+                    "Tatoeba (cha-Latn_eng-Latn)": 31.77,
+                    "Tatoeba (pes-Arab_eng-Latn)": 94.7,
+                    "Tatoeba (kat-Geor_eng-Latn)": 95.02,
+                    "Tatoeba (bos-Latn_eng-Latn)": 94.92,
+                    "Tatoeba (kor-Hang_eng-Latn)": 90.95,
+                    "Tatoeba (slk-Latn_eng-Latn)": 96.5,
+                    "Tatoeba (fry-Latn_eng-Latn)": 89.31,
+                    "Tatoeba (ara-Arab_eng-Latn)": 88.8,
+                    "Tatoeba (sqi-Latn_eng-Latn)": 96.76,
+                    "Tatoeba (ita-Latn_eng-Latn)": 92.72,
+                    "Tatoeba (lat-Latn_eng-Latn)": 80.07,
+                    "Tatoeba (hsb-Latn_eng-Latn)": 67.11,
+                    "Tatoeba (swg-Latn_eng-Latn)": 59.36,
+                    "Tatoeba (srp-Cyrl_eng-Latn)": 94.43,
+                    "Tatoeba (isl-Latn_eng-Latn)": 94.75,
+                    "Tatoeba (hrv-Latn_eng-Latn)": 96.95,
+                    "Tatoeba (wuu-Hans_eng-Latn)": 90.18,
+                    "Tatoeba (mhr-Cyrl_eng-Latn)": 15.74,
+                    "Tatoeba (vie-Latn_eng-Latn)": 97.2,
+                    "Tatoeba (cym-Latn_eng-Latn)": 92.0,
+                    "Tatoeba (dsb-Latn_eng-Latn)": 64.81,
+                    "Tatoeba (hun-Latn_eng-Latn)": 96.55,
+                    "Tatoeba (slv-Latn_eng-Latn)": 96.03,
+                    "Tatoeba (orv-Cyrl_eng-Latn)": 38.93,
+                    "Tatoeba (cat-Latn_eng-Latn)": 95.38,
+                    "Tatoeba (dtp-Latn_eng-Latn)": 10.85,
+                    "Tatoeba (por-Latn_eng-Latn)": 94.14,
+                    "Tatoeba (jpn-Jpan_eng-Latn)": 95.38,
+                    "Tatoeba (ang-Latn_eng-Latn)": 59.28,
+                    "Tatoeba (aze-Latn_eng-Latn)": 94.93,
+                    "Tatoeba (kzj-Latn_eng-Latn)": 11.33,
+                    "Tatoeba (deu-Latn_eng-Latn)": 99.2,
+                    "Tatoeba (uig-Arab_eng-Latn)": 92.4,
+                    "Tatoeba (tel-Telu_eng-Latn)": 97.86,
+                    "Tatoeba (tuk-Latn_eng-Latn)": 75.27,
+                    "Tatoeba (nob-Latn_eng-Latn)": 98.4,
+                    "Tatoeba (nno-Latn_eng-Latn)": 94.48,
+                    "Tatoeba (spa-Latn_eng-Latn)": 98.4,
+                    "Tatoeba (mal-Mlym_eng-Latn)": 98.45,
+                    "Tatoeba (pam-Latn_eng-Latn)": 10.73,
+                    "Tatoeba (xho-Latn_eng-Latn)": 91.55,
+                    "Tatoeba (arq-Arab_eng-Latn)": 42.69,
+                    "Tatoeba (kaz-Cyrl_eng-Latn)": 87.49,
+                    "Tatoeba (bel-Cyrl_eng-Latn)": 95.0,
+                    "Tatoeba (pol-Latn_eng-Latn)": 97.22,
+                    "Tatoeba (fao-Latn_eng-Latn)": 87.4,
+                    "Tatoeba (zsm-Latn_eng-Latn)": 95.62,
+                    "Tatoeba (lit-Latn_eng-Latn)": 96.47,
+                    "Tatoeba (ceb-Latn_eng-Latn)": 64.42,
+                    "Tatoeba (pms-Latn_eng-Latn)": 64.57,
+                    "Tatoeba (amh-Ethi_eng-Latn)": 91.47,
+                    "Tatoeba (afr-eng)": 96.18,
+                    "Tatoeba (amh-eng)": 91.47,
+                    "Tatoeba (ang-eng)": 59.28,
+                    "Tatoeba (ara-eng)": 88.8,
+                    "Tatoeba (arq-eng)": 42.69,
+                    "Tatoeba (arz-eng)": 76.0,
+                    "Tatoeba (ast-eng)": 90.68,
+                    "Tatoeba (awa-eng)": 71.7,
+                    "Tatoeba (aze-eng)": 94.93,
+                    "Tatoeba (bel-eng)": 95.0,
+                    "Tatoeba (ben-eng)": 88.55,
+                    "Tatoeba (ber-eng)": 8.4,
+                    "Tatoeba (bos-eng)": 94.92,
+                    "Tatoeba (bre-eng)": 15.07,
+                    "Tatoeba (bul-eng)": 94.58,
+                    "Tatoeba (cat-eng)": 95.38,
+                    "Tatoeba (cbk-eng)": 79.44,
+                    "Tatoeba (ceb-eng)": 64.42,
+                    "Tatoeba (ces-eng)": 96.68,
+                    "Tatoeba (cha-eng)": 31.77,
+                    "Tatoeba (cmn-eng)": 95.1,
+                    "Tatoeba (cor-eng)": 10.11,
+                    "Tatoeba (csb-eng)": 52.57,
+                    "Tatoeba (cym-eng)": 92.0,
+                    "Tatoeba (dan-eng)": 95.71,
+                    "Tatoeba (deu-eng)": 99.2,
+                    "Tatoeba (dsb-eng)": 64.81,
+                    "Tatoeba (dtp-eng)": 10.85,
+                    "Tatoeba (ell-eng)": 95.35,
+                    "Tatoeba (epo-eng)": 98.2,
+                    "Tatoeba (est-eng)": 96.55,
+                    "Tatoeba (eus-eng)": 95.01,
+                    "Tatoeba (fao-eng)": 87.4,
+                    "Tatoeba (fin-eng)": 96.37,
+                    "Tatoeba (fra-eng)": 94.86,
+                    "Tatoeba (fry-eng)": 89.31,
+                    "Tatoeba (gla-eng)": 85.66,
+                    "Tatoeba (gle-eng)": 93.8,
+                    "Tatoeba (glg-eng)": 96.82,
+                    "Tatoeba (gsw-eng)": 46.5,
+                    "Tatoeba (heb-eng)": 91.53,
+                    "Tatoeba (hin-eng)": 96.87,
+                    "Tatoeba (hrv-eng)": 96.95,
+                    "Tatoeba (hsb-eng)": 67.11,
+                    "Tatoeba (hun-eng)": 96.55,
+                    "Tatoeba (hye-eng)": 94.09,
+                    "Tatoeba (ido-eng)": 89.42,
+                    "Tatoeba (ile-eng)": 85.58,
+                    "Tatoeba (ina-eng)": 95.37,
+                    "Tatoeba (ind-eng)": 93.66,
+                    "Tatoeba (isl-eng)": 94.75,
+                    "Tatoeba (ita-eng)": 92.72,
+                    "Tatoeba (jav-eng)": 79.77,
+                    "Tatoeba (jpn-eng)": 95.38,
+                    "Tatoeba (kab-eng)": 4.31,
+                    "Tatoeba (kat-eng)": 95.02,
+                    "Tatoeba (kaz-eng)": 87.49,
+                    "Tatoeba (khm-eng)": 78.37,
+                    "Tatoeba (kor-eng)": 90.95,
+                    "Tatoeba (kur-eng)": 83.59,
+                    "Tatoeba (kzj-eng)": 11.33,
+                    "Tatoeba (lat-eng)": 80.07,
+                    "Tatoeba (lfn-eng)": 67.54,
+                    "Tatoeba (lit-eng)": 96.47,
+                    "Tatoeba (lvs-eng)": 95.88,
+                    "Tatoeba (mal-eng)": 98.45,
+                    "Tatoeba (mar-eng)": 92.65,
+                    "Tatoeba (max-eng)": 63.26,
+                    "Tatoeba (mhr-eng)": 15.74,
+                    "Tatoeba (mkd-eng)": 93.6,
+                    "Tatoeba (mon-eng)": 95.91,
+                    "Tatoeba (nds-eng)": 79.42,
+                    "Tatoeba (nld-eng)": 96.07,
+                    "Tatoeba (nno-eng)": 94.48,
+                    "Tatoeba (nob-eng)": 98.4,
+                    "Tatoeba (nov-eng)": 74.38,
+                    "Tatoeba (oci-eng)": 65.81,
+                    "Tatoeba (orv-eng)": 38.93,
+                    "Tatoeba (pam-eng)": 10.73,
+                    "Tatoeba (pes-eng)": 94.7,
+                    "Tatoeba (pms-eng)": 64.57,
+                    "Tatoeba (pol-eng)": 97.22,
+                    "Tatoeba (por-eng)": 94.14,
+                    "Tatoeba (ron-eng)": 96.92,
+                    "Tatoeba (rus-eng)": 93.75,
+                    "Tatoeba (slk-eng)": 96.5,
+                    "Tatoeba (slv-eng)": 96.03,
+                    "Tatoeba (spa-eng)": 98.4,
+                    "Tatoeba (sqi-eng)": 96.76,
+                    "Tatoeba (srp-eng)": 94.43,
+                    "Tatoeba (swe-eng)": 95.63,
+                    "Tatoeba (swg-eng)": 59.36,
+                    "Tatoeba (swh-eng)": 84.5,
+                    "Tatoeba (tam-eng)": 89.0,
+                    "Tatoeba (tat-eng)": 85.92,
+                    "Tatoeba (tel-eng)": 97.86,
+                    "Tatoeba (tgl-eng)": 96.02,
+                    "Tatoeba (tha-eng)": 96.14,
+                    "Tatoeba (tuk-eng)": 75.27,
+                    "Tatoeba (tur-eng)": 98.0,
+                    "Tatoeba (tzl-eng)": 58.88,
+                    "Tatoeba (uig-eng)": 92.4,
+                    "Tatoeba (ukr-eng)": 93.97,
+                    "Tatoeba (urd-eng)": 93.22,
+                    "Tatoeba (uzb-eng)": 84.23,
+                    "Tatoeba (vie-eng)": 97.2,
+                    "Tatoeba (war-eng)": 60.29,
+                    "Tatoeba (wuu-eng)": 90.18,
+                    "Tatoeba (xho-eng)": 91.55,
+                    "Tatoeba (yid-eng)": 88.79,
+                    "Tatoeba (yue-eng)": 89.58,
+                    "Tatoeba (zsm-eng)": 95.62
                 }
             ]
         },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "text2vec-base-multilingual",
-                    "AlloprofReranking": 51.48,
-                    "SyntecReranking": 70.28
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "text2vec-base-multilingual",
-                    "AlloprofRetrieval": 18.9,
-                    "BSARDRetrieval": 0.0,
-                    "MintakaRetrieval (fr)": 14.81,
-                    "SyntecRetrieval": 49.69,
-                    "XPQARetrieval (fr)": 40.4
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "text2vec-base-multilingual",
-                    "SICKFr": 77.25,
-                    "STS22 (fr)": 74.1,
-                    "STSBenchmarkMultilingualSTS (fr)": 83.48
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "text2vec-base-multilingual",
-                    "SummEvalFr": 29.33
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "text2vec-base-multilingual"
-                }
-            ]
-        }
-    },
-    "sbert_large_nlu_ru": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "sbert_large_nlu_ru"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "sbert_large_nlu_ru",
-                    "GeoreviewClassification (rus-Cyrl)": 39.97,
-                    "HeadlineClassification (rus-Cyrl)": 79.26,
-                    "InappropriatenessClassification (rus-Cyrl)": 62.52,
-                    "KinopoiskClassification (rus-Cyrl)": 49.51,
-                    "MassiveIntentClassification (rus-Cyrl)": 61.09,
-                    "MassiveScenarioClassification (rus-Cyrl)": 67.6,
-                    "RuReviewsClassification (rus-Cyrl)": 58.27,
-                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 53.9,
-                    "RuSciBenchOECDClassification (rus-Cyrl)": 43.04
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "sbert_large_nlu_ru",
-                    "GeoreviewClusteringP2P (rus-Cyrl)": 59.02,
-                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 50.4,
-                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 46.41
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "sbert_large_nlu_ru",
-                    "TERRa (rus-Cyrl)": 50.17
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "sbert_large_nlu_ru",
-                    "RuBQReranking (rus-Cyrl)": 46.81
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "sbert_large_nlu_ru",
-                    "RiaNewsRetrieval (rus-Cyrl)": 11.11,
-                    "RuBQRetrieval (rus-Cyrl)": 12.45
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "sbert_large_nlu_ru",
-                    "RUParaPhraserSTS (rus-Cyrl)": 62.06,
-                    "RuSTSBenchmarkSTS (rus-Cyrl)": 58.82,
-                    "STS22 (rus-Cyrl)": 50.75
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "sbert_large_nlu_ru"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "sbert_large_nlu_ru"
-                }
-            ]
-        }
-    },
-    "bert-base-10lang-cased": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "bert-base-10lang-cased"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "bert-base-10lang-cased",
-                    "AmazonReviewsClassification (fr)": 29.38,
-                    "MTOPDomainClassification (fr)": 63.65,
-                    "MTOPIntentClassification (fr)": 37.87,
-                    "MasakhaNEWSClassification (fra)": 63.93,
-                    "MassiveIntentClassification (fr)": 37.28,
-                    "MassiveScenarioClassification (fr)": 44.5
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "bert-base-10lang-cased",
-                    "AlloProfClusteringP2P": 53.22,
-                    "AlloProfClusteringS2S": 42.92,
-                    "HALClusteringS2S": 19.94,
-                    "MLSUMClusteringP2P": 40.96,
-                    "MLSUMClusteringS2S": 31.87,
-                    "MasakhaNEWSClusteringP2P (fra)": 24.23,
-                    "MasakhaNEWSClusteringS2S (fra)": 24.46
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "bert-base-10lang-cased",
-                    "OpusparcusPC (fr)": 86.79,
-                    "PawsXPairClassification (fr)": 53.4
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "bert-base-10lang-cased",
-                    "AlloprofReranking": 36.21,
-                    "SyntecReranking": 53.25
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "bert-base-10lang-cased",
-                    "AlloprofRetrieval": 1.6,
-                    "BSARDRetrieval": 0.0,
-                    "MintakaRetrieval (fr)": 3.55,
-                    "SyntecRetrieval": 18.95,
-                    "XPQARetrieval (fr)": 18.39
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "bert-base-10lang-cased",
-                    "SICKFr": 58.76,
-                    "STS22 (fr)": 40.31,
-                    "STSBenchmarkMultilingualSTS (fr)": 52.25
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "bert-base-10lang-cased",
-                    "SummEvalFr": 29.06
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "bert-base-10lang-cased"
-                }
-            ]
-        }
-    },
-    "instructor-large": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "instructor-large"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "instructor-large"
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "instructor-large"
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "instructor-large"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "instructor-large"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "instructor-large",
-                    "BrightRetrieval (pony)": 1.32,
-                    "BrightRetrieval (sustainable_living)": 13.16,
-                    "BrightRetrieval (aops)": 7.94,
-                    "BrightRetrieval (biology)": 15.61,
-                    "BrightRetrieval (stackoverflow)": 11.21,
-                    "BrightRetrieval (theoremqa_theorems)": 9.29,
-                    "BrightRetrieval (psychology)": 21.94,
-                    "BrightRetrieval (economics)": 15.99,
-                    "BrightRetrieval (robotics)": 11.45,
-                    "BrightRetrieval (leetcode)": 20.0,
-                    "BrightRetrieval (earth_science)": 21.52,
-                    "BrightRetrieval (theoremqa_questions)": 20.07
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "instructor-large"
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "instructor-large"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "instructor-large"
-                }
-            ]
-        }
-    },
-    "bert-base-multilingual-uncased": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "bert-base-multilingual-uncased"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "bert-base-multilingual-uncased",
-                    "AmazonReviewsClassification (fr)": 29.02,
-                    "MTOPDomainClassification (fr)": 64.49,
-                    "MTOPIntentClassification (fr)": 39.4,
-                    "MasakhaNEWSClassification (fra)": 75.69,
-                    "MassiveIntentClassification (fr)": 38.01,
-                    "MassiveScenarioClassification (fr)": 43.63
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "bert-base-multilingual-uncased",
-                    "AlloProfClusteringP2P": 60.66,
-                    "AlloProfClusteringS2S": 35.05,
-                    "HALClusteringS2S": 20.9,
-                    "MLSUMClusteringP2P": 43.5,
-                    "MLSUMClusteringS2S": 30.99,
-                    "MasakhaNEWSClusteringP2P (fra)": 49.71,
-                    "MasakhaNEWSClusteringS2S (fra)": 42.23
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "bert-base-multilingual-uncased",
-                    "OpusparcusPC (fr)": 87.43,
-                    "PawsXPairClassification (fr)": 53.22
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "bert-base-multilingual-uncased",
-                    "AlloprofReranking": 38.85,
-                    "SyntecReranking": 66.4
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "bert-base-multilingual-uncased",
-                    "AlloprofRetrieval": 5.51,
-                    "BSARDRetrieval": 0.0,
-                    "MintakaRetrieval (fr)": 2.87,
-                    "SyntecRetrieval": 34.95,
-                    "XPQARetrieval (fr)": 26.12
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "bert-base-multilingual-uncased",
-                    "SICKFr": 58.26,
-                    "STS22 (fr)": 56.47,
-                    "STSBenchmarkMultilingualSTS (fr)": 54.97
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "bert-base-multilingual-uncased",
-                    "SummEvalFr": 30.72
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "bert-base-multilingual-uncased"
-                }
-            ]
-        }
-    },
-    "gtr-t5-xl": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "gtr-t5-xl",
-                    "BUCC (de-en)": 90.99,
-                    "BUCC (fr-en)": 88.55,
-                    "BUCC (ru-en)": 2.07,
-                    "BUCC (zh-en)": 1.49,
-                    "Tatoeba (afr-eng)": 33.47,
-                    "Tatoeba (amh-eng)": 0.01,
-                    "Tatoeba (ang-eng)": 30.74,
-                    "Tatoeba (ara-eng)": 0.47,
-                    "Tatoeba (arq-eng)": 0.34,
-                    "Tatoeba (arz-eng)": 0.14,
-                    "Tatoeba (ast-eng)": 51.74,
-                    "Tatoeba (awa-eng)": 0.49,
-                    "Tatoeba (aze-eng)": 7.43,
-                    "Tatoeba (bel-eng)": 3.45,
-                    "Tatoeba (ben-eng)": 0.06,
-                    "Tatoeba (ber-eng)": 5.79,
-                    "Tatoeba (bos-eng)": 17.43,
-                    "Tatoeba (bre-eng)": 5.69,
-                    "Tatoeba (bul-eng)": 7.55,
-                    "Tatoeba (cat-eng)": 48.06,
-                    "Tatoeba (cbk-eng)": 54.56,
-                    "Tatoeba (ceb-eng)": 8.72,
-                    "Tatoeba (ces-eng)": 8.76,
-                    "Tatoeba (cha-eng)": 27.56,
-                    "Tatoeba (cmn-eng)": 2.26,
-                    "Tatoeba (cor-eng)": 3.69,
-                    "Tatoeba (csb-eng)": 13.18,
-                    "Tatoeba (cym-eng)": 6.97,
-                    "Tatoeba (dan-eng)": 47.36,
-                    "Tatoeba (deu-eng)": 91.54,
-                    "Tatoeba (dsb-eng)": 13.2,
-                    "Tatoeba (dtp-eng)": 4.54,
-                    "Tatoeba (ell-eng)": 0.55,
-                    "Tatoeba (epo-eng)": 27.86,
-                    "Tatoeba (est-eng)": 5.13,
-                    "Tatoeba (eus-eng)": 10.23,
-                    "Tatoeba (fao-eng)": 21.44,
-                    "Tatoeba (fin-eng)": 6.62,
-                    "Tatoeba (fra-eng)": 79.66,
-                    "Tatoeba (fry-eng)": 32.92,
-                    "Tatoeba (gla-eng)": 2.87,
-                    "Tatoeba (gle-eng)": 3.26,
-                    "Tatoeba (glg-eng)": 63.81,
-                    "Tatoeba (gsw-eng)": 29.71,
-                    "Tatoeba (heb-eng)": 0.33,
-                    "Tatoeba (hin-eng)": 0.25,
-                    "Tatoeba (hrv-eng)": 17.16,
-                    "Tatoeba (hsb-eng)": 12.02,
-                    "Tatoeba (hun-eng)": 7.21,
-                    "Tatoeba (hye-eng)": 0.78,
-                    "Tatoeba (ido-eng)": 40.83,
-                    "Tatoeba (ile-eng)": 54.95,
-                    "Tatoeba (ina-eng)": 72.28,
-                    "Tatoeba (ind-eng)": 30.95,
-                    "Tatoeba (isl-eng)": 11.29,
-                    "Tatoeba (ita-eng)": 73.83,
-                    "Tatoeba (jav-eng)": 8.66,
-                    "Tatoeba (jpn-eng)": 0.61,
-                    "Tatoeba (kab-eng)": 1.78,
-                    "Tatoeba (kat-eng)": 0.79,
-                    "Tatoeba (kaz-eng)": 0.95,
-                    "Tatoeba (khm-eng)": 0.49,
-                    "Tatoeba (kor-eng)": 1.87,
-                    "Tatoeba (kur-eng)": 10.91,
-                    "Tatoeba (kzj-eng)": 5.72,
-                    "Tatoeba (lat-eng)": 18.24,
-                    "Tatoeba (lfn-eng)": 43.49,
-                    "Tatoeba (lit-eng)": 7.13,
-                    "Tatoeba (lvs-eng)": 7.04,
-                    "Tatoeba (mal-eng)": 0.44,
-                    "Tatoeba (mar-eng)": 0.03,
-                    "Tatoeba (max-eng)": 18.99,
-                    "Tatoeba (mhr-eng)": 1.11,
-                    "Tatoeba (mkd-eng)": 2.49,
-                    "Tatoeba (mon-eng)": 2.01,
-                    "Tatoeba (nds-eng)": 39.96,
-                    "Tatoeba (nld-eng)": 58.86,
-                    "Tatoeba (nno-eng)": 29.07,
-                    "Tatoeba (nob-eng)": 40.25,
-                    "Tatoeba (nov-eng)": 50.19,
-                    "Tatoeba (oci-eng)": 30.72,
-                    "Tatoeba (orv-eng)": 0.85,
-                    "Tatoeba (pam-eng)": 7.21,
-                    "Tatoeba (pes-eng)": 0.53,
-                    "Tatoeba (pms-eng)": 31.07,
-                    "Tatoeba (pol-eng)": 18.06,
-                    "Tatoeba (por-eng)": 81.92,
-                    "Tatoeba (ron-eng)": 62.6,
-                    "Tatoeba (rus-eng)": 22.24,
-                    "Tatoeba (slk-eng)": 10.59,
-                    "Tatoeba (slv-eng)": 11.4,
-                    "Tatoeba (spa-eng)": 85.78,
-                    "Tatoeba (sqi-eng)": 14.92,
-                    "Tatoeba (srp-eng)": 9.87,
-                    "Tatoeba (swe-eng)": 55.08,
-                    "Tatoeba (swg-eng)": 32.66,
-                    "Tatoeba (swh-eng)": 7.64,
-                    "Tatoeba (tam-eng)": 0.49,
-                    "Tatoeba (tat-eng)": 1.28,
-                    "Tatoeba (tel-eng)": 0.45,
-                    "Tatoeba (tgl-eng)": 23.63,
-                    "Tatoeba (tha-eng)": 0.61,
-                    "Tatoeba (tuk-eng)": 5.71,
-                    "Tatoeba (tur-eng)": 8.25,
-                    "Tatoeba (tzl-eng)": 28.4,
-                    "Tatoeba (uig-eng)": 0.57,
-                    "Tatoeba (ukr-eng)": 5.69,
-                    "Tatoeba (urd-eng)": 0.0,
-                    "Tatoeba (uzb-eng)": 4.19,
-                    "Tatoeba (vie-eng)": 9.07,
-                    "Tatoeba (war-eng)": 12.31,
-                    "Tatoeba (wuu-eng)": 1.38,
-                    "Tatoeba (xho-eng)": 7.6,
-                    "Tatoeba (yid-eng)": 0.41,
-                    "Tatoeba (yue-eng)": 1.31,
-                    "Tatoeba (zsm-eng)": 29.74
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "gtr-t5-xl",
-                    "AmazonCounterfactualClassification (de)": 59.79,
-                    "AmazonCounterfactualClassification (en)": 68.6,
-                    "AmazonCounterfactualClassification (en-ext)": 69.03,
-                    "AmazonCounterfactualClassification (ja)": 50.59,
-                    "AmazonPolarityClassification": 74.58,
-                    "AmazonReviewsClassification (de)": 35.06,
-                    "AmazonReviewsClassification (en)": 38.2,
-                    "AmazonReviewsClassification (es)": 37.18,
-                    "AmazonReviewsClassification (fr)": 35.48,
-                    "AmazonReviewsClassification (ja)": 22.24,
-                    "AmazonReviewsClassification (zh)": 21.89,
-                    "Banking77Classification": 82.22,
-                    "EmotionClassification": 45.54,
-                    "ImdbClassification": 68.15,
-                    "MTOPDomainClassification (de)": 85.42,
-                    "MTOPDomainClassification (en)": 93.6,
-                    "MTOPDomainClassification (es)": 88.2,
-                    "MTOPDomainClassification (fr)": 85.05,
-                    "MTOPDomainClassification (hi)": 21.74,
-                    "MTOPDomainClassification (th)": 15.87,
-                    "MTOPIntentClassification (de)": 55.75,
-                    "MTOPIntentClassification (en)": 65.93,
-                    "MTOPIntentClassification (es)": 57.73,
-                    "MTOPIntentClassification (fr)": 51.07,
-                    "MTOPIntentClassification (hi)": 3.19,
-                    "MTOPIntentClassification (th)": 5.55,
-                    "MassiveIntentClassification (af)": 42.6,
-                    "MassiveIntentClassification (am)": 2.12,
-                    "MassiveIntentClassification (ar)": 4.64,
-                    "MassiveIntentClassification (az)": 35.05,
-                    "MassiveIntentClassification (bn)": 2.84,
-                    "MassiveIntentClassification (cy)": 36.19,
-                    "MassiveIntentClassification (da)": 48.42,
-                    "MassiveIntentClassification (de)": 55.49,
-                    "MassiveIntentClassification (el)": 10.14,
-                    "MassiveIntentClassification (en)": 70.23,
-                    "MassiveIntentClassification (es)": 56.72,
-                    "MassiveIntentClassification (fa)": 3.54,
-                    "MassiveIntentClassification (fi)": 37.13,
-                    "MassiveIntentClassification (fr)": 57.67,
-                    "MassiveIntentClassification (he)": 2.56,
-                    "MassiveIntentClassification (hi)": 3.24,
-                    "MassiveIntentClassification (hu)": 34.22,
-                    "MassiveIntentClassification (hy)": 3.01,
-                    "MassiveIntentClassification (id)": 46.54,
-                    "MassiveIntentClassification (is)": 34.77,
-                    "MassiveIntentClassification (it)": 54.13,
-                    "MassiveIntentClassification (ja)": 4.27,
-                    "MassiveIntentClassification (jv)": 36.97,
-                    "MassiveIntentClassification (ka)": 2.72,
-                    "MassiveIntentClassification (km)": 5.35,
-                    "MassiveIntentClassification (kn)": 3.17,
-                    "MassiveIntentClassification (ko)": 2.64,
-                    "MassiveIntentClassification (lv)": 36.32,
-                    "MassiveIntentClassification (ml)": 3.18,
-                    "MassiveIntentClassification (mn)": 22.85,
-                    "MassiveIntentClassification (ms)": 42.87,
-                    "MassiveIntentClassification (my)": 4.04,
-                    "MassiveIntentClassification (nb)": 45.87,
-                    "MassiveIntentClassification (nl)": 49.53,
-                    "MassiveIntentClassification (pl)": 42.64,
-                    "MassiveIntentClassification (pt)": 57.03,
-                    "MassiveIntentClassification (ro)": 49.95,
-                    "MassiveIntentClassification (ru)": 36.58,
-                    "MassiveIntentClassification (sl)": 39.44,
-                    "MassiveIntentClassification (sq)": 41.78,
-                    "MassiveIntentClassification (sv)": 47.95,
-                    "MassiveIntentClassification (sw)": 35.85,
-                    "MassiveIntentClassification (ta)": 2.32,
-                    "MassiveIntentClassification (te)": 2.2,
-                    "MassiveIntentClassification (th)": 3.74,
-                    "MassiveIntentClassification (tl)": 43.12,
-                    "MassiveIntentClassification (tr)": 35.24,
-                    "MassiveIntentClassification (ur)": 3.0,
-                    "MassiveIntentClassification (vi)": 30.01,
-                    "MassiveIntentClassification (zh-CN)": 1.72,
-                    "MassiveIntentClassification (zh-TW)": 3.35,
-                    "MassiveScenarioClassification (af)": 52.54,
-                    "MassiveScenarioClassification (am)": 6.3,
-                    "MassiveScenarioClassification (ar)": 11.96,
-                    "MassiveScenarioClassification (az)": 40.17,
-                    "MassiveScenarioClassification (bn)": 8.29,
-                    "MassiveScenarioClassification (cy)": 42.24,
-                    "MassiveScenarioClassification (da)": 57.28,
-                    "MassiveScenarioClassification (de)": 68.09,
-                    "MassiveScenarioClassification (el)": 16.66,
-                    "MassiveScenarioClassification (en)": 75.94,
-                    "MassiveScenarioClassification (es)": 64.32,
-                    "MassiveScenarioClassification (fa)": 6.9,
-                    "MassiveScenarioClassification (fi)": 43.96,
-                    "MassiveScenarioClassification (fr)": 66.72,
-                    "MassiveScenarioClassification (he)": 7.51,
-                    "MassiveScenarioClassification (hi)": 7.82,
-                    "MassiveScenarioClassification (hu)": 42.16,
-                    "MassiveScenarioClassification (hy)": 9.33,
-                    "MassiveScenarioClassification (id)": 53.54,
-                    "MassiveScenarioClassification (is)": 42.84,
-                    "MassiveScenarioClassification (it)": 62.44,
-                    "MassiveScenarioClassification (ja)": 7.29,
-                    "MassiveScenarioClassification (jv)": 43.13,
-                    "MassiveScenarioClassification (ka)": 7.63,
-                    "MassiveScenarioClassification (km)": 9.08,
-                    "MassiveScenarioClassification (kn)": 8.1,
-                    "MassiveScenarioClassification (ko)": 6.35,
-                    "MassiveScenarioClassification (lv)": 40.24,
-                    "MassiveScenarioClassification (ml)": 7.65,
-                    "MassiveScenarioClassification (mn)": 27.98,
-                    "MassiveScenarioClassification (ms)": 52.41,
-                    "MassiveScenarioClassification (my)": 9.21,
-                    "MassiveScenarioClassification (nb)": 54.44,
-                    "MassiveScenarioClassification (nl)": 60.35,
-                    "MassiveScenarioClassification (pl)": 49.97,
-                    "MassiveScenarioClassification (pt)": 62.78,
-                    "MassiveScenarioClassification (ro)": 59.62,
-                    "MassiveScenarioClassification (ru)": 43.44,
-                    "MassiveScenarioClassification (sl)": 44.79,
-                    "MassiveScenarioClassification (sq)": 50.84,
-                    "MassiveScenarioClassification (sv)": 58.21,
-                    "MassiveScenarioClassification (sw)": 44.63,
-                    "MassiveScenarioClassification (ta)": 7.95,
-                    "MassiveScenarioClassification (te)": 7.5,
-                    "MassiveScenarioClassification (th)": 8.79,
-                    "MassiveScenarioClassification (tl)": 53.54,
-                    "MassiveScenarioClassification (tr)": 42.47,
-                    "MassiveScenarioClassification (ur)": 9.58,
-                    "MassiveScenarioClassification (vi)": 34.68,
-                    "MassiveScenarioClassification (zh-CN)": 5.21,
-                    "MassiveScenarioClassification (zh-TW)": 8.77,
-                    "ToxicConversationsClassification": 67.56,
-                    "TweetSentimentExtractionClassification": 54.77
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "gtr-t5-xl",
-                    "ArxivClusteringP2P": 37.9,
-                    "ArxivClusteringS2S": 30.45,
-                    "BiorxivClusteringP2P": 30.52,
-                    "BiorxivClusteringS2S": 26.06,
-                    "MedrxivClusteringP2P": 28.69,
-                    "MedrxivClusteringS2S": 26.69,
-                    "RedditClustering": 61.34,
-                    "RedditClusteringP2P": 61.11,
-                    "StackExchangeClustering": 69.95,
-                    "StackExchangeClusteringP2P": 32.73,
-                    "TwentyNewsgroupsClustering": 51.15
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "gtr-t5-xl",
-                    "SprintDuplicateQuestions": 95.45,
-                    "TwitterSemEval2015": 77.81,
-                    "TwitterURLCorpus": 85.14
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "gtr-t5-xl",
-                    "AskUbuntuDupQuestions": 63.08,
-                    "MindSmallReranking": 31.5,
-                    "SciDocsRR": 76.49,
-                    "StackOverflowDupQuestions": 52.79
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "gtr-t5-xl",
-                    "ArguAna": 52.81,
-                    "CQADupstackRetrieval": 37.35,
-                    "ClimateFEVER": 27.01,
-                    "DBPedia": 39.74,
-                    "FEVER": 72.18,
-                    "FiQA2018": 44.19,
-                    "HotpotQA": 58.91,
-                    "MSMARCO": 43.52,
-                    "NFCorpus": 33.34,
-                    "NQ": 56.16,
-                    "QuoraRetrieval": 88.91,
-                    "SCIDOCS": 15.71,
-                    "SciFact": 64.2,
-                    "TRECCOVID": 60.09,
-                    "Touche2020": 25.26
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "gtr-t5-xl",
-                    "BIOSSES": 78.94,
-                    "SICK-R": 73.63,
-                    "STS12": 69.11,
-                    "STS13": 81.82,
-                    "STS14": 77.07,
-                    "STS15": 86.01,
-                    "STS16": 82.23,
-                    "STS17 (ar-ar)": 9.06,
-                    "STS17 (en-ar)": -3.22,
-                    "STS17 (en-de)": 70.38,
-                    "STS17 (en-en)": 84.9,
-                    "STS17 (en-tr)": 17.17,
-                    "STS17 (es-en)": 60.24,
-                    "STS17 (es-es)": 81.93,
-                    "STS17 (fr-en)": 62.17,
-                    "STS17 (it-en)": 59.11,
-                    "STS17 (ko-ko)": 8.9,
-                    "STS17 (nl-en)": 56.91,
-                    "STS22 (ar)": 37.66,
-                    "STS22 (de)": 50.58,
-                    "STS22 (de-en)": 53.63,
-                    "STS22 (de-fr)": 55.72,
-                    "STS22 (de-pl)": 27.99,
-                    "STS22 (en)": 66.61,
-                    "STS22 (es)": 59.14,
-                    "STS22 (es-en)": 69.99,
-                    "STS22 (es-it)": 60.94,
-                    "STS22 (fr)": 79.43,
-                    "STS22 (fr-pl)": 61.98,
-                    "STS22 (it)": 67.14,
-                    "STS22 (pl)": 33.74,
-                    "STS22 (pl-en)": 60.18,
-                    "STS22 (ru)": 32.69,
-                    "STS22 (tr)": 55.79,
-                    "STS22 (zh)": 31.16,
-                    "STS22 (zh-en)": 28.85,
-                    "STSBenchmark": 77.65
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "gtr-t5-xl",
-                    "SummEval": 30.21
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "gtr-t5-xl"
-                }
-            ]
-        }
-    },
-    "flaubert_base_cased": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "flaubert_base_cased"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "flaubert_base_cased",
-                    "AmazonReviewsClassification (fr)": 24.9,
-                    "MTOPDomainClassification (fr)": 25.55,
-                    "MTOPIntentClassification (fr)": 9.49,
-                    "MasakhaNEWSClassification (fra)": 71.14,
-                    "MassiveIntentClassification (fr)": 6.98,
-                    "MassiveScenarioClassification (fr)": 11.41
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "flaubert_base_cased",
-                    "AlloProfClusteringP2P": 52.86,
-                    "AlloProfClusteringS2S": 14.46,
-                    "HALClusteringS2S": 3.85,
-                    "MLSUMClusteringP2P": 39.06,
-                    "MLSUMClusteringS2S": 17.13,
-                    "MasakhaNEWSClusteringP2P (fra)": 41.61,
-                    "MasakhaNEWSClusteringS2S (fra)": 21.26
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "flaubert_base_cased",
-                    "OpusparcusPC (fr)": 82.15,
-                    "PawsXPairClassification (fr)": 51.89
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "flaubert_base_cased",
-                    "AlloprofReranking": 34.81,
-                    "SyntecReranking": 55.88
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "flaubert_base_cased",
-                    "AlloprofRetrieval": 1.63,
-                    "BSARDRetrieval": 0.0,
-                    "MintakaRetrieval (fr)": 0.58,
-                    "SyntecRetrieval": 20.56,
-                    "XPQARetrieval (fr)": 6.59
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "flaubert_base_cased",
-                    "SICKFr": 53.86,
-                    "STS22 (fr)": 65.37,
-                    "STSBenchmarkMultilingualSTS (fr)": 37.14
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "flaubert_base_cased",
-                    "SummEvalFr": 31.26
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "flaubert_base_cased"
-                }
-            ]
-        }
-    },
-    "monot5-3b-msmarco-10k": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "monot5-3b-msmarco-10k"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "monot5-3b-msmarco-10k"
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "monot5-3b-msmarco-10k"
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "monot5-3b-msmarco-10k"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "monot5-3b-msmarco-10k"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "monot5-3b-msmarco-10k"
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "monot5-3b-msmarco-10k"
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "monot5-3b-msmarco-10k"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "monot5-3b-msmarco-10k",
-                    "Core17InstructionRetrieval": 1.84,
-                    "News21InstructionRetrieval": 1.78,
-                    "Robust04InstructionRetrieval": 3.96
-                }
-            ]
-        }
-    },
-    "voyage-multilingual-2": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "voyage-multilingual-2"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "voyage-multilingual-2",
-                    "AmazonReviewsClassification (fr)": 43.36,
-                    "MTOPDomainClassification (fr)": 90.33,
-                    "MTOPIntentClassification (fr)": 60.52,
-                    "MasakhaNEWSClassification (fra)": 74.81,
-                    "MassiveIntentClassification (fr)": 68.06,
-                    "MassiveScenarioClassification (fr)": 74.29
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "voyage-multilingual-2",
-                    "AlloProfClusteringP2P": 65.37,
-                    "AlloProfClusteringS2S": 47.03,
-                    "HALClusteringS2S": 27.67,
-                    "MLSUMClusteringP2P (fr)": 45.99,
-                    "MLSUMClusteringS2S (fr)": 45.57,
-                    "MasakhaNEWSClusteringP2P (fra)": 44.53,
-                    "MasakhaNEWSClusteringS2S (fra)": 49.8
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "voyage-multilingual-2",
-                    "OpusparcusPC (fr)": 93.68,
-                    "PawsXPairClassification (fr)": 63.64
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "voyage-multilingual-2",
-                    "AlloprofReranking": 74.78,
-                    "SyntecReranking": 90.4
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "voyage-multilingual-2",
-                    "AlloprofRetrieval": 58.27,
-                    "BSARDRetrieval": 5.14,
-                    "LEMBNarrativeQARetrieval": 64.69,
-                    "LEMBNeedleRetrieval": 75.25,
-                    "LEMBPasskeyRetrieval": 97.0,
-                    "LEMBQMSumRetrieval": 51.49,
-                    "LEMBSummScreenFDRetrieval": 99.11,
-                    "LEMBWikimQARetrieval": 87.49,
-                    "MintakaRetrieval (fr)": 49.19,
-                    "SyntecRetrieval": 87.28,
-                    "XPQARetrieval (fr)": 72.92
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "voyage-multilingual-2",
-                    "SICKFr": 74.9,
-                    "STS22 (fr)": 82.76,
-                    "STSBenchmarkMultilingualSTS (fr)": 82.72
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "voyage-multilingual-2",
-                    "SummEvalFr": 29.96
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "voyage-multilingual-2"
-                }
-            ]
-        }
-    },
-    "xlm-roberta-large": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "xlm-roberta-large"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "xlm-roberta-large",
-                    "AmazonReviewsClassification (fr)": 26.62,
-                    "MTOPDomainClassification (fr)": 36.77,
-                    "MTOPIntentClassification (fr)": 15.37,
-                    "MasakhaNEWSClassification (fra)": 65.76,
-                    "MassiveIntentClassification (fr)": 15.82,
-                    "MassiveScenarioClassification (fr)": 23.92
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "xlm-roberta-large",
-                    "AlloProfClusteringP2P": 56.54,
-                    "AlloProfClusteringS2S": 21.18,
-                    "BlurbsClusteringP2P": 29.84,
-                    "BlurbsClusteringS2S": 7.29,
-                    "HALClusteringS2S": 5.94,
-                    "MLSUMClusteringP2P": 42.67,
-                    "MLSUMClusteringS2S": 18.5,
-                    "MasakhaNEWSClusteringP2P (fra)": 34.02,
-                    "MasakhaNEWSClusteringS2S (fra)": 21.52,
-                    "TenKGnadClusteringP2P": 32.46,
-                    "TenKGnadClusteringS2S": 6.16
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "xlm-roberta-large",
-                    "OpusparcusPC (fr)": 83.73,
-                    "PawsXPairClassification (fr)": 53.38
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "xlm-roberta-large",
-                    "AlloprofReranking": 28.62,
-                    "SyntecReranking": 49.4
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "xlm-roberta-large",
-                    "AlloprofRetrieval": 0.52,
-                    "BSARDRetrieval": 0.0,
-                    "MintakaRetrieval (fr)": 0.9,
-                    "SyntecRetrieval": 6.6,
-                    "XPQARetrieval (fr)": 12.7
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "xlm-roberta-large",
-                    "SICKFr": 50.01,
-                    "STS22 (fr)": 55.49,
-                    "STSBenchmarkMultilingualSTS (fr)": 42.32
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "xlm-roberta-large",
-                    "SummEvalFr": 28.89
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "xlm-roberta-large"
-                }
-            ]
-        }
-    },
-    "rubert-base-cased-sentence": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "rubert-base-cased-sentence",
-                    "Tatoeba (rus-Cyrl_eng-Latn)": 20.26
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "rubert-base-cased-sentence",
-                    "GeoreviewClassification (rus-Cyrl)": 38.05,
-                    "HeadlineClassification (rus-Cyrl)": 67.64,
-                    "InappropriatenessClassification (rus-Cyrl)": 58.27,
-                    "KinopoiskClassification (rus-Cyrl)": 45.86,
-                    "MassiveIntentClassification (rus-Cyrl)": 49.1,
-                    "MassiveScenarioClassification (rus-Cyrl)": 51.91,
-                    "RuReviewsClassification (rus-Cyrl)": 58.34,
-                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 52.18,
-                    "RuSciBenchOECDClassification (rus-Cyrl)": 40.11
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "rubert-base-cased-sentence",
-                    "GeoreviewClusteringP2P (rus-Cyrl)": 41.82,
-                    "MLSUMClusteringP2P (rus-Cyrl)": 43.71,
-                    "MLSUMClusteringS2S (rus-Cyrl)": 45.94,
-                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 46.29,
-                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 41.28
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "rubert-base-cased-sentence",
-                    "OpusparcusPC (rus-Cyrl)": 81.52,
-                    "TERRa (rus-Cyrl)": 59.12
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "rubert-base-cased-sentence",
-                    "RuBQReranking (rus-Cyrl)": 39.89
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "rubert-base-cased-sentence",
-                    "RiaNewsRetrieval (rus-Cyrl)": 6.72,
-                    "RuBQRetrieval (rus-Cyrl)": 12.63
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "rubert-base-cased-sentence",
-                    "RUParaPhraserSTS (rus-Cyrl)": 66.24,
-                    "RuSTSBenchmarkSTS (rus-Cyrl)": 66.03,
-                    "STS22 (rus-Cyrl)": 51.27,
-                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 66.71
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "rubert-base-cased-sentence"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "rubert-base-cased-sentence"
-                }
-            ]
-        }
-    },
-    "bge-base-zh-v1.5": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "bge-base-zh-v1.5"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "bge-base-zh-v1.5",
-                    "AmazonReviewsClassification (zh)": 40.15,
-                    "IFlyTek": 48.62,
-                    "JDReview": 83.62,
-                    "MassiveIntentClassification (zh-CN)": 67.93,
-                    "MassiveScenarioClassification (zh-CN)": 73.98,
-                    "MultilingualSentiment": 70.67,
-                    "OnlineShopping": 91.26,
-                    "TNews": 51.08,
-                    "Waimai": 85.36
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "bge-base-zh-v1.5",
-                    "CLSClusteringP2P": 39.91,
-                    "CLSClusteringS2S": 37.63,
-                    "ThuNewsClusteringP2P": 58.45,
-                    "ThuNewsClusteringS2S": 54.12
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "bge-base-zh-v1.5",
-                    "Cmnli": 84.1,
-                    "Ocnli": 75.41
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "bge-base-zh-v1.5",
-                    "CMedQAv1": 80.47,
-                    "CMedQAv2": 84.88,
-                    "MMarcoReranking": 29.74,
-                    "T2Reranking": 66.49
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "bge-base-zh-v1.5",
-                    "CmedqaRetrieval": 41.61,
-                    "CovidRetrieval": 74.7,
-                    "DuRetrieval": 85.07,
-                    "EcomRetrieval": 64.25,
-                    "MMarcoRetrieval": 77.69,
-                    "MedicalRetrieval": 56.51,
-                    "T2Retrieval": 83.71,
-                    "VideoRetrieval": 72.35
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "bge-base-zh-v1.5",
-                    "AFQMC": 42.4,
-                    "ATEC": 48.17,
-                    "BQ": 61.78,
-                    "LCQMC": 74.45,
-                    "PAWSX": 20.4,
-                    "QBQTC": 36.22,
-                    "STS22 (zh)": 68.01,
-                    "STSB": 78.31
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "bge-base-zh-v1.5"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "bge-base-zh-v1.5"
-                }
-            ]
-        }
-    },
-    "rubert-tiny-turbo": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "rubert-tiny-turbo",
-                    "Tatoeba (rus-Cyrl_eng-Latn)": 83.14
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "rubert-tiny-turbo",
-                    "AmazonPolarityClassification": 68.36,
-                    "Banking77Classification": 59.86,
-                    "EmotionClassification": 29.5,
-                    "GeoreviewClassification (rus-Cyrl)": 41.36,
-                    "HeadlineClassification (rus-Cyrl)": 68.9,
-                    "ImdbClassification": 58.36,
-                    "InappropriatenessClassification (rus-Cyrl)": 59.11,
-                    "KinopoiskClassification (rus-Cyrl)": 50.47,
-                    "MassiveIntentClassification (cmo-Hans)": 5.21,
-                    "MassiveIntentClassification (kor-Kore)": 2.53,
-                    "MassiveIntentClassification (hin-Deva)": 2.56,
-                    "MassiveIntentClassification (kan-Knda)": 2.06,
-                    "MassiveIntentClassification (kat-Geor)": 2.64,
-                    "MassiveIntentClassification (amh-Ethi)": 2.28,
-                    "MassiveIntentClassification (mya-Mymr)": 3.96,
-                    "MassiveIntentClassification (ell-Grek)": 9.66,
-                    "MassiveIntentClassification (lav-Latn)": 22.32,
-                    "MassiveIntentClassification (mal-Mlym)": 2.39,
-                    "MassiveIntentClassification (mon-Cyrl)": 28.99,
-                    "MassiveIntentClassification (urd-Arab)": 2.45,
-                    "MassiveIntentClassification (fas-Arab)": 3.34,
-                    "MassiveIntentClassification (ron-Latn)": 31.72,
-                    "MassiveIntentClassification (isl-Latn)": 24.85,
-                    "MassiveIntentClassification (en)": 50.16,
-                    "MassiveIntentClassification (hun-Latn)": 25.52,
-                    "MassiveIntentClassification (fra-Latn)": 31.51,
-                    "MassiveIntentClassification (tha-Thai)": 3.74,
-                    "MassiveIntentClassification (deu-Latn)": 32.1,
-                    "MassiveIntentClassification (tur-Latn)": 27.56,
-                    "MassiveIntentClassification (por-Latn)": 34.35,
-                    "MassiveIntentClassification (sqi-Latn)": 32.38,
-                    "MassiveIntentClassification (cmo-Hant)": 6.81,
-                    "MassiveIntentClassification (hye-Armn)": 2.72,
-                    "MassiveIntentClassification (dan-Latn)": 33.95,
-                    "MassiveIntentClassification (afr-Latn)": 30.4,
-                    "MassiveIntentClassification (ara-Arab)": 3.8,
-                    "MassiveIntentClassification (jav-Latn)": 28.53,
-                    "MassiveIntentClassification (tel-Telu)": 2.21,
-                    "MassiveIntentClassification (tgl-Latn)": 32.02,
-                    "MassiveIntentClassification (swa-Latn)": 27.79,
-                    "MassiveIntentClassification (jpn-Jpan)": 5.61,
-                    "MassiveIntentClassification (msa-Latn)": 28.94,
-                    "MassiveIntentClassification (nob-Latn)": 32.3,
-                    "MassiveIntentClassification (fin-Latn)": 31.13,
-                    "MassiveIntentClassification (ind-Latn)": 33.56,
-                    "MassiveIntentClassification (cym-Latn)": 31.68,
-                    "MassiveIntentClassification (slv-Latn)": 31.39,
-                    "MassiveIntentClassification (spa-Latn)": 31.03,
-                    "MassiveIntentClassification (ben-Beng)": 3.08,
-                    "MassiveIntentClassification (swe-Latn)": 30.23,
-                    "MassiveIntentClassification (rus-Cyrl)": 57.98,
-                    "MassiveIntentClassification (aze-Latn)": 23.58,
-                    "MassiveIntentClassification (ita-Latn)": 35.24,
-                    "MassiveIntentClassification (pol-Latn)": 26.82,
-                    "MassiveIntentClassification (vie-Latn)": 23.72,
-                    "MassiveIntentClassification (tam-Taml)": 1.5,
-                    "MassiveIntentClassification (heb-Hebr)": 2.25,
-                    "MassiveIntentClassification (nld-Latn)": 32.44,
-                    "MassiveIntentClassification (khm-Khmr)": 5.14,
-                    "MassiveScenarioClassification (cmo-Hans)": 10.6,
-                    "MassiveScenarioClassification (kor-Kore)": 5.63,
-                    "MassiveScenarioClassification (hin-Deva)": 7.41,
-                    "MassiveScenarioClassification (kan-Knda)": 7.6,
-                    "MassiveScenarioClassification (kat-Geor)": 7.01,
-                    "MassiveScenarioClassification (amh-Ethi)": 7.68,
-                    "MassiveScenarioClassification (mya-Mymr)": 10.73,
-                    "MassiveScenarioClassification (ell-Grek)": 17.95,
-                    "MassiveScenarioClassification (lav-Latn)": 29.29,
-                    "MassiveScenarioClassification (mal-Mlym)": 6.92,
-                    "MassiveScenarioClassification (mon-Cyrl)": 33.7,
-                    "MassiveScenarioClassification (urd-Arab)": 8.53,
-                    "MassiveScenarioClassification (fas-Arab)": 6.62,
-                    "MassiveScenarioClassification (ron-Latn)": 40.02,
-                    "MassiveScenarioClassification (isl-Latn)": 33.1,
-                    "MassiveScenarioClassification (en)": 61.29,
-                    "MassiveScenarioClassification (hun-Latn)": 36.41,
-                    "MassiveScenarioClassification (fra-Latn)": 42.9,
-                    "MassiveScenarioClassification (tha-Thai)": 8.26,
-                    "MassiveScenarioClassification (deu-Latn)": 42.07,
-                    "MassiveScenarioClassification (tur-Latn)": 34.85,
-                    "MassiveScenarioClassification (por-Latn)": 40.79,
-                    "MassiveScenarioClassification (sqi-Latn)": 42.66,
-                    "MassiveScenarioClassification (cmo-Hant)": 11.93,
-                    "MassiveScenarioClassification (hye-Armn)": 8.78,
-                    "MassiveScenarioClassification (dan-Latn)": 43.69,
-                    "MassiveScenarioClassification (afr-Latn)": 40.84,
-                    "MassiveScenarioClassification (ara-Arab)": 11.86,
-                    "MassiveScenarioClassification (jav-Latn)": 37.23,
-                    "MassiveScenarioClassification (tel-Telu)": 6.91,
-                    "MassiveScenarioClassification (tgl-Latn)": 38.16,
-                    "MassiveScenarioClassification (swa-Latn)": 35.66,
-                    "MassiveScenarioClassification (jpn-Jpan)": 10.6,
-                    "MassiveScenarioClassification (msa-Latn)": 38.97,
-                    "MassiveScenarioClassification (nob-Latn)": 39.05,
-                    "MassiveScenarioClassification (fin-Latn)": 35.19,
-                    "MassiveScenarioClassification (ind-Latn)": 39.54,
-                    "MassiveScenarioClassification (cym-Latn)": 39.85,
-                    "MassiveScenarioClassification (slv-Latn)": 35.98,
-                    "MassiveScenarioClassification (spa-Latn)": 37.13,
-                    "MassiveScenarioClassification (ben-Beng)": 8.85,
-                    "MassiveScenarioClassification (swe-Latn)": 36.12,
-                    "MassiveScenarioClassification (rus-Cyrl)": 62.9,
-                    "MassiveScenarioClassification (aze-Latn)": 30.32,
-                    "MassiveScenarioClassification (ita-Latn)": 42.69,
-                    "MassiveScenarioClassification (pol-Latn)": 31.62,
-                    "MassiveScenarioClassification (vie-Latn)": 31.89,
-                    "MassiveScenarioClassification (tam-Taml)": 7.01,
-                    "MassiveScenarioClassification (heb-Hebr)": 7.61,
-                    "MassiveScenarioClassification (nld-Latn)": 40.94,
-                    "MassiveScenarioClassification (khm-Khmr)": 8.51,
-                    "RuReviewsClassification (rus-Cyrl)": 60.66,
-                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 52.93,
-                    "RuSciBenchOECDClassification (rus-Cyrl)": 40.79,
-                    "ToxicConversationsClassification": 57.77,
-                    "TweetSentimentExtractionClassification": 55.3
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "rubert-tiny-turbo",
-                    "ArxivClusteringP2P": 24.83,
-                    "ArxivClusteringS2S": 16.68,
-                    "BiorxivClusteringP2P": 20.0,
-                    "BiorxivClusteringS2S": 12.67,
-                    "GeoreviewClusteringP2P (rus-Cyrl)": 59.71,
-                    "MLSUMClusteringP2P (rus-Cyrl)": 40.02,
-                    "MLSUMClusteringS2S (rus-Cyrl)": 41.36,
-                    "MedrxivClusteringP2P": 20.79,
-                    "MedrxivClusteringS2S": 18.18,
-                    "RedditClustering": 26.28,
-                    "RedditClusteringP2P": 40.48,
-                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 47.55,
-                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 41.44,
-                    "StackExchangeClustering": 33.51,
-                    "StackExchangeClusteringP2P": 27.98,
-                    "TwentyNewsgroupsClustering": 19.9
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "rubert-tiny-turbo",
-                    "OpusparcusPC (rus-Cyrl)": 87.58,
-                    "TERRa (rus-Cyrl)": 56.09
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "rubert-tiny-turbo",
-                    "RuBQReranking (rus-Cyrl)": 62.15
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "rubert-tiny-turbo",
-                    "AILACasedocs": 7.43,
-                    "AILAStatutes": 13.62,
-                    "ARCChallenge": 3.85,
-                    "AlphaNLI": 14.15,
-                    "ArguAna": 32.03,
-                    "ClimateFEVER": 5.56,
-                    "DBPedia": 9.61,
-                    "RiaNewsRetrieval (rus-Cyrl)": 51.27,
-                    "RuBQRetrieval (rus-Cyrl)": 51.73
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "rubert-tiny-turbo",
-                    "RUParaPhraserSTS (rus-Cyrl)": 72.15,
-                    "RuSTSBenchmarkSTS (rus-Cyrl)": 78.48,
-                    "STS22 (cmn-Hans)": 32.83,
-                    "STS22 (deu-Latn_fra-Latn)": 17.5,
-                    "STS22 (pol-Latn_eng-Latn)": 42.08,
-                    "STS22 (rus-Cyrl)": 60.06,
-                    "STS22 (fra-Latn)": 42.0,
-                    "STS22 (deu-Latn)": 8.16,
-                    "STS22 (tur-Latn)": 15.46,
-                    "STS22 (deu-Latn_eng-Latn)": 21.55,
-                    "STS22 (ita-Latn)": 39.69,
-                    "STS22 (pol-Latn)": 9.71,
-                    "STS22 (fra-Latn_pol-Latn)": 39.44,
-                    "STS22 (deu-Latn_pol-Latn)": 25.53,
-                    "STS22 (ara-Arab)": 27.95,
-                    "STS22 (spa-Latn_eng-Latn)": 42.77,
-                    "STS22 (spa-Latn_ita-Latn)": 32.83,
-                    "STS22 (spa-Latn)": 45.31,
-                    "STS22 (cmn-Hans_eng-Latn)": 31.25,
-                    "STS22 (en)": 47.06,
-                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 78.12
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "rubert-tiny-turbo"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "rubert-tiny-turbo"
-                }
-            ]
-        }
-    },
-    "LLM2Vec-Llama-2-supervised": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "LLM2Vec-Llama-2-supervised"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "LLM2Vec-Llama-2-supervised",
-                    "AmazonCounterfactualClassification (en)": 82.22,
-                    "AmazonPolarityClassification": 89.69,
-                    "AmazonReviewsClassification (en)": 48.47,
-                    "Banking77Classification": 88.17,
-                    "EmotionClassification": 51.71,
-                    "ImdbClassification": 85.78,
-                    "MTOPDomainClassification (en)": 95.57,
-                    "MTOPIntentClassification (en)": 82.81,
-                    "MassiveIntentClassification (en)": 78.06,
-                    "MassiveScenarioClassification (en)": 81.35,
-                    "ToxicConversationsClassification": 71.01,
-                    "TweetSentimentExtractionClassification": 61.11
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "LLM2Vec-Llama-2-supervised",
-                    "ArxivClusteringP2P": 43.14,
-                    "ArxivClusteringS2S": 42.38,
-                    "BiorxivClusteringP2P": 35.88,
-                    "BiorxivClusteringS2S": 34.81,
-                    "MedrxivClusteringP2P": 32.23,
-                    "MedrxivClusteringS2S": 31.37,
-                    "RedditClustering": 61.1,
-                    "RedditClusteringP2P": 64.52,
-                    "StackExchangeClustering": 67.98,
-                    "StackExchangeClusteringP2P": 33.2,
-                    "TwentyNewsgroupsClustering": 51.04
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "LLM2Vec-Llama-2-supervised",
-                    "SprintDuplicateQuestions": 96.83,
-                    "TwitterSemEval2015": 80.7,
-                    "TwitterURLCorpus": 86.56
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "LLM2Vec-Llama-2-supervised",
-                    "AskUbuntuDupQuestions": 63.13,
-                    "MindSmallReranking": 31.34,
-                    "SciDocsRR": 84.03,
-                    "StackOverflowDupQuestions": 51.02
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "LLM2Vec-Llama-2-supervised",
-                    "ArguAna": 56.53,
-                    "CQADupstackRetrieval": 45.94,
-                    "ClimateFEVER": 30.7,
-                    "DBPedia": 48.42,
-                    "FEVER": 89.93,
-                    "FiQA2018": 51.28,
-                    "HotpotQA": 72.99,
-                    "MSMARCO": 41.46,
-                    "NFCorpus": 40.33,
-                    "NQ": 61.24,
-                    "QuoraRetrieval": 85.59,
-                    "SCIDOCS": 21.05,
-                    "SciFact": 77.3,
-                    "TRECCOVID": 79.25,
-                    "Touche2020": 16.92
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "LLM2Vec-Llama-2-supervised",
-                    "BIOSSES": 82.13,
-                    "SICK-R": 83.01,
-                    "STS12": 78.85,
-                    "STS13": 86.84,
-                    "STS14": 84.04,
-                    "STS15": 88.72,
-                    "STS16": 86.79,
-                    "STS17 (en-en)": 90.63,
-                    "STS22 (en)": 67.55,
-                    "STSBenchmark": 88.72
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "LLM2Vec-Llama-2-supervised",
-                    "SummEval": 28.49
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "LLM2Vec-Llama-2-supervised"
-                }
-            ]
-        }
-    },
-    "LLM2Vec-Mistral-unsupervised": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "LLM2Vec-Mistral-unsupervised"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "LLM2Vec-Mistral-unsupervised",
-                    "AmazonCounterfactualClassification (en)": 76.94,
-                    "AmazonPolarityClassification": 85.29,
-                    "AmazonReviewsClassification (en)": 47.09,
-                    "Banking77Classification": 86.16,
-                    "EmotionClassification": 48.88,
-                    "ImdbClassification": 77.95,
-                    "MTOPDomainClassification (en)": 95.48,
-                    "MTOPIntentClassification (en)": 82.84,
-                    "MassiveIntentClassification (en)": 76.65,
-                    "MassiveScenarioClassification (en)": 79.99,
-                    "ToxicConversationsClassification": 70.71,
-                    "TweetSentimentExtractionClassification": 60.9
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "LLM2Vec-Mistral-unsupervised",
-                    "ArxivClusteringP2P": 47.56,
-                    "ArxivClusteringS2S": 39.92,
-                    "BiorxivClusteringP2P": 36.14,
-                    "BiorxivClusteringS2S": 30.26,
-                    "MedrxivClusteringP2P": 30.11,
-                    "MedrxivClusteringS2S": 26.93,
-                    "RedditClustering": 41.83,
-                    "RedditClusteringP2P": 62.08,
-                    "StackExchangeClustering": 67.34,
-                    "StackExchangeClusteringP2P": 34.5,
-                    "TwentyNewsgroupsClustering": 30.26
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "LLM2Vec-Mistral-unsupervised",
-                    "SprintDuplicateQuestions": 91.3,
-                    "TwitterSemEval2015": 68.76,
-                    "TwitterURLCorpus": 82.76
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "LLM2Vec-Mistral-unsupervised",
-                    "AskUbuntuDupQuestions": 58.6,
-                    "MindSmallReranking": 29.73,
-                    "SciDocsRR": 77.81,
-                    "StackOverflowDupQuestions": 49.8
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "LLM2Vec-Mistral-unsupervised",
-                    "ArguAna": 51.0,
-                    "CQADupstackRetrieval": 33.37,
-                    "ClimateFEVER": 22.97,
-                    "DBPedia": 25.48,
-                    "FEVER": 45.11,
-                    "FiQA2018": 27.24,
-                    "HotpotQA": 54.54,
-                    "MSMARCO": 19.13,
-                    "NFCorpus": 27.16,
-                    "NQ": 34.16,
-                    "QuoraRetrieval": 84.4,
-                    "SCIDOCS": 15.35,
-                    "SciFact": 68.68,
-                    "TRECCOVID": 55.67,
-                    "Touche2020": 6.54
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "LLM2Vec-Mistral-unsupervised",
-                    "BIOSSES": 83.29,
-                    "SICK-R": 75.55,
-                    "STS12": 67.65,
-                    "STS13": 83.9,
-                    "STS14": 76.97,
-                    "STS15": 83.8,
-                    "STS16": 81.91,
-                    "STS17 (en-en)": 85.58,
-                    "STS22 (en)": 65.93,
-                    "STSBenchmark": 80.42
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "LLM2Vec-Mistral-unsupervised",
-                    "SummEval": 30.19
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "LLM2Vec-Mistral-unsupervised"
-                }
-            ]
-        }
-    },
-    "paraphrase-multilingual-mpnet-base-v2": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "paraphrase-multilingual-mpnet-base-v2",
-                    "BUCC (de-en)": 98.59,
-                    "BUCC (fr-en)": 96.89,
-                    "BUCC (ru-en)": 96.44,
-                    "BUCC (zh-en)": 97.56,
-                    "BornholmBitextMining (dan-Latn)": 18.18,
-                    "Tatoeba (afr-eng)": 72.96,
-                    "Tatoeba (amh-eng)": 53.49,
-                    "Tatoeba (ang-eng)": 16.72,
-                    "Tatoeba (ara-eng)": 90.19,
-                    "Tatoeba (arq-eng)": 19.84,
-                    "Tatoeba (arz-eng)": 55.69,
-                    "Tatoeba (ast-eng)": 70.08,
-                    "Tatoeba (awa-eng)": 42.83,
-                    "Tatoeba (aze-eng)": 76.36,
-                    "Tatoeba (bel-eng)": 79.94,
-                    "Tatoeba (ben-eng)": 64.9,
-                    "Tatoeba (ber-eng)": 4.88,
-                    "Tatoeba (bos-eng)": 94.02,
-                    "Tatoeba (bre-eng)": 6.42,
-                    "Tatoeba (bul-eng)": 93.52,
-                    "Tatoeba (cat-eng)": 96.05,
-                    "Tatoeba (cbk-eng)": 58.68,
-                    "Tatoeba (ceb-eng)": 7.39,
-                    "Tatoeba (ces-eng)": 95.73,
-                    "Tatoeba (cha-eng)": 12.59,
-                    "Tatoeba (cmn-eng)": 95.83,
-                    "Tatoeba (cor-eng)": 3.53,
-                    "Tatoeba (csb-eng)": 23.73,
-                    "Tatoeba (cym-eng)": 22.31,
-                    "Tatoeba (dan-eng)": 96.17,
-                    "Tatoeba (deu-eng)": 97.73,
-                    "Tatoeba (dsb-eng)": 36.85,
-                    "Tatoeba (dtp-eng)": 5.03,
-                    "Tatoeba (ell-eng)": 94.93,
-                    "Tatoeba (epo-eng)": 55.12,
-                    "Tatoeba (est-eng)": 98.4,
-                    "Tatoeba (eus-eng)": 31.33,
-                    "Tatoeba (fao-eng)": 38.24,
-                    "Tatoeba (fin-eng)": 95.92,
-                    "Tatoeba (fra-eng)": 93.12,
-                    "Tatoeba (fry-eng)": 43.54,
-                    "Tatoeba (gla-eng)": 4.72,
-                    "Tatoeba (gle-eng)": 16.85,
-                    "Tatoeba (glg-eng)": 95.32,
-                    "Tatoeba (gsw-eng)": 25.12,
-                    "Tatoeba (heb-eng)": 88.26,
-                    "Tatoeba (hin-eng)": 97.75,
-                    "Tatoeba (hrv-eng)": 97.0,
-                    "Tatoeba (hsb-eng)": 44.32,
-                    "Tatoeba (hun-eng)": 94.18,
-                    "Tatoeba (hye-eng)": 94.38,
-                    "Tatoeba (ido-eng)": 43.91,
-                    "Tatoeba (ile-eng)": 60.36,
-                    "Tatoeba (ina-eng)": 84.32,
-                    "Tatoeba (ind-eng)": 93.5,
-                    "Tatoeba (isl-eng)": 59.25,
-                    "Tatoeba (ita-eng)": 93.76,
-                    "Tatoeba (jav-eng)": 23.39,
-                    "Tatoeba (jpn-eng)": 92.51,
-                    "Tatoeba (kab-eng)": 1.41,
-                    "Tatoeba (kat-eng)": 95.46,
-                    "Tatoeba (kaz-eng)": 61.49,
-                    "Tatoeba (khm-eng)": 58.8,
-                    "Tatoeba (kor-eng)": 93.07,
-                    "Tatoeba (kur-eng)": 61.44,
-                    "Tatoeba (kzj-eng)": 5.88,
-                    "Tatoeba (lat-eng)": 24.25,
-                    "Tatoeba (lfn-eng)": 49.56,
-                    "Tatoeba (lit-eng)": 95.37,
-                    "Tatoeba (lvs-eng)": 97.53,
-                    "Tatoeba (mal-eng)": 88.46,
-                    "Tatoeba (mar-eng)": 93.83,
-                    "Tatoeba (max-eng)": 48.77,
-                    "Tatoeba (mhr-eng)": 7.57,
-                    "Tatoeba (mkd-eng)": 93.02,
-                    "Tatoeba (mon-eng)": 96.14,
-                    "Tatoeba (nds-eng)": 38.88,
-                    "Tatoeba (nld-eng)": 95.5,
-                    "Tatoeba (nno-eng)": 81.41,
-                    "Tatoeba (nob-eng)": 98.53,
-                    "Tatoeba (nov-eng)": 50.23,
-                    "Tatoeba (oci-eng)": 43.49,
-                    "Tatoeba (orv-eng)": 23.77,
-                    "Tatoeba (pam-eng)": 5.39,
-                    "Tatoeba (pes-eng)": 93.47,
-                    "Tatoeba (pms-eng)": 34.19,
-                    "Tatoeba (pol-eng)": 96.95,
-                    "Tatoeba (por-eng)": 93.02,
-                    "Tatoeba (ron-eng)": 96.43,
-                    "Tatoeba (rus-eng)": 92.92,
-                    "Tatoeba (slk-eng)": 96.62,
-                    "Tatoeba (slv-eng)": 97.08,
-                    "Tatoeba (spa-eng)": 97.0,
-                    "Tatoeba (sqi-eng)": 98.57,
-                    "Tatoeba (srp-eng)": 94.12,
-                    "Tatoeba (swe-eng)": 95.45,
-                    "Tatoeba (swg-eng)": 22.8,
-                    "Tatoeba (swh-eng)": 16.02,
-                    "Tatoeba (tam-eng)": 73.6,
-                    "Tatoeba (tat-eng)": 10.89,
-                    "Tatoeba (tel-eng)": 79.73,
-                    "Tatoeba (tgl-eng)": 17.67,
-                    "Tatoeba (tha-eng)": 95.99,
-                    "Tatoeba (tuk-eng)": 14.91,
-                    "Tatoeba (tur-eng)": 96.17,
-                    "Tatoeba (tzl-eng)": 34.21,
-                    "Tatoeba (uig-eng)": 48.35,
-                    "Tatoeba (ukr-eng)": 92.67,
-                    "Tatoeba (urd-eng)": 95.12,
-                    "Tatoeba (uzb-eng)": 23.19,
-                    "Tatoeba (vie-eng)": 97.23,
-                    "Tatoeba (war-eng)": 7.42,
-                    "Tatoeba (wuu-eng)": 78.25,
-                    "Tatoeba (xho-eng)": 6.53,
-                    "Tatoeba (yid-eng)": 30.73,
-                    "Tatoeba (yue-eng)": 77.58,
-                    "Tatoeba (zsm-eng)": 95.8,
-                    "Tatoeba (gsw-Latn_eng-Latn)": 25.12,
-                    "Tatoeba (spa-Latn_eng-Latn)": 97.0,
-                    "Tatoeba (lat-Latn_eng-Latn)": 24.25,
-                    "Tatoeba (hun-Latn_eng-Latn)": 94.18,
-                    "Tatoeba (eus-Latn_eng-Latn)": 31.33,
-                    "Tatoeba (heb-Hebr_eng-Latn)": 88.26,
-                    "Tatoeba (ang-Latn_eng-Latn)": 16.72,
-                    "Tatoeba (swe-Latn_eng-Latn)": 95.45,
-                    "Tatoeba (slk-Latn_eng-Latn)": 96.62,
-                    "Tatoeba (ell-Grek_eng-Latn)": 94.93,
-                    "Tatoeba (nld-Latn_eng-Latn)": 95.5,
-                    "Tatoeba (cym-Latn_eng-Latn)": 22.31,
-                    "Tatoeba (sqi-Latn_eng-Latn)": 98.57,
-                    "Tatoeba (csb-Latn_eng-Latn)": 23.73,
-                    "Tatoeba (ben-Beng_eng-Latn)": 64.9,
-                    "Tatoeba (bre-Latn_eng-Latn)": 6.42,
-                    "Tatoeba (mkd-Cyrl_eng-Latn)": 93.02,
-                    "Tatoeba (cmn-Hans_eng-Latn)": 95.83,
-                    "Tatoeba (deu-Latn_eng-Latn)": 97.73,
-                    "Tatoeba (fao-Latn_eng-Latn)": 38.24,
-                    "Tatoeba (afr-Latn_eng-Latn)": 72.96,
-                    "Tatoeba (nno-Latn_eng-Latn)": 81.41,
-                    "Tatoeba (jpn-Jpan_eng-Latn)": 92.51,
-                    "Tatoeba (tzl-Latn_eng-Latn)": 34.21,
-                    "Tatoeba (arz-Arab_eng-Latn)": 55.69,
-                    "Tatoeba (ita-Latn_eng-Latn)": 93.76,
-                    "Tatoeba (arq-Arab_eng-Latn)": 19.84,
-                    "Tatoeba (uzb-Latn_eng-Latn)": 23.19,
-                    "Tatoeba (rus-Cyrl_eng-Latn)": 92.92,
-                    "Tatoeba (tat-Cyrl_eng-Latn)": 10.89,
-                    "Tatoeba (fin-Latn_eng-Latn)": 95.92,
-                    "Tatoeba (nob-Latn_eng-Latn)": 98.53,
-                    "Tatoeba (tam-Taml_eng-Latn)": 73.6,
-                    "Tatoeba (kur-Latn_eng-Latn)": 61.44,
-                    "Tatoeba (wuu-Hans_eng-Latn)": 78.25,
-                    "Tatoeba (cor-Latn_eng-Latn)": 3.53,
-                    "Tatoeba (cha-Latn_eng-Latn)": 12.59,
-                    "Tatoeba (hsb-Latn_eng-Latn)": 44.32,
-                    "Tatoeba (max-Deva_eng-Latn)": 48.77,
-                    "Tatoeba (kat-Geor_eng-Latn)": 95.46,
-                    "Tatoeba (mal-Mlym_eng-Latn)": 88.46,
-                    "Tatoeba (ina-Latn_eng-Latn)": 84.32,
-                    "Tatoeba (cbk-Latn_eng-Latn)": 58.68,
-                    "Tatoeba (yid-Hebr_eng-Latn)": 30.73,
-                    "Tatoeba (swg-Latn_eng-Latn)": 22.8,
-                    "Tatoeba (dtp-Latn_eng-Latn)": 5.03,
-                    "Tatoeba (ber-Tfng_eng-Latn)": 4.88,
-                    "Tatoeba (epo-Latn_eng-Latn)": 55.12,
-                    "Tatoeba (mar-Deva_eng-Latn)": 93.83,
-                    "Tatoeba (kaz-Cyrl_eng-Latn)": 61.49,
-                    "Tatoeba (tgl-Latn_eng-Latn)": 17.67,
-                    "Tatoeba (hrv-Latn_eng-Latn)": 97.0,
-                    "Tatoeba (bel-Cyrl_eng-Latn)": 79.94,
-                    "Tatoeba (pam-Latn_eng-Latn)": 5.39,
-                    "Tatoeba (zsm-Latn_eng-Latn)": 95.8,
-                    "Tatoeba (ces-Latn_eng-Latn)": 95.73,
-                    "Tatoeba (gla-Latn_eng-Latn)": 4.72,
-                    "Tatoeba (hin-Deva_eng-Latn)": 97.75,
-                    "Tatoeba (slv-Latn_eng-Latn)": 97.08,
-                    "Tatoeba (cat-Latn_eng-Latn)": 96.05,
-                    "Tatoeba (war-Latn_eng-Latn)": 7.42,
-                    "Tatoeba (hye-Armn_eng-Latn)": 94.38,
-                    "Tatoeba (ind-Latn_eng-Latn)": 93.5,
-                    "Tatoeba (kor-Hang_eng-Latn)": 93.07,
-                    "Tatoeba (por-Latn_eng-Latn)": 93.02,
-                    "Tatoeba (fry-Latn_eng-Latn)": 43.54,
-                    "Tatoeba (dan-Latn_eng-Latn)": 96.17,
-                    "Tatoeba (nov-Latn_eng-Latn)": 50.23,
-                    "Tatoeba (vie-Latn_eng-Latn)": 97.23,
-                    "Tatoeba (kzj-Latn_eng-Latn)": 5.88,
-                    "Tatoeba (ido-Latn_eng-Latn)": 43.91,
-                    "Tatoeba (tuk-Latn_eng-Latn)": 14.91,
-                    "Tatoeba (glg-Latn_eng-Latn)": 95.32,
-                    "Tatoeba (bos-Latn_eng-Latn)": 94.02,
-                    "Tatoeba (gle-Latn_eng-Latn)": 16.85,
-                    "Tatoeba (fra-Latn_eng-Latn)": 93.12,
-                    "Tatoeba (lvs-Latn_eng-Latn)": 97.53,
-                    "Tatoeba (mon-Cyrl_eng-Latn)": 96.14,
-                    "Tatoeba (lit-Latn_eng-Latn)": 95.37,
-                    "Tatoeba (ron-Latn_eng-Latn)": 96.43,
-                    "Tatoeba (pms-Latn_eng-Latn)": 34.19,
-                    "Tatoeba (lfn-Latn_eng-Latn)": 49.56,
-                    "Tatoeba (isl-Latn_eng-Latn)": 59.25,
-                    "Tatoeba (xho-Latn_eng-Latn)": 6.53,
-                    "Tatoeba (orv-Cyrl_eng-Latn)": 23.77,
-                    "Tatoeba (ukr-Cyrl_eng-Latn)": 92.67,
-                    "Tatoeba (dsb-Latn_eng-Latn)": 36.85,
-                    "Tatoeba (nds-Latn_eng-Latn)": 38.88,
-                    "Tatoeba (amh-Ethi_eng-Latn)": 53.49,
-                    "Tatoeba (yue-Hant_eng-Latn)": 77.58,
-                    "Tatoeba (urd-Arab_eng-Latn)": 95.12,
-                    "Tatoeba (tel-Telu_eng-Latn)": 79.73,
-                    "Tatoeba (ile-Latn_eng-Latn)": 60.36,
-                    "Tatoeba (jav-Latn_eng-Latn)": 23.39,
-                    "Tatoeba (ast-Latn_eng-Latn)": 70.08,
-                    "Tatoeba (tha-Thai_eng-Latn)": 95.99,
-                    "Tatoeba (ara-Arab_eng-Latn)": 90.19,
-                    "Tatoeba (pes-Arab_eng-Latn)": 93.47,
-                    "Tatoeba (awa-Deva_eng-Latn)": 42.83,
-                    "Tatoeba (tur-Latn_eng-Latn)": 96.17,
-                    "Tatoeba (ceb-Latn_eng-Latn)": 7.39,
-                    "Tatoeba (swh-Latn_eng-Latn)": 16.02,
-                    "Tatoeba (srp-Cyrl_eng-Latn)": 94.12,
-                    "Tatoeba (est-Latn_eng-Latn)": 98.4,
-                    "Tatoeba (aze-Latn_eng-Latn)": 76.36,
-                    "Tatoeba (bul-Cyrl_eng-Latn)": 93.52,
-                    "Tatoeba (oci-Latn_eng-Latn)": 43.49,
-                    "Tatoeba (pol-Latn_eng-Latn)": 96.95,
-                    "Tatoeba (kab-Latn_eng-Latn)": 1.41,
-                    "Tatoeba (khm-Khmr_eng-Latn)": 58.8,
-                    "Tatoeba (uig-Arab_eng-Latn)": 48.35,
-                    "Tatoeba (mhr-Cyrl_eng-Latn)": 7.57
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "paraphrase-multilingual-mpnet-base-v2",
-                    "AllegroReviews": 33.86,
-                    "AllegroReviews (pol-Latn)": 33.89,
-                    "AmazonCounterfactualClassification (de)": 69.95,
-                    "AmazonCounterfactualClassification (en)": 75.81,
-                    "AmazonCounterfactualClassification (en-ext)": 76.25,
-                    "AmazonCounterfactualClassification (ja)": 69.79,
-                    "AmazonCounterfactualClassification (deu-Latn)": 69.96,
-                    "AmazonCounterfactualClassification (jpn-Jpan)": 69.78,
-                    "AmazonPolarityClassification": 76.41,
-                    "AmazonReviewsClassification (de)": 39.52,
-                    "AmazonReviewsClassification (en)": 38.52,
-                    "AmazonReviewsClassification (es)": 39.99,
-                    "AmazonReviewsClassification (fr)": 39.0,
-                    "AmazonReviewsClassification (ja)": 36.64,
-                    "AmazonReviewsClassification (zh)": 37.74,
-                    "AmazonReviewsClassification (deu-Latn)": 39.53,
-                    "AmazonReviewsClassification (spa-Latn)": 39.97,
-                    "AmazonReviewsClassification (fra-Latn)": 38.98,
-                    "AmazonReviewsClassification (jpn-Jpan)": 36.65,
-                    "AmazonReviewsClassification (cmn-Hans)": 37.74,
-                    "AngryTweetsClassification (dan-Latn)": 54.84,
-                    "Banking77Classification": 81.1,
-                    "CBD": 65.0,
-                    "CBD (pol-Latn)": 64.97,
-                    "DanishPoliticalCommentsClassification (dan-Latn)": 40.96,
-                    "EmotionClassification": 45.85,
-                    "GeoreviewClassification (rus-Cyrl)": 42.33,
-                    "HeadlineClassification (rus-Cyrl)": 70.35,
-                    "IFlyTek (cmn-Hans)": 43.98,
-                    "ImdbClassification": 64.58,
-                    "InappropriatenessClassification (rus-Cyrl)": 59.32,
-                    "JDReview (cmn-Hans)": 70.34,
-                    "KinopoiskClassification (rus-Cyrl)": 44.31,
-                    "LccSentimentClassification (dan-Latn)": 58.4,
-                    "MTOPDomainClassification (de)": 85.73,
-                    "MTOPDomainClassification (en)": 89.24,
-                    "MTOPDomainClassification (es)": 86.96,
-                    "MTOPDomainClassification (fr)": 81.21,
-                    "MTOPDomainClassification (hi)": 84.76,
-                    "MTOPDomainClassification (th)": 82.51,
-                    "MTOPDomainClassification (deu-Latn)": 85.73,
-                    "MTOPDomainClassification (spa-Latn)": 86.98,
-                    "MTOPDomainClassification (fra-Latn)": 81.21,
-                    "MTOPDomainClassification (hin-Deva)": 84.76,
-                    "MTOPDomainClassification (tha-Thai)": 82.51,
-                    "MTOPIntentClassification (de)": 61.27,
-                    "MTOPIntentClassification (en)": 68.69,
-                    "MTOPIntentClassification (es)": 66.59,
-                    "MTOPIntentClassification (fr)": 59.76,
-                    "MTOPIntentClassification (hi)": 62.37,
-                    "MTOPIntentClassification (th)": 64.8,
-                    "MTOPIntentClassification (deu-Latn)": 61.26,
-                    "MTOPIntentClassification (spa-Latn)": 66.6,
-                    "MTOPIntentClassification (fra-Latn)": 59.75,
-                    "MTOPIntentClassification (hin-Deva)": 62.38,
-                    "MTOPIntentClassification (tha-Thai)": 64.77,
-                    "MasakhaNEWSClassification (fra)": 78.1,
-                    "MasakhaNEWSClassification (amh-Ethi)": 78.83,
-                    "MasakhaNEWSClassification (eng)": 75.39,
-                    "MasakhaNEWSClassification (fra-Latn)": 72.94,
-                    "MasakhaNEWSClassification (hau-Latn)": 54.49,
-                    "MasakhaNEWSClassification (ibo-Latn)": 46.79,
-                    "MasakhaNEWSClassification (lin-Latn)": 69.77,
-                    "MasakhaNEWSClassification (lug-Latn)": 43.05,
-                    "MasakhaNEWSClassification (orm-Ethi)": 41.97,
-                    "MasakhaNEWSClassification (pcm-Latn)": 90.2,
-                    "MasakhaNEWSClassification (run-Latn)": 49.97,
-                    "MasakhaNEWSClassification (sna-Latn)": 59.78,
-                    "MasakhaNEWSClassification (som-Latn)": 47.65,
-                    "MasakhaNEWSClassification (swa-Latn)": 60.42,
-                    "MasakhaNEWSClassification (tir-Ethi)": 45.04,
-                    "MasakhaNEWSClassification (xho-Latn)": 48.82,
-                    "MasakhaNEWSClassification (yor-Latn)": 58.3,
-                    "MassiveIntentClassification (pl)": 64.29,
-                    "MassiveIntentClassification (fr)": 61.88,
-                    "MassiveIntentClassification (mal-Mlym)": 54.34,
-                    "MassiveIntentClassification (tel-Telu)": 52.85,
-                    "MassiveIntentClassification (jpn-Jpan)": 63.76,
-                    "MassiveIntentClassification (nld-Latn)": 63.57,
-                    "MassiveIntentClassification (jav-Latn)": 36.49,
-                    "MassiveIntentClassification (heb-Hebr)": 58.25,
-                    "MassiveIntentClassification (tam-Taml)": 50.18,
-                    "MassiveIntentClassification (slv-Latn)": 63.5,
-                    "MassiveIntentClassification (tha-Thai)": 61.12,
-                    "MassiveIntentClassification (fra-Latn)": 64.8,
-                    "MassiveIntentClassification (ind-Latn)": 65.43,
-                    "MassiveIntentClassification (amh-Ethi)": 41.56,
-                    "MassiveIntentClassification (en)": 69.32,
-                    "MassiveIntentClassification (nob-Latn)": 62.62,
-                    "MassiveIntentClassification (kan-Knda)": 50.62,
-                    "MassiveIntentClassification (dan-Latn)": 62.8,
-                    "MassiveIntentClassification (ell-Grek)": 62.63,
-                    "MassiveIntentClassification (msa-Latn)": 60.72,
-                    "MassiveIntentClassification (ita-Latn)": 64.69,
-                    "MassiveIntentClassification (tur-Latn)": 64.58,
-                    "MassiveIntentClassification (ben-Beng)": 48.79,
-                    "MassiveIntentClassification (aze-Latn)": 56.98,
-                    "MassiveIntentClassification (tgl-Latn)": 38.83,
-                    "MassiveIntentClassification (mon-Cyrl)": 56.61,
-                    "MassiveIntentClassification (urd-Arab)": 56.36,
-                    "MassiveIntentClassification (vie-Latn)": 59.71,
-                    "MassiveIntentClassification (cmo-Hans)": 65.32,
-                    "MassiveIntentClassification (cym-Latn)": 27.89,
-                    "MassiveIntentClassification (rus-Cyrl)": 63.23,
-                    "MassiveIntentClassification (mya-Mymr)": 57.08,
-                    "MassiveIntentClassification (hun-Latn)": 63.85,
-                    "MassiveIntentClassification (hin-Deva)": 62.79,
-                    "MassiveIntentClassification (hye-Armn)": 57.76,
-                    "MassiveIntentClassification (kat-Geor)": 49.88,
-                    "MassiveIntentClassification (fin-Latn)": 62.26,
-                    "MassiveIntentClassification (ara-Arab)": 51.43,
-                    "MassiveIntentClassification (por-Latn)": 64.88,
-                    "MassiveIntentClassification (pol-Latn)": 64.32,
-                    "MassiveIntentClassification (isl-Latn)": 37.09,
-                    "MassiveIntentClassification (afr-Latn)": 52.35,
-                    "MassiveIntentClassification (fas-Arab)": 65.33,
-                    "MassiveIntentClassification (khm-Khmr)": 45.48,
-                    "MassiveIntentClassification (kor-Kore)": 61.84,
-                    "MassiveIntentClassification (spa-Latn)": 64.45,
-                    "MassiveIntentClassification (cmo-Hant)": 62.33,
-                    "MassiveIntentClassification (ron-Latn)": 62.83,
-                    "MassiveIntentClassification (sqi-Latn)": 62.48,
-                    "MassiveIntentClassification (swa-Latn)": 31.93,
-                    "MassiveIntentClassification (swe-Latn)": 64.71,
-                    "MassiveIntentClassification (deu-Latn)": 59.56,
-                    "MassiveIntentClassification (lav-Latn)": 61.29,
-                    "MassiveScenarioClassification (pl)": 68.98,
-                    "MassiveScenarioClassification (fr)": 67.9,
-                    "MassiveScenarioClassification (tam-Taml)": 55.97,
-                    "MassiveScenarioClassification (heb-Hebr)": 65.16,
-                    "MassiveScenarioClassification (ind-Latn)": 70.73,
-                    "MassiveScenarioClassification (afr-Latn)": 59.68,
-                    "MassiveScenarioClassification (fin-Latn)": 67.58,
-                    "MassiveScenarioClassification (vie-Latn)": 65.7,
-                    "MassiveScenarioClassification (mon-Cyrl)": 60.84,
-                    "MassiveScenarioClassification (sqi-Latn)": 69.62,
-                    "MassiveScenarioClassification (nob-Latn)": 70.23,
-                    "MassiveScenarioClassification (por-Latn)": 70.08,
-                    "MassiveScenarioClassification (aze-Latn)": 61.52,
-                    "MassiveScenarioClassification (nld-Latn)": 70.37,
-                    "MassiveScenarioClassification (spa-Latn)": 70.4,
-                    "MassiveScenarioClassification (mal-Mlym)": 60.14,
-                    "MassiveScenarioClassification (cmo-Hant)": 68.71,
-                    "MassiveScenarioClassification (fra-Latn)": 70.71,
-                    "MassiveScenarioClassification (ita-Latn)": 69.74,
-                    "MassiveScenarioClassification (hun-Latn)": 70.31,
-                    "MassiveScenarioClassification (urd-Arab)": 62.92,
-                    "MassiveScenarioClassification (cym-Latn)": 35.27,
-                    "MassiveScenarioClassification (khm-Khmr)": 53.13,
-                    "MassiveScenarioClassification (swa-Latn)": 37.26,
-                    "MassiveScenarioClassification (mya-Mymr)": 63.03,
-                    "MassiveScenarioClassification (isl-Latn)": 44.16,
-                    "MassiveScenarioClassification (tha-Thai)": 69.44,
-                    "MassiveScenarioClassification (kat-Geor)": 57.3,
-                    "MassiveScenarioClassification (pol-Latn)": 68.99,
-                    "MassiveScenarioClassification (ell-Grek)": 68.81,
-                    "MassiveScenarioClassification (cmo-Hans)": 71.25,
-                    "MassiveScenarioClassification (tgl-Latn)": 43.98,
-                    "MassiveScenarioClassification (lav-Latn)": 66.28,
-                    "MassiveScenarioClassification (jpn-Jpan)": 69.68,
-                    "MassiveScenarioClassification (deu-Latn)": 67.35,
-                    "MassiveScenarioClassification (ara-Arab)": 57.79,
-                    "MassiveScenarioClassification (en)": 75.35,
-                    "MassiveScenarioClassification (msa-Latn)": 65.85,
-                    "MassiveScenarioClassification (tel-Telu)": 58.79,
-                    "MassiveScenarioClassification (ben-Beng)": 54.52,
-                    "MassiveScenarioClassification (kan-Knda)": 56.08,
-                    "MassiveScenarioClassification (tur-Latn)": 70.41,
-                    "MassiveScenarioClassification (kor-Kore)": 68.51,
-                    "MassiveScenarioClassification (hye-Armn)": 63.03,
-                    "MassiveScenarioClassification (jav-Latn)": 44.22,
-                    "MassiveScenarioClassification (rus-Cyrl)": 69.92,
-                    "MassiveScenarioClassification (hin-Deva)": 67.94,
-                    "MassiveScenarioClassification (amh-Ethi)": 48.96,
-                    "MassiveScenarioClassification (dan-Latn)": 71.04,
-                    "MassiveScenarioClassification (fas-Arab)": 69.88,
-                    "MassiveScenarioClassification (slv-Latn)": 70.81,
-                    "MassiveScenarioClassification (swe-Latn)": 71.6,
-                    "MassiveScenarioClassification (ron-Latn)": 67.94,
-                    "MultilingualSentiment (cmn-Hans)": 66.49,
-                    "NoRecClassification (nob-Latn)": 50.32,
-                    "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 41.57,
-                    "OnlineShopping (cmn-Hans)": 87.75,
-                    "PAC": 63.76,
-                    "PAC (pol-Latn)": 63.76,
-                    "PolEmo2.0-IN": 62.78,
-                    "PolEmo2.0-IN (pol-Latn)": 62.74,
-                    "PolEmo2.0-OUT": 19.98,
-                    "PolEmo2.0-OUT (pol-Latn)": 19.92,
-                    "RuReviewsClassification (rus-Cyrl)": 62.33,
-                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 56.01,
-                    "RuSciBenchOECDClassification (rus-Cyrl)": 44.14,
-                    "TNews (cmn-Hans)": 43.73,
-                    "ToxicConversationsClassification": 65.56,
-                    "TweetSentimentExtractionClassification": 59.04,
-                    "Waimai (cmn-Hans)": 83.97
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "paraphrase-multilingual-mpnet-base-v2",
-                    "8TagsClustering": 25.62,
-                    "AlloProfClusteringP2P": 54.49,
-                    "AlloProfClusteringS2S": 44.79,
-                    "ArxivClusteringP2P": 37.78,
-                    "ArxivClusteringS2S": 31.68,
-                    "BiorxivClusteringP2P": 33.02,
-                    "BiorxivClusteringS2S": 29.45,
-                    "BlurbsClusteringP2P": 34.38,
-                    "BlurbsClusteringS2S": 15.81,
-                    "GeoreviewClusteringP2P (rus-Cyrl)": 56.18,
-                    "HALClusteringS2S": 23.97,
-                    "MLSUMClusteringP2P": 40.55,
-                    "MLSUMClusteringP2P (rus-Cyrl)": 35.95,
-                    "MLSUMClusteringS2S": 37.53,
-                    "MLSUMClusteringS2S (rus-Cyrl)": 38.88,
-                    "MasakhaNEWSClusteringP2P (fra)": 41.57,
-                    "MasakhaNEWSClusteringP2P (amh-Ethi)": 46.85,
-                    "MasakhaNEWSClusteringP2P (eng)": 47.3,
-                    "MasakhaNEWSClusteringP2P (fra-Latn)": 53.3,
-                    "MasakhaNEWSClusteringP2P (hau-Latn)": 27.61,
-                    "MasakhaNEWSClusteringP2P (ibo-Latn)": 41.32,
-                    "MasakhaNEWSClusteringP2P (lin-Latn)": 58.37,
-                    "MasakhaNEWSClusteringP2P (lug-Latn)": 47.56,
-                    "MasakhaNEWSClusteringP2P (orm-Ethi)": 24.53,
-                    "MasakhaNEWSClusteringP2P (pcm-Latn)": 66.55,
-                    "MasakhaNEWSClusteringP2P (run-Latn)": 51.97,
-                    "MasakhaNEWSClusteringP2P (sna-Latn)": 45.55,
-                    "MasakhaNEWSClusteringP2P (som-Latn)": 33.98,
-                    "MasakhaNEWSClusteringP2P (swa-Latn)": 25.03,
-                    "MasakhaNEWSClusteringP2P (tir-Ethi)": 48.33,
-                    "MasakhaNEWSClusteringP2P (xho-Latn)": 29.47,
-                    "MasakhaNEWSClusteringP2P (yor-Latn)": 28.25,
-                    "MasakhaNEWSClusteringS2S (fra)": 30.88,
-                    "MasakhaNEWSClusteringS2S (amh-Ethi)": 51.54,
-                    "MasakhaNEWSClusteringS2S (eng)": 43.28,
-                    "MasakhaNEWSClusteringS2S (fra-Latn)": 37.92,
-                    "MasakhaNEWSClusteringS2S (hau-Latn)": 17.97,
-                    "MasakhaNEWSClusteringS2S (ibo-Latn)": 34.56,
-                    "MasakhaNEWSClusteringS2S (lin-Latn)": 57.43,
-                    "MasakhaNEWSClusteringS2S (lug-Latn)": 45.22,
-                    "MasakhaNEWSClusteringS2S (orm-Ethi)": 21.9,
-                    "MasakhaNEWSClusteringS2S (pcm-Latn)": 62.1,
-                    "MasakhaNEWSClusteringS2S (run-Latn)": 46.81,
-                    "MasakhaNEWSClusteringS2S (sna-Latn)": 43.15,
-                    "MasakhaNEWSClusteringS2S (som-Latn)": 29.44,
-                    "MasakhaNEWSClusteringS2S (swa-Latn)": 10.31,
-                    "MasakhaNEWSClusteringS2S (tir-Ethi)": 51.95,
-                    "MasakhaNEWSClusteringS2S (xho-Latn)": 21.26,
-                    "MasakhaNEWSClusteringS2S (yor-Latn)": 28.88,
-                    "MedrxivClusteringP2P": 31.93,
-                    "MedrxivClusteringS2S": 31.53,
-                    "RedditClustering": 45.65,
-                    "RedditClusteringP2P": 52.05,
-                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 48.47,
-                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 42.9,
-                    "StackExchangeClustering": 52.99,
-                    "StackExchangeClusteringP2P": 33.06,
-                    "TenKGnadClusteringP2P": 35.96,
-                    "TenKGnadClusteringS2S": 22.0,
-                    "TwentyNewsgroupsClustering": 44.36
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "paraphrase-multilingual-mpnet-base-v2",
-                    "CDSC-E": 75.76,
-                    "CDSC-E (pol-Latn)": 75.77,
-                    "OpusparcusPC (fr)": 93.45,
-                    "OpusparcusPC (deu-Latn)": 97.34,
-                    "OpusparcusPC (en)": 98.59,
-                    "OpusparcusPC (fin-Latn)": 95.33,
-                    "OpusparcusPC (fra-Latn)": 93.45,
-                    "OpusparcusPC (rus-Cyrl)": 90.47,
-                    "OpusparcusPC (swe-Latn)": 95.16,
-                    "PPC": 93.67,
-                    "PSC": 98.26,
-                    "PSC (pol-Latn)": 98.26,
-                    "PawsXPairClassification (fr)": 58.14,
-                    "PawsXPairClassification (deu-Latn)": 55.69,
-                    "PawsXPairClassification (en)": 60.12,
-                    "PawsXPairClassification (spa-Latn)": 56.94,
-                    "PawsXPairClassification (fra-Latn)": 58.14,
-                    "PawsXPairClassification (jpn-Hira)": 49.37,
-                    "PawsXPairClassification (kor-Hang)": 50.66,
-                    "PawsXPairClassification (cmn-Hans)": 55.47,
-                    "SICK-E-PL": 77.22,
-                    "SICK-E-PL (pol-Latn)": 77.22,
-                    "SprintDuplicateQuestions": 90.55,
-                    "TERRa (rus-Cyrl)": 64.57,
-                    "TwitterSemEval2015": 66.75,
-                    "TwitterURLCorpus": 85.14
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "paraphrase-multilingual-mpnet-base-v2",
-                    "AlloprofReranking": 54.34,
-                    "AlloprofReranking (fra-Latn)": 67.2,
-                    "AskUbuntuDupQuestions": 60.16,
-                    "MMarcoReranking (cmn-Hans)": 14.57,
-                    "MindSmallReranking": 30.15,
-                    "RuBQReranking (rus-Cyrl)": 58.77,
-                    "SciDocsRR": 78.09,
-                    "StackOverflowDupQuestions": 46.78,
-                    "SyntecReranking": 83.23,
-                    "SyntecReranking (fra-Latn)": 80.97,
-                    "T2Reranking (cmn-Hans)": 64.49
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "paraphrase-multilingual-mpnet-base-v2",
-                    "AILACasedocs": 17.45,
-                    "AILAStatutes": 22.24,
-                    "ARCChallenge": 7.19,
-                    "AlloprofRetrieval": 30.8,
-                    "AlloprofRetrieval (fra-Latn)": 30.8,
-                    "AlphaNLI": 21.87,
-                    "ArguAna": 48.91,
-                    "ArguAna-PL": 42.62,
-                    "ArguAna-PL (pol-Latn)": 42.61,
-                    "BSARDRetrieval": 0.0,
-                    "BSARDRetrieval (fra-Latn)": 13.19,
-                    "CQADupstackRetrieval": 31.32,
-                    "ClimateFEVER": 15.27,
-                    "CmedqaRetrieval (cmn-Hans)": 10.15,
-                    "CovidRetrieval (cmn-Hans)": 28.85,
-                    "DBPedia": 26.22,
-                    "DBPedia-PL": 20.18,
-                    "DuRetrieval (cmn-Hans)": 33.41,
-                    "EcomRetrieval (cmn-Hans)": 9.69,
-                    "FEVER": 56.76,
-                    "FiQA-PL": 14.68,
-                    "FiQA-PL (pol-Latn)": 14.71,
-                    "FiQA2018": 22.96,
-                    "GerDaLIRSmall (deu-Latn)": 3.0,
-                    "HellaSwag": 17.53,
-                    "HotpotQA": 37.03,
-                    "HotpotQA-PL": 29.36,
-                    "LEMBNarrativeQARetrieval": 16.02,
-                    "LEMBNeedleRetrieval": 14.0,
-                    "LEMBPasskeyRetrieval": 7.75,
-                    "LEMBQMSumRetrieval": 12.23,
-                    "LEMBSummScreenFDRetrieval": 41.15,
-                    "LEMBWikimQARetrieval": 38.86,
-                    "LeCaRDv2 (zho-Hans)": 33.91,
-                    "LegalBenchConsumerContractsQA": 52.37,
-                    "LegalBenchCorporateLobbying": 87.62,
-                    "LegalQuAD (deu-Latn)": 17.8,
-                    "LegalSummarization": 56.8,
-                    "MMarcoRetrieval (cmn-Hans)": 44.62,
-                    "MSMARCO": 26.6,
-                    "MSMARCO-PL": 12.45,
-                    "MedicalRetrieval (cmn-Hans)": 14.1,
-                    "MintakaRetrieval (fr)": 24.45,
-                    "MintakaRetrieval (ara-Arab)": 14.55,
-                    "MintakaRetrieval (deu-Latn)": 25.43,
-                    "MintakaRetrieval (spa-Latn)": 24.94,
-                    "MintakaRetrieval (fra-Latn)": 24.45,
-                    "MintakaRetrieval (hin-Deva)": 18.67,
-                    "MintakaRetrieval (ita-Latn)": 25.62,
-                    "MintakaRetrieval (jpn-Hira)": 15.46,
-                    "MintakaRetrieval (por-Latn)": 26.15,
-                    "NFCorpus": 25.49,
-                    "NFCorpus-PL": 18.53,
-                    "NFCorpus-PL (pol-Latn)": 18.54,
-                    "NQ": 33.6,
-                    "NQ-PL": 15.64,
-                    "PIQA": 18.65,
-                    "Quail": 2.98,
-                    "Quora-PL": 79.18,
-                    "QuoraRetrieval": 86.4,
-                    "RARbCode": 11.02,
-                    "RARbMath": 30.93,
-                    "RiaNewsRetrieval (rus-Cyrl)": 51.75,
-                    "RuBQRetrieval (rus-Cyrl)": 37.04,
-                    "SCIDOCS": 13.97,
-                    "SCIDOCS-PL": 11.18,
-                    "SCIDOCS-PL (pol-Latn)": 11.17,
-                    "SIQA": 1.21,
-                    "SciFact": 50.3,
-                    "SciFact-PL": 41.53,
-                    "SciFact-PL (pol-Latn)": 41.55,
-                    "SpartQA": 5.69,
-                    "SyntecRetrieval": 76.0,
-                    "SyntecRetrieval (fra-Latn)": 76.0,
-                    "T2Retrieval (cmn-Hans)": 28.35,
-                    "TRECCOVID": 37.87,
-                    "TRECCOVID-PL": 35.38,
-                    "TRECCOVID-PL (pol-Latn)": 35.43,
-                    "TempReasonL1": 1.94,
-                    "TempReasonL2Fact": 5.34,
-                    "TempReasonL2Pure": 0.33,
-                    "TempReasonL3Fact": 6.79,
-                    "TempReasonL3Pure": 3.19,
-                    "Touche2020": 17.4,
-                    "VideoRetrieval (cmn-Hans)": 14.18,
-                    "WinoGrande": 49.01,
-                    "XPQARetrieval (fr)": 46.22,
-                    "XPQARetrieval (ara-Arab_ara-Arab)": 24.86,
-                    "XPQARetrieval (eng-Latn_ara-Arab)": 19.6,
-                    "XPQARetrieval (ara-Arab_eng-Latn)": 28.21,
-                    "XPQARetrieval (deu-Latn_deu-Latn)": 48.81,
-                    "XPQARetrieval (eng-Latn_deu-Latn)": 31.93,
-                    "XPQARetrieval (deu-Latn_eng-Latn)": 53.26,
-                    "XPQARetrieval (spa-Latn_spa-Latn)": 41.08,
-                    "XPQARetrieval (eng-Latn_spa-Latn)": 30.05,
-                    "XPQARetrieval (spa-Latn_eng-Latn)": 43.4,
-                    "XPQARetrieval (fra-Latn_fra-Latn)": 46.22,
-                    "XPQARetrieval (eng-Latn_fra-Latn)": 29.55,
-                    "XPQARetrieval (fra-Latn_eng-Latn)": 47.3,
-                    "XPQARetrieval (hin-Deva_hin-Deva)": 50.74,
-                    "XPQARetrieval (eng-Latn_hin-Deva)": 24.97,
-                    "XPQARetrieval (hin-Deva_eng-Latn)": 49.24,
-                    "XPQARetrieval (ita-Latn_ita-Latn)": 52.87,
-                    "XPQARetrieval (eng-Latn_ita-Latn)": 33.44,
-                    "XPQARetrieval (ita-Latn_eng-Latn)": 51.49,
-                    "XPQARetrieval (jpn-Hira_jpn-Hira)": 53.17,
-                    "XPQARetrieval (eng-Latn_jpn-Hira)": 26.66,
-                    "XPQARetrieval (jpn-Hira_eng-Latn)": 49.86,
-                    "XPQARetrieval (kor-Hang_kor-Hang)": 24.9,
-                    "XPQARetrieval (eng-Latn_kor-Hang)": 24.5,
-                    "XPQARetrieval (kor-Hang_eng-Latn)": 24.57,
-                    "XPQARetrieval (pol-Latn_pol-Latn)": 29.36,
-                    "XPQARetrieval (eng-Latn_pol-Latn)": 20.48,
-                    "XPQARetrieval (pol-Latn_eng-Latn)": 29.31,
-                    "XPQARetrieval (por-Latn_por-Latn)": 34.26,
-                    "XPQARetrieval (eng-Latn_por-Latn)": 21.72,
-                    "XPQARetrieval (por-Latn_eng-Latn)": 37.62,
-                    "XPQARetrieval (tam-Taml_tam-Taml)": 19.8,
-                    "XPQARetrieval (eng-Latn_tam-Taml)": 13.93,
-                    "XPQARetrieval (tam-Taml_eng-Latn)": 18.26,
-                    "XPQARetrieval (cmn-Hans_cmn-Hans)": 42.54,
-                    "XPQARetrieval (eng-Latn_cmn-Hans)": 20.91,
-                    "XPQARetrieval (cmn-Hans_eng-Latn)": 42.81
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "paraphrase-multilingual-mpnet-base-v2",
-                    "AFQMC (cmn-Hans)": 15.69,
-                    "ATEC (cmn-Hans)": 20.27,
-                    "BIOSSES": 76.27,
-                    "BQ (cmn-Hans)": 36.33,
-                    "CDSC-R": 88.8,
-                    "CDSC-R (pol-Latn)": 88.8,
-                    "LCQMC (cmn-Hans)": 63.3,
-                    "PAWSX (cmn-Hans)": 12.16,
-                    "RUParaPhraserSTS (rus-Cyrl)": 65.74,
-                    "RuSTSBenchmarkSTS (rus-Cyrl)": 82.46,
-                    "SICK-R": 79.62,
-                    "SICK-R-PL": 73.13,
-                    "SICK-R-PL (pol-Latn)": 73.13,
-                    "SICKFr": 75.56,
-                    "SICKFr (fra-Latn)": 75.56,
-                    "STS12": 77.9,
-                    "STS13": 85.11,
-                    "STS14": 80.81,
-                    "STS15": 87.48,
-                    "STS16": 83.2,
-                    "STS17 (ar-ar)": 79.1,
-                    "STS17 (en-ar)": 80.85,
-                    "STS17 (en-de)": 83.28,
-                    "STS17 (en-en)": 86.99,
-                    "STS17 (en-tr)": 74.9,
-                    "STS17 (es-en)": 86.11,
-                    "STS17 (es-es)": 85.14,
-                    "STS17 (fr-en)": 81.17,
-                    "STS17 (it-en)": 84.24,
-                    "STS17 (ko-ko)": 83.41,
-                    "STS17 (nl-en)": 82.51,
-                    "STS17 (eng-Latn_deu-Latn)": 83.28,
-                    "STS17 (eng-Latn_tur-Latn)": 74.9,
-                    "STS17 (eng-Latn_ara-Arab)": 80.85,
-                    "STS17 (ara-Arab)": 79.1,
-                    "STS17 (nld-Latn_eng-Latn)": 82.51,
-                    "STS17 (fra-Latn_eng-Latn)": 81.17,
-                    "STS17 (ita-Latn_eng-Latn)": 84.24,
-                    "STS17 (spa-Latn_eng-Latn)": 86.11,
-                    "STS17 (spa-Latn)": 85.14,
-                    "STS17 (kor-Hang)": 83.41,
-                    "STS22 (pl)": 33.64,
-                    "STS22 (fr)": 74.3,
-                    "STS22 (spa-Latn)": 59.91,
-                    "STS22 (en)": 63.52,
-                    "STS22 (spa-Latn_ita-Latn)": 53.7,
-                    "STS22 (pol-Latn)": 33.65,
-                    "STS22 (ara-Arab)": 52.19,
-                    "STS22 (deu-Latn)": 46.7,
-                    "STS22 (fra-Latn)": 74.3,
-                    "STS22 (deu-Latn_pol-Latn)": 40.53,
-                    "STS22 (tur-Latn)": 56.3,
-                    "STS22 (cmn-Hans_eng-Latn)": 67.96,
-                    "STS22 (pol-Latn_eng-Latn)": 73.07,
-                    "STS22 (rus-Cyrl)": 58.74,
-                    "STS22 (cmn-Hans)": 61.75,
-                    "STS22 (spa-Latn_eng-Latn)": 70.26,
-                    "STS22 (fra-Latn_pol-Latn)": 84.52,
-                    "STS22 (deu-Latn_eng-Latn)": 50.81,
-                    "STS22 (deu-Latn_fra-Latn)": 62.34,
-                    "STS22 (ita-Latn)": 60.65,
-                    "STSB (cmn-Hans)": 80.84,
-                    "STSBenchmark": 86.82,
-                    "STSBenchmarkMultilingualSTS (fr)": 84.69,
-                    "STSBenchmarkMultilingualSTS (nld-Latn)": 83.36,
-                    "STSBenchmarkMultilingualSTS (deu-Latn)": 83.56,
-                    "STSBenchmarkMultilingualSTS (fra-Latn)": 84.69,
-                    "STSBenchmarkMultilingualSTS (spa-Latn)": 84.61,
-                    "STSBenchmarkMultilingualSTS (cmn-Hans)": 81.98,
-                    "STSBenchmarkMultilingualSTS (en)": 86.82,
-                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 82.45,
-                    "STSBenchmarkMultilingualSTS (por-Latn)": 84.0,
-                    "STSBenchmarkMultilingualSTS (ita-Latn)": 84.09,
-                    "STSBenchmarkMultilingualSTS (pol-Latn)": 81.46
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "paraphrase-multilingual-mpnet-base-v2",
-                    "SummEval": 31.57,
-                    "SummEvalFr": 29.47,
-                    "SummEvalFr (fra-Latn)": 29.47
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "paraphrase-multilingual-mpnet-base-v2"
-                }
-            ]
-        }
-    },
-    "text-similarity-babbage-001": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "text-similarity-babbage-001"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "text-similarity-babbage-001"
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "text-similarity-babbage-001",
-                    "RedditClustering": 45.64,
-                    "StackExchangeClustering": 53.01,
-                    "TwentyNewsgroupsClustering": 42.01
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "text-similarity-babbage-001",
-                    "SprintDuplicateQuestions": 76.46,
-                    "TwitterSemEval2015": 70.85,
-                    "TwitterURLCorpus": 85.08
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "text-similarity-babbage-001",
-                    "AskUbuntuDupQuestions": 54.68,
-                    "SciDocsRR": 72.78,
-                    "StackOverflowDupQuestions": 40.65
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "text-similarity-babbage-001"
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "text-similarity-babbage-001",
-                    "BIOSSES": 78.12,
-                    "SICK-R": 77.02,
-                    "STSBenchmark": 84.32
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "text-similarity-babbage-001"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "text-similarity-babbage-001"
-                }
-            ]
-        }
-    },
-    "tart-dual-contriever-msmarco": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "tart-dual-contriever-msmarco"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "tart-dual-contriever-msmarco"
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "tart-dual-contriever-msmarco"
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "tart-dual-contriever-msmarco"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "tart-dual-contriever-msmarco"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "tart-dual-contriever-msmarco"
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "tart-dual-contriever-msmarco"
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "tart-dual-contriever-msmarco"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "tart-dual-contriever-msmarco",
-                    "Core17InstructionRetrieval": -3.04,
-                    "News21InstructionRetrieval": -2.98,
-                    "Robust04InstructionRetrieval": -8.98
-                }
-            ]
-        }
-    },
-    "USER-bge-m3": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "USER-bge-m3",
-                    "Tatoeba (rus-Cyrl_eng-Latn)": 93.52
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "USER-bge-m3",
-                    "GeoreviewClassification (rus-Cyrl)": 50.98,
-                    "HeadlineClassification (rus-Cyrl)": 70.09,
-                    "InappropriatenessClassification (rus-Cyrl)": 60.76,
-                    "KinopoiskClassification (rus-Cyrl)": 63.33,
-                    "MassiveIntentClassification (rus-Cyrl)": 68.85,
-                    "MassiveScenarioClassification (rus-Cyrl)": 72.9,
-                    "RuReviewsClassification (rus-Cyrl)": 68.52,
-                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 57.67,
-                    "RuSciBenchOECDClassification (rus-Cyrl)": 44.2
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "USER-bge-m3",
-                    "GeoreviewClusteringP2P (rus-Cyrl)": 62.79,
-                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 53.11,
-                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 44.93
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "USER-bge-m3",
-                    "OpusparcusPC (rus-Cyrl)": 90.73,
-                    "TERRa (rus-Cyrl)": 64.99
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "USER-bge-m3",
-                    "RuBQReranking (rus-Cyrl)": 73.08
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "USER-bge-m3",
-                    "RiaNewsRetrieval (rus-Cyrl)": 83.53,
-                    "RuBQRetrieval (rus-Cyrl)": 70.03
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "USER-bge-m3",
-                    "RUParaPhraserSTS (rus-Cyrl)": 76.36,
-                    "RuSTSBenchmarkSTS (rus-Cyrl)": 83.35,
-                    "STS22 (rus-Cyrl)": 66.42,
-                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 82.96
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "USER-bge-m3"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "USER-bge-m3"
-                }
-            ]
-        }
-    },
-    "bge-large-en-v1.5": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "bge-large-en-v1.5"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "bge-large-en-v1.5"
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "bge-large-en-v1.5"
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "bge-large-en-v1.5"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "bge-large-en-v1.5"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "bge-large-en-v1.5",
-                    "AILACasedocs": 25.15,
-                    "AILAStatutes": 20.74,
-                    "ARCChallenge": 9.99,
-                    "AlphaNLI": 13.13,
-                    "BrightRetrieval (stackoverflow)": 9.51,
-                    "BrightRetrieval (earth_science)": 24.15,
-                    "BrightRetrieval (aops)": 6.08,
-                    "BrightRetrieval (sustainable_living)": 13.27,
-                    "BrightRetrieval (psychology)": 17.44,
-                    "BrightRetrieval (robotics)": 12.21,
-                    "BrightRetrieval (theoremqa_theorems)": 5.51,
-                    "BrightRetrieval (pony)": 5.64,
-                    "BrightRetrieval (biology)": 11.96,
-                    "BrightRetrieval (theoremqa_questions)": 12.56,
-                    "BrightRetrieval (leetcode)": 26.68,
-                    "BrightRetrieval (economics)": 16.59,
-                    "GerDaLIRSmall": 3.96,
-                    "HellaSwag": 28.5,
-                    "LeCaRDv2": 22.68,
-                    "LegalBenchConsumerContractsQA": 73.52,
-                    "LegalBenchCorporateLobbying": 91.51,
-                    "LegalQuAD": 16.22,
-                    "LegalSummarization": 59.99,
-                    "PIQA": 27.99,
-                    "Quail": 1.83,
-                    "RARbCode": 48.12,
-                    "RARbMath": 57.36,
-                    "SIQA": 1.04,
-                    "SpartQA": 2.99,
-                    "TempReasonL1": 1.46,
-                    "TempReasonL2Fact": 24.25,
-                    "TempReasonL2Pure": 2.35,
-                    "TempReasonL3Fact": 20.64,
-                    "TempReasonL3Pure": 6.67,
-                    "WinoGrande": 19.18
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "bge-large-en-v1.5"
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "bge-large-en-v1.5"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "bge-large-en-v1.5"
-                }
-            ]
-        }
-    },
-    "allenai-specter": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "allenai-specter"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "allenai-specter",
-                    "AmazonCounterfactualClassification (de)": 54.46,
-                    "AmazonCounterfactualClassification (en)": 58.7,
-                    "AmazonCounterfactualClassification (en-ext)": 59.28,
-                    "AmazonCounterfactualClassification (ja)": 43.87,
-                    "AmazonPolarityClassification": 57.77,
-                    "AmazonReviewsClassification (de)": 24.08,
-                    "AmazonReviewsClassification (en)": 26.26,
-                    "AmazonReviewsClassification (es)": 23.88,
-                    "AmazonReviewsClassification (fr)": 23.31,
-                    "AmazonReviewsClassification (ja)": 20.25,
-                    "AmazonReviewsClassification (zh)": 20.49,
-                    "Banking77Classification": 66.66,
-                    "EmotionClassification": 24.82,
-                    "ImdbClassification": 56.35,
-                    "MTOPDomainClassification (de)": 48.55,
-                    "MTOPDomainClassification (en)": 74.53,
-                    "MTOPDomainClassification (es)": 58.39,
-                    "MTOPDomainClassification (fr)": 54.61,
-                    "MTOPDomainClassification (hi)": 21.22,
-                    "MTOPDomainClassification (th)": 14.98,
-                    "MTOPIntentClassification (de)": 35.55,
-                    "MTOPIntentClassification (en)": 50.05,
-                    "MTOPIntentClassification (es)": 36.72,
-                    "MTOPIntentClassification (fr)": 34.71,
-                    "MTOPIntentClassification (hi)": 4.44,
-                    "MTOPIntentClassification (th)": 4.67,
-                    "MassiveIntentClassification (af)": 33.68,
-                    "MassiveIntentClassification (am)": 2.94,
-                    "MassiveIntentClassification (ar)": 10.04,
-                    "MassiveIntentClassification (az)": 30.74,
-                    "MassiveIntentClassification (bn)": 3.02,
-                    "MassiveIntentClassification (cy)": 33.94,
-                    "MassiveIntentClassification (da)": 38.47,
-                    "MassiveIntentClassification (de)": 36.06,
-                    "MassiveIntentClassification (el)": 27.7,
-                    "MassiveIntentClassification (en)": 51.73,
-                    "MassiveIntentClassification (es)": 35.6,
-                    "MassiveIntentClassification (fa)": 17.97,
-                    "MassiveIntentClassification (fi)": 35.53,
-                    "MassiveIntentClassification (fr)": 38.41,
-                    "MassiveIntentClassification (he)": 2.69,
-                    "MassiveIntentClassification (hi)": 3.43,
-                    "MassiveIntentClassification (hu)": 34.05,
-                    "MassiveIntentClassification (hy)": 3.11,
-                    "MassiveIntentClassification (id)": 40.02,
-                    "MassiveIntentClassification (is)": 32.63,
-                    "MassiveIntentClassification (it)": 39.28,
-                    "MassiveIntentClassification (ja)": 4.95,
-                    "MassiveIntentClassification (jv)": 34.95,
-                    "MassiveIntentClassification (ka)": 2.57,
-                    "MassiveIntentClassification (km)": 4.73,
-                    "MassiveIntentClassification (kn)": 3.54,
-                    "MassiveIntentClassification (ko)": 2.68,
-                    "MassiveIntentClassification (lv)": 37.91,
-                    "MassiveIntentClassification (ml)": 2.88,
-                    "MassiveIntentClassification (mn)": 16.94,
-                    "MassiveIntentClassification (ms)": 36.6,
-                    "MassiveIntentClassification (my)": 3.96,
-                    "MassiveIntentClassification (nb)": 34.75,
-                    "MassiveIntentClassification (nl)": 33.95,
-                    "MassiveIntentClassification (pl)": 35.77,
-                    "MassiveIntentClassification (pt)": 43.05,
-                    "MassiveIntentClassification (ro)": 36.2,
-                    "MassiveIntentClassification (ru)": 25.3,
-                    "MassiveIntentClassification (sl)": 35.9,
-                    "MassiveIntentClassification (sq)": 36.6,
-                    "MassiveIntentClassification (sv)": 36.0,
-                    "MassiveIntentClassification (sw)": 34.81,
-                    "MassiveIntentClassification (ta)": 3.11,
-                    "MassiveIntentClassification (te)": 2.53,
-                    "MassiveIntentClassification (th)": 4.38,
-                    "MassiveIntentClassification (tl)": 35.51,
-                    "MassiveIntentClassification (tr)": 32.02,
-                    "MassiveIntentClassification (ur)": 9.61,
-                    "MassiveIntentClassification (vi)": 37.07,
-                    "MassiveIntentClassification (zh-CN)": 2.81,
-                    "MassiveIntentClassification (zh-TW)": 4.79,
-                    "MassiveScenarioClassification (af)": 36.17,
-                    "MassiveScenarioClassification (am)": 7.64,
-                    "MassiveScenarioClassification (ar)": 15.26,
-                    "MassiveScenarioClassification (az)": 30.73,
-                    "MassiveScenarioClassification (bn)": 7.15,
-                    "MassiveScenarioClassification (cy)": 34.73,
-                    "MassiveScenarioClassification (da)": 39.93,
-                    "MassiveScenarioClassification (de)": 38.62,
-                    "MassiveScenarioClassification (el)": 27.18,
-                    "MassiveScenarioClassification (en)": 58.58,
-                    "MassiveScenarioClassification (es)": 39.44,
-                    "MassiveScenarioClassification (fa)": 21.43,
-                    "MassiveScenarioClassification (fi)": 33.21,
-                    "MassiveScenarioClassification (fr)": 40.26,
-                    "MassiveScenarioClassification (he)": 7.42,
-                    "MassiveScenarioClassification (hi)": 8.06,
-                    "MassiveScenarioClassification (hu)": 34.54,
-                    "MassiveScenarioClassification (hy)": 8.61,
-                    "MassiveScenarioClassification (id)": 40.04,
-                    "MassiveScenarioClassification (is)": 33.57,
-                    "MassiveScenarioClassification (it)": 40.1,
-                    "MassiveScenarioClassification (ja)": 9.96,
-                    "MassiveScenarioClassification (jv)": 36.11,
-                    "MassiveScenarioClassification (ka)": 7.13,
-                    "MassiveScenarioClassification (km)": 9.66,
-                    "MassiveScenarioClassification (kn)": 7.55,
-                    "MassiveScenarioClassification (ko)": 7.27,
-                    "MassiveScenarioClassification (lv)": 37.03,
-                    "MassiveScenarioClassification (ml)": 7.22,
-                    "MassiveScenarioClassification (mn)": 21.53,
-                    "MassiveScenarioClassification (ms)": 37.57,
-                    "MassiveScenarioClassification (my)": 9.54,
-                    "MassiveScenarioClassification (nb)": 35.71,
-                    "MassiveScenarioClassification (nl)": 34.62,
-                    "MassiveScenarioClassification (pl)": 36.87,
-                    "MassiveScenarioClassification (pt)": 44.68,
-                    "MassiveScenarioClassification (ro)": 37.29,
-                    "MassiveScenarioClassification (ru)": 28.16,
-                    "MassiveScenarioClassification (sl)": 37.95,
-                    "MassiveScenarioClassification (sq)": 37.82,
-                    "MassiveScenarioClassification (sv)": 35.35,
-                    "MassiveScenarioClassification (sw)": 35.37,
-                    "MassiveScenarioClassification (ta)": 7.19,
-                    "MassiveScenarioClassification (te)": 7.29,
-                    "MassiveScenarioClassification (th)": 9.47,
-                    "MassiveScenarioClassification (tl)": 37.31,
-                    "MassiveScenarioClassification (tr)": 34.57,
-                    "MassiveScenarioClassification (ur)": 16.17,
-                    "MassiveScenarioClassification (vi)": 35.91,
-                    "MassiveScenarioClassification (zh-CN)": 9.19,
-                    "MassiveScenarioClassification (zh-TW)": 10.19,
-                    "ToxicConversationsClassification": 57.44,
-                    "TweetSentimentExtractionClassification": 45.52
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "allenai-specter",
-                    "ArxivClusteringP2P": 44.75,
-                    "ArxivClusteringS2S": 35.27,
-                    "BiorxivClusteringP2P": 39.52,
-                    "BiorxivClusteringS2S": 34.53,
-                    "MedrxivClusteringP2P": 35.04,
-                    "MedrxivClusteringS2S": 31.66,
-                    "RedditClustering": 24.13,
-                    "RedditClusteringP2P": 35.06,
-                    "StackExchangeClustering": 39.01,
-                    "StackExchangeClusteringP2P": 31.46,
-                    "TwentyNewsgroupsClustering": 24.22
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "allenai-specter",
-                    "SprintDuplicateQuestions": 71.63,
-                    "TwitterSemEval2015": 43.25,
-                    "TwitterURLCorpus": 69.22
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "allenai-specter",
-                    "AskUbuntuDupQuestions": 50.07,
-                    "MindSmallReranking": 24.8,
-                    "SciDocsRR": 81.31,
-                    "StackOverflowDupQuestions": 36.22
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "allenai-specter",
-                    "ArguAna": 32.67,
-                    "CQADupstackRetrieval": 14.6,
-                    "ClimateFEVER": 6.86,
-                    "DBPedia": 4.14,
-                    "FEVER": 5.45,
-                    "FiQA2018": 5.64,
-                    "HotpotQA": 5.46,
-                    "MSMARCO": 5.59,
-                    "NFCorpus": 0.85,
-                    "NQ": 5.99,
-                    "QuoraRetrieval": 64.65,
-                    "SCIDOCS": 0.0,
-                    "SciFact": 47.88,
-                    "TRECCOVID": 29.91,
-                    "Touche2020": 8.46
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "allenai-specter",
-                    "BIOSSES": 64.95,
-                    "SICK-R": 56.39,
-                    "STS12": 62.49,
-                    "STS13": 58.7,
-                    "STS14": 54.87,
-                    "STS15": 62.54,
-                    "STS16": 64.27,
-                    "STS17 (ar-ar)": 27.14,
-                    "STS17 (en-ar)": 6.9,
-                    "STS17 (en-de)": 11.59,
-                    "STS17 (en-en)": 69.63,
-                    "STS17 (en-tr)": 6.46,
-                    "STS17 (es-en)": 10.86,
-                    "STS17 (es-es)": 55.45,
-                    "STS17 (fr-en)": 16.02,
-                    "STS17 (it-en)": 19.87,
-                    "STS17 (ko-ko)": 8.08,
-                    "STS17 (nl-en)": 24.92,
-                    "STS22 (ar)": 19.57,
-                    "STS22 (de)": 17.31,
-                    "STS22 (de-en)": 26.03,
-                    "STS22 (de-fr)": 10.26,
-                    "STS22 (de-pl)": 16.94,
-                    "STS22 (en)": 55.06,
-                    "STS22 (es)": 48.89,
-                    "STS22 (es-en)": 51.79,
-                    "STS22 (es-it)": 25.24,
-                    "STS22 (fr)": 53.92,
-                    "STS22 (fr-pl)": 39.44,
-                    "STS22 (it)": 39.43,
-                    "STS22 (pl)": 13.56,
-                    "STS22 (pl-en)": 25.36,
-                    "STS22 (ru)": 1.11,
-                    "STS22 (tr)": 31.73,
-                    "STS22 (zh)": 16.35,
-                    "STS22 (zh-en)": 8.44,
-                    "STSBenchmark": 61.26
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "allenai-specter",
-                    "SummEval": 27.66
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "allenai-specter"
-                }
-            ]
-        }
-    },
-    "bert-base-25lang-cased": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "bert-base-25lang-cased"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "bert-base-25lang-cased",
-                    "AmazonReviewsClassification (fr)": 29.39,
-                    "MTOPDomainClassification (fr)": 63.63,
-                    "MTOPIntentClassification (fr)": 37.86,
-                    "MasakhaNEWSClassification (fra)": 63.91,
-                    "MassiveIntentClassification (fr)": 37.3,
-                    "MassiveScenarioClassification (fr)": 44.47
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "bert-base-25lang-cased",
-                    "AlloProfClusteringP2P": 53.49,
-                    "AlloProfClusteringS2S": 43.1,
-                    "HALClusteringS2S": 19.78,
-                    "MLSUMClusteringP2P": 40.73,
-                    "MLSUMClusteringS2S": 31.94,
-                    "MasakhaNEWSClusteringP2P (fra)": 24.23,
-                    "MasakhaNEWSClusteringS2S (fra)": 24.46
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "bert-base-25lang-cased",
-                    "OpusparcusPC (fr)": 86.79,
-                    "PawsXPairClassification (fr)": 53.39
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "bert-base-25lang-cased",
-                    "AlloprofReranking": 36.25,
-                    "SyntecReranking": 53.25
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "bert-base-25lang-cased",
-                    "AlloprofRetrieval": 1.6,
-                    "BSARDRetrieval": 0.0,
-                    "MintakaRetrieval (fr)": 3.55,
-                    "SyntecRetrieval": 18.95,
-                    "XPQARetrieval (fr)": 18.46
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "bert-base-25lang-cased",
-                    "SICKFr": 58.76,
-                    "STS22 (fr)": 38.77,
-                    "STSBenchmarkMultilingualSTS (fr)": 52.25
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "bert-base-25lang-cased",
-                    "SummEvalFr": 28.84
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "bert-base-25lang-cased"
-                }
-            ]
-        }
-    },
-    "bge-base-en-v1.5": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "bge-base-en-v1.5"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "bge-base-en-v1.5"
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "bge-base-en-v1.5",
-                    "BiorxivClusteringP2P": 39.44,
-                    "BiorxivClusteringS2S": 36.62,
-                    "MedrxivClusteringP2P": 33.21,
-                    "MedrxivClusteringS2S": 31.68,
-                    "RedditClustering": 56.61,
-                    "RedditClusteringP2P": 62.66,
-                    "StackExchangeClustering": 66.11,
-                    "StackExchangeClusteringP2P": 35.24,
-                    "TwentyNewsgroupsClustering": 50.75
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "bge-base-en-v1.5"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "bge-base-en-v1.5"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "bge-base-en-v1.5",
-                    "ARCChallenge": 9.66,
-                    "AlphaNLI": 10.99,
-                    "HellaSwag": 26.64,
-                    "PIQA": 25.69,
-                    "Quail": 1.42,
-                    "RARbCode": 46.47,
-                    "RARbMath": 46.86,
-                    "SIQA": 0.94,
-                    "SpartQA": 3.37,
-                    "TempReasonL1": 1.07,
-                    "TempReasonL2Fact": 17.23,
-                    "TempReasonL2Pure": 1.29,
-                    "TempReasonL3Fact": 13.36,
-                    "TempReasonL3Pure": 5.2,
-                    "WinoGrande": 13.76
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "bge-base-en-v1.5"
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "bge-base-en-v1.5"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "bge-base-en-v1.5"
-                }
-            ]
-        }
-    },
-    "cross-en-de-roberta-sentence-transformer": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "cross-en-de-roberta-sentence-transformer"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "cross-en-de-roberta-sentence-transformer"
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "cross-en-de-roberta-sentence-transformer",
-                    "BlurbsClusteringP2P": 30.82,
-                    "BlurbsClusteringS2S": 12.69,
-                    "TenKGnadClusteringP2P": 23.5,
-                    "TenKGnadClusteringS2S": 10.94
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "cross-en-de-roberta-sentence-transformer"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "cross-en-de-roberta-sentence-transformer"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "cross-en-de-roberta-sentence-transformer"
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "cross-en-de-roberta-sentence-transformer"
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "cross-en-de-roberta-sentence-transformer"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "cross-en-de-roberta-sentence-transformer"
-                }
-            ]
-        }
-    },
-    "all-MiniLM-L6-v2-instruct": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "all-MiniLM-L6-v2-instruct"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "all-MiniLM-L6-v2-instruct"
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "all-MiniLM-L6-v2-instruct"
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "all-MiniLM-L6-v2-instruct"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "all-MiniLM-L6-v2-instruct"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "all-MiniLM-L6-v2-instruct",
-                    "ARCChallenge": 9.4,
-                    "AlphaNLI": 15.09,
-                    "HellaSwag": 20.51,
-                    "PIQA": 24.68,
-                    "Quail": 3.46,
-                    "RARbCode": 42.47,
-                    "RARbMath": 62.39,
-                    "SIQA": 1.53,
-                    "SpartQA": 0.57,
-                    "TempReasonL1": 1.05,
-                    "TempReasonL2Fact": 16.57,
-                    "TempReasonL2Pure": 0.49,
-                    "TempReasonL3Fact": 14.01,
-                    "TempReasonL3Pure": 6.27,
-                    "WinoGrande": 20.73
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "all-MiniLM-L6-v2-instruct"
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "all-MiniLM-L6-v2-instruct"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "all-MiniLM-L6-v2-instruct"
-                }
-            ]
-        }
-    },
-    "gtr-t5-base": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "gtr-t5-base"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "gtr-t5-base",
-                    "AmazonCounterfactualClassification (en)": 69.33,
-                    "AmazonPolarityClassification": 67.82,
-                    "AmazonReviewsClassification (en)": 38.48,
-                    "Banking77Classification": 79.26,
-                    "EmotionClassification": 42.2,
-                    "ImdbClassification": 65.99,
-                    "MTOPDomainClassification (en)": 92.42,
-                    "MTOPIntentClassification (en)": 62.44,
-                    "MassiveIntentClassification (en)": 67.05,
-                    "MassiveScenarioClassification (en)": 75.4,
-                    "ToxicConversationsClassification": 66.6,
-                    "TweetSentimentExtractionClassification": 56.02
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "gtr-t5-base",
-                    "ArxivClusteringP2P": 35.49,
-                    "ArxivClusteringS2S": 27.18,
-                    "BiorxivClusteringP2P": 27.66,
-                    "BiorxivClusteringS2S": 23.25,
-                    "MedrxivClusteringP2P": 27.57,
-                    "MedrxivClusteringS2S": 25.13,
-                    "RedditClustering": 56.13,
-                    "RedditClusteringP2P": 58.53,
-                    "StackExchangeClustering": 64.21,
-                    "StackExchangeClusteringP2P": 33.01,
-                    "TwentyNewsgroupsClustering": 46.72
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "gtr-t5-base",
-                    "SprintDuplicateQuestions": 94.55,
-                    "TwitterSemEval2015": 72.23,
-                    "TwitterURLCorpus": 84.77
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "gtr-t5-base",
-                    "AskUbuntuDupQuestions": 60.86,
-                    "MindSmallReranking": 31.33,
-                    "SciDocsRR": 73.71,
-                    "StackOverflowDupQuestions": 51.01
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "gtr-t5-base",
-                    "ArguAna": 50.83,
-                    "CQADupstackRetrieval": 34.55,
-                    "ClimateFEVER": 24.88,
-                    "DBPedia": 35.24,
-                    "FEVER": 68.93,
-                    "FiQA2018": 35.15,
-                    "HotpotQA": 54.93,
-                    "MSMARCO": 41.16,
-                    "NFCorpus": 30.22,
-                    "NQ": 50.47,
-                    "QuoraRetrieval": 87.98,
-                    "SCIDOCS": 14.0,
-                    "SciFact": 59.74,
-                    "TRECCOVID": 56.05,
-                    "Touche2020": 25.89
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "gtr-t5-base",
-                    "BIOSSES": 79.0,
-                    "SICK-R": 71.45,
-                    "STS12": 68.59,
-                    "STS13": 79.09,
-                    "STS14": 74.64,
-                    "STS15": 84.85,
-                    "STS16": 81.57,
-                    "STS17 (en-en)": 85.8,
-                    "STS22 (en)": 66.17,
-                    "STSBenchmark": 79.58
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "gtr-t5-base",
-                    "SummEval": 29.67
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "gtr-t5-base"
-                }
-            ]
-        }
-    },
-    "electra-small-nordic": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "electra-small-nordic",
-                    "BornholmBitextMining": 1.44
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "electra-small-nordic",
-                    "AngryTweetsClassification": 47.91,
-                    "DKHateClassification": 59.45,
-                    "DanishPoliticalCommentsClassification": 31.89,
-                    "LccSentimentClassification": 47.93,
-                    "MassiveIntentClassification (da)": 26.3,
-                    "MassiveIntentClassification (nb)": 24.6,
-                    "MassiveIntentClassification (sv)": 27.58,
-                    "MassiveScenarioClassification (da)": 28.93,
-                    "MassiveScenarioClassification (nb)": 27.3,
-                    "MassiveScenarioClassification (sv)": 29.93,
-                    "NoRecClassification": 45.44,
-                    "NordicLangClassification": 57.82,
-                    "NorwegianParliament": 53.25,
-                    "ScalaDaClassification": 70.41,
-                    "ScalaNbClassification": 75.28
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "electra-small-nordic"
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "electra-small-nordic"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "electra-small-nordic"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "electra-small-nordic"
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "electra-small-nordic"
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "electra-small-nordic"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "electra-small-nordic"
-                }
-            ]
-        }
-    },
-    "electra-small-swedish-cased-discriminator": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "electra-small-swedish-cased-discriminator",
-                    "BornholmBitextMining": 0.85
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "electra-small-swedish-cased-discriminator",
-                    "AngryTweetsClassification": 40.52,
-                    "DKHateClassification": 52.28,
-                    "DanishPoliticalCommentsClassification": 25.17,
-                    "LccSentimentClassification": 36.67,
-                    "MassiveIntentClassification (da)": 6.51,
-                    "MassiveIntentClassification (nb)": 5.66,
-                    "MassiveIntentClassification (sv)": 6.6,
-                    "MassiveScenarioClassification (da)": 11.5,
-                    "MassiveScenarioClassification (nb)": 11.26,
-                    "MassiveScenarioClassification (sv)": 12.16,
-                    "NoRecClassification": 39.72,
-                    "NordicLangClassification": 44.53,
-                    "NorwegianParliament": 52.44,
-                    "ScalaDaClassification": 51.66,
-                    "ScalaNbClassification": 52.41
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "electra-small-swedish-cased-discriminator"
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "electra-small-swedish-cased-discriminator"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "electra-small-swedish-cased-discriminator"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "electra-small-swedish-cased-discriminator"
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "electra-small-swedish-cased-discriminator"
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "electra-small-swedish-cased-discriminator"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "electra-small-swedish-cased-discriminator"
-                }
-            ]
-        }
-    },
-    "text-embedding-ada-002": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "text-embedding-ada-002"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "text-embedding-ada-002",
-                    "AmazonCounterfactualClassification (en)": 75.94,
-                    "AmazonPolarityClassification": 86.72,
-                    "AmazonReviewsClassification (zh)": 38.3,
-                    "AmazonReviewsClassification (en)": 44.78,
-                    "AmazonReviewsClassification (fr)": 43.76,
-                    "Banking77Classification": 80.66,
-                    "EmotionClassification": 48.74,
-                    "IFlyTek": 44.62,
-                    "ImdbClassification": 77.98,
-                    "JDReview": 74.6,
-                    "MTOPDomainClassification (en)": 92.13,
-                    "MTOPDomainClassification (fr)": 89.38,
-                    "MTOPIntentClassification (en)": 64.68,
-                    "MTOPIntentClassification (fr)": 64.45,
-                    "MasakhaNEWSClassification (fra)": 81.52,
-                    "MassiveIntentClassification (zh-CN)": 64.81,
-                    "MassiveIntentClassification (en)": 70.15,
-                    "MassiveIntentClassification (fr)": 65.42,
-                    "MassiveScenarioClassification (zh-CN)": 71.4,
-                    "MassiveScenarioClassification (en)": 75.33,
-                    "MassiveScenarioClassification (fr)": 71.11,
-                    "MultilingualSentiment": 67.99,
-                    "OnlineShopping": 88.94,
-                    "TNews": 45.77,
-                    "ToxicConversationsClassification": 72.29,
-                    "TweetSentimentExtractionClassification": 61.81,
-                    "Waimai": 82.37
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "text-embedding-ada-002",
-                    "AlloProfClusteringP2P": 64.83,
-                    "AlloProfClusteringS2S": 53.52,
-                    "ArxivClusteringP2P": 45.01,
-                    "ArxivClusteringS2S": 36.85,
-                    "BiorxivClusteringP2P": 36.66,
-                    "BiorxivClusteringS2S": 34.21,
-                    "CLSClusteringP2P": 38.26,
-                    "CLSClusteringS2S": 35.91,
-                    "HALClusteringS2S": 26.18,
-                    "MLSUMClusteringP2P": 44.59,
-                    "MLSUMClusteringS2S": 41.67,
-                    "MasakhaNEWSClusteringP2P (fra)": 68.35,
-                    "MasakhaNEWSClusteringS2S (fra)": 48.58,
-                    "MedrxivClusteringP2P": 32.6,
-                    "MedrxivClusteringS2S": 30.8,
-                    "RedditClustering": 61.42,
-                    "RedditClusteringP2P": 64.13,
-                    "StackExchangeClustering": 72.22,
-                    "StackExchangeClusteringP2P": 38.49,
-                    "ThuNewsClusteringP2P": 58.71,
-                    "ThuNewsClusteringS2S": 49.86,
-                    "TwentyNewsgroupsClustering": 52.56
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "text-embedding-ada-002",
-                    "Cmnli": 76.03,
-                    "Ocnli": 63.08,
-                    "OpusparcusPC (fr)": 94.12,
-                    "PawsXPairClassification (fr)": 60.16,
-                    "SprintDuplicateQuestions": 92.17,
-                    "TwitterSemEval2015": 75.28,
-                    "TwitterURLCorpus": 87.22
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "text-embedding-ada-002",
-                    "AskUbuntuDupQuestions": 62.05,
-                    "CMedQAv1": 63.08,
-                    "CMedQAv2": 64.02,
-                    "MMarcoReranking": 23.39,
-                    "MindSmallReranking": 31.45,
-                    "SciDocsRR": 81.22,
-                    "StackOverflowDupQuestions": 50.54,
-                    "SyntecReranking": 89.87,
-                    "T2Reranking": 66.65
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "text-embedding-ada-002",
-                    "ARCChallenge": 13.3,
-                    "AlloprofRetrieval": 51.64,
-                    "AlphaNLI": 25.65,
-                    "ArguAna": 57.44,
-                    "BSARDRetrieval": 0.61,
-                    "CQADupstackRetrieval": 41.69,
-                    "ClimateFEVER": 21.64,
-                    "CmedqaRetrieval": 22.36,
-                    "CovidRetrieval": 57.21,
-                    "DBPedia": 39.39,
-                    "DuRetrieval": 71.17,
-                    "EcomRetrieval": 44.49,
-                    "FEVER": 74.99,
-                    "FiQA2018": 44.41,
-                    "HellaSwag": 29.29,
-                    "HotpotQA": 60.9,
-                    "MMarcoRetrieval": 69.86,
-                    "MSMARCO": 40.91,
-                    "MedicalRetrieval": 37.92,
-                    "MintakaRetrieval (fr)": 29.94,
-                    "NFCorpus": 36.97,
-                    "NQ": 51.58,
-                    "PIQA": 31.02,
-                    "Quail": 5.83,
-                    "QuoraRetrieval": 87.6,
-                    "RARbCode": 83.39,
-                    "RARbMath": 73.21,
-                    "SCIDOCS": 18.36,
-                    "SIQA": 3.14,
-                    "SciFact": 72.75,
-                    "SpartQA": 4.23,
-                    "SyntecRetrieval": 85.97,
-                    "T2Retrieval": 69.14,
-                    "TRECCOVID": 68.47,
-                    "TempReasonL1": 1.68,
-                    "TempReasonL2Fact": 19.93,
-                    "TempReasonL2Pure": 2.6,
-                    "TempReasonL3Fact": 18.02,
-                    "TempReasonL3Pure": 7.58,
-                    "Touche2020": 21.61,
-                    "VideoRetrieval": 43.85,
-                    "WinoGrande": 19.65,
-                    "XPQARetrieval (fr)": 73.0
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "text-embedding-ada-002",
-                    "AFQMC": 23.88,
-                    "ATEC": 29.25,
-                    "BIOSSES": 86.35,
-                    "BQ": 45.33,
-                    "LCQMC": 68.41,
-                    "PAWSX": 16.55,
-                    "QBQTC": 30.27,
-                    "SICK-R": 80.6,
-                    "SICKFr": 76.28,
-                    "STS12": 69.8,
-                    "STS13": 83.27,
-                    "STS14": 76.09,
-                    "STS15": 86.12,
-                    "STS16": 85.96,
-                    "STS17 (en-en)": 90.25,
-                    "STS22 (zh)": 62.53,
-                    "STS22 (en)": 68.12,
-                    "STS22 (tr)": 64.5,
-                    "STS22 (fr)": 81.09,
-                    "STSB": 70.61,
-                    "STSBenchmark": 83.17,
-                    "STSBenchmarkMultilingualSTS (fr)": 77.55
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "text-embedding-ada-002",
-                    "SummEval": 30.8,
-                    "SummEvalFr": 30.5
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "text-embedding-ada-002"
-                }
-            ]
-        }
-    },
-    "GritLM-7B": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "GritLM-7B"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "GritLM-7B"
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "GritLM-7B"
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "GritLM-7B"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "GritLM-7B"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "GritLM-7B",
-                    "ARCChallenge": 26.68,
-                    "AlphaNLI": 34.0,
-                    "BrightRetrieval (pony)": 21.98,
-                    "BrightRetrieval (robotics)": 17.31,
-                    "BrightRetrieval (economics)": 19.0,
-                    "BrightRetrieval (theoremqa_questions)": 23.34,
-                    "BrightRetrieval (leetcode)": 29.85,
-                    "BrightRetrieval (earth_science)": 32.77,
-                    "BrightRetrieval (stackoverflow)": 11.62,
-                    "BrightRetrieval (sustainable_living)": 18.04,
-                    "BrightRetrieval (biology)": 25.04,
-                    "BrightRetrieval (psychology)": 19.92,
-                    "BrightRetrieval (theoremqa_theorems)": 17.41,
-                    "BrightRetrieval (aops)": 8.91,
-                    "HellaSwag": 39.45,
-                    "PIQA": 44.35,
-                    "Quail": 11.69,
-                    "RARbCode": 84.0,
-                    "RARbMath": 82.35,
-                    "SIQA": 7.23,
-                    "SpartQA": 9.29,
-                    "TempReasonL1": 7.15,
-                    "TempReasonL2Fact": 58.38,
-                    "TempReasonL2Pure": 11.22,
-                    "TempReasonL3Fact": 44.29,
-                    "TempReasonL3Pure": 14.15,
-                    "WinoGrande": 53.74
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "GritLM-7B"
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "GritLM-7B"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "GritLM-7B",
-                    "Core17InstructionRetrieval": 2.62,
-                    "News21InstructionRetrieval": -1.01,
-                    "Robust04InstructionRetrieval": -1.68
-                }
-            ]
-        }
-    },
-    "bge-m3": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "bge-m3",
-                    "Tatoeba (rus-Cyrl_eng-Latn)": 93.42
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "bge-m3",
-                    "GeoreviewClassification (rus-Cyrl)": 48.27,
-                    "HeadlineClassification (rus-Cyrl)": 70.32,
-                    "InappropriatenessClassification (rus-Cyrl)": 59.87,
-                    "KinopoiskClassification (rus-Cyrl)": 58.23,
-                    "MassiveIntentClassification (rus-Cyrl)": 68.75,
-                    "MassiveScenarioClassification (rus-Cyrl)": 73.42,
-                    "RuReviewsClassification (rus-Cyrl)": 66.91,
-                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 55.81,
-                    "RuSciBenchOECDClassification (rus-Cyrl)": 42.57
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "bge-m3",
-                    "GeoreviewClusteringP2P (rus-Cyrl)": 63.75,
-                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 50.57,
-                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 43.21
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "bge-m3",
-                    "OpusparcusPC (rus-Cyrl)": 89.64,
-                    "TERRa (rus-Cyrl)": 60.6
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "bge-m3",
-                    "RuBQReranking (rus-Cyrl)": 74.02
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "bge-m3",
-                    "ARCChallenge": 9.02,
-                    "AlphaNLI": 24.73,
-                    "HellaSwag": 25.67,
-                    "LEMBNarrativeQARetrieval": 45.76,
-                    "LEMBNeedleRetrieval": 40.25,
-                    "LEMBPasskeyRetrieval": 46.0,
-                    "LEMBQMSumRetrieval": 35.54,
-                    "LEMBSummScreenFDRetrieval": 94.09,
-                    "LEMBWikimQARetrieval": 77.73,
-                    "PIQA": 22.93,
-                    "Quail": 7.51,
-                    "RARbCode": 38.8,
-                    "RARbMath": 69.19,
-                    "RiaNewsRetrieval (rus-Cyrl)": 82.98,
-                    "RuBQRetrieval (rus-Cyrl)": 71.21,
-                    "SIQA": 4.89,
-                    "SpartQA": 7.49,
-                    "TempReasonL1": 0.99,
-                    "TempReasonL2Fact": 33.23,
-                    "TempReasonL2Pure": 0.68,
-                    "TempReasonL3Fact": 30.05,
-                    "TempReasonL3Pure": 5.28,
-                    "WinoGrande": 41.72
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "bge-m3",
-                    "RUParaPhraserSTS (rus-Cyrl)": 74.9,
-                    "RuSTSBenchmarkSTS (rus-Cyrl)": 79.87,
-                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 79.27
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "bge-m3"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "bge-m3"
-                }
-            ]
-        }
-    },
-    "sentence-bert-swedish-cased": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "sentence-bert-swedish-cased",
-                    "BornholmBitextMining": 14.08
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "sentence-bert-swedish-cased",
-                    "AngryTweetsClassification": 44.46,
-                    "DKHateClassification": 59.36,
-                    "DanishPoliticalCommentsClassification": 28.32,
-                    "LccSentimentClassification": 47.2,
-                    "MassiveIntentClassification (da)": 42.84,
-                    "MassiveIntentClassification (nb)": 42.74,
-                    "MassiveIntentClassification (sv)": 69.11,
-                    "MassiveScenarioClassification (da)": 49.64,
-                    "MassiveScenarioClassification (nb)": 49.49,
-                    "MassiveScenarioClassification (sv)": 75.96,
-                    "NoRecClassification": 43.53,
-                    "NordicLangClassification": 51.45,
-                    "NorwegianParliament": 55.74,
-                    "ScalaDaClassification": 50.12,
-                    "ScalaNbClassification": 50.34
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "sentence-bert-swedish-cased"
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "sentence-bert-swedish-cased"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "sentence-bert-swedish-cased"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "sentence-bert-swedish-cased"
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "sentence-bert-swedish-cased"
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "sentence-bert-swedish-cased"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "sentence-bert-swedish-cased"
-                }
-            ]
-        }
-    },
-    "voyage-2": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "voyage-2"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "voyage-2",
-                    "AmazonReviewsClassification (fr)": 37.26,
-                    "MTOPDomainClassification (fr)": 79.79,
-                    "MTOPIntentClassification (fr)": 45.62,
-                    "MasakhaNEWSClassification (fra)": 80.19,
-                    "MassiveIntentClassification (fr)": 53.7,
-                    "MassiveScenarioClassification (fr)": 62.46
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "voyage-2",
-                    "AlloProfClusteringP2P": 57.96,
-                    "AlloProfClusteringS2S": 41.65,
-                    "HALClusteringS2S": 24.84,
-                    "MLSUMClusteringP2P": 45.08,
-                    "MLSUMClusteringS2S": 38.77,
-                    "MasakhaNEWSClusteringP2P (fra)": 48.54,
-                    "MasakhaNEWSClusteringS2S (fra)": 36.33
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "voyage-2",
-                    "OpusparcusPC (fr)": 89.76,
-                    "PawsXPairClassification (fr)": 58.96
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "voyage-2",
-                    "AlloprofReranking": 63.54,
-                    "SyntecReranking": 82.65
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "voyage-2",
-                    "AlloprofRetrieval": 45.5,
-                    "BSARDRetrieval": 0.15,
-                    "MintakaRetrieval (fr)": 15.51,
-                    "SyntecRetrieval": 75.83,
-                    "XPQARetrieval (fr)": 67.07
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "voyage-2",
-                    "SICKFr": 68.51,
-                    "STS22 (fr)": 70.51,
-                    "STSBenchmarkMultilingualSTS (fr)": 76.43
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "voyage-2",
-                    "SummEvalFr": 30.88
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "voyage-2"
-                }
-            ]
-        }
-    },
-    "instructor-base": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "instructor-base"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "instructor-base"
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "instructor-base"
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "instructor-base"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "instructor-base"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "instructor-base"
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "instructor-base"
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "instructor-base"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "instructor-base",
-                    "Core17InstructionRetrieval": -1.09,
-                    "News21InstructionRetrieval": -1.78,
-                    "Robust04InstructionRetrieval": -10.42
-                }
-            ]
-        }
-    },
-    "dfm-encoder-large-v1": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "dfm-encoder-large-v1",
-                    "BornholmBitextMining": 11.65
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "dfm-encoder-large-v1",
-                    "AngryTweetsClassification": 53.8,
-                    "DKHateClassification": 60.09,
-                    "DanishPoliticalCommentsClassification": 36.6,
-                    "LccSentimentClassification": 57.33,
-                    "MassiveIntentClassification (da)": 60.55,
-                    "MassiveIntentClassification (nb)": 52.49,
-                    "MassiveIntentClassification (sv)": 49.74,
-                    "MassiveScenarioClassification (da)": 64.16,
-                    "MassiveScenarioClassification (nb)": 54.59,
-                    "MassiveScenarioClassification (sv)": 50.1,
-                    "NoRecClassification": 48.3,
-                    "NordicLangClassification": 77.68,
-                    "NorwegianParliament": 58.78,
-                    "ScalaDaClassification": 63.08,
-                    "ScalaNbClassification": 58.95
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "dfm-encoder-large-v1"
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "dfm-encoder-large-v1"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "dfm-encoder-large-v1"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "dfm-encoder-large-v1"
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "dfm-encoder-large-v1"
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "dfm-encoder-large-v1"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "dfm-encoder-large-v1"
-                }
-            ]
-        }
-    },
-    "sentence-t5-base": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "sentence-t5-base"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "sentence-t5-base",
-                    "AmazonCounterfactualClassification (de)": 69.98,
-                    "AmazonCounterfactualClassification (en)": 75.82,
-                    "AmazonCounterfactualClassification (en-ext)": 76.81,
-                    "AmazonCounterfactualClassification (ja)": 46.05,
-                    "AmazonPolarityClassification": 85.12,
-                    "AmazonReviewsClassification (de)": 37.9,
-                    "AmazonReviewsClassification (en)": 44.94,
-                    "AmazonReviewsClassification (es)": 37.33,
-                    "AmazonReviewsClassification (fr)": 37.35,
-                    "AmazonReviewsClassification (ja)": 22.29,
-                    "AmazonReviewsClassification (zh)": 21.53,
-                    "Banking77Classification": 76.48,
-                    "EmotionClassification": 51.35,
-                    "ImdbClassification": 77.34,
-                    "MTOPDomainClassification (de)": 76.98,
-                    "MTOPDomainClassification (en)": 90.34,
-                    "MTOPDomainClassification (es)": 73.61,
-                    "MTOPDomainClassification (fr)": 75.03,
-                    "MTOPDomainClassification (hi)": 21.4,
-                    "MTOPDomainClassification (th)": 16.21,
-                    "MTOPIntentClassification (de)": 44.43,
-                    "MTOPIntentClassification (en)": 63.32,
-                    "MTOPIntentClassification (es)": 42.03,
-                    "MTOPIntentClassification (fr)": 43.85,
-                    "MTOPIntentClassification (hi)": 3.8,
-                    "MTOPIntentClassification (th)": 5.21,
-                    "MasakhaNEWSClassification (fra)": 81.21,
-                    "MassiveIntentClassification (af)": 34.32,
-                    "MassiveIntentClassification (am)": 2.38,
-                    "MassiveIntentClassification (ar)": 4.53,
-                    "MassiveIntentClassification (az)": 31.76,
-                    "MassiveIntentClassification (bn)": 2.58,
-                    "MassiveIntentClassification (cy)": 28.94,
-                    "MassiveIntentClassification (da)": 38.82,
-                    "MassiveIntentClassification (de)": 45.23,
-                    "MassiveIntentClassification (el)": 10.05,
-                    "MassiveIntentClassification (en)": 69.74,
-                    "MassiveIntentClassification (es)": 45.32,
-                    "MassiveIntentClassification (fa)": 3.58,
-                    "MassiveIntentClassification (fi)": 33.52,
-                    "MassiveIntentClassification (fr)": 51.13,
-                    "MassiveIntentClassification (he)": 2.63,
-                    "MassiveIntentClassification (hi)": 2.68,
-                    "MassiveIntentClassification (hu)": 32.31,
-                    "MassiveIntentClassification (hy)": 3.33,
-                    "MassiveIntentClassification (id)": 35.5,
-                    "MassiveIntentClassification (is)": 29.82,
-                    "MassiveIntentClassification (it)": 45.59,
-                    "MassiveIntentClassification (ja)": 3.67,
-                    "MassiveIntentClassification (jv)": 31.15,
-                    "MassiveIntentClassification (ka)": 2.77,
-                    "MassiveIntentClassification (km)": 5.66,
-                    "MassiveIntentClassification (kn)": 2.59,
-                    "MassiveIntentClassification (ko)": 2.34,
-                    "MassiveIntentClassification (lv)": 33.97,
-                    "MassiveIntentClassification (ml)": 2.55,
-                    "MassiveIntentClassification (mn)": 14.7,
-                    "MassiveIntentClassification (ms)": 33.12,
-                    "MassiveIntentClassification (my)": 4.42,
-                    "MassiveIntentClassification (nb)": 38.53,
-                    "MassiveIntentClassification (nl)": 37.96,
-                    "MassiveIntentClassification (pl)": 34.41,
-                    "MassiveIntentClassification (pt)": 43.35,
-                    "MassiveIntentClassification (ro)": 42.69,
-                    "MassiveIntentClassification (ru)": 14.82,
-                    "MassiveIntentClassification (sl)": 34.54,
-                    "MassiveIntentClassification (sq)": 38.54,
-                    "MassiveIntentClassification (sv)": 35.98,
-                    "MassiveIntentClassification (sw)": 32.14,
-                    "MassiveIntentClassification (ta)": 1.41,
-                    "MassiveIntentClassification (te)": 2.5,
-                    "MassiveIntentClassification (th)": 3.71,
-                    "MassiveIntentClassification (tl)": 36.04,
-                    "MassiveIntentClassification (tr)": 33.77,
-                    "MassiveIntentClassification (ur)": 2.99,
-                    "MassiveIntentClassification (vi)": 22.62,
-                    "MassiveIntentClassification (zh-CN)": 1.12,
-                    "MassiveIntentClassification (zh-TW)": 4.63,
-                    "MassiveScenarioClassification (af)": 44.45,
-                    "MassiveScenarioClassification (am)": 7.51,
-                    "MassiveScenarioClassification (ar)": 12.32,
-                    "MassiveScenarioClassification (az)": 38.41,
-                    "MassiveScenarioClassification (bn)": 8.45,
-                    "MassiveScenarioClassification (cy)": 35.04,
-                    "MassiveScenarioClassification (da)": 48.36,
-                    "MassiveScenarioClassification (de)": 59.12,
-                    "MassiveScenarioClassification (el)": 17.68,
-                    "MassiveScenarioClassification (en)": 72.32,
-                    "MassiveScenarioClassification (es)": 55.61,
-                    "MassiveScenarioClassification (fa)": 6.86,
-                    "MassiveScenarioClassification (fi)": 41.34,
-                    "MassiveScenarioClassification (fr)": 59.92,
-                    "MassiveScenarioClassification (he)": 7.86,
-                    "MassiveScenarioClassification (hi)": 7.63,
-                    "MassiveScenarioClassification (hu)": 41.31,
-                    "MassiveScenarioClassification (hy)": 9.23,
-                    "MassiveScenarioClassification (id)": 44.64,
-                    "MassiveScenarioClassification (is)": 39.63,
-                    "MassiveScenarioClassification (it)": 54.58,
-                    "MassiveScenarioClassification (ja)": 4.96,
-                    "MassiveScenarioClassification (jv)": 40.73,
-                    "MassiveScenarioClassification (ka)": 7.51,
-                    "MassiveScenarioClassification (km)": 8.73,
-                    "MassiveScenarioClassification (kn)": 7.99,
-                    "MassiveScenarioClassification (ko)": 6.03,
-                    "MassiveScenarioClassification (lv)": 36.42,
-                    "MassiveScenarioClassification (ml)": 6.96,
-                    "MassiveScenarioClassification (mn)": 19.85,
-                    "MassiveScenarioClassification (ms)": 43.18,
-                    "MassiveScenarioClassification (my)": 9.46,
-                    "MassiveScenarioClassification (nb)": 46.6,
-                    "MassiveScenarioClassification (nl)": 50.0,
-                    "MassiveScenarioClassification (pl)": 42.3,
-                    "MassiveScenarioClassification (pt)": 52.24,
-                    "MassiveScenarioClassification (ro)": 53.7,
-                    "MassiveScenarioClassification (ru)": 20.69,
-                    "MassiveScenarioClassification (sl)": 39.79,
-                    "MassiveScenarioClassification (sq)": 50.16,
-                    "MassiveScenarioClassification (sv)": 46.69,
-                    "MassiveScenarioClassification (sw)": 40.48,
-                    "MassiveScenarioClassification (ta)": 7.47,
-                    "MassiveScenarioClassification (te)": 6.87,
-                    "MassiveScenarioClassification (th)": 8.26,
-                    "MassiveScenarioClassification (tl)": 48.94,
-                    "MassiveScenarioClassification (tr)": 41.83,
-                    "MassiveScenarioClassification (ur)": 9.77,
-                    "MassiveScenarioClassification (vi)": 30.01,
-                    "MassiveScenarioClassification (zh-CN)": 4.17,
-                    "MassiveScenarioClassification (zh-TW)": 7.91,
-                    "ToxicConversationsClassification": 68.2,
-                    "TweetSentimentExtractionClassification": 62.71
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "sentence-t5-base",
-                    "AlloProfClusteringP2P": 58.44,
-                    "AlloProfClusteringS2S": 35.93,
-                    "ArxivClusteringP2P": 39.28,
-                    "ArxivClusteringS2S": 27.26,
-                    "BiorxivClusteringP2P": 33.99,
-                    "BiorxivClusteringS2S": 22.92,
-                    "BlurbsClusteringP2P": 30.59,
-                    "BlurbsClusteringS2S": 11.57,
-                    "HALClusteringS2S": 17.72,
-                    "MLSUMClusteringP2P": 40.77,
-                    "MLSUMClusteringS2S": 30.06,
-                    "MasakhaNEWSClusteringP2P (fra)": 61.9,
-                    "MasakhaNEWSClusteringS2S (fra)": 35.64,
-                    "MedrxivClusteringP2P": 33.2,
-                    "MedrxivClusteringS2S": 26.13,
-                    "RedditClustering": 52.93,
-                    "RedditClusteringP2P": 59.67,
-                    "StackExchangeClustering": 63.13,
-                    "StackExchangeClusteringP2P": 35.68,
-                    "TenKGnadClusteringP2P": 44.88,
-                    "TenKGnadClusteringS2S": 18.11,
-                    "TwentyNewsgroupsClustering": 48.1
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "sentence-t5-base",
-                    "OpusparcusPC (fr)": 89.4,
-                    "PawsXPairClassification (fr)": 55.35,
-                    "SprintDuplicateQuestions": 91.23,
-                    "TwitterSemEval2015": 78.25,
-                    "TwitterURLCorpus": 86.05
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "sentence-t5-base",
-                    "AlloprofReranking": 50.12,
-                    "AskUbuntuDupQuestions": 59.73,
-                    "MindSmallReranking": 30.2,
-                    "SciDocsRR": 73.96,
-                    "StackOverflowDupQuestions": 48.46,
-                    "SyntecReranking": 78.05
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "sentence-t5-base",
-                    "AlloprofRetrieval": 27.52,
-                    "ArguAna": 44.85,
-                    "BSARDRetrieval": 0.16,
-                    "CQADupstackRetrieval": 35.23,
-                    "ClimateFEVER": 10.37,
-                    "DBPedia": 27.77,
-                    "FEVER": 26.17,
-                    "FiQA2018": 34.83,
-                    "HotpotQA": 33.2,
-                    "MSMARCO": 20.7,
-                    "MintakaRetrieval (fr)": 21.04,
-                    "NFCorpus": 28.65,
-                    "NQ": 36.32,
-                    "QuoraRetrieval": 85.49,
-                    "SCIDOCS": 14.15,
-                    "SciFact": 45.76,
-                    "SyntecRetrieval": 67.0,
-                    "TRECCOVID": 40.7,
-                    "Touche2020": 20.3,
-                    "XPQARetrieval (fr)": 45.19
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "sentence-t5-base",
-                    "BIOSSES": 75.89,
-                    "SICK-R": 80.18,
-                    "SICKFr": 71.74,
-                    "STS12": 78.05,
-                    "STS13": 85.85,
-                    "STS14": 82.19,
-                    "STS15": 87.46,
-                    "STS16": 84.03,
-                    "STS17 (ar-ar)": 13.36,
-                    "STS17 (en-ar)": -5.65,
-                    "STS17 (en-de)": 67.11,
-                    "STS17 (en-en)": 89.57,
-                    "STS17 (en-tr)": -0.02,
-                    "STS17 (es-en)": 47.72,
-                    "STS17 (es-es)": 79.94,
-                    "STS17 (fr-en)": 56.61,
-                    "STS17 (it-en)": 30.46,
-                    "STS17 (ko-ko)": 10.06,
-                    "STS17 (nl-en)": 36.46,
-                    "STS22 (ar)": 31.2,
-                    "STS22 (de)": 42.08,
-                    "STS22 (de-en)": 46.9,
-                    "STS22 (de-fr)": 55.04,
-                    "STS22 (de-pl)": 33.94,
-                    "STS22 (en)": 62.66,
-                    "STS22 (es)": 53.81,
-                    "STS22 (es-en)": 65.19,
-                    "STS22 (es-it)": 55.29,
-                    "STS22 (fr)": 77.69,
-                    "STS22 (fr-pl)": 28.17,
-                    "STS22 (it)": 60.65,
-                    "STS22 (pl)": 24.42,
-                    "STS22 (pl-en)": 42.97,
-                    "STS22 (ru)": 12.13,
-                    "STS22 (tr)": 40.45,
-                    "STS22 (zh)": 32.9,
-                    "STS22 (zh-en)": 20.15,
-                    "STSBenchmark": 85.52,
-                    "STSBenchmarkMultilingualSTS (fr)": 74.04
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "sentence-t5-base",
-                    "SummEval": 31.39,
-                    "SummEvalFr": 30.01
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "sentence-t5-base"
-                }
-            ]
-        }
-    },
-    "sentence-t5-large": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "sentence-t5-large",
-                    "BUCC (de-en)": 87.0,
-                    "BUCC (fr-en)": 88.91,
-                    "BUCC (ru-en)": 0.44,
-                    "BUCC (zh-en)": 0.95,
-                    "Tatoeba (afr-eng)": 23.7,
-                    "Tatoeba (amh-eng)": 0.65,
-                    "Tatoeba (ang-eng)": 30.98,
-                    "Tatoeba (ara-eng)": 0.48,
-                    "Tatoeba (arq-eng)": 0.68,
-                    "Tatoeba (arz-eng)": 0.22,
-                    "Tatoeba (ast-eng)": 55.3,
-                    "Tatoeba (awa-eng)": 1.03,
-                    "Tatoeba (aze-eng)": 5.83,
-                    "Tatoeba (bel-eng)": 1.66,
-                    "Tatoeba (ben-eng)": 0.0,
-                    "Tatoeba (ber-eng)": 5.62,
-                    "Tatoeba (bos-eng)": 12.23,
-                    "Tatoeba (bre-eng)": 5.84,
-                    "Tatoeba (bul-eng)": 1.35,
-                    "Tatoeba (cat-eng)": 48.56,
-                    "Tatoeba (cbk-eng)": 46.97,
-                    "Tatoeba (ceb-eng)": 9.79,
-                    "Tatoeba (ces-eng)": 6.0,
-                    "Tatoeba (cha-eng)": 24.21,
-                    "Tatoeba (cmn-eng)": 2.26,
-                    "Tatoeba (cor-eng)": 4.03,
-                    "Tatoeba (csb-eng)": 9.53,
-                    "Tatoeba (cym-eng)": 9.17,
-                    "Tatoeba (dan-eng)": 34.63,
-                    "Tatoeba (deu-eng)": 89.31,
-                    "Tatoeba (dsb-eng)": 9.68,
-                    "Tatoeba (dtp-eng)": 4.66,
-                    "Tatoeba (ell-eng)": 0.77,
-                    "Tatoeba (epo-eng)": 26.88,
-                    "Tatoeba (est-eng)": 5.19,
-                    "Tatoeba (eus-eng)": 9.46,
-                    "Tatoeba (fao-eng)": 21.59,
-                    "Tatoeba (fin-eng)": 5.66,
-                    "Tatoeba (fra-eng)": 79.71,
-                    "Tatoeba (fry-eng)": 28.29,
-                    "Tatoeba (gla-eng)": 2.34,
-                    "Tatoeba (gle-eng)": 3.55,
-                    "Tatoeba (glg-eng)": 56.25,
-                    "Tatoeba (gsw-eng)": 24.25,
-                    "Tatoeba (heb-eng)": 0.57,
-                    "Tatoeba (hin-eng)": 0.12,
-                    "Tatoeba (hrv-eng)": 10.29,
-                    "Tatoeba (hsb-eng)": 9.52,
-                    "Tatoeba (hun-eng)": 6.22,
-                    "Tatoeba (hye-eng)": 0.81,
-                    "Tatoeba (ido-eng)": 41.11,
-                    "Tatoeba (ile-eng)": 54.0,
-                    "Tatoeba (ina-eng)": 75.47,
-                    "Tatoeba (ind-eng)": 13.02,
-                    "Tatoeba (isl-eng)": 8.98,
-                    "Tatoeba (ita-eng)": 67.23,
-                    "Tatoeba (jav-eng)": 8.54,
-                    "Tatoeba (jpn-eng)": 0.99,
-                    "Tatoeba (kab-eng)": 1.85,
-                    "Tatoeba (kat-eng)": 1.37,
-                    "Tatoeba (kaz-eng)": 0.67,
-                    "Tatoeba (khm-eng)": 0.56,
-                    "Tatoeba (kor-eng)": 1.73,
-                    "Tatoeba (kur-eng)": 9.23,
-                    "Tatoeba (kzj-eng)": 5.38,
-                    "Tatoeba (lat-eng)": 21.3,
-                    "Tatoeba (lfn-eng)": 40.48,
-                    "Tatoeba (lit-eng)": 5.38,
-                    "Tatoeba (lvs-eng)": 6.83,
-                    "Tatoeba (mal-eng)": 0.45,
-                    "Tatoeba (mar-eng)": 0.01,
-                    "Tatoeba (max-eng)": 16.44,
-                    "Tatoeba (mhr-eng)": 0.33,
-                    "Tatoeba (mkd-eng)": 0.4,
-                    "Tatoeba (mon-eng)": 2.48,
-                    "Tatoeba (nds-eng)": 34.66,
-                    "Tatoeba (nld-eng)": 42.72,
-                    "Tatoeba (nno-eng)": 24.08,
-                    "Tatoeba (nob-eng)": 34.17,
-                    "Tatoeba (nov-eng)": 55.01,
-                    "Tatoeba (oci-eng)": 29.15,
-                    "Tatoeba (orv-eng)": 0.2,
-                    "Tatoeba (pam-eng)": 6.99,
-                    "Tatoeba (pes-eng)": 0.9,
-                    "Tatoeba (pms-eng)": 30.8,
-                    "Tatoeba (pol-eng)": 12.81,
-                    "Tatoeba (por-eng)": 73.45,
-                    "Tatoeba (ron-eng)": 54.86,
-                    "Tatoeba (rus-eng)": 2.43,
-                    "Tatoeba (slk-eng)": 8.35,
-                    "Tatoeba (slv-eng)": 9.3,
-                    "Tatoeba (spa-eng)": 78.87,
-                    "Tatoeba (sqi-eng)": 11.74,
-                    "Tatoeba (srp-eng)": 5.83,
-                    "Tatoeba (swe-eng)": 35.41,
-                    "Tatoeba (swg-eng)": 28.18,
-                    "Tatoeba (swh-eng)": 7.53,
-                    "Tatoeba (tam-eng)": 0.36,
-                    "Tatoeba (tat-eng)": 1.01,
-                    "Tatoeba (tel-eng)": 1.1,
-                    "Tatoeba (tgl-eng)": 12.4,
-                    "Tatoeba (tha-eng)": 1.58,
-                    "Tatoeba (tuk-eng)": 4.95,
-                    "Tatoeba (tur-eng)": 6.45,
-                    "Tatoeba (tzl-eng)": 37.82,
-                    "Tatoeba (uig-eng)": 0.67,
-                    "Tatoeba (ukr-eng)": 1.88,
-                    "Tatoeba (urd-eng)": 0.0,
-                    "Tatoeba (uzb-eng)": 4.79,
-                    "Tatoeba (vie-eng)": 7.03,
-                    "Tatoeba (war-eng)": 9.68,
-                    "Tatoeba (wuu-eng)": 1.28,
-                    "Tatoeba (xho-eng)": 10.64,
-                    "Tatoeba (yid-eng)": 0.57,
-                    "Tatoeba (yue-eng)": 0.88,
-                    "Tatoeba (zsm-eng)": 14.67
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "sentence-t5-large",
-                    "AmazonCounterfactualClassification (de)": 67.97,
-                    "AmazonCounterfactualClassification (en)": 75.51,
-                    "AmazonCounterfactualClassification (en-ext)": 75.44,
-                    "AmazonCounterfactualClassification (ja)": 45.72,
-                    "AmazonPolarityClassification": 92.87,
-                    "AmazonReviewsClassification (de)": 43.16,
-                    "AmazonReviewsClassification (en)": 47.12,
-                    "AmazonReviewsClassification (es)": 42.89,
-                    "AmazonReviewsClassification (fr)": 41.48,
-                    "AmazonReviewsClassification (ja)": 22.49,
-                    "AmazonReviewsClassification (zh)": 22.12,
-                    "Banking77Classification": 78.46,
-                    "EmotionClassification": 51.74,
-                    "ImdbClassification": 87.01,
-                    "MTOPDomainClassification (de)": 80.56,
-                    "MTOPDomainClassification (en)": 90.99,
-                    "MTOPDomainClassification (es)": 80.78,
-                    "MTOPDomainClassification (fr)": 79.6,
-                    "MTOPDomainClassification (hi)": 21.22,
-                    "MTOPDomainClassification (th)": 15.82,
-                    "MTOPIntentClassification (de)": 52.5,
-                    "MTOPIntentClassification (en)": 64.98,
-                    "MTOPIntentClassification (es)": 52.07,
-                    "MTOPIntentClassification (fr)": 47.73,
-                    "MTOPIntentClassification (hi)": 3.74,
-                    "MTOPIntentClassification (th)": 4.96,
-                    "MasakhaNEWSClassification (fra)": 80.43,
-                    "MassiveIntentClassification (af)": 38.41,
-                    "MassiveIntentClassification (am)": 2.49,
-                    "MassiveIntentClassification (ar)": 4.7,
-                    "MassiveIntentClassification (az)": 31.77,
-                    "MassiveIntentClassification (bn)": 2.77,
-                    "MassiveIntentClassification (cy)": 31.69,
-                    "MassiveIntentClassification (da)": 41.76,
-                    "MassiveIntentClassification (de)": 52.01,
-                    "MassiveIntentClassification (el)": 9.74,
-                    "MassiveIntentClassification (en)": 71.78,
-                    "MassiveIntentClassification (es)": 54.1,
-                    "MassiveIntentClassification (fa)": 3.86,
-                    "MassiveIntentClassification (fi)": 34.07,
-                    "MassiveIntentClassification (fr)": 57.01,
-                    "MassiveIntentClassification (he)": 2.14,
-                    "MassiveIntentClassification (hi)": 2.97,
-                    "MassiveIntentClassification (hu)": 32.01,
-                    "MassiveIntentClassification (hy)": 3.17,
-                    "MassiveIntentClassification (id)": 34.55,
-                    "MassiveIntentClassification (is)": 32.0,
-                    "MassiveIntentClassification (it)": 52.94,
-                    "MassiveIntentClassification (ja)": 2.9,
-                    "MassiveIntentClassification (jv)": 32.42,
-                    "MassiveIntentClassification (ka)": 2.71,
-                    "MassiveIntentClassification (km)": 5.5,
-                    "MassiveIntentClassification (kn)": 2.41,
-                    "MassiveIntentClassification (ko)": 2.57,
-                    "MassiveIntentClassification (lv)": 35.09,
-                    "MassiveIntentClassification (ml)": 2.95,
-                    "MassiveIntentClassification (mn)": 18.33,
-                    "MassiveIntentClassification (ms)": 29.69,
-                    "MassiveIntentClassification (my)": 3.99,
-                    "MassiveIntentClassification (nb)": 41.29,
-                    "MassiveIntentClassification (nl)": 44.95,
-                    "MassiveIntentClassification (pl)": 37.67,
-                    "MassiveIntentClassification (pt)": 51.96,
-                    "MassiveIntentClassification (ro)": 43.83,
-                    "MassiveIntentClassification (ru)": 17.32,
-                    "MassiveIntentClassification (sl)": 33.71,
-                    "MassiveIntentClassification (sq)": 37.62,
-                    "MassiveIntentClassification (sv)": 40.67,
-                    "MassiveIntentClassification (sw)": 31.9,
-                    "MassiveIntentClassification (ta)": 1.91,
-                    "MassiveIntentClassification (te)": 2.54,
-                    "MassiveIntentClassification (th)": 3.85,
-                    "MassiveIntentClassification (tl)": 36.83,
-                    "MassiveIntentClassification (tr)": 33.0,
-                    "MassiveIntentClassification (ur)": 2.62,
-                    "MassiveIntentClassification (vi)": 22.81,
-                    "MassiveIntentClassification (zh-CN)": 1.09,
-                    "MassiveIntentClassification (zh-TW)": 3.49,
-                    "MassiveScenarioClassification (af)": 50.28,
-                    "MassiveScenarioClassification (am)": 7.15,
-                    "MassiveScenarioClassification (ar)": 12.12,
-                    "MassiveScenarioClassification (az)": 39.68,
-                    "MassiveScenarioClassification (bn)": 8.06,
-                    "MassiveScenarioClassification (cy)": 38.01,
-                    "MassiveScenarioClassification (da)": 51.44,
-                    "MassiveScenarioClassification (de)": 62.71,
-                    "MassiveScenarioClassification (el)": 17.19,
-                    "MassiveScenarioClassification (en)": 73.16,
-                    "MassiveScenarioClassification (es)": 59.56,
-                    "MassiveScenarioClassification (fa)": 6.5,
-                    "MassiveScenarioClassification (fi)": 41.72,
-                    "MassiveScenarioClassification (fr)": 63.6,
-                    "MassiveScenarioClassification (he)": 7.93,
-                    "MassiveScenarioClassification (hi)": 7.85,
-                    "MassiveScenarioClassification (hu)": 41.37,
-                    "MassiveScenarioClassification (hy)": 9.42,
-                    "MassiveScenarioClassification (id)": 44.88,
-                    "MassiveScenarioClassification (is)": 40.86,
-                    "MassiveScenarioClassification (it)": 60.09,
-                    "MassiveScenarioClassification (ja)": 6.56,
-                    "MassiveScenarioClassification (jv)": 40.18,
-                    "MassiveScenarioClassification (ka)": 7.37,
-                    "MassiveScenarioClassification (km)": 9.56,
-                    "MassiveScenarioClassification (kn)": 8.4,
-                    "MassiveScenarioClassification (ko)": 5.96,
-                    "MassiveScenarioClassification (lv)": 41.44,
-                    "MassiveScenarioClassification (ml)": 7.47,
-                    "MassiveScenarioClassification (mn)": 25.36,
-                    "MassiveScenarioClassification (ms)": 39.69,
-                    "MassiveScenarioClassification (my)": 9.68,
-                    "MassiveScenarioClassification (nb)": 49.92,
-                    "MassiveScenarioClassification (nl)": 56.09,
-                    "MassiveScenarioClassification (pl)": 45.2,
-                    "MassiveScenarioClassification (pt)": 57.99,
-                    "MassiveScenarioClassification (ro)": 56.0,
-                    "MassiveScenarioClassification (ru)": 27.47,
-                    "MassiveScenarioClassification (sl)": 41.04,
-                    "MassiveScenarioClassification (sq)": 49.38,
-                    "MassiveScenarioClassification (sv)": 50.97,
-                    "MassiveScenarioClassification (sw)": 40.62,
-                    "MassiveScenarioClassification (ta)": 7.59,
-                    "MassiveScenarioClassification (te)": 7.07,
-                    "MassiveScenarioClassification (th)": 8.52,
-                    "MassiveScenarioClassification (tl)": 49.89,
-                    "MassiveScenarioClassification (tr)": 43.08,
-                    "MassiveScenarioClassification (ur)": 9.31,
-                    "MassiveScenarioClassification (vi)": 27.46,
-                    "MassiveScenarioClassification (zh-CN)": 4.7,
-                    "MassiveScenarioClassification (zh-TW)": 7.24,
-                    "ToxicConversationsClassification": 71.73,
-                    "TweetSentimentExtractionClassification": 62.33
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "sentence-t5-large",
-                    "AlloProfClusteringP2P": 61.82,
-                    "AlloProfClusteringS2S": 39.78,
-                    "ArxivClusteringP2P": 41.62,
-                    "ArxivClusteringS2S": 29.44,
-                    "BiorxivClusteringP2P": 35.99,
-                    "BiorxivClusteringS2S": 24.02,
-                    "BlurbsClusteringP2P": 35.33,
-                    "BlurbsClusteringS2S": 13.27,
-                    "HALClusteringS2S": 18.73,
-                    "MLSUMClusteringP2P": 42.07,
-                    "MLSUMClusteringS2S": 31.87,
-                    "MasakhaNEWSClusteringP2P (fra)": 58.6,
-                    "MasakhaNEWSClusteringS2S (fra)": 31.33,
-                    "MedrxivClusteringP2P": 32.4,
-                    "MedrxivClusteringS2S": 26.33,
-                    "RedditClustering": 54.53,
-                    "RedditClusteringP2P": 62.5,
-                    "StackExchangeClustering": 65.11,
-                    "StackExchangeClusteringP2P": 36.86,
-                    "TenKGnadClusteringP2P": 44.11,
-                    "TenKGnadClusteringS2S": 17.26,
-                    "TwentyNewsgroupsClustering": 49.33
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "sentence-t5-large",
-                    "OpusparcusPC (fr)": 91.19,
-                    "PawsXPairClassification (fr)": 59.59,
-                    "SprintDuplicateQuestions": 89.01,
-                    "TwitterSemEval2015": 79.75,
-                    "TwitterURLCorpus": 86.14
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "sentence-t5-large",
-                    "AlloprofReranking": 57.99,
-                    "AskUbuntuDupQuestions": 61.51,
-                    "MindSmallReranking": 30.27,
-                    "SciDocsRR": 74.88,
-                    "StackOverflowDupQuestions": 49.34,
-                    "SyntecReranking": 79.77
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "sentence-t5-large",
-                    "AlloprofRetrieval": 34.52,
-                    "ArguAna": 39.27,
-                    "BSARDRetrieval": 0.0,
-                    "CQADupstackRetrieval": 38.96,
-                    "ClimateFEVER": 11.36,
-                    "DBPedia": 31.55,
-                    "FEVER": 36.21,
-                    "FiQA2018": 43.55,
-                    "HotpotQA": 33.95,
-                    "MSMARCO": 23.96,
-                    "MintakaRetrieval (fr)": 23.92,
-                    "NFCorpus": 31.1,
-                    "NQ": 42.02,
-                    "QuoraRetrieval": 85.73,
-                    "SCIDOCS": 15.38,
-                    "SciFact": 49.91,
-                    "SyntecRetrieval": 71.05,
-                    "TRECCOVID": 46.11,
-                    "Touche2020": 21.63,
-                    "XPQARetrieval (fr)": 48.79
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "sentence-t5-large",
-                    "BIOSSES": 78.93,
-                    "SICK-R": 80.34,
-                    "SICKFr": 72.83,
-                    "STS12": 79.11,
-                    "STS13": 87.33,
-                    "STS14": 83.17,
-                    "STS15": 88.28,
-                    "STS16": 84.36,
-                    "STS17 (ar-ar)": 10.75,
-                    "STS17 (en-ar)": -4.71,
-                    "STS17 (en-de)": 73.62,
-                    "STS17 (en-en)": 88.99,
-                    "STS17 (en-tr)": -0.42,
-                    "STS17 (es-en)": 62.62,
-                    "STS17 (es-es)": 82.74,
-                    "STS17 (fr-en)": 67.86,
-                    "STS17 (it-en)": 51.86,
-                    "STS17 (ko-ko)": 9.44,
-                    "STS17 (nl-en)": 45.95,
-                    "STS22 (ar)": 27.01,
-                    "STS22 (de)": 43.73,
-                    "STS22 (de-en)": 49.93,
-                    "STS22 (de-fr)": 61.58,
-                    "STS22 (de-pl)": 38.83,
-                    "STS22 (en)": 62.39,
-                    "STS22 (es)": 57.68,
-                    "STS22 (es-en)": 68.09,
-                    "STS22 (es-it)": 61.58,
-                    "STS22 (fr)": 75.01,
-                    "STS22 (fr-pl)": 5.63,
-                    "STS22 (it)": 62.01,
-                    "STS22 (pl)": 25.0,
-                    "STS22 (pl-en)": 51.72,
-                    "STS22 (ru)": 14.21,
-                    "STS22 (tr)": 47.3,
-                    "STS22 (zh)": 30.47,
-                    "STS22 (zh-en)": 23.1,
-                    "STSBenchmark": 85.36,
-                    "STSBenchmarkMultilingualSTS (fr)": 77.59
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "sentence-t5-large",
-                    "SummEval": 29.64,
-                    "SummEvalFr": 30.23
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "sentence-t5-large"
-                }
-            ]
-        }
-    },
-    "LLM2Vec-Mistral-supervised": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "LLM2Vec-Mistral-supervised"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "LLM2Vec-Mistral-supervised",
-                    "AmazonCounterfactualClassification (en)": 77.58,
-                    "AmazonPolarityClassification": 91.12,
-                    "AmazonReviewsClassification (en)": 49.97,
-                    "Banking77Classification": 88.31,
-                    "EmotionClassification": 52.04,
-                    "ImdbClassification": 87.42,
-                    "MTOPDomainClassification (en)": 96.04,
-                    "MTOPIntentClassification (en)": 84.77,
-                    "MassiveIntentClassification (en)": 79.29,
-                    "MassiveScenarioClassification (en)": 81.64,
-                    "ToxicConversationsClassification": 69.26,
-                    "TweetSentimentExtractionClassification": 62.14
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "LLM2Vec-Mistral-supervised",
-                    "ArxivClusteringP2P": 42.81,
-                    "ArxivClusteringS2S": 44.24,
-                    "BiorxivClusteringP2P": 34.27,
-                    "BiorxivClusteringS2S": 35.53,
-                    "MedrxivClusteringP2P": 31.07,
-                    "MedrxivClusteringS2S": 31.27,
-                    "RedditClustering": 60.24,
-                    "RedditClusteringP2P": 64.12,
-                    "StackExchangeClustering": 70.73,
-                    "StackExchangeClusteringP2P": 34.5,
-                    "TwentyNewsgroupsClustering": 52.18
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "LLM2Vec-Mistral-supervised",
-                    "SprintDuplicateQuestions": 96.82,
-                    "TwitterSemEval2015": 80.6,
-                    "TwitterURLCorpus": 86.56
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "LLM2Vec-Mistral-supervised",
-                    "AskUbuntuDupQuestions": 63.98,
-                    "MindSmallReranking": 31.5,
-                    "SciDocsRR": 83.8,
-                    "StackOverflowDupQuestions": 54.41
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "LLM2Vec-Mistral-supervised",
-                    "ArguAna": 57.48,
-                    "CQADupstackRetrieval": 48.84,
-                    "ClimateFEVER": 35.19,
-                    "DBPedia": 49.58,
-                    "FEVER": 89.4,
-                    "FiQA2018": 53.11,
-                    "HotpotQA": 74.07,
-                    "MSMARCO": 42.17,
-                    "NFCorpus": 39.33,
-                    "NQ": 61.7,
-                    "QuoraRetrieval": 87.75,
-                    "SCIDOCS": 22.5,
-                    "SciFact": 78.86,
-                    "TRECCOVID": 77.69,
-                    "Touche2020": 22.18
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "LLM2Vec-Mistral-supervised",
-                    "BIOSSES": 85.24,
-                    "SICK-R": 83.7,
-                    "STS12": 78.8,
-                    "STS13": 86.37,
-                    "STS14": 84.04,
-                    "STS15": 88.99,
-                    "STS16": 87.22,
-                    "STS17 (en-en)": 90.19,
-                    "STS22 (en)": 67.68,
-                    "STSBenchmark": 88.65
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "LLM2Vec-Mistral-supervised",
-                    "SummEval": 29.96
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "LLM2Vec-Mistral-supervised"
-                }
-            ]
-        }
-    },
-    "llama-2-7b-chat": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "llama-2-7b-chat"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "llama-2-7b-chat"
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "llama-2-7b-chat"
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "llama-2-7b-chat"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "llama-2-7b-chat"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "llama-2-7b-chat"
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "llama-2-7b-chat"
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "llama-2-7b-chat"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "llama-2-7b-chat",
-                    "Core17InstructionRetrieval": 2.84,
-                    "News21InstructionRetrieval": 0.23,
-                    "Robust04InstructionRetrieval": 2.0
-                }
-            ]
-        }
-    },
-    "LLM2Vec-Sheared-Llama-supervised": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "LLM2Vec-Sheared-Llama-supervised"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "LLM2Vec-Sheared-Llama-supervised",
-                    "AmazonCounterfactualClassification (en)": 77.42,
-                    "AmazonPolarityClassification": 82.05,
-                    "AmazonReviewsClassification (en)": 40.81,
-                    "Banking77Classification": 86.01,
-                    "EmotionClassification": 48.38,
-                    "ImdbClassification": 75.33,
-                    "MTOPDomainClassification (en)": 94.09,
-                    "MTOPIntentClassification (en)": 77.05,
-                    "MassiveIntentClassification (en)": 75.58,
-                    "MassiveScenarioClassification (en)": 79.16,
-                    "ToxicConversationsClassification": 69.92,
-                    "TweetSentimentExtractionClassification": 60.76
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "LLM2Vec-Sheared-Llama-supervised",
-                    "ArxivClusteringP2P": 43.47,
-                    "ArxivClusteringS2S": 39.85,
-                    "BiorxivClusteringP2P": 37.1,
-                    "BiorxivClusteringS2S": 34.28,
-                    "MedrxivClusteringP2P": 33.55,
-                    "MedrxivClusteringS2S": 31.11,
-                    "RedditClustering": 53.02,
-                    "RedditClusteringP2P": 60.47,
-                    "StackExchangeClustering": 63.04,
-                    "StackExchangeClusteringP2P": 34.01,
-                    "TwentyNewsgroupsClustering": 49.37
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "LLM2Vec-Sheared-Llama-supervised",
-                    "SprintDuplicateQuestions": 96.25,
-                    "TwitterSemEval2015": 76.14,
-                    "TwitterURLCorpus": 86.23
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "LLM2Vec-Sheared-Llama-supervised",
-                    "AskUbuntuDupQuestions": 60.71,
-                    "MindSmallReranking": 31.96,
-                    "SciDocsRR": 79.23,
-                    "StackOverflowDupQuestions": 49.61
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "LLM2Vec-Sheared-Llama-supervised",
-                    "ArguAna": 51.66,
-                    "CQADupstackRetrieval": 41.73,
-                    "ClimateFEVER": 33.49,
-                    "DBPedia": 43.58,
-                    "FEVER": 86.81,
-                    "FiQA2018": 41.0,
-                    "HotpotQA": 63.85,
-                    "MSMARCO": 38.32,
-                    "NFCorpus": 37.12,
-                    "NQ": 53.89,
-                    "QuoraRetrieval": 87.37,
-                    "SCIDOCS": 17.96,
-                    "SciFact": 72.08,
-                    "TRECCOVID": 80.41,
-                    "Touche2020": 22.31
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "LLM2Vec-Sheared-Llama-supervised",
-                    "BIOSSES": 85.88,
-                    "SICK-R": 82.25,
-                    "STS12": 78.28,
-                    "STS13": 85.52,
-                    "STS14": 82.49,
-                    "STS15": 88.76,
-                    "STS16": 87.11,
-                    "STS17 (en-en)": 90.1,
-                    "STS22 (en)": 68.25,
-                    "STSBenchmark": 87.16
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "LLM2Vec-Sheared-Llama-supervised",
-                    "SummEval": 30.01
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "LLM2Vec-Sheared-Llama-supervised"
-                }
-            ]
-        }
-    },
-    "use-cmlm-multilingual": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "use-cmlm-multilingual"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "use-cmlm-multilingual"
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "use-cmlm-multilingual",
-                    "BlurbsClusteringP2P": 29.63,
-                    "BlurbsClusteringS2S": 15.24,
-                    "TenKGnadClusteringP2P": 37.1,
-                    "TenKGnadClusteringS2S": 25.64
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "use-cmlm-multilingual"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "use-cmlm-multilingual"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "use-cmlm-multilingual"
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "use-cmlm-multilingual"
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "use-cmlm-multilingual"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "use-cmlm-multilingual"
-                }
-            ]
-        }
-    },
-    "rubert-tiny2": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "rubert-tiny2"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "rubert-tiny2",
-                    "GeoreviewClassification (rus-Cyrl)": 39.64,
-                    "HeadlineClassification (rus-Cyrl)": 74.19,
-                    "InappropriatenessClassification (rus-Cyrl)": 58.57,
-                    "KinopoiskClassification (rus-Cyrl)": 49.06,
-                    "MassiveIntentClassification (rus-Cyrl)": 50.83,
-                    "MassiveScenarioClassification (rus-Cyrl)": 59.15,
-                    "RuReviewsClassification (rus-Cyrl)": 56.99,
-                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 45.63,
-                    "RuSciBenchOECDClassification (rus-Cyrl)": 35.48
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "rubert-tiny2",
-                    "GeoreviewClusteringP2P (rus-Cyrl)": 44.18,
-                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 41.41,
-                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 38.09
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "rubert-tiny2",
-                    "TERRa (rus-Cyrl)": 51.87
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "rubert-tiny2",
-                    "RuBQReranking (rus-Cyrl)": 46.09
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "rubert-tiny2",
-                    "RiaNewsRetrieval (rus-Cyrl)": 13.92,
-                    "RuBQRetrieval (rus-Cyrl)": 10.87
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "rubert-tiny2",
-                    "RUParaPhraserSTS (rus-Cyrl)": 65.14,
-                    "RuSTSBenchmarkSTS (rus-Cyrl)": 69.43,
-                    "STS22 (rus-Cyrl)": 50.23
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "rubert-tiny2"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "rubert-tiny2"
-                }
-            ]
-        }
-    },
-    "gtr-t5-xxl": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "gtr-t5-xxl"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "gtr-t5-xxl",
-                    "AmazonCounterfactualClassification (en)": 67.3,
-                    "AmazonPolarityClassification": 75.05,
-                    "AmazonReviewsClassification (en)": 37.3,
-                    "Banking77Classification": 82.32,
-                    "EmotionClassification": 43.19,
-                    "ImdbClassification": 70.8,
-                    "MTOPDomainClassification (en)": 93.84,
-                    "MTOPIntentClassification (en)": 67.71,
-                    "MassiveIntentClassification (en)": 70.61,
-                    "MassiveScenarioClassification (en)": 77.77,
-                    "ToxicConversationsClassification": 68.48,
-                    "TweetSentimentExtractionClassification": 54.54
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "gtr-t5-xxl",
-                    "ArxivClusteringP2P": 37.9,
-                    "ArxivClusteringS2S": 32.39,
-                    "BiorxivClusteringP2P": 30.48,
-                    "BiorxivClusteringS2S": 27.5,
-                    "MedrxivClusteringP2P": 29.12,
-                    "MedrxivClusteringS2S": 27.56,
-                    "RedditClustering": 64.13,
-                    "RedditClusteringP2P": 62.84,
-                    "StackExchangeClustering": 71.43,
-                    "StackExchangeClusteringP2P": 32.85,
-                    "TwentyNewsgroupsClustering": 50.44
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "gtr-t5-xxl",
-                    "SprintDuplicateQuestions": 95.68,
-                    "TwitterSemEval2015": 77.54,
-                    "TwitterURLCorpus": 85.13
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "gtr-t5-xxl",
-                    "AskUbuntuDupQuestions": 63.23,
-                    "MindSmallReranking": 31.93,
-                    "SciDocsRR": 77.96,
-                    "StackOverflowDupQuestions": 53.5
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "gtr-t5-xxl",
-                    "ArguAna": 53.77,
-                    "CQADupstackRetrieval": 38.56,
-                    "ClimateFEVER": 27.21,
-                    "DBPedia": 41.28,
-                    "FEVER": 74.08,
-                    "FiQA2018": 46.78,
-                    "HotpotQA": 59.67,
-                    "MSMARCO": 44.05,
-                    "NFCorpus": 34.18,
-                    "NQ": 57.24,
-                    "QuoraRetrieval": 89.09,
-                    "SCIDOCS": 15.88,
-                    "SciFact": 66.77,
-                    "TRECCOVID": 51.9,
-                    "Touche2020": 26.76
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "gtr-t5-xxl",
-                    "BIOSSES": 81.91,
-                    "SICK-R": 74.29,
-                    "STS12": 70.12,
-                    "STS13": 82.72,
-                    "STS14": 78.24,
-                    "STS15": 86.26,
-                    "STS16": 81.61,
-                    "STS17 (en-en)": 85.18,
-                    "STS22 (en)": 65.76,
-                    "STSBenchmark": 77.73
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "gtr-t5-xxl",
-                    "SummEval": 30.64
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "gtr-t5-xxl"
-                }
-            ]
-        }
-    },
-    "voyage-lite-02-instruct": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "voyage-lite-02-instruct"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "voyage-lite-02-instruct",
-                    "AmazonCounterfactualClassification (en)": 88.31,
-                    "AmazonPolarityClassification": 96.32,
-                    "AmazonReviewsClassification (en)": 56.25,
-                    "Banking77Classification": 88.59,
-                    "EmotionClassification": 50.28,
-                    "ImdbClassification": 95.75,
-                    "MTOPDomainClassification (en)": 97.65,
-                    "MTOPIntentClassification (en)": 75.16,
-                    "MassiveIntentClassification (en)": 73.97,
-                    "MassiveScenarioClassification (en)": 83.99,
-                    "ToxicConversationsClassification": 81.75,
-                    "TweetSentimentExtractionClassification": 62.98
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "voyage-lite-02-instruct",
-                    "ArxivClusteringP2P": 51.95,
-                    "ArxivClusteringS2S": 42.48,
-                    "BiorxivClusteringP2P": 50.15,
-                    "BiorxivClusteringS2S": 42.84,
-                    "MedrxivClusteringP2P": 47.24,
-                    "MedrxivClusteringS2S": 43.48,
-                    "RedditClustering": 63.73,
-                    "RedditClusteringP2P": 64.09,
-                    "StackExchangeClustering": 70.71,
-                    "StackExchangeClusteringP2P": 40.34,
-                    "TwentyNewsgroupsClustering": 59.56
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "voyage-lite-02-instruct",
-                    "SprintDuplicateQuestions": 98.07,
-                    "TwitterSemEval2015": 74.44,
-                    "TwitterURLCorpus": 88.11
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "voyage-lite-02-instruct",
-                    "AskUbuntuDupQuestions": 63.24,
-                    "MindSmallReranking": 31.48,
-                    "SciDocsRR": 84.68,
-                    "StackOverflowDupQuestions": 53.56
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "voyage-lite-02-instruct",
-                    "ArguAna": 70.28,
-                    "CQADupstackRetrieval": 46.2,
-                    "ClimateFEVER": 31.95,
-                    "DBPedia": 39.79,
-                    "FEVER": 91.35,
-                    "FiQA2018": 52.51,
-                    "HotpotQA": 75.51,
-                    "MSMARCO": 37.93,
-                    "NFCorpus": 43.7,
-                    "NQ": 64.26,
-                    "QuoraRetrieval": 87.62,
-                    "SCIDOCS": 20.24,
-                    "SciFact": 79.91,
-                    "TRECCOVID": 81.02,
-                    "Touche2020": 26.8
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "voyage-lite-02-instruct",
-                    "BIOSSES": 89.7,
-                    "SICK-R": 78.44,
-                    "STS12": 86.46,
-                    "STS13": 87.76,
-                    "STS14": 86.6,
-                    "STS15": 90.1,
-                    "STS16": 86.39,
-                    "STS17 (en-en)": 86.98,
-                    "STS22 (en)": 76.89,
-                    "STSBenchmark": 88.56
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "voyage-lite-02-instruct",
-                    "SummEval": 31.01
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "voyage-lite-02-instruct"
-                }
-            ]
-        }
-    },
-    "gte-Qwen2-7B-instruct": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "gte-Qwen2-7B-instruct"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "gte-Qwen2-7B-instruct"
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "gte-Qwen2-7B-instruct"
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "gte-Qwen2-7B-instruct"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "gte-Qwen2-7B-instruct"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "gte-Qwen2-7B-instruct",
-                    "BrightRetrieval (earth_science)": 40.66,
-                    "BrightRetrieval (sustainable_living)": 20.82,
-                    "BrightRetrieval (theoremqa_theorems)": 28.15,
-                    "BrightRetrieval (aops)": 15.1,
-                    "BrightRetrieval (economics)": 16.18,
-                    "BrightRetrieval (pony)": 1.25,
-                    "BrightRetrieval (stackoverflow)": 13.95,
-                    "BrightRetrieval (leetcode)": 31.07,
-                    "BrightRetrieval (biology)": 32.09,
-                    "BrightRetrieval (theoremqa_questions)": 29.9,
-                    "BrightRetrieval (robotics)": 12.82,
-                    "BrightRetrieval (psychology)": 26.58
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "gte-Qwen2-7B-instruct"
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "gte-Qwen2-7B-instruct"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "gte-Qwen2-7B-instruct"
-                }
-            ]
-        }
-    },
-    "instructor-xl": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "instructor-xl"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "instructor-xl"
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "instructor-xl"
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "instructor-xl"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "instructor-xl"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "instructor-xl",
-                    "BrightRetrieval (aops)": 8.26,
-                    "BrightRetrieval (robotics)": 17.39,
-                    "BrightRetrieval (economics)": 22.81,
-                    "BrightRetrieval (stackoverflow)": 19.06,
-                    "BrightRetrieval (leetcode)": 27.5,
-                    "BrightRetrieval (theoremqa_questions)": 14.59,
-                    "BrightRetrieval (psychology)": 27.43,
-                    "BrightRetrieval (biology)": 21.91,
-                    "BrightRetrieval (theoremqa_theorems)": 6.5,
-                    "BrightRetrieval (earth_science)": 34.35,
-                    "BrightRetrieval (sustainable_living)": 18.82,
-                    "BrightRetrieval (pony)": 5.02
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "instructor-xl"
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "instructor-xl"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "instructor-xl",
-                    "Core17InstructionRetrieval": 0.69,
-                    "News21InstructionRetrieval": -0.9,
-                    "Robust04InstructionRetrieval": -8.08
-                }
-            ]
-        }
-    },
-    "mistral-embed": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "mistral-embed"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "mistral-embed",
-                    "AmazonReviewsClassification (fr)": 41.59,
-                    "MTOPDomainClassification (fr)": 90.05,
-                    "MTOPIntentClassification (fr)": 66.09,
-                    "MasakhaNEWSClassification (fra)": 81.4,
-                    "MassiveIntentClassification (fr)": 62.83,
-                    "MassiveScenarioClassification (fr)": 69.71
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "mistral-embed",
-                    "AlloProfClusteringP2P": 62.01,
-                    "AlloProfClusteringS2S": 49.2,
-                    "HALClusteringS2S": 26.17,
-                    "MLSUMClusteringP2P": 45.28,
-                    "MLSUMClusteringS2S": 42.74,
-                    "MasakhaNEWSClusteringP2P (fra)": 48.13,
-                    "MasakhaNEWSClusteringS2S (fra)": 39.62
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "mistral-embed",
-                    "OpusparcusPC (fr)": 92.61,
-                    "PawsXPairClassification (fr)": 62.02
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "mistral-embed",
-                    "AlloprofReranking": 72.36,
-                    "SyntecReranking": 88.57
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "mistral-embed",
-                    "AILACasedocs": 38.2,
-                    "AILAStatutes": 44.81,
-                    "AlloprofRetrieval": 56.84,
-                    "BSARDRetrieval": 2.48,
-                    "GerDaLIRSmall": 17.85,
-                    "LeCaRDv2": 61.12,
-                    "LegalBenchConsumerContractsQA": 80.8,
-                    "LegalBenchCorporateLobbying": 94.11,
-                    "LegalQuAD": 47.17,
-                    "LegalSummarization": 67.39,
-                    "MintakaRetrieval (fr)": 21.73,
-                    "SyntecRetrieval": 78.77,
-                    "XPQARetrieval (fr)": 74.24
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "mistral-embed",
-                    "SICKFr": 76.21,
-                    "STS22 (fr)": 82.74,
-                    "STSBenchmarkMultilingualSTS (fr)": 79.72
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "mistral-embed",
-                    "SummEvalFr": 31.47
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "mistral-embed"
-                }
-            ]
-        }
-    },
-    "bert-base-uncased": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "bert-base-uncased"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "bert-base-uncased",
-                    "AmazonCounterfactualClassification (en)": 74.25,
-                    "AmazonPolarityClassification": 71.33,
-                    "AmazonReviewsClassification (en)": 33.56,
-                    "Banking77Classification": 63.41,
-                    "EmotionClassification": 35.28,
-                    "ImdbClassification": 65.35,
-                    "MTOPDomainClassification (en)": 82.63,
-                    "MTOPIntentClassification (en)": 68.14,
-                    "MassiveIntentClassification (en)": 59.88,
-                    "MassiveScenarioClassification (en)": 64.28,
-                    "ToxicConversationsClassification": 70.0,
-                    "TweetSentimentExtractionClassification": 51.81
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "bert-base-uncased",
-                    "ArxivClusteringP2P": 35.19,
-                    "ArxivClusteringS2S": 27.51,
-                    "BiorxivClusteringP2P": 30.12,
-                    "BiorxivClusteringS2S": 24.77,
-                    "MedrxivClusteringP2P": 26.09,
-                    "MedrxivClusteringS2S": 23.6,
-                    "RedditClustering": 27.24,
-                    "RedditClusteringP2P": 43.32,
-                    "StackExchangeClustering": 43.58,
-                    "StackExchangeClusteringP2P": 26.55,
-                    "TwentyNewsgroupsClustering": 23.35
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "bert-base-uncased",
-                    "SprintDuplicateQuestions": 36.81,
-                    "TwitterSemEval2015": 55.9,
-                    "TwitterURLCorpus": 76.29
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "bert-base-uncased",
-                    "AskUbuntuDupQuestions": 45.84,
-                    "MindSmallReranking": 28.37,
-                    "SciDocsRR": 64.94,
-                    "StackOverflowDupQuestions": 34.62
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "bert-base-uncased",
-                    "ArguAna": 28.29,
-                    "CQADupstackRetrieval": 5.51,
-                    "ClimateFEVER": 5.41,
-                    "DBPedia": 4.13,
-                    "FEVER": 3.3,
-                    "FiQA2018": 2.19,
-                    "HotpotQA": 8.26,
-                    "MSMARCO": 1.91,
-                    "NFCorpus": 4.3,
-                    "NQ": 2.62,
-                    "QuoraRetrieval": 61.03,
-                    "SCIDOCS": 2.82,
-                    "SciFact": 13.34,
-                    "TRECCOVID": 14.74,
-                    "Touche2020": 0.97
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "bert-base-uncased",
-                    "BIOSSES": 54.7,
-                    "SICK-R": 58.65,
-                    "STS12": 30.87,
-                    "STS13": 59.89,
-                    "STS14": 47.73,
-                    "STS15": 60.29,
-                    "STS16": 63.73,
-                    "STS17 (en-en)": 64.1,
-                    "STS22 (en)": 56.37,
-                    "STSBenchmark": 47.29
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "bert-base-uncased",
-                    "SummEval": 29.82
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "bert-base-uncased"
-                }
-            ]
-        }
-    },
-    "text-embedding-3-large-256": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "text-embedding-3-large-256"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "text-embedding-3-large-256",
-                    "AmazonCounterfactualClassification (en)": 73.96,
-                    "AmazonPolarityClassification": 91.32,
-                    "AmazonReviewsClassification (en)": 46.03,
-                    "Banking77Classification": 83.19,
-                    "EmotionClassification": 45.8,
-                    "ImdbClassification": 85.93,
-                    "MTOPDomainClassification (en)": 92.76,
-                    "MTOPIntentClassification (en)": 70.45,
-                    "MassiveIntentClassification (en)": 71.12,
-                    "MassiveScenarioClassification (en)": 75.56,
-                    "ToxicConversationsClassification": 68.52,
-                    "TweetSentimentExtractionClassification": 58.98
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "text-embedding-3-large-256",
-                    "ArxivClusteringP2P": 47.05,
-                    "ArxivClusteringS2S": 42.59,
-                    "BiorxivClusteringP2P": 35.43,
-                    "BiorxivClusteringS2S": 33.86,
-                    "MedrxivClusteringP2P": 32.1,
-                    "MedrxivClusteringS2S": 31.15,
-                    "RedditClustering": 60.18,
-                    "RedditClusteringP2P": 64.71,
-                    "StackExchangeClustering": 71.23,
-                    "StackExchangeClusteringP2P": 35.95,
-                    "TwentyNewsgroupsClustering": 54.24
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "text-embedding-3-large-256",
-                    "SprintDuplicateQuestions": 89.02,
-                    "TwitterSemEval2015": 76.56,
-                    "TwitterURLCorpus": 87.09
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "text-embedding-3-large-256",
-                    "AskUbuntuDupQuestions": 64.61,
-                    "MindSmallReranking": 29.63,
-                    "SciDocsRR": 84.25,
-                    "StackOverflowDupQuestions": 53.46
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "text-embedding-3-large-256",
-                    "ArguAna": 55.6,
-                    "CQADupstackRetrieval": 42.28,
-                    "ClimateFEVER": 25.8,
-                    "DBPedia": 40.8,
-                    "FEVER": 84.57,
-                    "FiQA2018": 50.33,
-                    "HotpotQA": 62.69,
-                    "MSMARCO": 37.93,
-                    "NFCorpus": 37.94,
-                    "NQ": 56.64,
-                    "QuoraRetrieval": 88.22,
-                    "SCIDOCS": 20.44,
-                    "SciFact": 73.1,
-                    "TRECCOVID": 76.24,
-                    "Touche2020": 22.31
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "text-embedding-3-large-256",
-                    "BIOSSES": 84.87,
-                    "SICK-R": 79.18,
-                    "STS12": 71.98,
-                    "STS13": 85.52,
-                    "STS14": 80.5,
-                    "STS15": 87.51,
-                    "STS16": 84.48,
-                    "STS17 (en-en)": 88.11,
-                    "STS22 (en)": 65.92,
-                    "STSBenchmark": 82.34
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "text-embedding-3-large-256",
-                    "SummEval": 29.92
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "text-embedding-3-large-256"
-                }
-            ]
-        }
-    },
-    "google-gecko.text-embedding-preview-0409": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "google-gecko.text-embedding-preview-0409"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "google-gecko.text-embedding-preview-0409",
-                    "AmazonCounterfactualClassification (en)": 75.34,
-                    "AmazonPolarityClassification": 97.34,
-                    "AmazonReviewsClassification (en)": 51.17,
-                    "Banking77Classification": 88.62,
-                    "EmotionClassification": 52.51,
-                    "ImdbClassification": 95.65,
-                    "MTOPDomainClassification (en)": 98.35,
-                    "MTOPIntentClassification (en)": 83.43,
-                    "MassiveIntentClassification (en)": 80.22,
-                    "MassiveScenarioClassification (en)": 87.19,
-                    "ToxicConversationsClassification": 89.67,
-                    "TweetSentimentExtractionClassification": 74.52
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "google-gecko.text-embedding-preview-0409",
-                    "ArxivClusteringP2P": 46.27,
-                    "ArxivClusteringS2S": 38.36,
-                    "BiorxivClusteringP2P": 37.87,
-                    "BiorxivClusteringS2S": 35.67,
-                    "MedrxivClusteringP2P": 33.11,
-                    "MedrxivClusteringS2S": 31.54,
-                    "RedditClustering": 65.81,
-                    "RedditClusteringP2P": 66.62,
-                    "StackExchangeClustering": 74.52,
-                    "StackExchangeClusteringP2P": 37.63,
-                    "TwentyNewsgroupsClustering": 54.87
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "google-gecko.text-embedding-preview-0409",
-                    "SprintDuplicateQuestions": 96.26,
-                    "TwitterSemEval2015": 79.04,
-                    "TwitterURLCorpus": 87.53
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "google-gecko.text-embedding-preview-0409",
-                    "AskUbuntuDupQuestions": 64.4,
-                    "MindSmallReranking": 33.07,
-                    "SciDocsRR": 83.59,
-                    "StackOverflowDupQuestions": 54.56
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "google-gecko.text-embedding-preview-0409",
-                    "ArguAna": 62.18,
-                    "BrightRetrieval (earth_science)": 34.38,
-                    "BrightRetrieval (leetcode)": 29.64,
-                    "BrightRetrieval (theoremqa_questions)": 21.51,
-                    "BrightRetrieval (aops)": 9.33,
-                    "BrightRetrieval (sustainable_living)": 17.25,
-                    "BrightRetrieval (pony)": 3.59,
-                    "BrightRetrieval (theoremqa_theorems)": 16.77,
-                    "BrightRetrieval (stackoverflow)": 17.93,
-                    "BrightRetrieval (biology)": 22.98,
-                    "BrightRetrieval (robotics)": 15.98,
-                    "BrightRetrieval (economics)": 19.5,
-                    "BrightRetrieval (psychology)": 27.86,
-                    "CQADupstackRetrieval": 48.89,
-                    "ClimateFEVER": 33.21,
-                    "DBPedia": 47.12,
-                    "FEVER": 86.96,
-                    "FiQA2018": 59.24,
-                    "HotpotQA": 71.33,
-                    "MSMARCO": 32.58,
-                    "NFCorpus": 40.33,
-                    "NQ": 61.28,
-                    "QuoraRetrieval": 88.18,
-                    "SCIDOCS": 20.34,
-                    "SciFact": 75.42,
-                    "TRECCOVID": 82.62,
-                    "Touche2020": 25.86
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "google-gecko.text-embedding-preview-0409",
-                    "BIOSSES": 89.46,
-                    "SICK-R": 81.93,
-                    "STS12": 77.59,
-                    "STS13": 90.36,
-                    "STS14": 85.25,
-                    "STS15": 89.66,
-                    "STS16": 87.34,
-                    "STS17 (en-en)": 92.06,
-                    "STS22 (en)": 68.02,
-                    "STSBenchmark": 88.99
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "google-gecko.text-embedding-preview-0409",
-                    "SummEval": 32.63
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "google-gecko.text-embedding-preview-0409",
-                    "Core17InstructionRetrieval": 5.44,
-                    "News21InstructionRetrieval": 3.94,
-                    "Robust04InstructionRetrieval": -2.4
-                }
-            ]
-        }
-    },
-    "jina-embeddings-v2-base-en": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "jina-embeddings-v2-base-en"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "jina-embeddings-v2-base-en"
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "jina-embeddings-v2-base-en"
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "jina-embeddings-v2-base-en"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "jina-embeddings-v2-base-en"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "jina-embeddings-v2-base-en",
-                    "LEMBNarrativeQARetrieval": 37.89,
-                    "LEMBNeedleRetrieval": 54.25,
-                    "LEMBPasskeyRetrieval": 50.25,
-                    "LEMBQMSumRetrieval": 38.87,
-                    "LEMBSummScreenFDRetrieval": 93.48,
-                    "LEMBWikimQARetrieval": 73.99
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "jina-embeddings-v2-base-en"
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "jina-embeddings-v2-base-en"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "jina-embeddings-v2-base-en"
-                }
-            ]
-        }
-    },
-    "nb-bert-base": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "nb-bert-base",
-                    "BornholmBitextMining": 9.88
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "nb-bert-base",
-                    "AngryTweetsClassification": 52.14,
-                    "DKHateClassification": 61.73,
-                    "DanishPoliticalCommentsClassification": 34.84,
-                    "LccSentimentClassification": 51.4,
-                    "MassiveIntentClassification (da)": 56.69,
-                    "MassiveIntentClassification (nb)": 60.67,
-                    "MassiveIntentClassification (sv)": 53.89,
-                    "MassiveScenarioClassification (da)": 61.93,
-                    "MassiveScenarioClassification (nb)": 67.31,
-                    "MassiveScenarioClassification (sv)": 55.37,
-                    "NoRecClassification": 51.32,
-                    "NordicLangClassification": 84.69,
-                    "NorwegianParliament": 57.41,
-                    "ScalaDaClassification": 57.99,
-                    "ScalaNbClassification": 62.25
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "nb-bert-base"
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "nb-bert-base"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "nb-bert-base"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "nb-bert-base"
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "nb-bert-base"
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "nb-bert-base"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "nb-bert-base"
-                }
-            ]
-        }
-    },
-    "bge-base-en-v1.5-instruct": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "bge-base-en-v1.5-instruct"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "bge-base-en-v1.5-instruct"
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "bge-base-en-v1.5-instruct"
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "bge-base-en-v1.5-instruct"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "bge-base-en-v1.5-instruct"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "bge-base-en-v1.5-instruct",
-                    "ARCChallenge": 8.85,
-                    "AlphaNLI": 4.13,
-                    "HellaSwag": 24.03,
-                    "PIQA": 23.03,
-                    "Quail": 1.25,
-                    "RARbCode": 46.32,
-                    "RARbMath": 45.62,
-                    "SIQA": 0.24,
-                    "SpartQA": 2.67,
-                    "TempReasonL1": 0.8,
-                    "TempReasonL2Fact": 16.56,
-                    "TempReasonL2Pure": 1.33,
-                    "TempReasonL3Fact": 12.68,
-                    "TempReasonL3Pure": 5.08,
-                    "WinoGrande": 10.27
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "bge-base-en-v1.5-instruct"
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "bge-base-en-v1.5-instruct"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "bge-base-en-v1.5-instruct"
-                }
-            ]
-        }
-    },
-    "GritLM-7B-noinstruct": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "GritLM-7B-noinstruct"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "GritLM-7B-noinstruct"
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "GritLM-7B-noinstruct"
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "GritLM-7B-noinstruct"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "GritLM-7B-noinstruct"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "GritLM-7B-noinstruct",
-                    "ARCChallenge": 16.57,
-                    "AlphaNLI": 29.56,
-                    "HellaSwag": 36.03,
-                    "PIQA": 35.8,
-                    "Quail": 8.68,
-                    "RARbCode": 83.14,
-                    "RARbMath": 83.01,
-                    "SIQA": 5.73,
-                    "SpartQA": 1.56,
-                    "TempReasonL1": 2.57,
-                    "TempReasonL2Fact": 48.25,
-                    "TempReasonL2Pure": 8.98,
-                    "TempReasonL3Fact": 34.11,
-                    "TempReasonL3Pure": 12.44,
-                    "WinoGrande": 52.12
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "GritLM-7B-noinstruct"
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "GritLM-7B-noinstruct"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "GritLM-7B-noinstruct"
-                }
-            ]
-        }
-    },
-    "st-polish-paraphrase-from-mpnet": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "st-polish-paraphrase-from-mpnet"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "st-polish-paraphrase-from-mpnet",
-                    "AllegroReviews": 34.55,
-                    "CBD": 67.48,
-                    "MassiveIntentClassification (pl)": 65.93,
-                    "MassiveScenarioClassification (pl)": 71.85,
-                    "PAC": 63.25,
-                    "PolEmo2.0-IN": 68.37,
-                    "PolEmo2.0-OUT": 30.99
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "st-polish-paraphrase-from-mpnet",
-                    "8TagsClustering": 33.15
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "st-polish-paraphrase-from-mpnet",
-                    "CDSC-E": 75.06,
-                    "PPC": 93.49,
-                    "PSC": 99.05,
-                    "SICK-E-PL": 80.56
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "st-polish-paraphrase-from-mpnet"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "st-polish-paraphrase-from-mpnet",
-                    "ArguAna-PL": 51.87,
-                    "DBPedia-PL": 24.59,
-                    "FiQA-PL": 22.27,
-                    "HotpotQA-PL": 32.11,
-                    "MSMARCO-PL": 17.91,
-                    "NFCorpus-PL": 24.05,
-                    "NQ-PL": 23.54,
-                    "Quora-PL": 81.49,
-                    "SCIDOCS-PL": 13.23,
-                    "SciFact-PL": 52.51,
-                    "TRECCOVID-PL": 35.23
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "st-polish-paraphrase-from-mpnet",
-                    "CDSC-R": 88.55,
-                    "SICK-R-PL": 76.18,
-                    "STS22 (pl)": 37.34
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "st-polish-paraphrase-from-mpnet"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "st-polish-paraphrase-from-mpnet"
-                }
-            ]
-        }
-    },
-    "contriever": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "contriever"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "contriever"
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "contriever"
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "contriever"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "contriever"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "contriever",
-                    "ARCChallenge": 8.62,
-                    "AlphaNLI": 31.77,
-                    "HellaSwag": 17.73,
-                    "PIQA": 24.64,
-                    "Quail": 4.97,
-                    "RARbCode": 9.28,
-                    "RARbMath": 30.76,
-                    "SIQA": 1.27,
-                    "SpartQA": 10.94,
-                    "TempReasonL1": 1.93,
-                    "TempReasonL2Fact": 22.68,
-                    "TempReasonL2Pure": 1.12,
-                    "TempReasonL3Fact": 20.62,
-                    "TempReasonL3Pure": 7.8,
-                    "WinoGrande": 47.15
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "contriever"
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "contriever"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "contriever"
-                }
-            ]
-        }
-    },
-    "e5-small": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "e5-small",
-                    "BornholmBitextMining": 40.27
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "e5-small",
-                    "AngryTweetsClassification": 43.6,
-                    "DKHateClassification": 57.57,
-                    "DanishPoliticalCommentsClassification": 28.37,
-                    "LccSentimentClassification": 40.27,
-                    "MassiveIntentClassification (da)": 41.89,
-                    "MassiveIntentClassification (nb)": 40.25,
-                    "MassiveIntentClassification (sv)": 40.07,
-                    "MassiveScenarioClassification (da)": 49.93,
-                    "MassiveScenarioClassification (nb)": 48.58,
-                    "MassiveScenarioClassification (sv)": 47.06,
-                    "NoRecClassification": 41.84,
-                    "NordicLangClassification": 53.47,
-                    "NorwegianParliament": 56.57,
-                    "ScalaDaClassification": 50.15,
-                    "ScalaNbClassification": 50.03
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "e5-small",
-                    "BiorxivClusteringP2P": 36.1,
-                    "BiorxivClusteringS2S": 31.51,
-                    "MedrxivClusteringP2P": 31.31,
-                    "MedrxivClusteringS2S": 28.32,
-                    "RedditClustering": 43.27,
-                    "RedditClusteringP2P": 57.22,
-                    "StackExchangeClustering": 59.6,
-                    "StackExchangeClusteringP2P": 30.82,
-                    "TwentyNewsgroupsClustering": 37.65
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "e5-small"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "e5-small"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "e5-small"
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "e5-small"
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "e5-small"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "e5-small"
-                }
-            ]
-        }
-    },
-    "udever-bloom-560m": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "udever-bloom-560m"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "udever-bloom-560m",
-                    "AmazonReviewsClassification (fr)": 26.85,
-                    "MTOPDomainClassification (fr)": 34.99,
-                    "MTOPIntentClassification (fr)": 15.76,
-                    "MasakhaNEWSClassification (fra)": 67.94,
-                    "MassiveIntentClassification (fr)": 15.09,
-                    "MassiveScenarioClassification (fr)": 21.67
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "udever-bloom-560m",
-                    "AlloProfClusteringP2P": 53.57,
-                    "AlloProfClusteringS2S": 22.13,
-                    "HALClusteringS2S": 7.68,
-                    "MLSUMClusteringP2P": 36.43,
-                    "MLSUMClusteringS2S": 25.26,
-                    "MasakhaNEWSClusteringP2P (fra)": 37.57,
-                    "MasakhaNEWSClusteringS2S (fra)": 20.58
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "udever-bloom-560m",
-                    "OpusparcusPC (fr)": 82.1,
-                    "PawsXPairClassification (fr)": 59.69
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "udever-bloom-560m",
-                    "AlloprofReranking": 28.75,
-                    "SyntecReranking": 50.88
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "udever-bloom-560m",
-                    "AlloprofRetrieval": 1.98,
-                    "BSARDRetrieval": 0.0,
-                    "MintakaRetrieval (fr)": 0.48,
-                    "SyntecRetrieval": 24.45,
-                    "XPQARetrieval (fr)": 12.98
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "udever-bloom-560m",
-                    "SICKFr": 54.54,
-                    "STS22 (fr)": 61.35,
-                    "STSBenchmarkMultilingualSTS (fr)": 36.78
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "udever-bloom-560m",
-                    "SummEvalFr": 23.63
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "udever-bloom-560m"
-                }
-            ]
-        }
-    },
-    "google-gecko-256.text-embedding-preview-0409": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "google-gecko-256.text-embedding-preview-0409"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "google-gecko-256.text-embedding-preview-0409",
-                    "AmazonCounterfactualClassification (en)": 70.93,
-                    "AmazonPolarityClassification": 97.34,
-                    "AmazonReviewsClassification (en)": 48.47,
-                    "Banking77Classification": 86.01,
-                    "EmotionClassification": 51.53,
-                    "ImdbClassification": 95.7,
-                    "MTOPDomainClassification (en)": 98.02,
-                    "MTOPIntentClassification (en)": 77.82,
-                    "MassiveIntentClassification (en)": 75.67,
-                    "MassiveScenarioClassification (en)": 85.16,
-                    "ToxicConversationsClassification": 88.33,
-                    "TweetSentimentExtractionClassification": 72.97
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "google-gecko-256.text-embedding-preview-0409",
-                    "ArxivClusteringP2P": 44.12,
-                    "ArxivClusteringS2S": 36.54,
-                    "BiorxivClusteringP2P": 36.28,
-                    "BiorxivClusteringS2S": 33.09,
-                    "MedrxivClusteringP2P": 32.08,
-                    "MedrxivClusteringS2S": 30.84,
-                    "RedditClustering": 62.24,
-                    "RedditClusteringP2P": 63.7,
-                    "StackExchangeClustering": 70.19,
-                    "StackExchangeClusteringP2P": 36.1,
-                    "TwentyNewsgroupsClustering": 50.6
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "google-gecko-256.text-embedding-preview-0409",
-                    "SprintDuplicateQuestions": 96.49,
-                    "TwitterSemEval2015": 78.23,
-                    "TwitterURLCorpus": 87.04
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "google-gecko-256.text-embedding-preview-0409",
-                    "AskUbuntuDupQuestions": 63.84,
-                    "MindSmallReranking": 31.89,
-                    "SciDocsRR": 81.62,
-                    "StackOverflowDupQuestions": 53.76
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "google-gecko-256.text-embedding-preview-0409",
-                    "ArguAna": 56.27,
-                    "CQADupstackRetrieval": 45.41,
-                    "ClimateFEVER": 29.35,
-                    "DBPedia": 41.91,
-                    "FEVER": 82.61,
-                    "FiQA2018": 55.54,
-                    "HotpotQA": 64.65,
-                    "MSMARCO": 31.12,
-                    "NFCorpus": 37.81,
-                    "NQ": 57.37,
-                    "QuoraRetrieval": 87.89,
-                    "SCIDOCS": 18.21,
-                    "SciFact": 70.86,
-                    "TRECCOVID": 80.13,
-                    "Touche2020": 27.4
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "google-gecko-256.text-embedding-preview-0409",
-                    "BIOSSES": 89.42,
-                    "SICK-R": 81.67,
-                    "STS12": 78.02,
-                    "STS13": 90.1,
-                    "STS14": 85.44,
-                    "STS15": 89.64,
-                    "STS16": 87.24,
-                    "STS17 (en-en)": 90.46,
-                    "STS22 (en)": 67.99,
-                    "STSBenchmark": 89.33
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "google-gecko-256.text-embedding-preview-0409",
-                    "SummEval": 32.36
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "google-gecko-256.text-embedding-preview-0409"
-                }
-            ]
-        }
-    },
-    "text-similarity-davinci-001": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "text-similarity-davinci-001"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "text-similarity-davinci-001"
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "text-similarity-davinci-001",
-                    "RedditClustering": 31.78,
-                    "StackExchangeClustering": 36.86,
-                    "TwentyNewsgroupsClustering": 29.33
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "text-similarity-davinci-001",
-                    "SprintDuplicateQuestions": 69.52,
-                    "TwitterSemEval2015": 74.42,
-                    "TwitterURLCorpus": 83.75
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "text-similarity-davinci-001",
-                    "AskUbuntuDupQuestions": 53.56,
-                    "SciDocsRR": 68.7,
-                    "StackOverflowDupQuestions": 39.41
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "text-similarity-davinci-001"
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "text-similarity-davinci-001",
-                    "BIOSSES": 68.95,
-                    "SICK-R": 78.72,
-                    "STSBenchmark": 84.08
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "text-similarity-davinci-001"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "text-similarity-davinci-001"
-                }
-            ]
-        }
-    },
-    "e5-large": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "e5-large",
-                    "BornholmBitextMining": 40.15
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "e5-large",
-                    "AngryTweetsClassification": 46.14,
-                    "DKHateClassification": 58.72,
-                    "DanishPoliticalCommentsClassification": 28.67,
-                    "LccSentimentClassification": 42.13,
-                    "MassiveIntentClassification (da)": 42.29,
-                    "MassiveIntentClassification (nb)": 40.63,
-                    "MassiveIntentClassification (sv)": 40.69,
-                    "MassiveScenarioClassification (da)": 52.95,
-                    "MassiveScenarioClassification (nb)": 51.91,
-                    "MassiveScenarioClassification (sv)": 50.97,
-                    "NoRecClassification": 41.83,
-                    "NordicLangClassification": 58.3,
-                    "NorwegianParliament": 57.26,
-                    "ScalaDaClassification": 49.9,
-                    "ScalaNbClassification": 50.13
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "e5-large"
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "e5-large"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "e5-large"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "e5-large"
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "e5-large"
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "e5-large"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "e5-large"
-                }
-            ]
-        }
-    },
-    "text-similarity-ada-001": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "text-similarity-ada-001"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "text-similarity-ada-001",
-                    "AmazonCounterfactualClassification (en)": 76.4,
-                    "AmazonPolarityClassification": 92.83,
-                    "AmazonReviewsClassification (en)": 47.45,
-                    "Banking77Classification": 68.04,
-                    "EmotionClassification": 50.33,
-                    "ImdbClassification": 89.38,
-                    "MTOPDomainClassification (en)": 89.89,
-                    "MTOPIntentClassification (en)": 64.8,
-                    "MassiveIntentClassification (en)": 65.17,
-                    "MassiveScenarioClassification (en)": 67.67,
-                    "ToxicConversationsClassification": 70.0,
-                    "TweetSentimentExtractionClassification": 63.35
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "text-similarity-ada-001",
-                    "ArxivClusteringP2P": 41.49,
-                    "ArxivClusteringS2S": 28.47,
-                    "BiorxivClusteringP2P": 36.86,
-                    "BiorxivClusteringS2S": 27.55,
-                    "MedrxivClusteringP2P": 31.09,
-                    "MedrxivClusteringS2S": 26.5,
-                    "RedditClustering": 42.47,
-                    "RedditClusteringP2P": 58.1,
-                    "StackExchangeClustering": 53.52,
-                    "StackExchangeClusteringP2P": 30.43,
-                    "TwentyNewsgroupsClustering": 36.26
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "text-similarity-ada-001",
-                    "SprintDuplicateQuestions": 77.85,
-                    "TwitterSemEval2015": 69.04,
-                    "TwitterURLCorpus": 83.69
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "text-similarity-ada-001",
-                    "AskUbuntuDupQuestions": 53.49,
-                    "MindSmallReranking": 30.71,
-                    "SciDocsRR": 71.04,
-                    "StackOverflowDupQuestions": 40.85
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "text-similarity-ada-001",
-                    "ArguAna": 39.65,
-                    "CQADupstackRetrieval": 10.17,
-                    "ClimateFEVER": 2.83,
-                    "DBPedia": 3.48,
-                    "FEVER": 4.45,
-                    "FiQA2018": 7.54,
-                    "HotpotQA": 12.6,
-                    "MSMARCO": 10.53,
-                    "NFCorpus": 20.59,
-                    "NQ": 2.02,
-                    "QuoraRetrieval": 82.18,
-                    "SCIDOCS": 6.28,
-                    "SciFact": 45.46,
-                    "TRECCOVID": 24.56,
-                    "Touche2020": 3.1
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "text-similarity-ada-001",
-                    "BIOSSES": 78.04,
-                    "SICK-R": 77.48,
-                    "STS12": 72.3,
-                    "STS13": 81.49,
-                    "STS14": 74.74,
-                    "STS15": 84.28,
-                    "STS16": 82.06,
-                    "STS17 (en-en)": 87.08,
-                    "STS22 (en)": 64.71,
-                    "STSBenchmark": 83.78
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "text-similarity-ada-001",
-                    "SummEval": 26.94
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "text-similarity-ada-001"
-                }
-            ]
-        }
-    },
-    "tart-full-flan-t5-xl": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "tart-full-flan-t5-xl"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "tart-full-flan-t5-xl"
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "tart-full-flan-t5-xl"
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "tart-full-flan-t5-xl"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "tart-full-flan-t5-xl"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "tart-full-flan-t5-xl"
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "tart-full-flan-t5-xl"
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "tart-full-flan-t5-xl"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "tart-full-flan-t5-xl",
-                    "Core17InstructionRetrieval": 2.82,
-                    "News21InstructionRetrieval": 1.99,
-                    "Robust04InstructionRetrieval": -0.72
-                }
-            ]
-        }
-    },
-    "bge-small-en-v1.5": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "bge-small-en-v1.5"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "bge-small-en-v1.5"
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "bge-small-en-v1.5"
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "bge-small-en-v1.5"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "bge-small-en-v1.5"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "bge-small-en-v1.5",
-                    "ARCChallenge": 8.95,
-                    "AlphaNLI": 11.64,
-                    "HellaSwag": 25.44,
-                    "PIQA": 23.92,
-                    "Quail": 1.75,
-                    "RARbCode": 42.36,
-                    "RARbMath": 44.98,
-                    "SIQA": 0.77,
-                    "SpartQA": 3.55,
-                    "TempReasonL1": 1.41,
-                    "TempReasonL2Fact": 17.56,
-                    "TempReasonL2Pure": 1.05,
-                    "TempReasonL3Fact": 13.88,
-                    "TempReasonL3Pure": 4.76,
-                    "WinoGrande": 10.28
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "bge-small-en-v1.5"
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "bge-small-en-v1.5"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "bge-small-en-v1.5"
-                }
-            ]
-        }
-    },
-    "multilingual-e5-small": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "multilingual-e5-small",
-                    "BornholmBitextMining (dan-Latn)": 37.15,
-                    "BornholmBitextMining": 43.89,
-                    "Tatoeba (swh-Latn_eng-Latn)": 65.43,
-                    "Tatoeba (jpn-Jpan_eng-Latn)": 77.43,
-                    "Tatoeba (tuk-Latn_eng-Latn)": 16.99,
-                    "Tatoeba (lat-Latn_eng-Latn)": 37.76,
-                    "Tatoeba (mal-Mlym_eng-Latn)": 94.78,
-                    "Tatoeba (ast-Latn_eng-Latn)": 62.81,
-                    "Tatoeba (est-Latn_eng-Latn)": 56.47,
-                    "Tatoeba (cym-Latn_eng-Latn)": 62.3,
-                    "Tatoeba (pol-Latn_eng-Latn)": 88.85,
-                    "Tatoeba (ukr-Cyrl_eng-Latn)": 82.98,
-                    "Tatoeba (ido-Latn_eng-Latn)": 70.07,
-                    "Tatoeba (zsm-Latn_eng-Latn)": 91.37,
-                    "Tatoeba (bul-Cyrl_eng-Latn)": 85.47,
-                    "Tatoeba (dsb-Latn_eng-Latn)": 29.87,
-                    "Tatoeba (tha-Thai_eng-Latn)": 90.88,
-                    "Tatoeba (arz-Arab_eng-Latn)": 53.35,
-                    "Tatoeba (cbk-Latn_eng-Latn)": 55.36,
-                    "Tatoeba (pms-Latn_eng-Latn)": 35.47,
-                    "Tatoeba (ber-Tfng_eng-Latn)": 18.22,
-                    "Tatoeba (slk-Latn_eng-Latn)": 79.86,
-                    "Tatoeba (ang-Latn_eng-Latn)": 30.3,
-                    "Tatoeba (ind-Latn_eng-Latn)": 88.28,
-                    "Tatoeba (cha-Latn_eng-Latn)": 24.88,
-                    "Tatoeba (slv-Latn_eng-Latn)": 73.93,
-                    "Tatoeba (kab-Latn_eng-Latn)": 18.06,
-                    "Tatoeba (ina-Latn_eng-Latn)": 86.39,
-                    "Tatoeba (lfn-Latn_eng-Latn)": 51.46,
-                    "Tatoeba (hye-Armn_eng-Latn)": 83.81,
-                    "Tatoeba (war-Latn_eng-Latn)": 39.14,
-                    "Tatoeba (dtp-Latn_eng-Latn)": 6.42,
-                    "Tatoeba (nds-Latn_eng-Latn)": 52.46,
-                    "Tatoeba (urd-Arab_eng-Latn)": 85.07,
-                    "Tatoeba (rus-Cyrl_eng-Latn)": 89.77,
-                    "Tatoeba (fao-Latn_eng-Latn)": 56.57,
-                    "Tatoeba (cat-Latn_eng-Latn)": 79.3,
-                    "Tatoeba (gla-Latn_eng-Latn)": 35.96,
-                    "Tatoeba (kur-Latn_eng-Latn)": 39.99,
-                    "Tatoeba (cor-Latn_eng-Latn)": 5.24,
-                    "Tatoeba (nov-Latn_eng-Latn)": 64.2,
-                    "Tatoeba (max-Deva_eng-Latn)": 48.29,
-                    "Tatoeba (nno-Latn_eng-Latn)": 70.29,
-                    "Tatoeba (kor-Hang_eng-Latn)": 73.74,
-                    "Tatoeba (vie-Latn_eng-Latn)": 89.03,
-                    "Tatoeba (tur-Latn_eng-Latn)": 88.42,
-                    "Tatoeba (spa-Latn_eng-Latn)": 93.01,
-                    "Tatoeba (gsw-Latn_eng-Latn)": 40.13,
-                    "Tatoeba (yid-Hebr_eng-Latn)": 65.9,
-                    "Tatoeba (orv-Cyrl_eng-Latn)": 14.89,
-                    "Tatoeba (wuu-Hans_eng-Latn)": 67.3,
-                    "Tatoeba (heb-Hebr_eng-Latn)": 73.68,
-                    "Tatoeba (arq-Arab_eng-Latn)": 23.62,
-                    "Tatoeba (nld-Latn_eng-Latn)": 91.87,
-                    "Tatoeba (kaz-Cyrl_eng-Latn)": 70.57,
-                    "Tatoeba (mon-Cyrl_eng-Latn)": 77.7,
-                    "Tatoeba (fin-Latn_eng-Latn)": 70.23,
-                    "Tatoeba (hrv-Latn_eng-Latn)": 84.42,
-                    "Tatoeba (fra-Latn_eng-Latn)": 90.51,
-                    "Tatoeba (khm-Khmr_eng-Latn)": 44.34,
-                    "Tatoeba (amh-Ethi_eng-Latn)": 74.11,
-                    "Tatoeba (eus-Latn_eng-Latn)": 50.9,
-                    "Tatoeba (lvs-Latn_eng-Latn)": 61.84,
-                    "Tatoeba (pes-Arab_eng-Latn)": 85.51,
-                    "Tatoeba (tzl-Latn_eng-Latn)": 34.83,
-                    "Tatoeba (oci-Latn_eng-Latn)": 38.27,
-                    "Tatoeba (ell-Grek_eng-Latn)": 86.81,
-                    "Tatoeba (tgl-Latn_eng-Latn)": 77.54,
-                    "Tatoeba (uig-Arab_eng-Latn)": 60.59,
-                    "Tatoeba (ben-Beng_eng-Latn)": 81.4,
-                    "Tatoeba (uzb-Latn_eng-Latn)": 59.11,
-                    "Tatoeba (epo-Latn_eng-Latn)": 88.96,
-                    "Tatoeba (sqi-Latn_eng-Latn)": 86.21,
-                    "Tatoeba (kzj-Latn_eng-Latn)": 6.56,
-                    "Tatoeba (mkd-Cyrl_eng-Latn)": 63.74,
-                    "Tatoeba (bre-Latn_eng-Latn)": 7.09,
-                    "Tatoeba (dan-Latn_eng-Latn)": 86.38,
-                    "Tatoeba (mhr-Cyrl_eng-Latn)": 5.58,
-                    "Tatoeba (csb-Latn_eng-Latn)": 26.23,
-                    "Tatoeba (xho-Latn_eng-Latn)": 63.2,
-                    "Tatoeba (swe-Latn_eng-Latn)": 87.46,
-                    "Tatoeba (tat-Cyrl_eng-Latn)": 66.8,
-                    "Tatoeba (srp-Cyrl_eng-Latn)": 83.06,
-                    "Tatoeba (cmn-Hans_eng-Latn)": 89.85,
-                    "Tatoeba (ces-Latn_eng-Latn)": 80.99,
-                    "Tatoeba (bel-Cyrl_eng-Latn)": 80.89,
-                    "Tatoeba (yue-Hant_eng-Latn)": 69.33,
-                    "Tatoeba (lit-Latn_eng-Latn)": 59.95,
-                    "Tatoeba (tel-Telu_eng-Latn)": 86.82,
-                    "Tatoeba (nob-Latn_eng-Latn)": 90.18,
-                    "Tatoeba (mar-Deva_eng-Latn)": 85.94,
-                    "Tatoeba (ara-Arab_eng-Latn)": 76.09,
-                    "Tatoeba (swg-Latn_eng-Latn)": 44.0,
-                    "Tatoeba (bos-Latn_eng-Latn)": 81.15,
-                    "Tatoeba (pam-Latn_eng-Latn)": 5.76,
-                    "Tatoeba (fry-Latn_eng-Latn)": 49.05,
-                    "Tatoeba (hun-Latn_eng-Latn)": 74.44,
-                    "Tatoeba (ron-Latn_eng-Latn)": 85.68,
-                    "Tatoeba (afr-Latn_eng-Latn)": 85.17,
-                    "Tatoeba (isl-Latn_eng-Latn)": 62.32,
-                    "Tatoeba (aze-Latn_eng-Latn)": 80.79,
-                    "Tatoeba (hsb-Latn_eng-Latn)": 36.49,
-                    "Tatoeba (tam-Taml_eng-Latn)": 82.82,
-                    "Tatoeba (ceb-Latn_eng-Latn)": 42.35,
-                    "Tatoeba (jav-Latn_eng-Latn)": 53.39,
-                    "Tatoeba (glg-Latn_eng-Latn)": 79.65,
-                    "Tatoeba (por-Latn_eng-Latn)": 89.63,
-                    "Tatoeba (awa-Deva_eng-Latn)": 74.55,
-                    "Tatoeba (hin-Deva_eng-Latn)": 92.36,
-                    "Tatoeba (ita-Latn_eng-Latn)": 88.54,
-                    "Tatoeba (deu-Latn_eng-Latn)": 97.22,
-                    "Tatoeba (gle-Latn_eng-Latn)": 56.32,
-                    "Tatoeba (kat-Geor_eng-Latn)": 77.6,
-                    "Tatoeba (ile-Latn_eng-Latn)": 70.31
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "multilingual-e5-small",
-                    "AllegroReviews (pol-Latn)": 37.33,
-                    "AllegroReviews": 37.42,
-                    "AmazonCounterfactualClassification (en-ext)": 73.07,
-                    "AmazonCounterfactualClassification (en)": 71.87,
-                    "AmazonCounterfactualClassification (deu-Latn)": 71.72,
-                    "AmazonCounterfactualClassification (jpn-Jpan)": 61.46,
-                    "AmazonPolarityClassification": 88.61,
-                    "AmazonReviewsClassification (en)": 45.75,
-                    "AmazonReviewsClassification (deu-Latn)": 41.07,
-                    "AmazonReviewsClassification (spa-Latn)": 41.37,
-                    "AmazonReviewsClassification (fra-Latn)": 39.47,
-                    "AmazonReviewsClassification (jpn-Jpan)": 38.55,
-                    "AmazonReviewsClassification (cmn-Hans)": 38.31,
-                    "AmazonReviewsClassification (fr)": 39.68,
-                    "AngryTweetsClassification (dan-Latn)": 56.27,
-                    "AngryTweetsClassification": 53.57,
-                    "Banking77Classification": 70.44,
-                    "CBD (pol-Latn)": 63.33,
-                    "CBD": 63.25,
-                    "DKHateClassification": 60.73,
-                    "DanishPoliticalCommentsClassification (dan-Latn)": 34.82,
-                    "DanishPoliticalCommentsClassification": 34.38,
-                    "EmotionClassification": 42.86,
-                    "GeoreviewClassification (rus-Cyrl)": 44.66,
-                    "HeadlineClassification (rus-Cyrl)": 73.94,
-                    "IFlyTek (cmn-Hans)": 40.74,
-                    "IFlyTek": 47.35,
-                    "ImdbClassification": 79.57,
-                    "InappropriatenessClassification (rus-Cyrl)": 59.16,
-                    "JDReview (cmn-Hans)": 78.37,
-                    "JDReview": 79.34,
-                    "KinopoiskClassification (rus-Cyrl)": 49.96,
-                    "LccSentimentClassification (dan-Latn)": 58.6,
-                    "LccSentimentClassification": 57.87,
-                    "MTOPDomainClassification (en)": 88.99,
-                    "MTOPDomainClassification (deu-Latn)": 86.15,
-                    "MTOPDomainClassification (spa-Latn)": 85.53,
-                    "MTOPDomainClassification (fra-Latn)": 81.5,
-                    "MTOPDomainClassification (hin-Deva)": 84.07,
-                    "MTOPDomainClassification (tha-Thai)": 83.16,
-                    "MTOPDomainClassification (fr)": 81.2,
-                    "MTOPIntentClassification (en)": 56.69,
-                    "MTOPIntentClassification (deu-Latn)": 55.88,
-                    "MTOPIntentClassification (spa-Latn)": 53.15,
-                    "MTOPIntentClassification (fra-Latn)": 44.35,
-                    "MTOPIntentClassification (hin-Deva)": 52.26,
-                    "MTOPIntentClassification (tha-Thai)": 54.61,
-                    "MTOPIntentClassification (fr)": 46.01,
-                    "MasakhaNEWSClassification (amh-Ethi)": 84.28,
-                    "MasakhaNEWSClassification (eng)": 75.61,
-                    "MasakhaNEWSClassification (fra-Latn)": 74.67,
-                    "MasakhaNEWSClassification (hau-Latn)": 73.08,
-                    "MasakhaNEWSClassification (ibo-Latn)": 63.9,
-                    "MasakhaNEWSClassification (lin-Latn)": 73.37,
-                    "MasakhaNEWSClassification (lug-Latn)": 67.89,
-                    "MasakhaNEWSClassification (orm-Ethi)": 68.77,
-                    "MasakhaNEWSClassification (pcm-Latn)": 90.79,
-                    "MasakhaNEWSClassification (run-Latn)": 75.4,
-                    "MasakhaNEWSClassification (sna-Latn)": 82.76,
-                    "MasakhaNEWSClassification (som-Latn)": 59.8,
-                    "MasakhaNEWSClassification (swa-Latn)": 69.85,
-                    "MasakhaNEWSClassification (tir-Ethi)": 68.01,
-                    "MasakhaNEWSClassification (xho-Latn)": 72.22,
-                    "MasakhaNEWSClassification (yor-Latn)": 73.84,
-                    "MasakhaNEWSClassification (fra)": 77.65,
-                    "MassiveIntentClassification (ben-Beng)": 50.68,
-                    "MassiveIntentClassification (tur-Latn)": 56.88,
-                    "MassiveIntentClassification (ind-Latn)": 56.2,
-                    "MassiveIntentClassification (khm-Khmr)": 33.45,
-                    "MassiveIntentClassification (en)": 63.87,
-                    "MassiveIntentClassification (mal-Mlym)": 52.81,
-                    "MassiveIntentClassification (pol-Latn)": 57.33,
-                    "MassiveIntentClassification (lav-Latn)": 44.93,
-                    "MassiveIntentClassification (isl-Latn)": 41.53,
-                    "MassiveIntentClassification (sqi-Latn)": 48.68,
-                    "MassiveIntentClassification (amh-Ethi)": 43.52,
-                    "MassiveIntentClassification (cmo-Hans)": 62.04,
-                    "MassiveIntentClassification (nld-Latn)": 59.27,
-                    "MassiveIntentClassification (deu-Latn)": 55.52,
-                    "MassiveIntentClassification (nob-Latn)": 55.36,
-                    "MassiveIntentClassification (cmo-Hant)": 53.75,
-                    "MassiveIntentClassification (urd-Arab)": 50.51,
-                    "MassiveIntentClassification (slv-Latn)": 47.71,
-                    "MassiveIntentClassification (hun-Latn)": 53.21,
-                    "MassiveIntentClassification (jpn-Jpan)": 61.58,
-                    "MassiveIntentClassification (swa-Latn)": 44.84,
-                    "MassiveIntentClassification (fra-Latn)": 57.9,
-                    "MassiveIntentClassification (spa-Latn)": 59.19,
-                    "MassiveIntentClassification (mon-Cyrl)": 47.38,
-                    "MassiveIntentClassification (dan-Latn)": 56.12,
-                    "MassiveIntentClassification (msa-Latn)": 50.8,
-                    "MassiveIntentClassification (aze-Latn)": 49.32,
-                    "MassiveIntentClassification (fas-Arab)": 57.73,
-                    "MassiveIntentClassification (kan-Knda)": 47.85,
-                    "MassiveIntentClassification (kor-Kore)": 57.12,
-                    "MassiveIntentClassification (tha-Thai)": 56.26,
-                    "MassiveIntentClassification (heb-Hebr)": 51.11,
-                    "MassiveIntentClassification (hin-Deva)": 55.69,
-                    "MassiveIntentClassification (ara-Arab)": 47.78,
-                    "MassiveIntentClassification (por-Latn)": 60.12,
-                    "MassiveIntentClassification (vie-Latn)": 56.19,
-                    "MassiveIntentClassification (hye-Armn)": 47.89,
-                    "MassiveIntentClassification (ita-Latn)": 58.8,
-                    "MassiveIntentClassification (ell-Grek)": 54.14,
-                    "MassiveIntentClassification (cym-Latn)": 36.62,
-                    "MassiveIntentClassification (tel-Telu)": 48.85,
-                    "MassiveIntentClassification (kat-Geor)": 39.52,
-                    "MassiveIntentClassification (swe-Latn)": 58.2,
-                    "MassiveIntentClassification (tam-Taml)": 47.65,
-                    "MassiveIntentClassification (fin-Latn)": 55.14,
-                    "MassiveIntentClassification (tgl-Latn)": 48.7,
-                    "MassiveIntentClassification (ron-Latn)": 52.82,
-                    "MassiveIntentClassification (jav-Latn)": 42.96,
-                    "MassiveIntentClassification (rus-Cyrl)": 58.43,
-                    "MassiveIntentClassification (afr-Latn)": 48.74,
-                    "MassiveIntentClassification (mya-Mymr)": 45.64,
-                    "MassiveIntentClassification (da)": 54.63,
-                    "MassiveIntentClassification (nb)": 53.96,
-                    "MassiveIntentClassification (sv)": 56.6,
-                    "MassiveIntentClassification (pl)": 57.4,
-                    "MassiveScenarioClassification (nld-Latn)": 67.01,
-                    "MassiveScenarioClassification (tur-Latn)": 62.14,
-                    "MassiveScenarioClassification (cym-Latn)": 44.63,
-                    "MassiveScenarioClassification (jav-Latn)": 51.39,
-                    "MassiveScenarioClassification (hin-Deva)": 62.22,
-                    "MassiveScenarioClassification (fra-Latn)": 63.9,
-                    "MassiveScenarioClassification (cmo-Hans)": 68.96,
-                    "MassiveScenarioClassification (kan-Knda)": 52.73,
-                    "MassiveScenarioClassification (isl-Latn)": 49.66,
-                    "MassiveScenarioClassification (jpn-Jpan)": 67.75,
-                    "MassiveScenarioClassification (mal-Mlym)": 60.31,
-                    "MassiveScenarioClassification (pol-Latn)": 64.27,
-                    "MassiveScenarioClassification (mya-Mymr)": 51.07,
-                    "MassiveScenarioClassification (slv-Latn)": 54.05,
-                    "MassiveScenarioClassification (rus-Cyrl)": 63.89,
-                    "MassiveScenarioClassification (urd-Arab)": 55.91,
-                    "MassiveScenarioClassification (fas-Arab)": 63.32,
-                    "MassiveScenarioClassification (fin-Latn)": 61.89,
-                    "MassiveScenarioClassification (kat-Geor)": 44.96,
-                    "MassiveScenarioClassification (sqi-Latn)": 56.15,
-                    "MassiveScenarioClassification (en)": 69.28,
-                    "MassiveScenarioClassification (hun-Latn)": 61.93,
-                    "MassiveScenarioClassification (aze-Latn)": 53.27,
-                    "MassiveScenarioClassification (heb-Hebr)": 59.22,
-                    "MassiveScenarioClassification (kor-Kore)": 65.7,
-                    "MassiveScenarioClassification (nob-Latn)": 61.96,
-                    "MassiveScenarioClassification (dan-Latn)": 64.03,
-                    "MassiveScenarioClassification (cmo-Hant)": 61.15,
-                    "MassiveScenarioClassification (ron-Latn)": 60.0,
-                    "MassiveScenarioClassification (amh-Ethi)": 50.53,
-                    "MassiveScenarioClassification (spa-Latn)": 64.43,
-                    "MassiveScenarioClassification (afr-Latn)": 58.0,
-                    "MassiveScenarioClassification (lav-Latn)": 51.0,
-                    "MassiveScenarioClassification (deu-Latn)": 65.88,
-                    "MassiveScenarioClassification (ita-Latn)": 64.03,
-                    "MassiveScenarioClassification (tha-Thai)": 65.72,
-                    "MassiveScenarioClassification (msa-Latn)": 59.18,
-                    "MassiveScenarioClassification (tam-Taml)": 52.74,
-                    "MassiveScenarioClassification (ara-Arab)": 54.56,
-                    "MassiveScenarioClassification (tgl-Latn)": 55.3,
-                    "MassiveScenarioClassification (por-Latn)": 62.75,
-                    "MassiveScenarioClassification (swe-Latn)": 67.33,
-                    "MassiveScenarioClassification (tel-Telu)": 54.86,
-                    "MassiveScenarioClassification (khm-Khmr)": 39.01,
-                    "MassiveScenarioClassification (swa-Latn)": 52.42,
-                    "MassiveScenarioClassification (vie-Latn)": 62.67,
-                    "MassiveScenarioClassification (ind-Latn)": 62.0,
-                    "MassiveScenarioClassification (hye-Armn)": 52.93,
-                    "MassiveScenarioClassification (ben-Beng)": 57.38,
-                    "MassiveScenarioClassification (mon-Cyrl)": 52.41,
-                    "MassiveScenarioClassification (ell-Grek)": 62.29,
-                    "MassiveScenarioClassification (da)": 62.34,
-                    "MassiveScenarioClassification (nb)": 59.9,
-                    "MassiveScenarioClassification (sv)": 65.54,
-                    "MassiveScenarioClassification (pl)": 64.25,
-                    "MultilingualSentiment (cmn-Hans)": 66.0,
-                    "MultilingualSentiment": 64.74,
-                    "NoRecClassification (nob-Latn)": 50.08,
-                    "NoRecClassification": 53.96,
-                    "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 72.15,
-                    "NordicLangClassification": 75.15,
-                    "NorwegianParliament": 60.15,
-                    "OnlineShopping (cmn-Hans)": 88.7,
-                    "OnlineShopping": 88.73,
-                    "PAC (pol-Latn)": 70.48,
-                    "PAC": 70.55,
-                    "PolEmo2.0-IN (pol-Latn)": 67.31,
-                    "PolEmo2.0-IN": 67.35,
-                    "PolEmo2.0-OUT (pol-Latn)": 39.17,
-                    "PolEmo2.0-OUT": 39.13,
-                    "RuReviewsClassification (rus-Cyrl)": 61.18,
-                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 54.99,
-                    "RuSciBenchOECDClassification (rus-Cyrl)": 41.72,
-                    "ScalaDaClassification": 50.3,
-                    "ScalaNbClassification": 50.06,
-                    "TNews (cmn-Hans)": 46.6,
-                    "TNews": 48.38,
-                    "ToxicConversationsClassification": 63.59,
-                    "TweetSentimentExtractionClassification": 62.79,
-                    "Waimai (cmn-Hans)": 84.15,
-                    "Waimai": 83.9
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "multilingual-e5-small",
-                    "8TagsClustering": 23.92,
-                    "AlloProfClusteringP2P": 60.89,
-                    "AlloProfClusteringS2S": 32.52,
-                    "BiorxivClusteringP2P": 35.84,
-                    "BiorxivClusteringS2S": 27.35,
-                    "CLSClusteringP2P": 39.14,
-                    "CLSClusteringS2S": 37.79,
-                    "GeoreviewClusteringP2P (rus-Cyrl)": 58.57,
-                    "HALClusteringS2S": 18.95,
-                    "MLSUMClusteringP2P (rus-Cyrl)": 39.69,
-                    "MLSUMClusteringP2P": 43.2,
-                    "MLSUMClusteringS2S (rus-Cyrl)": 39.9,
-                    "MLSUMClusteringS2S": 37.61,
-                    "MasakhaNEWSClusteringP2P (amh-Ethi)": 66.2,
-                    "MasakhaNEWSClusteringP2P (eng)": 50.08,
-                    "MasakhaNEWSClusteringP2P (fra-Latn)": 56.32,
-                    "MasakhaNEWSClusteringP2P (hau-Latn)": 53.63,
-                    "MasakhaNEWSClusteringP2P (ibo-Latn)": 49.19,
-                    "MasakhaNEWSClusteringP2P (lin-Latn)": 55.06,
-                    "MasakhaNEWSClusteringP2P (lug-Latn)": 59.97,
-                    "MasakhaNEWSClusteringP2P (orm-Ethi)": 32.72,
-                    "MasakhaNEWSClusteringP2P (pcm-Latn)": 62.22,
-                    "MasakhaNEWSClusteringP2P (run-Latn)": 57.52,
-                    "MasakhaNEWSClusteringP2P (sna-Latn)": 45.11,
-                    "MasakhaNEWSClusteringP2P (som-Latn)": 42.39,
-                    "MasakhaNEWSClusteringP2P (swa-Latn)": 23.77,
-                    "MasakhaNEWSClusteringP2P (tir-Ethi)": 57.68,
-                    "MasakhaNEWSClusteringP2P (xho-Latn)": 39.96,
-                    "MasakhaNEWSClusteringP2P (yor-Latn)": 26.56,
-                    "MasakhaNEWSClusteringP2P (fra)": 40.12,
-                    "MasakhaNEWSClusteringS2S (amh-Ethi)": 55.48,
-                    "MasakhaNEWSClusteringS2S (eng)": 37.79,
-                    "MasakhaNEWSClusteringS2S (fra-Latn)": 35.8,
-                    "MasakhaNEWSClusteringS2S (hau-Latn)": 20.22,
-                    "MasakhaNEWSClusteringS2S (ibo-Latn)": 35.67,
-                    "MasakhaNEWSClusteringS2S (lin-Latn)": 41.12,
-                    "MasakhaNEWSClusteringS2S (lug-Latn)": 48.63,
-                    "MasakhaNEWSClusteringS2S (orm-Ethi)": 29.16,
-                    "MasakhaNEWSClusteringS2S (pcm-Latn)": 65.36,
-                    "MasakhaNEWSClusteringS2S (run-Latn)": 45.5,
-                    "MasakhaNEWSClusteringS2S (sna-Latn)": 47.61,
-                    "MasakhaNEWSClusteringS2S (som-Latn)": 28.59,
-                    "MasakhaNEWSClusteringS2S (swa-Latn)": 13.91,
-                    "MasakhaNEWSClusteringS2S (tir-Ethi)": 50.51,
-                    "MasakhaNEWSClusteringS2S (xho-Latn)": 37.26,
-                    "MasakhaNEWSClusteringS2S (yor-Latn)": 23.38,
-                    "MasakhaNEWSClusteringS2S (fra)": 39.22,
-                    "MedrxivClusteringP2P": 30.72,
-                    "MedrxivClusteringS2S": 27.0,
-                    "RedditClustering": 40.12,
-                    "RedditClusteringP2P": 59.49,
-                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 51.1,
-                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 45.29,
-                    "StackExchangeClustering": 53.32,
-                    "StackExchangeClusteringP2P": 31.87,
-                    "ThuNewsClusteringP2P": 55.18,
-                    "ThuNewsClusteringS2S": 48.93,
-                    "TwentyNewsgroupsClustering": 33.67
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "multilingual-e5-small",
-                    "CDSC-E (pol-Latn)": 69.69,
-                    "CDSC-E": 69.7,
-                    "Cmnli": 72.12,
-                    "Ocnli": 60.77,
-                    "OpusparcusPC (deu-Latn)": 94.9,
-                    "OpusparcusPC (en)": 98.42,
-                    "OpusparcusPC (fin-Latn)": 88.29,
-                    "OpusparcusPC (fra-Latn)": 91.77,
-                    "OpusparcusPC (rus-Cyrl)": 84.79,
-                    "OpusparcusPC (swe-Latn)": 91.07,
-                    "OpusparcusPC (fr)": 92.52,
-                    "PPC": 86.72,
-                    "PSC (pol-Latn)": 99.23,
-                    "PSC": 99.24,
-                    "PawsXPairClassification (deu-Latn)": 52.13,
-                    "PawsXPairClassification (en)": 53.91,
-                    "PawsXPairClassification (spa-Latn)": 51.39,
-                    "PawsXPairClassification (fra-Latn)": 52.69,
-                    "PawsXPairClassification (jpn-Hira)": 48.24,
-                    "PawsXPairClassification (kor-Hang)": 49.95,
-                    "PawsXPairClassification (cmn-Hans)": 54.01,
-                    "PawsXPairClassification (fr)": 55.68,
-                    "SICK-E-PL (pol-Latn)": 66.35,
-                    "SICK-E-PL": 66.34,
-                    "SprintDuplicateQuestions": 92.18,
-                    "TERRa (rus-Cyrl)": 55.14,
-                    "TwitterSemEval2015": 70.75,
-                    "TwitterURLCorpus": 85.03
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "multilingual-e5-small",
-                    "AlloprofReranking (fra-Latn)": 64.41,
-                    "AlloprofReranking": 56.17,
-                    "AskUbuntuDupQuestions": 56.42,
-                    "CMedQAv1": 63.44,
-                    "CMedQAv2": 62.41,
-                    "MMarcoReranking (cmn-Hans)": 29.98,
-                    "MMarcoReranking": 24.33,
-                    "MindSmallReranking": 29.96,
-                    "RuBQReranking (rus-Cyrl)": 71.46,
-                    "SciDocsRR": 78.26,
-                    "StackOverflowDupQuestions": 46.97,
-                    "SyntecReranking (fra-Latn)": 81.22,
-                    "SyntecReranking": 86.7,
-                    "T2Reranking (cmn-Hans)": 65.72,
-                    "T2Reranking": 65.24
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "multilingual-e5-small",
-                    "AILACasedocs": 23.43,
-                    "AILAStatutes": 19.01,
-                    "ARCChallenge": 7.14,
-                    "AlloprofRetrieval (fra-Latn)": 27.38,
-                    "AlloprofRetrieval": 27.01,
-                    "AlphaNLI": 13.0,
-                    "ArguAna": 39.09,
-                    "ArguAna-PL (pol-Latn)": 37.49,
-                    "ArguAna-PL": 37.43,
-                    "BSARDRetrieval (fra-Latn)": 14.54,
-                    "BSARDRetrieval": 0.0,
-                    "CmedqaRetrieval (cmn-Hans)": 24.36,
-                    "CmedqaRetrieval": 24.38,
-                    "CovidRetrieval (cmn-Hans)": 72.82,
-                    "CovidRetrieval": 72.82,
-                    "DBPedia-PL": 29.27,
-                    "DuRetrieval (cmn-Hans)": 81.36,
-                    "DuRetrieval": 81.35,
-                    "EcomRetrieval (cmn-Hans)": 53.53,
-                    "EcomRetrieval": 53.56,
-                    "FiQA-PL (pol-Latn)": 22.02,
-                    "FiQA-PL": 22.03,
-                    "FiQA2018": 33.13,
-                    "GerDaLIRSmall (deu-Latn)": 14.81,
-                    "HellaSwag": 23.73,
-                    "HotpotQA-PL": 60.15,
-                    "LEMBNarrativeQARetrieval": 22.6,
-                    "LEMBNeedleRetrieval": 30.75,
-                    "LEMBPasskeyRetrieval": 38.25,
-                    "LEMBQMSumRetrieval": 21.51,
-                    "LEMBSummScreenFDRetrieval": 62.75,
-                    "LEMBWikimQARetrieval": 57.13,
-                    "LeCaRDv2 (zho-Hans)": 61.58,
-                    "LegalBenchConsumerContractsQA": 66.98,
-                    "LegalBenchCorporateLobbying": 89.47,
-                    "LegalQuAD (deu-Latn)": 47.8,
-                    "LegalSummarization": 55.76,
-                    "MMarcoRetrieval (cmn-Hans)": 73.17,
-                    "MMarcoRetrieval": 73.17,
-                    "MSMARCO-PL": 26.94,
-                    "MedicalRetrieval (cmn-Hans)": 44.84,
-                    "MedicalRetrieval": 44.84,
-                    "MintakaRetrieval (ara-Arab)": 21.22,
-                    "MintakaRetrieval (deu-Latn)": 25.6,
-                    "MintakaRetrieval (spa-Latn)": 26.4,
-                    "MintakaRetrieval (fra-Latn)": 25.0,
-                    "MintakaRetrieval (hin-Deva)": 21.1,
-                    "MintakaRetrieval (ita-Latn)": 26.25,
-                    "MintakaRetrieval (jpn-Hira)": 20.69,
-                    "MintakaRetrieval (por-Latn)": 24.44,
-                    "MintakaRetrieval (fr)": 22.53,
-                    "NFCorpus": 31.0,
-                    "NFCorpus-PL (pol-Latn)": 26.5,
-                    "NFCorpus-PL": 26.48,
-                    "NQ-PL": 40.46,
-                    "PIQA": 21.08,
-                    "Quail": 2.38,
-                    "Quora-PL": 78.7,
-                    "RARbCode": 46.96,
-                    "RARbMath": 63.91,
-                    "RiaNewsRetrieval (rus-Cyrl)": 70.01,
-                    "RuBQRetrieval (rus-Cyrl)": 68.53,
-                    "SCIDOCS": 13.9,
-                    "SCIDOCS-PL (pol-Latn)": 11.59,
-                    "SCIDOCS-PL": 11.6,
-                    "SIQA": 2.57,
-                    "SciFact": 67.7,
-                    "SciFact-PL (pol-Latn)": 62.76,
-                    "SciFact-PL": 62.76,
-                    "SpartQA": 5.43,
-                    "SyntecRetrieval (fra-Latn)": 73.46,
-                    "SyntecRetrieval": 75.76,
-                    "T2Retrieval (cmn-Hans)": 71.36,
-                    "T2Retrieval": 71.39,
-                    "TRECCOVID": 72.57,
-                    "TRECCOVID-PL (pol-Latn)": 70.92,
-                    "TRECCOVID-PL": 70.92,
-                    "TempReasonL1": 0.8,
-                    "TempReasonL2Fact": 36.76,
-                    "TempReasonL2Pure": 0.62,
-                    "TempReasonL3Fact": 32.42,
-                    "TempReasonL3Pure": 6.36,
-                    "Touche2020": 21.16,
-                    "VideoRetrieval (cmn-Hans)": 58.06,
-                    "VideoRetrieval": 58.09,
-                    "WinoGrande": 37.46,
-                    "XPQARetrieval (ara-Arab_ara-Arab)": 39.93,
-                    "XPQARetrieval (eng-Latn_ara-Arab)": 18.09,
-                    "XPQARetrieval (ara-Arab_eng-Latn)": 31.64,
-                    "XPQARetrieval (deu-Latn_deu-Latn)": 69.43,
-                    "XPQARetrieval (eng-Latn_deu-Latn)": 25.14,
-                    "XPQARetrieval (deu-Latn_eng-Latn)": 52.36,
-                    "XPQARetrieval (spa-Latn_spa-Latn)": 55.71,
-                    "XPQARetrieval (eng-Latn_spa-Latn)": 22.5,
-                    "XPQARetrieval (spa-Latn_eng-Latn)": 42.4,
-                    "XPQARetrieval (fra-Latn_fra-Latn)": 57.17,
-                    "XPQARetrieval (eng-Latn_fra-Latn)": 27.69,
-                    "XPQARetrieval (fra-Latn_eng-Latn)": 47.46,
-                    "XPQARetrieval (hin-Deva_hin-Deva)": 68.15,
-                    "XPQARetrieval (eng-Latn_hin-Deva)": 25.82,
-                    "XPQARetrieval (hin-Deva_eng-Latn)": 63.79,
-                    "XPQARetrieval (ita-Latn_ita-Latn)": 67.71,
-                    "XPQARetrieval (eng-Latn_ita-Latn)": 22.97,
-                    "XPQARetrieval (ita-Latn_eng-Latn)": 46.61,
-                    "XPQARetrieval (jpn-Hira_jpn-Hira)": 69.49,
-                    "XPQARetrieval (eng-Latn_jpn-Hira)": 25.08,
-                    "XPQARetrieval (jpn-Hira_eng-Latn)": 54.6,
-                    "XPQARetrieval (kor-Hang_kor-Hang)": 33.0,
-                    "XPQARetrieval (eng-Latn_kor-Hang)": 22.49,
-                    "XPQARetrieval (kor-Hang_eng-Latn)": 23.02,
-                    "XPQARetrieval (pol-Latn_pol-Latn)": 43.37,
-                    "XPQARetrieval (eng-Latn_pol-Latn)": 19.89,
-                    "XPQARetrieval (pol-Latn_eng-Latn)": 28.72,
-                    "XPQARetrieval (por-Latn_por-Latn)": 41.8,
-                    "XPQARetrieval (eng-Latn_por-Latn)": 15.79,
-                    "XPQARetrieval (por-Latn_eng-Latn)": 33.74,
-                    "XPQARetrieval (tam-Taml_tam-Taml)": 31.65,
-                    "XPQARetrieval (eng-Latn_tam-Taml)": 13.18,
-                    "XPQARetrieval (tam-Taml_eng-Latn)": 26.44,
-                    "XPQARetrieval (cmn-Hans_cmn-Hans)": 63.98,
-                    "XPQARetrieval (eng-Latn_cmn-Hans)": 16.52,
-                    "XPQARetrieval (cmn-Hans_eng-Latn)": 45.32,
-                    "XPQARetrieval (fr)": 57.47
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "multilingual-e5-small",
-                    "AFQMC (cmn-Hans)": 25.21,
-                    "AFQMC": 25.21,
-                    "ATEC (cmn-Hans)": 35.14,
-                    "ATEC": 35.14,
-                    "BIOSSES": 82.46,
-                    "BQ (cmn-Hans)": 43.27,
-                    "BQ": 43.27,
-                    "CDSC-R (pol-Latn)": 90.27,
-                    "CDSC-R": 90.27,
-                    "LCQMC (cmn-Hans)": 72.7,
-                    "LCQMC": 72.7,
-                    "PAWSX (cmn-Hans)": 11.0,
-                    "PAWSX": 11.01,
-                    "QBQTC": 30.25,
-                    "RUParaPhraserSTS (rus-Cyrl)": 70.46,
-                    "RuSTSBenchmarkSTS (rus-Cyrl)": 78.08,
-                    "SICK-R": 77.51,
-                    "SICK-R-PL (pol-Latn)": 69.45,
-                    "SICK-R-PL": 69.46,
-                    "SICKFr (fra-Latn)": 74.67,
-                    "SICKFr": 75.62,
-                    "STS12": 76.56,
-                    "STS13": 76.97,
-                    "STS14": 75.52,
-                    "STS15": 87.12,
-                    "STS16": 83.63,
-                    "STS17 (ita-Latn_eng-Latn)": 77.31,
-                    "STS17 (en-en)": 86.42,
-                    "STS17 (eng-Latn_ara-Arab)": 57.39,
-                    "STS17 (eng-Latn_tur-Latn)": 55.93,
-                    "STS17 (spa-Latn_eng-Latn)": 72.43,
-                    "STS17 (kor-Hang)": 78.87,
-                    "STS17 (spa-Latn)": 84.83,
-                    "STS17 (eng-Latn_deu-Latn)": 76.82,
-                    "STS17 (fra-Latn_eng-Latn)": 72.28,
-                    "STS17 (nld-Latn_eng-Latn)": 75.43,
-                    "STS17 (ara-Arab)": 73.0,
-                    "STS22 (ita-Latn)": 76.53,
-                    "STS22 (en)": 61.25,
-                    "STS22 (pol-Latn_eng-Latn)": 72.69,
-                    "STS22 (cmn-Hans)": 66.85,
-                    "STS22 (fra-Latn)": 76.58,
-                    "STS22 (deu-Latn)": 53.45,
-                    "STS22 (fra-Latn_pol-Latn)": 84.52,
-                    "STS22 (deu-Latn_pol-Latn)": 28.24,
-                    "STS22 (spa-Latn_eng-Latn)": 74.2,
-                    "STS22 (spa-Latn)": 66.86,
-                    "STS22 (rus-Cyrl)": 59.9,
-                    "STS22 (spa-Latn_ita-Latn)": 71.74,
-                    "STS22 (pol-Latn)": 35.78,
-                    "STS22 (tur-Latn)": 63.69,
-                    "STS22 (ara-Arab)": 56.65,
-                    "STS22 (cmn-Hans_eng-Latn)": 65.32,
-                    "STS22 (deu-Latn_eng-Latn)": 56.07,
-                    "STS22 (deu-Latn_fra-Latn)": 60.62,
-                    "STS22 (pl)": 35.8,
-                    "STSB (cmn-Hans)": 77.73,
-                    "STSB": 77.73,
-                    "STSBenchmark": 84.11,
-                    "STSBenchmarkMultilingualSTS (en)": 84.11,
-                    "STSBenchmarkMultilingualSTS (cmn-Hans)": 78.49,
-                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 78.24,
-                    "STSBenchmarkMultilingualSTS (spa-Latn)": 80.31,
-                    "STSBenchmarkMultilingualSTS (deu-Latn)": 79.17,
-                    "STSBenchmarkMultilingualSTS (fra-Latn)": 79.2,
-                    "STSBenchmarkMultilingualSTS (nld-Latn)": 76.04,
-                    "STSBenchmarkMultilingualSTS (pol-Latn)": 72.61,
-                    "STSBenchmarkMultilingualSTS (por-Latn)": 77.39,
-                    "STSBenchmarkMultilingualSTS (ita-Latn)": 78.21,
-                    "STSBenchmarkMultilingualSTS (fr)": 79.32
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "multilingual-e5-small",
-                    "SummEval": 30.04,
-                    "SummEvalFr (fra-Latn)": 31.14,
-                    "SummEvalFr": 31.85
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "multilingual-e5-small"
-                }
-            ]
-        }
-    },
-    "Cohere-embed-multilingual-light-v3.0": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "Cohere-embed-multilingual-light-v3.0"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "Cohere-embed-multilingual-light-v3.0",
-                    "AmazonReviewsClassification (fr)": 38.6,
-                    "MTOPDomainClassification (fr)": 80.79,
-                    "MTOPIntentClassification (fr)": 50.01,
-                    "MasakhaNEWSClassification (fra)": 82.58,
-                    "MassiveIntentClassification (fr)": 56.31,
-                    "MassiveScenarioClassification (fr)": 59.5
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "Cohere-embed-multilingual-light-v3.0",
-                    "AlloProfClusteringP2P": 61.96,
-                    "AlloProfClusteringS2S": 31.36,
-                    "HALClusteringS2S": 17.31,
-                    "MLSUMClusteringP2P": 42.8,
-                    "MLSUMClusteringS2S": 32.72,
-                    "MasakhaNEWSClusteringP2P (fra)": 56.81,
-                    "MasakhaNEWSClusteringS2S (fra)": 29.41
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "Cohere-embed-multilingual-light-v3.0",
-                    "OpusparcusPC (fr)": 90.92,
-                    "PawsXPairClassification (fr)": 57.32
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "Cohere-embed-multilingual-light-v3.0",
-                    "AlloprofReranking": 51.6,
-                    "SyntecReranking": 88.03
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "Cohere-embed-multilingual-light-v3.0",
-                    "AlloprofRetrieval": 35.39,
-                    "BSARDRetrieval": 0.0,
-                    "MintakaRetrieval (fr)": 23.0,
-                    "SyntecRetrieval": 76.88,
-                    "XPQARetrieval (fr)": 45.23
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "Cohere-embed-multilingual-light-v3.0",
-                    "SICKFr": 75.5,
-                    "STS22 (fr)": 82.8,
-                    "STSBenchmarkMultilingualSTS (fr)": 76.48
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "Cohere-embed-multilingual-light-v3.0",
-                    "SummEvalFr": 31.4
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "Cohere-embed-multilingual-light-v3.0"
-                }
-            ]
-        }
-    },
-    "e5-mistral-7b-instruct": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "e5-mistral-7b-instruct",
-                    "Tatoeba (rus-Cyrl_eng-Latn)": 93.75
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "e5-mistral-7b-instruct",
-                    "AmazonReviewsClassification (fr)": 36.71,
-                    "GeoreviewClassification (rus-Cyrl)": 50.25,
-                    "HeadlineClassification (rus-Cyrl)": 85.68,
-                    "InappropriatenessClassification (rus-Cyrl)": 67.19,
-                    "KinopoiskClassification (rus-Cyrl)": 65.49,
-                    "MTOPDomainClassification (fr)": 74.8,
-                    "MTOPIntentClassification (fr)": 53.97,
-                    "MasakhaNEWSClassification (fra)": 80.59,
-                    "MassiveIntentClassification (rus-Cyrl)": 76.08,
-                    "MassiveIntentClassification (fr)": 46.39,
-                    "MassiveScenarioClassification (rus-Cyrl)": 79.61,
-                    "MassiveScenarioClassification (fr)": 53.86,
-                    "RuReviewsClassification (rus-Cyrl)": 67.68,
-                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 64.59,
-                    "RuSciBenchOECDClassification (rus-Cyrl)": 51.13
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "e5-mistral-7b-instruct",
-                    "AlloProfClusteringP2P": 61.06,
-                    "AlloProfClusteringS2S": 28.12,
-                    "GeoreviewClusteringP2P (rus-Cyrl)": 65.68,
-                    "HALClusteringS2S": 19.69,
-                    "MLSUMClusteringP2P": 45.59,
-                    "MLSUMClusteringS2S": 32.0,
-                    "MasakhaNEWSClusteringP2P (fra)": 52.47,
-                    "MasakhaNEWSClusteringS2S (fra)": 49.2,
-                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 61.55,
-                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 52.72
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "e5-mistral-7b-instruct",
-                    "OpusparcusPC (rus-Cyrl)": 91.44,
-                    "OpusparcusPC (fr)": 88.5,
-                    "PawsXPairClassification (fr)": 63.65,
-                    "TERRa (rus-Cyrl)": 59.38
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "e5-mistral-7b-instruct",
-                    "AlloprofReranking": 47.36,
-                    "RuBQReranking (rus-Cyrl)": 74.61,
-                    "SyntecReranking": 77.05
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "e5-mistral-7b-instruct",
-                    "AILACasedocs": 38.76,
-                    "AILAStatutes": 38.07,
-                    "ARCChallenge": 17.81,
-                    "AlloprofRetrieval": 16.46,
-                    "AlphaNLI": 26.12,
-                    "BSARDRetrieval": 0.0,
-                    "BrightRetrieval (sustainable_living)": 18.51,
-                    "BrightRetrieval (economics)": 15.49,
-                    "BrightRetrieval (theoremqa_theorems)": 23.78,
-                    "BrightRetrieval (aops)": 7.1,
-                    "BrightRetrieval (theoremqa_questions)": 23.94,
-                    "BrightRetrieval (stackoverflow)": 9.83,
-                    "BrightRetrieval (psychology)": 15.79,
-                    "BrightRetrieval (pony)": 4.81,
-                    "BrightRetrieval (leetcode)": 28.72,
-                    "BrightRetrieval (biology)": 18.84,
-                    "BrightRetrieval (earth_science)": 25.96,
-                    "BrightRetrieval (robotics)": 16.37,
-                    "GerDaLIRSmall": 37.18,
-                    "HellaSwag": 34.85,
-                    "LEMBNarrativeQARetrieval": 44.62,
-                    "LEMBNeedleRetrieval": 48.25,
-                    "LEMBPasskeyRetrieval": 71.0,
-                    "LEMBQMSumRetrieval": 43.63,
-                    "LEMBSummScreenFDRetrieval": 96.82,
-                    "LEMBWikimQARetrieval": 82.11,
-                    "LeCaRDv2": 68.56,
-                    "LegalBenchConsumerContractsQA": 75.46,
-                    "LegalBenchCorporateLobbying": 94.01,
-                    "LegalQuAD": 59.64,
-                    "LegalSummarization": 66.51,
-                    "MintakaRetrieval (fr)": 3.57,
-                    "PIQA": 39.37,
-                    "Quail": 7.01,
-                    "RARbCode": 78.46,
-                    "RARbMath": 72.16,
-                    "RiaNewsRetrieval (rus-Cyrl)": 81.94,
-                    "RuBQRetrieval (rus-Cyrl)": 73.98,
-                    "SIQA": 5.42,
-                    "SpartQA": 9.92,
-                    "SyntecRetrieval": 55.9,
-                    "TempReasonL1": 3.31,
-                    "TempReasonL2Fact": 36.9,
-                    "TempReasonL2Pure": 9.18,
-                    "TempReasonL3Fact": 30.18,
-                    "TempReasonL3Pure": 14.31,
-                    "WinoGrande": 41.21,
-                    "XPQARetrieval (fr)": 41.29
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "e5-mistral-7b-instruct",
-                    "RUParaPhraserSTS (rus-Cyrl)": 76.17,
-                    "RuSTSBenchmarkSTS (rus-Cyrl)": 84.13,
-                    "SICKFr": 64.39,
-                    "STS22 (fr)": 69.82,
-                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 84.25,
-                    "STSBenchmarkMultilingualSTS (fr)": 61.87
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "e5-mistral-7b-instruct",
-                    "SummEvalFr": 32.22
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "e5-mistral-7b-instruct",
-                    "Core17InstructionRetrieval": 0.09,
-                    "News21InstructionRetrieval": -0.86,
-                    "Robust04InstructionRetrieval": -9.59
-                }
-            ]
-        }
-    },
-    "gbert-large": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "gbert-large"
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "gbert-large"
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "gbert-large",
-                    "BlurbsClusteringP2P": 39.3,
-                    "BlurbsClusteringS2S": 13.38,
-                    "TenKGnadClusteringP2P": 41.69,
-                    "TenKGnadClusteringS2S": 34.97
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "gbert-large"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "gbert-large"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "gbert-large"
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "gbert-large"
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "gbert-large"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "gbert-large"
-                }
-            ]
-        }
-    },
-    "norbert3-base": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "norbert3-base",
-                    "BornholmBitextMining": 6.08
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "norbert3-base",
-                    "AngryTweetsClassification": 52.48,
-                    "DKHateClassification": 58.78,
-                    "DanishPoliticalCommentsClassification": 34.14,
-                    "LccSentimentClassification": 54.07,
-                    "MassiveIntentClassification (da)": 53.16,
-                    "MassiveIntentClassification (nb)": 54.2,
-                    "MassiveIntentClassification (sv)": 52.08,
-                    "MassiveScenarioClassification (da)": 57.17,
-                    "MassiveScenarioClassification (nb)": 60.69,
-                    "MassiveScenarioClassification (sv)": 53.53,
-                    "NoRecClassification": 53.4,
-                    "NordicLangClassification": 82.67,
-                    "NorwegianParliament": 59.33,
-                    "ScalaDaClassification": 58.25,
-                    "ScalaNbClassification": 60.19
-                }
-            ]
-        },
-        "Clustering": {
-            "v_measure": [
-                {
-                    "Model": "norbert3-base"
-                }
-            ]
-        },
-        "PairClassification": {
-            "ap": [
-                {
-                    "Model": "norbert3-base"
-                }
-            ]
-        },
-        "Reranking": {
-            "map": [
-                {
-                    "Model": "norbert3-base"
-                }
-            ]
-        },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "norbert3-base"
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "norbert3-base"
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "norbert3-base"
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "norbert3-base"
-                }
-            ]
-        }
-    },
-    "LaBSE": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "LaBSE",
-                    "BUCC (de-en)": 99.35,
-                    "BUCC (fr-en)": 98.72,
-                    "BUCC (ru-en)": 97.78,
-                    "BUCC (zh-en)": 99.16,
-                    "BornholmBitextMining (dan-Latn)": 45.63,
-                    "Tatoeba (ber-Tfng_eng-Latn)": 8.4,
-                    "Tatoeba (kab-Latn_eng-Latn)": 4.31,
-                    "Tatoeba (tur-Latn_eng-Latn)": 98.0,
-                    "Tatoeba (gle-Latn_eng-Latn)": 93.8,
-                    "Tatoeba (awa-Deva_eng-Latn)": 71.7,
-                    "Tatoeba (yue-Hant_eng-Latn)": 89.58,
-                    "Tatoeba (tzl-Latn_eng-Latn)": 58.88,
-                    "Tatoeba (tat-Cyrl_eng-Latn)": 85.92,
-                    "Tatoeba (fin-Latn_eng-Latn)": 96.37,
-                    "Tatoeba (cor-Latn_eng-Latn)": 10.11,
-                    "Tatoeba (hye-Armn_eng-Latn)": 94.09,
-                    "Tatoeba (ben-Beng_eng-Latn)": 88.55,
-                    "Tatoeba (epo-Latn_eng-Latn)": 98.2,
-                    "Tatoeba (ile-Latn_eng-Latn)": 85.58,
-                    "Tatoeba (nld-Latn_eng-Latn)": 96.07,
-                    "Tatoeba (mar-Deva_eng-Latn)": 92.65,
-                    "Tatoeba (cmn-Hans_eng-Latn)": 95.1,
-                    "Tatoeba (hin-Deva_eng-Latn)": 96.87,
-                    "Tatoeba (tgl-Latn_eng-Latn)": 96.02,
-                    "Tatoeba (mon-Cyrl_eng-Latn)": 95.91,
-                    "Tatoeba (oci-Latn_eng-Latn)": 65.81,
-                    "Tatoeba (dan-Latn_eng-Latn)": 95.71,
-                    "Tatoeba (mkd-Cyrl_eng-Latn)": 93.6,
-                    "Tatoeba (ces-Latn_eng-Latn)": 96.68,
-                    "Tatoeba (fra-Latn_eng-Latn)": 94.86,
-                    "Tatoeba (yid-Hebr_eng-Latn)": 88.79,
-                    "Tatoeba (est-Latn_eng-Latn)": 96.55,
-                    "Tatoeba (ast-Latn_eng-Latn)": 90.68,
-                    "Tatoeba (ind-Latn_eng-Latn)": 93.66,
-                    "Tatoeba (bre-Latn_eng-Latn)": 15.07,
-                    "Tatoeba (eus-Latn_eng-Latn)": 95.01,
-                    "Tatoeba (heb-Hebr_eng-Latn)": 91.53,
-                    "Tatoeba (rus-Cyrl_eng-Latn)": 93.75,
-                    "Tatoeba (lfn-Latn_eng-Latn)": 67.54,
-                    "Tatoeba (jav-Latn_eng-Latn)": 79.77,
-                    "Tatoeba (ukr-Cyrl_eng-Latn)": 93.97,
-                    "Tatoeba (ell-Grek_eng-Latn)": 95.35,
-                    "Tatoeba (nds-Latn_eng-Latn)": 79.42,
-                    "Tatoeba (arz-Arab_eng-Latn)": 76.0,
-                    "Tatoeba (gla-Latn_eng-Latn)": 85.66,
-                    "Tatoeba (cbk-Latn_eng-Latn)": 79.44,
-                    "Tatoeba (max-Deva_eng-Latn)": 63.26,
-                    "Tatoeba (ron-Latn_eng-Latn)": 96.92,
-                    "Tatoeba (ido-Latn_eng-Latn)": 89.42,
-                    "Tatoeba (lvs-Latn_eng-Latn)": 95.88,
-                    "Tatoeba (khm-Khmr_eng-Latn)": 78.37,
-                    "Tatoeba (urd-Arab_eng-Latn)": 93.22,
-                    "Tatoeba (glg-Latn_eng-Latn)": 96.82,
-                    "Tatoeba (gsw-Latn_eng-Latn)": 46.5,
-                    "Tatoeba (swe-Latn_eng-Latn)": 95.63,
-                    "Tatoeba (swh-Latn_eng-Latn)": 84.5,
-                    "Tatoeba (tha-Thai_eng-Latn)": 96.14,
-                    "Tatoeba (tam-Taml_eng-Latn)": 89.0,
-                    "Tatoeba (uzb-Latn_eng-Latn)": 84.23,
-                    "Tatoeba (bul-Cyrl_eng-Latn)": 94.58,
-                    "Tatoeba (kur-Latn_eng-Latn)": 83.59,
-                    "Tatoeba (ina-Latn_eng-Latn)": 95.37,
-                    "Tatoeba (nov-Latn_eng-Latn)": 74.38,
-                    "Tatoeba (afr-Latn_eng-Latn)": 96.18,
-                    "Tatoeba (csb-Latn_eng-Latn)": 52.57,
-                    "Tatoeba (war-Latn_eng-Latn)": 60.29,
-                    "Tatoeba (cha-Latn_eng-Latn)": 31.77,
-                    "Tatoeba (pes-Arab_eng-Latn)": 94.7,
-                    "Tatoeba (kat-Geor_eng-Latn)": 95.02,
-                    "Tatoeba (bos-Latn_eng-Latn)": 94.92,
-                    "Tatoeba (kor-Hang_eng-Latn)": 90.95,
-                    "Tatoeba (slk-Latn_eng-Latn)": 96.5,
-                    "Tatoeba (fry-Latn_eng-Latn)": 89.31,
-                    "Tatoeba (ara-Arab_eng-Latn)": 88.8,
-                    "Tatoeba (sqi-Latn_eng-Latn)": 96.76,
-                    "Tatoeba (ita-Latn_eng-Latn)": 92.72,
-                    "Tatoeba (lat-Latn_eng-Latn)": 80.07,
-                    "Tatoeba (hsb-Latn_eng-Latn)": 67.11,
-                    "Tatoeba (swg-Latn_eng-Latn)": 59.36,
-                    "Tatoeba (srp-Cyrl_eng-Latn)": 94.43,
-                    "Tatoeba (isl-Latn_eng-Latn)": 94.75,
-                    "Tatoeba (hrv-Latn_eng-Latn)": 96.95,
-                    "Tatoeba (wuu-Hans_eng-Latn)": 90.18,
-                    "Tatoeba (mhr-Cyrl_eng-Latn)": 15.74,
-                    "Tatoeba (vie-Latn_eng-Latn)": 97.2,
-                    "Tatoeba (cym-Latn_eng-Latn)": 92.0,
-                    "Tatoeba (dsb-Latn_eng-Latn)": 64.81,
-                    "Tatoeba (hun-Latn_eng-Latn)": 96.55,
-                    "Tatoeba (slv-Latn_eng-Latn)": 96.03,
-                    "Tatoeba (orv-Cyrl_eng-Latn)": 38.93,
-                    "Tatoeba (cat-Latn_eng-Latn)": 95.38,
-                    "Tatoeba (dtp-Latn_eng-Latn)": 10.85,
-                    "Tatoeba (por-Latn_eng-Latn)": 94.14,
-                    "Tatoeba (jpn-Jpan_eng-Latn)": 95.38,
-                    "Tatoeba (ang-Latn_eng-Latn)": 59.28,
-                    "Tatoeba (aze-Latn_eng-Latn)": 94.93,
-                    "Tatoeba (kzj-Latn_eng-Latn)": 11.33,
-                    "Tatoeba (deu-Latn_eng-Latn)": 99.2,
-                    "Tatoeba (uig-Arab_eng-Latn)": 92.4,
-                    "Tatoeba (tel-Telu_eng-Latn)": 97.86,
-                    "Tatoeba (tuk-Latn_eng-Latn)": 75.27,
-                    "Tatoeba (nob-Latn_eng-Latn)": 98.4,
-                    "Tatoeba (nno-Latn_eng-Latn)": 94.48,
-                    "Tatoeba (spa-Latn_eng-Latn)": 98.4,
-                    "Tatoeba (mal-Mlym_eng-Latn)": 98.45,
-                    "Tatoeba (pam-Latn_eng-Latn)": 10.73,
-                    "Tatoeba (xho-Latn_eng-Latn)": 91.55,
-                    "Tatoeba (arq-Arab_eng-Latn)": 42.69,
-                    "Tatoeba (kaz-Cyrl_eng-Latn)": 87.49,
-                    "Tatoeba (bel-Cyrl_eng-Latn)": 95.0,
-                    "Tatoeba (pol-Latn_eng-Latn)": 97.22,
-                    "Tatoeba (fao-Latn_eng-Latn)": 87.4,
-                    "Tatoeba (zsm-Latn_eng-Latn)": 95.62,
-                    "Tatoeba (lit-Latn_eng-Latn)": 96.47,
-                    "Tatoeba (ceb-Latn_eng-Latn)": 64.42,
-                    "Tatoeba (pms-Latn_eng-Latn)": 64.57,
-                    "Tatoeba (amh-Ethi_eng-Latn)": 91.47,
-                    "Tatoeba (afr-eng)": 96.18,
-                    "Tatoeba (amh-eng)": 91.47,
-                    "Tatoeba (ang-eng)": 59.28,
-                    "Tatoeba (ara-eng)": 88.8,
-                    "Tatoeba (arq-eng)": 42.69,
-                    "Tatoeba (arz-eng)": 76.0,
-                    "Tatoeba (ast-eng)": 90.68,
-                    "Tatoeba (awa-eng)": 71.7,
-                    "Tatoeba (aze-eng)": 94.93,
-                    "Tatoeba (bel-eng)": 95.0,
-                    "Tatoeba (ben-eng)": 88.55,
-                    "Tatoeba (ber-eng)": 8.4,
-                    "Tatoeba (bos-eng)": 94.92,
-                    "Tatoeba (bre-eng)": 15.07,
-                    "Tatoeba (bul-eng)": 94.58,
-                    "Tatoeba (cat-eng)": 95.38,
-                    "Tatoeba (cbk-eng)": 79.44,
-                    "Tatoeba (ceb-eng)": 64.42,
-                    "Tatoeba (ces-eng)": 96.68,
-                    "Tatoeba (cha-eng)": 31.77,
-                    "Tatoeba (cmn-eng)": 95.1,
-                    "Tatoeba (cor-eng)": 10.11,
-                    "Tatoeba (csb-eng)": 52.57,
-                    "Tatoeba (cym-eng)": 92.0,
-                    "Tatoeba (dan-eng)": 95.71,
-                    "Tatoeba (deu-eng)": 99.2,
-                    "Tatoeba (dsb-eng)": 64.81,
-                    "Tatoeba (dtp-eng)": 10.85,
-                    "Tatoeba (ell-eng)": 95.35,
-                    "Tatoeba (epo-eng)": 98.2,
-                    "Tatoeba (est-eng)": 96.55,
-                    "Tatoeba (eus-eng)": 95.01,
-                    "Tatoeba (fao-eng)": 87.4,
-                    "Tatoeba (fin-eng)": 96.37,
-                    "Tatoeba (fra-eng)": 94.86,
-                    "Tatoeba (fry-eng)": 89.31,
-                    "Tatoeba (gla-eng)": 85.66,
-                    "Tatoeba (gle-eng)": 93.8,
-                    "Tatoeba (glg-eng)": 96.82,
-                    "Tatoeba (gsw-eng)": 46.5,
-                    "Tatoeba (heb-eng)": 91.53,
-                    "Tatoeba (hin-eng)": 96.87,
-                    "Tatoeba (hrv-eng)": 96.95,
-                    "Tatoeba (hsb-eng)": 67.11,
-                    "Tatoeba (hun-eng)": 96.55,
-                    "Tatoeba (hye-eng)": 94.09,
-                    "Tatoeba (ido-eng)": 89.42,
-                    "Tatoeba (ile-eng)": 85.58,
-                    "Tatoeba (ina-eng)": 95.37,
-                    "Tatoeba (ind-eng)": 93.66,
-                    "Tatoeba (isl-eng)": 94.75,
-                    "Tatoeba (ita-eng)": 92.72,
-                    "Tatoeba (jav-eng)": 79.77,
-                    "Tatoeba (jpn-eng)": 95.38,
-                    "Tatoeba (kab-eng)": 4.31,
-                    "Tatoeba (kat-eng)": 95.02,
-                    "Tatoeba (kaz-eng)": 87.49,
-                    "Tatoeba (khm-eng)": 78.37,
-                    "Tatoeba (kor-eng)": 90.95,
-                    "Tatoeba (kur-eng)": 83.59,
-                    "Tatoeba (kzj-eng)": 11.33,
-                    "Tatoeba (lat-eng)": 80.07,
-                    "Tatoeba (lfn-eng)": 67.54,
-                    "Tatoeba (lit-eng)": 96.47,
-                    "Tatoeba (lvs-eng)": 95.88,
-                    "Tatoeba (mal-eng)": 98.45,
-                    "Tatoeba (mar-eng)": 92.65,
-                    "Tatoeba (max-eng)": 63.26,
-                    "Tatoeba (mhr-eng)": 15.74,
-                    "Tatoeba (mkd-eng)": 93.6,
-                    "Tatoeba (mon-eng)": 95.91,
-                    "Tatoeba (nds-eng)": 79.42,
-                    "Tatoeba (nld-eng)": 96.07,
-                    "Tatoeba (nno-eng)": 94.48,
-                    "Tatoeba (nob-eng)": 98.4,
-                    "Tatoeba (nov-eng)": 74.38,
-                    "Tatoeba (oci-eng)": 65.81,
-                    "Tatoeba (orv-eng)": 38.93,
-                    "Tatoeba (pam-eng)": 10.73,
-                    "Tatoeba (pes-eng)": 94.7,
-                    "Tatoeba (pms-eng)": 64.57,
-                    "Tatoeba (pol-eng)": 97.22,
-                    "Tatoeba (por-eng)": 94.14,
-                    "Tatoeba (ron-eng)": 96.92,
-                    "Tatoeba (rus-eng)": 93.75,
-                    "Tatoeba (slk-eng)": 96.5,
-                    "Tatoeba (slv-eng)": 96.03,
-                    "Tatoeba (spa-eng)": 98.4,
-                    "Tatoeba (sqi-eng)": 96.76,
-                    "Tatoeba (srp-eng)": 94.43,
-                    "Tatoeba (swe-eng)": 95.63,
-                    "Tatoeba (swg-eng)": 59.36,
-                    "Tatoeba (swh-eng)": 84.5,
-                    "Tatoeba (tam-eng)": 89.0,
-                    "Tatoeba (tat-eng)": 85.92,
-                    "Tatoeba (tel-eng)": 97.86,
-                    "Tatoeba (tgl-eng)": 96.02,
-                    "Tatoeba (tha-eng)": 96.14,
-                    "Tatoeba (tuk-eng)": 75.27,
-                    "Tatoeba (tur-eng)": 98.0,
-                    "Tatoeba (tzl-eng)": 58.88,
-                    "Tatoeba (uig-eng)": 92.4,
-                    "Tatoeba (ukr-eng)": 93.97,
-                    "Tatoeba (urd-eng)": 93.22,
-                    "Tatoeba (uzb-eng)": 84.23,
-                    "Tatoeba (vie-eng)": 97.2,
-                    "Tatoeba (war-eng)": 60.29,
-                    "Tatoeba (wuu-eng)": 90.18,
-                    "Tatoeba (xho-eng)": 91.55,
-                    "Tatoeba (yid-eng)": 88.79,
-                    "Tatoeba (yue-eng)": 89.58,
-                    "Tatoeba (zsm-eng)": 95.62
-                }
-            ]
-        },
-        "Classification": {
-            "accuracy": [
+        "Classification": {
+            "accuracy": [
                 {
                     "Model": "LaBSE",
                     "AllegroReviews (pol-Latn)": 34.86,
@@ -16695,4754 +5565,16581 @@
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "LaBSE",
-                    "8TagsClustering": 12.96,
-                    "AlloProfClusteringP2P": 54.78,
-                    "AlloProfClusteringS2S": 31.6,
-                    "ArxivClusteringP2P": 32.13,
-                    "ArxivClusteringS2S": 22.05,
-                    "BiorxivClusteringP2P": 29.84,
-                    "BiorxivClusteringS2S": 20.57,
-                    "GeoreviewClusteringP2P (rus-Cyrl)": 52.19,
-                    "HALClusteringS2S": 20.62,
-                    "MLSUMClusteringP2P (rus-Cyrl)": 39.45,
-                    "MLSUMClusteringP2P": 42.09,
-                    "MLSUMClusteringS2S (rus-Cyrl)": 35.77,
-                    "MLSUMClusteringS2S": 34.84,
-                    "MasakhaNEWSClusteringP2P (amh-Ethi)": 67.78,
-                    "MasakhaNEWSClusteringP2P (eng)": 48.16,
-                    "MasakhaNEWSClusteringP2P (fra-Latn)": 46.16,
-                    "MasakhaNEWSClusteringP2P (hau-Latn)": 39.77,
-                    "MasakhaNEWSClusteringP2P (ibo-Latn)": 62.67,
-                    "MasakhaNEWSClusteringP2P (lin-Latn)": 62.98,
-                    "MasakhaNEWSClusteringP2P (lug-Latn)": 47.76,
-                    "MasakhaNEWSClusteringP2P (orm-Ethi)": 28.76,
-                    "MasakhaNEWSClusteringP2P (pcm-Latn)": 77.16,
-                    "MasakhaNEWSClusteringP2P (run-Latn)": 60.36,
-                    "MasakhaNEWSClusteringP2P (sna-Latn)": 63.57,
-                    "MasakhaNEWSClusteringP2P (som-Latn)": 34.94,
-                    "MasakhaNEWSClusteringP2P (swa-Latn)": 27.26,
-                    "MasakhaNEWSClusteringP2P (tir-Ethi)": 51.59,
-                    "MasakhaNEWSClusteringP2P (xho-Latn)": 45.32,
-                    "MasakhaNEWSClusteringP2P (yor-Latn)": 48.73,
-                    "MasakhaNEWSClusteringP2P (fra)": 46.16,
-                    "MasakhaNEWSClusteringS2S (amh-Ethi)": 52.73,
-                    "MasakhaNEWSClusteringS2S (eng)": 32.6,
-                    "MasakhaNEWSClusteringS2S (fra-Latn)": 38.13,
-                    "MasakhaNEWSClusteringS2S (hau-Latn)": 31.62,
-                    "MasakhaNEWSClusteringS2S (ibo-Latn)": 32.27,
-                    "MasakhaNEWSClusteringS2S (lin-Latn)": 49.38,
-                    "MasakhaNEWSClusteringS2S (lug-Latn)": 47.63,
-                    "MasakhaNEWSClusteringS2S (orm-Ethi)": 25.05,
-                    "MasakhaNEWSClusteringS2S (pcm-Latn)": 68.18,
-                    "MasakhaNEWSClusteringS2S (run-Latn)": 52.39,
-                    "MasakhaNEWSClusteringS2S (sna-Latn)": 46.9,
-                    "MasakhaNEWSClusteringS2S (som-Latn)": 24.08,
-                    "MasakhaNEWSClusteringS2S (swa-Latn)": 15.83,
-                    "MasakhaNEWSClusteringS2S (tir-Ethi)": 49.07,
-                    "MasakhaNEWSClusteringS2S (xho-Latn)": 28.52,
-                    "MasakhaNEWSClusteringS2S (yor-Latn)": 32.26,
-                    "MasakhaNEWSClusteringS2S (fra)": 38.13,
-                    "MedrxivClusteringP2P": 30.13,
-                    "MedrxivClusteringS2S": 24.82,
-                    "RedditClustering": 28.79,
-                    "RedditClusteringP2P": 49.14,
-                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 49.09,
-                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 41.97,
-                    "StackExchangeClustering": 35.43,
-                    "StackExchangeClusteringP2P": 28.83,
-                    "TwentyNewsgroupsClustering": 23.28
+                    "Model": "LaBSE",
+                    "8TagsClustering": 12.96,
+                    "AlloProfClusteringP2P": 54.78,
+                    "AlloProfClusteringS2S": 31.6,
+                    "ArxivClusteringP2P": 32.13,
+                    "ArxivClusteringS2S": 22.05,
+                    "BiorxivClusteringP2P": 29.84,
+                    "BiorxivClusteringS2S": 20.57,
+                    "GeoreviewClusteringP2P (rus-Cyrl)": 52.19,
+                    "HALClusteringS2S": 20.62,
+                    "MLSUMClusteringP2P (rus-Cyrl)": 39.45,
+                    "MLSUMClusteringP2P": 42.09,
+                    "MLSUMClusteringS2S (rus-Cyrl)": 35.77,
+                    "MLSUMClusteringS2S": 34.84,
+                    "MasakhaNEWSClusteringP2P (amh-Ethi)": 67.78,
+                    "MasakhaNEWSClusteringP2P (eng)": 48.16,
+                    "MasakhaNEWSClusteringP2P (fra-Latn)": 46.16,
+                    "MasakhaNEWSClusteringP2P (hau-Latn)": 39.77,
+                    "MasakhaNEWSClusteringP2P (ibo-Latn)": 62.67,
+                    "MasakhaNEWSClusteringP2P (lin-Latn)": 62.98,
+                    "MasakhaNEWSClusteringP2P (lug-Latn)": 47.76,
+                    "MasakhaNEWSClusteringP2P (orm-Ethi)": 28.76,
+                    "MasakhaNEWSClusteringP2P (pcm-Latn)": 77.16,
+                    "MasakhaNEWSClusteringP2P (run-Latn)": 60.36,
+                    "MasakhaNEWSClusteringP2P (sna-Latn)": 63.57,
+                    "MasakhaNEWSClusteringP2P (som-Latn)": 34.94,
+                    "MasakhaNEWSClusteringP2P (swa-Latn)": 27.26,
+                    "MasakhaNEWSClusteringP2P (tir-Ethi)": 51.59,
+                    "MasakhaNEWSClusteringP2P (xho-Latn)": 45.32,
+                    "MasakhaNEWSClusteringP2P (yor-Latn)": 48.73,
+                    "MasakhaNEWSClusteringP2P (fra)": 46.16,
+                    "MasakhaNEWSClusteringS2S (amh-Ethi)": 52.73,
+                    "MasakhaNEWSClusteringS2S (eng)": 32.6,
+                    "MasakhaNEWSClusteringS2S (fra-Latn)": 38.13,
+                    "MasakhaNEWSClusteringS2S (hau-Latn)": 31.62,
+                    "MasakhaNEWSClusteringS2S (ibo-Latn)": 32.27,
+                    "MasakhaNEWSClusteringS2S (lin-Latn)": 49.38,
+                    "MasakhaNEWSClusteringS2S (lug-Latn)": 47.63,
+                    "MasakhaNEWSClusteringS2S (orm-Ethi)": 25.05,
+                    "MasakhaNEWSClusteringS2S (pcm-Latn)": 68.18,
+                    "MasakhaNEWSClusteringS2S (run-Latn)": 52.39,
+                    "MasakhaNEWSClusteringS2S (sna-Latn)": 46.9,
+                    "MasakhaNEWSClusteringS2S (som-Latn)": 24.08,
+                    "MasakhaNEWSClusteringS2S (swa-Latn)": 15.83,
+                    "MasakhaNEWSClusteringS2S (tir-Ethi)": 49.07,
+                    "MasakhaNEWSClusteringS2S (xho-Latn)": 28.52,
+                    "MasakhaNEWSClusteringS2S (yor-Latn)": 32.26,
+                    "MasakhaNEWSClusteringS2S (fra)": 38.13,
+                    "MedrxivClusteringP2P": 30.13,
+                    "MedrxivClusteringS2S": 24.82,
+                    "RedditClustering": 28.79,
+                    "RedditClusteringP2P": 49.14,
+                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 49.09,
+                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 41.97,
+                    "StackExchangeClustering": 35.43,
+                    "StackExchangeClusteringP2P": 28.83,
+                    "TwentyNewsgroupsClustering": 23.28
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "LaBSE",
+                    "CDSC-E (pol-Latn)": 68.92,
+                    "CDSC-E": 68.91,
+                    "OpusparcusPC (deu-Latn)": 96.58,
+                    "OpusparcusPC (en)": 98.12,
+                    "OpusparcusPC (fin-Latn)": 94.44,
+                    "OpusparcusPC (fra-Latn)": 93.96,
+                    "OpusparcusPC (rus-Cyrl)": 87.3,
+                    "OpusparcusPC (swe-Latn)": 93.69,
+                    "OpusparcusPC (fr)": 93.96,
+                    "PPC": 86.97,
+                    "PSC (pol-Latn)": 97.42,
+                    "PSC": 97.42,
+                    "PawsXPairClassification (deu-Latn)": 51.07,
+                    "PawsXPairClassification (en)": 54.07,
+                    "PawsXPairClassification (spa-Latn)": 52.19,
+                    "PawsXPairClassification (fra-Latn)": 54.63,
+                    "PawsXPairClassification (jpn-Hira)": 47.56,
+                    "PawsXPairClassification (kor-Hang)": 49.39,
+                    "PawsXPairClassification (cmn-Hans)": 54.26,
+                    "PawsXPairClassification (fr)": 54.63,
+                    "SICK-E-PL (pol-Latn)": 63.77,
+                    "SICK-E-PL": 63.77,
+                    "SprintDuplicateQuestions": 89.26,
+                    "TERRa (rus-Cyrl)": 55.71,
+                    "TwitterSemEval2015": 62.78,
+                    "TwitterURLCorpus": 84.58
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "LaBSE",
+                    "AlloprofReranking (fra-Latn)": 55.37,
+                    "AlloprofReranking": 49.51,
+                    "AskUbuntuDupQuestions": 52.75,
+                    "MMarcoReranking (cmn-Hans)": 14.83,
+                    "MindSmallReranking": 29.81,
+                    "RuBQReranking (rus-Cyrl)": 55.13,
+                    "SciDocsRR": 68.72,
+                    "StackOverflowDupQuestions": 42.42,
+                    "SyntecReranking (fra-Latn)": 67.62,
+                    "SyntecReranking": 73.28,
+                    "T2Reranking (cmn-Hans)": 63.29
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "LaBSE",
+                    "AILACasedocs": 17.67,
+                    "AILAStatutes": 16.72,
+                    "ARCChallenge": 3.78,
+                    "AlloprofRetrieval (fra-Latn)": 19.77,
+                    "AlloprofRetrieval": 19.77,
+                    "AlphaNLI": 13.11,
+                    "ArguAna": 34.18,
+                    "ArguAna-PL (pol-Latn)": 38.56,
+                    "ArguAna-PL": 38.52,
+                    "BSARDRetrieval (fra-Latn)": 4.44,
+                    "BSARDRetrieval": 0.0,
+                    "CQADupstackRetrieval": 18.75,
+                    "ClimateFEVER": 3.83,
+                    "CmedqaRetrieval (cmn-Hans)": 5.49,
+                    "CovidRetrieval (cmn-Hans)": 28.6,
+                    "DBPedia": 15.57,
+                    "DBPedia-PL": 16.1,
+                    "DuRetrieval (cmn-Hans)": 26.34,
+                    "EcomRetrieval (cmn-Hans)": 25.42,
+                    "FEVER": 12.18,
+                    "FiQA-PL (pol-Latn)": 7.66,
+                    "FiQA-PL": 7.63,
+                    "FiQA2018": 7.0,
+                    "GerDaLIRSmall (deu-Latn)": 4.59,
+                    "HellaSwag": 5.59,
+                    "HotpotQA": 18.75,
+                    "HotpotQA-PL": 19.72,
+                    "LEMBNarrativeQARetrieval": 11.45,
+                    "LEMBNeedleRetrieval": 17.5,
+                    "LEMBPasskeyRetrieval": 20.25,
+                    "LEMBQMSumRetrieval": 14.07,
+                    "LEMBSummScreenFDRetrieval": 40.52,
+                    "LEMBWikimQARetrieval": 28.1,
+                    "LeCaRDv2 (zho-Hans)": 24.68,
+                    "LegalBenchConsumerContractsQA": 54.66,
+                    "LegalBenchCorporateLobbying": 69.39,
+                    "LegalQuAD (deu-Latn)": 16.64,
+                    "LegalSummarization": 53.89,
+                    "MMarcoRetrieval (cmn-Hans)": 34.78,
+                    "MSMARCO": 7.6,
+                    "MSMARCO-PL": 7.22,
+                    "MedicalRetrieval (cmn-Hans)": 6.68,
+                    "MintakaRetrieval (ara-Arab)": 14.06,
+                    "MintakaRetrieval (deu-Latn)": 15.26,
+                    "MintakaRetrieval (spa-Latn)": 15.65,
+                    "MintakaRetrieval (fra-Latn)": 15.53,
+                    "MintakaRetrieval (hin-Deva)": 13.67,
+                    "MintakaRetrieval (ita-Latn)": 15.94,
+                    "MintakaRetrieval (jpn-Hira)": 12.8,
+                    "MintakaRetrieval (por-Latn)": 15.03,
+                    "MintakaRetrieval (fr)": 15.53,
+                    "NFCorpus": 16.54,
+                    "NFCorpus-PL (pol-Latn)": 17.45,
+                    "NFCorpus-PL": 17.45,
+                    "NQ": 8.42,
+                    "NQ-PL": 9.65,
+                    "PIQA": 6.53,
+                    "Quail": 1.91,
+                    "Quora-PL": 74.96,
+                    "QuoraRetrieval": 77.03,
+                    "RARbCode": 2.31,
+                    "RARbMath": 27.19,
+                    "RiaNewsRetrieval (rus-Cyrl)": 42.75,
+                    "RuBQRetrieval (rus-Cyrl)": 30.02,
+                    "SCIDOCS": 5.63,
+                    "SCIDOCS-PL (pol-Latn)": 7.47,
+                    "SCIDOCS-PL": 7.48,
+                    "SIQA": 1.07,
+                    "SciFact": 38.2,
+                    "SciFact-PL (pol-Latn)": 39.79,
+                    "SciFact-PL": 39.79,
+                    "SpartQA": 1.56,
+                    "SyntecRetrieval (fra-Latn)": 55.31,
+                    "SyntecRetrieval": 55.31,
+                    "T2Retrieval (cmn-Hans)": 25.32,
+                    "TRECCOVID": 16.34,
+                    "TRECCOVID-PL (pol-Latn)": 18.51,
+                    "TRECCOVID-PL": 18.45,
+                    "TempReasonL1": 1.56,
+                    "TempReasonL2Fact": 7.06,
+                    "TempReasonL2Pure": 0.14,
+                    "TempReasonL3Fact": 8.74,
+                    "TempReasonL3Pure": 4.73,
+                    "Touche2020": 4.88,
+                    "VideoRetrieval (cmn-Hans)": 22.04,
+                    "WinoGrande": 54.3,
+                    "XPQARetrieval (ara-Arab_ara-Arab)": 35.19,
+                    "XPQARetrieval (eng-Latn_ara-Arab)": 20.64,
+                    "XPQARetrieval (ara-Arab_eng-Latn)": 32.47,
+                    "XPQARetrieval (deu-Latn_deu-Latn)": 53.56,
+                    "XPQARetrieval (eng-Latn_deu-Latn)": 24.31,
+                    "XPQARetrieval (deu-Latn_eng-Latn)": 54.87,
+                    "XPQARetrieval (spa-Latn_spa-Latn)": 44.49,
+                    "XPQARetrieval (eng-Latn_spa-Latn)": 25.31,
+                    "XPQARetrieval (spa-Latn_eng-Latn)": 43.4,
+                    "XPQARetrieval (fra-Latn_fra-Latn)": 51.74,
+                    "XPQARetrieval (eng-Latn_fra-Latn)": 21.29,
+                    "XPQARetrieval (fra-Latn_eng-Latn)": 49.4,
+                    "XPQARetrieval (hin-Deva_hin-Deva)": 66.64,
+                    "XPQARetrieval (eng-Latn_hin-Deva)": 23.25,
+                    "XPQARetrieval (hin-Deva_eng-Latn)": 64.54,
+                    "XPQARetrieval (ita-Latn_ita-Latn)": 56.27,
+                    "XPQARetrieval (eng-Latn_ita-Latn)": 25.8,
+                    "XPQARetrieval (ita-Latn_eng-Latn)": 52.69,
+                    "XPQARetrieval (jpn-Hira_jpn-Hira)": 58.6,
+                    "XPQARetrieval (eng-Latn_jpn-Hira)": 21.49,
+                    "XPQARetrieval (jpn-Hira_eng-Latn)": 52.41,
+                    "XPQARetrieval (kor-Hang_kor-Hang)": 27.66,
+                    "XPQARetrieval (eng-Latn_kor-Hang)": 23.33,
+                    "XPQARetrieval (kor-Hang_eng-Latn)": 23.96,
+                    "XPQARetrieval (pol-Latn_pol-Latn)": 37.33,
+                    "XPQARetrieval (eng-Latn_pol-Latn)": 16.19,
+                    "XPQARetrieval (pol-Latn_eng-Latn)": 37.7,
+                    "XPQARetrieval (por-Latn_por-Latn)": 38.49,
+                    "XPQARetrieval (eng-Latn_por-Latn)": 19.41,
+                    "XPQARetrieval (por-Latn_eng-Latn)": 37.33,
+                    "XPQARetrieval (tam-Taml_tam-Taml)": 37.32,
+                    "XPQARetrieval (eng-Latn_tam-Taml)": 20.53,
+                    "XPQARetrieval (tam-Taml_eng-Latn)": 30.14,
+                    "XPQARetrieval (cmn-Hans_cmn-Hans)": 50.7,
+                    "XPQARetrieval (eng-Latn_cmn-Hans)": 20.59,
+                    "XPQARetrieval (cmn-Hans_eng-Latn)": 48.23,
+                    "XPQARetrieval (fr)": 51.74
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "LaBSE",
+                    "AFQMC (cmn-Hans)": 21.02,
+                    "ATEC (cmn-Hans)": 26.61,
+                    "BIOSSES": 78.7,
+                    "BQ (cmn-Hans)": 42.6,
+                    "CDSC-R (pol-Latn)": 85.53,
+                    "CDSC-R": 85.53,
+                    "LCQMC (cmn-Hans)": 52.19,
+                    "PAWSX (cmn-Hans)": 10.23,
+                    "RUParaPhraserSTS (rus-Cyrl)": 65.74,
+                    "RuSTSBenchmarkSTS (rus-Cyrl)": 73.34,
+                    "SICK-R": 69.99,
+                    "SICK-R-PL (pol-Latn)": 65.9,
+                    "SICK-R-PL": 65.9,
+                    "SICKFr (fra-Latn)": 69.94,
+                    "SICKFr": 69.94,
+                    "STS12": 65.08,
+                    "STS13": 67.98,
+                    "STS14": 64.03,
+                    "STS15": 76.59,
+                    "STS16": 72.98,
+                    "STS17 (nld-Latn_eng-Latn)": 75.22,
+                    "STS17 (eng-Latn_tur-Latn)": 72.07,
+                    "STS17 (spa-Latn)": 80.83,
+                    "STS17 (kor-Hang)": 71.32,
+                    "STS17 (eng-Latn_deu-Latn)": 73.85,
+                    "STS17 (ita-Latn_eng-Latn)": 76.99,
+                    "STS17 (eng-Latn_ara-Arab)": 74.51,
+                    "STS17 (ara-Arab)": 69.07,
+                    "STS17 (fra-Latn_eng-Latn)": 76.98,
+                    "STS17 (spa-Latn_eng-Latn)": 65.71,
+                    "STS17 (en-en)": 79.45,
+                    "STS17 (ar-ar)": 69.07,
+                    "STS17 (en-ar)": 74.51,
+                    "STS17 (en-de)": 73.85,
+                    "STS17 (en-tr)": 72.07,
+                    "STS17 (es-en)": 65.71,
+                    "STS17 (es-es)": 80.83,
+                    "STS17 (fr-en)": 76.98,
+                    "STS17 (it-en)": 76.99,
+                    "STS17 (ko-ko)": 71.32,
+                    "STS17 (nl-en)": 75.22,
+                    "STS22 (pol-Latn_eng-Latn)": 69.41,
+                    "STS22 (deu-Latn_eng-Latn)": 50.14,
+                    "STS22 (spa-Latn)": 63.18,
+                    "STS22 (deu-Latn_pol-Latn)": 58.69,
+                    "STS22 (fra-Latn)": 77.95,
+                    "STS22 (fra-Latn_pol-Latn)": 61.98,
+                    "STS22 (deu-Latn)": 48.58,
+                    "STS22 (pol-Latn)": 39.3,
+                    "STS22 (en)": 60.97,
+                    "STS22 (spa-Latn_ita-Latn)": 69.69,
+                    "STS22 (cmn-Hans_eng-Latn)": 64.02,
+                    "STS22 (deu-Latn_fra-Latn)": 53.28,
+                    "STS22 (tur-Latn)": 58.15,
+                    "STS22 (ita-Latn)": 72.22,
+                    "STS22 (rus-Cyrl)": 57.49,
+                    "STS22 (spa-Latn_eng-Latn)": 71.86,
+                    "STS22 (ara-Arab)": 57.67,
+                    "STS22 (cmn-Hans)": 63.02,
+                    "STS22 (ar)": 57.67,
+                    "STS22 (de)": 48.58,
+                    "STS22 (de-en)": 50.14,
+                    "STS22 (de-fr)": 53.28,
+                    "STS22 (de-pl)": 58.69,
+                    "STS22 (es)": 63.18,
+                    "STS22 (es-en)": 71.86,
+                    "STS22 (es-it)": 69.69,
+                    "STS22 (fr)": 77.95,
+                    "STS22 (fr-pl)": 61.98,
+                    "STS22 (it)": 72.22,
+                    "STS22 (pl)": 39.28,
+                    "STS22 (pl-en)": 69.41,
+                    "STS22 (ru)": 57.49,
+                    "STS22 (tr)": 58.15,
+                    "STS22 (zh)": 63.02,
+                    "STS22 (zh-en)": 64.02,
+                    "STSB (cmn-Hans)": 68.38,
+                    "STSBenchmark": 72.25,
+                    "STSBenchmarkMultilingualSTS (en)": 72.25,
+                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 73.06,
+                    "STSBenchmarkMultilingualSTS (fra-Latn)": 75.1,
+                    "STSBenchmarkMultilingualSTS (spa-Latn)": 72.92,
+                    "STSBenchmarkMultilingualSTS (nld-Latn)": 70.22,
+                    "STSBenchmarkMultilingualSTS (cmn-Hans)": 69.5,
+                    "STSBenchmarkMultilingualSTS (ita-Latn)": 72.97,
+                    "STSBenchmarkMultilingualSTS (por-Latn)": 71.65,
+                    "STSBenchmarkMultilingualSTS (deu-Latn)": 72.43,
+                    "STSBenchmarkMultilingualSTS (pol-Latn)": 72.58,
+                    "STSBenchmarkMultilingualSTS (fr)": 75.1
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "LaBSE",
+                    "SummEval": 31.05,
+                    "SummEvalFr (fra-Latn)": 30.16,
+                    "SummEvalFr": 30.16
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "LaBSE",
+                    "CEDRClassification (rus-Cyrl)": 40.61,
+                    "SensitiveTopicsClassification (rus-Cyrl)": 22.23
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "LaBSE"
+                }
+            ]
+        }
+    },
+    "sentence-t5-large": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "sentence-t5-large",
+                    "BUCC (de-en)": 87.0,
+                    "BUCC (fr-en)": 88.91,
+                    "BUCC (ru-en)": 0.44,
+                    "BUCC (zh-en)": 0.95,
+                    "Tatoeba (afr-eng)": 23.7,
+                    "Tatoeba (amh-eng)": 0.65,
+                    "Tatoeba (ang-eng)": 30.98,
+                    "Tatoeba (ara-eng)": 0.48,
+                    "Tatoeba (arq-eng)": 0.68,
+                    "Tatoeba (arz-eng)": 0.22,
+                    "Tatoeba (ast-eng)": 55.3,
+                    "Tatoeba (awa-eng)": 1.03,
+                    "Tatoeba (aze-eng)": 5.83,
+                    "Tatoeba (bel-eng)": 1.66,
+                    "Tatoeba (ben-eng)": 0.0,
+                    "Tatoeba (ber-eng)": 5.62,
+                    "Tatoeba (bos-eng)": 12.23,
+                    "Tatoeba (bre-eng)": 5.84,
+                    "Tatoeba (bul-eng)": 1.35,
+                    "Tatoeba (cat-eng)": 48.56,
+                    "Tatoeba (cbk-eng)": 46.97,
+                    "Tatoeba (ceb-eng)": 9.79,
+                    "Tatoeba (ces-eng)": 6.0,
+                    "Tatoeba (cha-eng)": 24.21,
+                    "Tatoeba (cmn-eng)": 2.26,
+                    "Tatoeba (cor-eng)": 4.03,
+                    "Tatoeba (csb-eng)": 9.53,
+                    "Tatoeba (cym-eng)": 9.17,
+                    "Tatoeba (dan-eng)": 34.63,
+                    "Tatoeba (deu-eng)": 89.31,
+                    "Tatoeba (dsb-eng)": 9.68,
+                    "Tatoeba (dtp-eng)": 4.66,
+                    "Tatoeba (ell-eng)": 0.77,
+                    "Tatoeba (epo-eng)": 26.88,
+                    "Tatoeba (est-eng)": 5.19,
+                    "Tatoeba (eus-eng)": 9.46,
+                    "Tatoeba (fao-eng)": 21.59,
+                    "Tatoeba (fin-eng)": 5.66,
+                    "Tatoeba (fra-eng)": 79.71,
+                    "Tatoeba (fry-eng)": 28.29,
+                    "Tatoeba (gla-eng)": 2.34,
+                    "Tatoeba (gle-eng)": 3.55,
+                    "Tatoeba (glg-eng)": 56.25,
+                    "Tatoeba (gsw-eng)": 24.25,
+                    "Tatoeba (heb-eng)": 0.57,
+                    "Tatoeba (hin-eng)": 0.12,
+                    "Tatoeba (hrv-eng)": 10.29,
+                    "Tatoeba (hsb-eng)": 9.52,
+                    "Tatoeba (hun-eng)": 6.22,
+                    "Tatoeba (hye-eng)": 0.81,
+                    "Tatoeba (ido-eng)": 41.11,
+                    "Tatoeba (ile-eng)": 54.0,
+                    "Tatoeba (ina-eng)": 75.47,
+                    "Tatoeba (ind-eng)": 13.02,
+                    "Tatoeba (isl-eng)": 8.98,
+                    "Tatoeba (ita-eng)": 67.23,
+                    "Tatoeba (jav-eng)": 8.54,
+                    "Tatoeba (jpn-eng)": 0.99,
+                    "Tatoeba (kab-eng)": 1.85,
+                    "Tatoeba (kat-eng)": 1.37,
+                    "Tatoeba (kaz-eng)": 0.67,
+                    "Tatoeba (khm-eng)": 0.56,
+                    "Tatoeba (kor-eng)": 1.73,
+                    "Tatoeba (kur-eng)": 9.23,
+                    "Tatoeba (kzj-eng)": 5.38,
+                    "Tatoeba (lat-eng)": 21.3,
+                    "Tatoeba (lfn-eng)": 40.48,
+                    "Tatoeba (lit-eng)": 5.38,
+                    "Tatoeba (lvs-eng)": 6.83,
+                    "Tatoeba (mal-eng)": 0.45,
+                    "Tatoeba (mar-eng)": 0.01,
+                    "Tatoeba (max-eng)": 16.44,
+                    "Tatoeba (mhr-eng)": 0.33,
+                    "Tatoeba (mkd-eng)": 0.4,
+                    "Tatoeba (mon-eng)": 2.48,
+                    "Tatoeba (nds-eng)": 34.66,
+                    "Tatoeba (nld-eng)": 42.72,
+                    "Tatoeba (nno-eng)": 24.08,
+                    "Tatoeba (nob-eng)": 34.17,
+                    "Tatoeba (nov-eng)": 55.01,
+                    "Tatoeba (oci-eng)": 29.15,
+                    "Tatoeba (orv-eng)": 0.2,
+                    "Tatoeba (pam-eng)": 6.99,
+                    "Tatoeba (pes-eng)": 0.9,
+                    "Tatoeba (pms-eng)": 30.8,
+                    "Tatoeba (pol-eng)": 12.81,
+                    "Tatoeba (por-eng)": 73.45,
+                    "Tatoeba (ron-eng)": 54.86,
+                    "Tatoeba (rus-eng)": 2.43,
+                    "Tatoeba (slk-eng)": 8.35,
+                    "Tatoeba (slv-eng)": 9.3,
+                    "Tatoeba (spa-eng)": 78.87,
+                    "Tatoeba (sqi-eng)": 11.74,
+                    "Tatoeba (srp-eng)": 5.83,
+                    "Tatoeba (swe-eng)": 35.41,
+                    "Tatoeba (swg-eng)": 28.18,
+                    "Tatoeba (swh-eng)": 7.53,
+                    "Tatoeba (tam-eng)": 0.36,
+                    "Tatoeba (tat-eng)": 1.01,
+                    "Tatoeba (tel-eng)": 1.1,
+                    "Tatoeba (tgl-eng)": 12.4,
+                    "Tatoeba (tha-eng)": 1.58,
+                    "Tatoeba (tuk-eng)": 4.95,
+                    "Tatoeba (tur-eng)": 6.45,
+                    "Tatoeba (tzl-eng)": 37.82,
+                    "Tatoeba (uig-eng)": 0.67,
+                    "Tatoeba (ukr-eng)": 1.88,
+                    "Tatoeba (urd-eng)": 0.0,
+                    "Tatoeba (uzb-eng)": 4.79,
+                    "Tatoeba (vie-eng)": 7.03,
+                    "Tatoeba (war-eng)": 9.68,
+                    "Tatoeba (wuu-eng)": 1.28,
+                    "Tatoeba (xho-eng)": 10.64,
+                    "Tatoeba (yid-eng)": 0.57,
+                    "Tatoeba (yue-eng)": 0.88,
+                    "Tatoeba (zsm-eng)": 14.67
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "sentence-t5-large",
+                    "AmazonCounterfactualClassification (de)": 67.97,
+                    "AmazonCounterfactualClassification (en)": 75.51,
+                    "AmazonCounterfactualClassification (en-ext)": 75.44,
+                    "AmazonCounterfactualClassification (ja)": 45.72,
+                    "AmazonPolarityClassification": 92.87,
+                    "AmazonReviewsClassification (de)": 43.16,
+                    "AmazonReviewsClassification (en)": 47.12,
+                    "AmazonReviewsClassification (es)": 42.89,
+                    "AmazonReviewsClassification (fr)": 41.48,
+                    "AmazonReviewsClassification (ja)": 22.49,
+                    "AmazonReviewsClassification (zh)": 22.12,
+                    "Banking77Classification": 78.46,
+                    "EmotionClassification": 51.74,
+                    "ImdbClassification": 87.01,
+                    "MTOPDomainClassification (de)": 80.56,
+                    "MTOPDomainClassification (en)": 90.99,
+                    "MTOPDomainClassification (es)": 80.78,
+                    "MTOPDomainClassification (fr)": 79.6,
+                    "MTOPDomainClassification (hi)": 21.22,
+                    "MTOPDomainClassification (th)": 15.82,
+                    "MTOPIntentClassification (de)": 52.5,
+                    "MTOPIntentClassification (en)": 64.98,
+                    "MTOPIntentClassification (es)": 52.07,
+                    "MTOPIntentClassification (fr)": 47.73,
+                    "MTOPIntentClassification (hi)": 3.74,
+                    "MTOPIntentClassification (th)": 4.96,
+                    "MasakhaNEWSClassification (fra)": 80.43,
+                    "MassiveIntentClassification (af)": 38.41,
+                    "MassiveIntentClassification (am)": 2.49,
+                    "MassiveIntentClassification (ar)": 4.7,
+                    "MassiveIntentClassification (az)": 31.77,
+                    "MassiveIntentClassification (bn)": 2.77,
+                    "MassiveIntentClassification (cy)": 31.69,
+                    "MassiveIntentClassification (da)": 41.76,
+                    "MassiveIntentClassification (de)": 52.01,
+                    "MassiveIntentClassification (el)": 9.74,
+                    "MassiveIntentClassification (en)": 71.78,
+                    "MassiveIntentClassification (es)": 54.1,
+                    "MassiveIntentClassification (fa)": 3.86,
+                    "MassiveIntentClassification (fi)": 34.07,
+                    "MassiveIntentClassification (fr)": 57.01,
+                    "MassiveIntentClassification (he)": 2.14,
+                    "MassiveIntentClassification (hi)": 2.97,
+                    "MassiveIntentClassification (hu)": 32.01,
+                    "MassiveIntentClassification (hy)": 3.17,
+                    "MassiveIntentClassification (id)": 34.55,
+                    "MassiveIntentClassification (is)": 32.0,
+                    "MassiveIntentClassification (it)": 52.94,
+                    "MassiveIntentClassification (ja)": 2.9,
+                    "MassiveIntentClassification (jv)": 32.42,
+                    "MassiveIntentClassification (ka)": 2.71,
+                    "MassiveIntentClassification (km)": 5.5,
+                    "MassiveIntentClassification (kn)": 2.41,
+                    "MassiveIntentClassification (ko)": 2.57,
+                    "MassiveIntentClassification (lv)": 35.09,
+                    "MassiveIntentClassification (ml)": 2.95,
+                    "MassiveIntentClassification (mn)": 18.33,
+                    "MassiveIntentClassification (ms)": 29.69,
+                    "MassiveIntentClassification (my)": 3.99,
+                    "MassiveIntentClassification (nb)": 41.29,
+                    "MassiveIntentClassification (nl)": 44.95,
+                    "MassiveIntentClassification (pl)": 37.67,
+                    "MassiveIntentClassification (pt)": 51.96,
+                    "MassiveIntentClassification (ro)": 43.83,
+                    "MassiveIntentClassification (ru)": 17.32,
+                    "MassiveIntentClassification (sl)": 33.71,
+                    "MassiveIntentClassification (sq)": 37.62,
+                    "MassiveIntentClassification (sv)": 40.67,
+                    "MassiveIntentClassification (sw)": 31.9,
+                    "MassiveIntentClassification (ta)": 1.91,
+                    "MassiveIntentClassification (te)": 2.54,
+                    "MassiveIntentClassification (th)": 3.85,
+                    "MassiveIntentClassification (tl)": 36.83,
+                    "MassiveIntentClassification (tr)": 33.0,
+                    "MassiveIntentClassification (ur)": 2.62,
+                    "MassiveIntentClassification (vi)": 22.81,
+                    "MassiveIntentClassification (zh-CN)": 1.09,
+                    "MassiveIntentClassification (zh-TW)": 3.49,
+                    "MassiveScenarioClassification (af)": 50.28,
+                    "MassiveScenarioClassification (am)": 7.15,
+                    "MassiveScenarioClassification (ar)": 12.12,
+                    "MassiveScenarioClassification (az)": 39.68,
+                    "MassiveScenarioClassification (bn)": 8.06,
+                    "MassiveScenarioClassification (cy)": 38.01,
+                    "MassiveScenarioClassification (da)": 51.44,
+                    "MassiveScenarioClassification (de)": 62.71,
+                    "MassiveScenarioClassification (el)": 17.19,
+                    "MassiveScenarioClassification (en)": 73.16,
+                    "MassiveScenarioClassification (es)": 59.56,
+                    "MassiveScenarioClassification (fa)": 6.5,
+                    "MassiveScenarioClassification (fi)": 41.72,
+                    "MassiveScenarioClassification (fr)": 63.6,
+                    "MassiveScenarioClassification (he)": 7.93,
+                    "MassiveScenarioClassification (hi)": 7.85,
+                    "MassiveScenarioClassification (hu)": 41.37,
+                    "MassiveScenarioClassification (hy)": 9.42,
+                    "MassiveScenarioClassification (id)": 44.88,
+                    "MassiveScenarioClassification (is)": 40.86,
+                    "MassiveScenarioClassification (it)": 60.09,
+                    "MassiveScenarioClassification (ja)": 6.56,
+                    "MassiveScenarioClassification (jv)": 40.18,
+                    "MassiveScenarioClassification (ka)": 7.37,
+                    "MassiveScenarioClassification (km)": 9.56,
+                    "MassiveScenarioClassification (kn)": 8.4,
+                    "MassiveScenarioClassification (ko)": 5.96,
+                    "MassiveScenarioClassification (lv)": 41.44,
+                    "MassiveScenarioClassification (ml)": 7.47,
+                    "MassiveScenarioClassification (mn)": 25.36,
+                    "MassiveScenarioClassification (ms)": 39.69,
+                    "MassiveScenarioClassification (my)": 9.68,
+                    "MassiveScenarioClassification (nb)": 49.92,
+                    "MassiveScenarioClassification (nl)": 56.09,
+                    "MassiveScenarioClassification (pl)": 45.2,
+                    "MassiveScenarioClassification (pt)": 57.99,
+                    "MassiveScenarioClassification (ro)": 56.0,
+                    "MassiveScenarioClassification (ru)": 27.47,
+                    "MassiveScenarioClassification (sl)": 41.04,
+                    "MassiveScenarioClassification (sq)": 49.38,
+                    "MassiveScenarioClassification (sv)": 50.97,
+                    "MassiveScenarioClassification (sw)": 40.62,
+                    "MassiveScenarioClassification (ta)": 7.59,
+                    "MassiveScenarioClassification (te)": 7.07,
+                    "MassiveScenarioClassification (th)": 8.52,
+                    "MassiveScenarioClassification (tl)": 49.89,
+                    "MassiveScenarioClassification (tr)": 43.08,
+                    "MassiveScenarioClassification (ur)": 9.31,
+                    "MassiveScenarioClassification (vi)": 27.46,
+                    "MassiveScenarioClassification (zh-CN)": 4.7,
+                    "MassiveScenarioClassification (zh-TW)": 7.24,
+                    "ToxicConversationsClassification": 71.73,
+                    "TweetSentimentExtractionClassification": 62.33
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "sentence-t5-large",
+                    "AlloProfClusteringP2P": 61.82,
+                    "AlloProfClusteringS2S": 39.78,
+                    "ArxivClusteringP2P": 41.62,
+                    "ArxivClusteringS2S": 29.44,
+                    "BiorxivClusteringP2P": 35.99,
+                    "BiorxivClusteringS2S": 24.02,
+                    "BlurbsClusteringP2P": 35.33,
+                    "BlurbsClusteringS2S": 13.27,
+                    "HALClusteringS2S": 18.73,
+                    "MLSUMClusteringP2P": 42.07,
+                    "MLSUMClusteringS2S": 31.87,
+                    "MasakhaNEWSClusteringP2P (fra)": 58.6,
+                    "MasakhaNEWSClusteringS2S (fra)": 31.33,
+                    "MedrxivClusteringP2P": 32.4,
+                    "MedrxivClusteringS2S": 26.33,
+                    "RedditClustering": 54.53,
+                    "RedditClusteringP2P": 62.5,
+                    "StackExchangeClustering": 65.11,
+                    "StackExchangeClusteringP2P": 36.86,
+                    "TenKGnadClusteringP2P": 44.11,
+                    "TenKGnadClusteringS2S": 17.26,
+                    "TwentyNewsgroupsClustering": 49.33
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "sentence-t5-large",
+                    "OpusparcusPC (fr)": 91.19,
+                    "PawsXPairClassification (fr)": 59.59,
+                    "SprintDuplicateQuestions": 89.01,
+                    "TwitterSemEval2015": 79.75,
+                    "TwitterURLCorpus": 86.14
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "sentence-t5-large",
+                    "AlloprofReranking": 57.99,
+                    "AskUbuntuDupQuestions": 61.51,
+                    "MindSmallReranking": 30.27,
+                    "SciDocsRR": 74.88,
+                    "StackOverflowDupQuestions": 49.34,
+                    "SyntecReranking": 79.77
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "sentence-t5-large",
+                    "AlloprofRetrieval": 34.52,
+                    "ArguAna": 39.27,
+                    "BSARDRetrieval": 0.0,
+                    "CQADupstackRetrieval": 38.96,
+                    "ClimateFEVER": 11.36,
+                    "DBPedia": 31.55,
+                    "FEVER": 36.21,
+                    "FiQA2018": 43.55,
+                    "HotpotQA": 33.95,
+                    "MSMARCO": 23.96,
+                    "MintakaRetrieval (fr)": 23.92,
+                    "NFCorpus": 31.1,
+                    "NQ": 42.02,
+                    "QuoraRetrieval": 85.73,
+                    "SCIDOCS": 15.38,
+                    "SciFact": 49.91,
+                    "SyntecRetrieval": 71.05,
+                    "TRECCOVID": 46.11,
+                    "Touche2020": 21.63,
+                    "XPQARetrieval (fr)": 48.79
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "sentence-t5-large",
+                    "BIOSSES": 78.93,
+                    "SICK-R": 80.34,
+                    "SICKFr": 72.83,
+                    "STS12": 79.11,
+                    "STS13": 87.33,
+                    "STS14": 83.17,
+                    "STS15": 88.28,
+                    "STS16": 84.36,
+                    "STS17 (ar-ar)": 10.75,
+                    "STS17 (en-ar)": -4.71,
+                    "STS17 (en-de)": 73.62,
+                    "STS17 (en-en)": 88.99,
+                    "STS17 (en-tr)": -0.42,
+                    "STS17 (es-en)": 62.62,
+                    "STS17 (es-es)": 82.74,
+                    "STS17 (fr-en)": 67.86,
+                    "STS17 (it-en)": 51.86,
+                    "STS17 (ko-ko)": 9.44,
+                    "STS17 (nl-en)": 45.95,
+                    "STS22 (ar)": 27.01,
+                    "STS22 (de)": 43.73,
+                    "STS22 (de-en)": 49.93,
+                    "STS22 (de-fr)": 61.58,
+                    "STS22 (de-pl)": 38.83,
+                    "STS22 (en)": 62.39,
+                    "STS22 (es)": 57.68,
+                    "STS22 (es-en)": 68.09,
+                    "STS22 (es-it)": 61.58,
+                    "STS22 (fr)": 75.01,
+                    "STS22 (fr-pl)": 5.63,
+                    "STS22 (it)": 62.01,
+                    "STS22 (pl)": 25.0,
+                    "STS22 (pl-en)": 51.72,
+                    "STS22 (ru)": 14.21,
+                    "STS22 (tr)": 47.3,
+                    "STS22 (zh)": 30.47,
+                    "STS22 (zh-en)": 23.1,
+                    "STSBenchmark": 85.36,
+                    "STSBenchmarkMultilingualSTS (fr)": 77.59
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "sentence-t5-large",
+                    "SummEval": 29.64,
+                    "SummEvalFr": 30.23
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "sentence-t5-large"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "sentence-t5-large"
+                }
+            ]
+        }
+    },
+    "Cohere-embed-english-v3.0": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "Cohere-embed-english-v3.0"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "Cohere-embed-english-v3.0"
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "Cohere-embed-english-v3.0"
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "Cohere-embed-english-v3.0"
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "Cohere-embed-english-v3.0"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "Cohere-embed-english-v3.0",
+                    "AILACasedocs": 31.54,
+                    "AILAStatutes": 27.15,
+                    "ARCChallenge": 9.89,
+                    "AlphaNLI": 15.1,
+                    "BrightRetrieval (psychology)": 21.82,
+                    "BrightRetrieval (economics)": 20.18,
+                    "BrightRetrieval (robotics)": 16.21,
+                    "BrightRetrieval (biology)": 18.98,
+                    "BrightRetrieval (stackoverflow)": 16.47,
+                    "BrightRetrieval (theoremqa_theorems)": 6.04,
+                    "BrightRetrieval (pony)": 1.77,
+                    "BrightRetrieval (sustainable_living)": 17.69,
+                    "BrightRetrieval (aops)": 6.46,
+                    "BrightRetrieval (theoremqa_questions)": 15.07,
+                    "BrightRetrieval (leetcode)": 26.78,
+                    "BrightRetrieval (earth_science)": 27.45,
+                    "GerDaLIRSmall": 6.05,
+                    "HellaSwag": 26.35,
+                    "LeCaRDv2": 21.02,
+                    "LegalBenchConsumerContractsQA": 77.12,
+                    "LegalBenchCorporateLobbying": 93.68,
+                    "LegalQuAD": 26.08,
+                    "LegalSummarization": 61.7,
+                    "PIQA": 28.49,
+                    "Quail": 4.1,
+                    "RARbCode": 57.19,
+                    "RARbMath": 72.26,
+                    "SIQA": 4.26,
+                    "SpartQA": 3.75,
+                    "TempReasonL1": 1.5,
+                    "TempReasonL2Fact": 35.91,
+                    "TempReasonL2Pure": 1.89,
+                    "TempReasonL3Fact": 27.51,
+                    "TempReasonL3Pure": 8.53,
+                    "WinoGrande": 58.01
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "Cohere-embed-english-v3.0"
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "Cohere-embed-english-v3.0"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "Cohere-embed-english-v3.0"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "Cohere-embed-english-v3.0",
+                    "Core17InstructionRetrieval": 2.8,
+                    "News21InstructionRetrieval": 0.2,
+                    "Robust04InstructionRetrieval": -3.63
+                }
+            ]
+        }
+    },
+    "gtr-t5-large": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "gtr-t5-large"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "gtr-t5-large",
+                    "AmazonCounterfactualClassification (de)": 59.38,
+                    "AmazonCounterfactualClassification (en)": 70.03,
+                    "AmazonCounterfactualClassification (en-ext)": 69.86,
+                    "AmazonCounterfactualClassification (ja)": 45.87,
+                    "AmazonPolarityClassification": 73.92,
+                    "AmazonReviewsClassification (de)": 33.06,
+                    "AmazonReviewsClassification (en)": 37.21,
+                    "AmazonReviewsClassification (es)": 34.0,
+                    "AmazonReviewsClassification (fr)": 33.48,
+                    "AmazonReviewsClassification (ja)": 21.78,
+                    "AmazonReviewsClassification (zh)": 21.83,
+                    "Banking77Classification": 81.21,
+                    "EmotionClassification": 46.33,
+                    "ImdbClassification": 70.86,
+                    "MTOPDomainClassification (de)": 81.91,
+                    "MTOPDomainClassification (en)": 94.01,
+                    "MTOPDomainClassification (es)": 84.7,
+                    "MTOPDomainClassification (fr)": 82.48,
+                    "MTOPDomainClassification (hi)": 22.11,
+                    "MTOPDomainClassification (th)": 16.36,
+                    "MTOPIntentClassification (de)": 52.13,
+                    "MTOPIntentClassification (en)": 63.86,
+                    "MTOPIntentClassification (es)": 52.62,
+                    "MTOPIntentClassification (fr)": 46.39,
+                    "MTOPIntentClassification (hi)": 3.9,
+                    "MTOPIntentClassification (th)": 5.38,
+                    "MassiveIntentClassification (af)": 41.02,
+                    "MassiveIntentClassification (am)": 2.34,
+                    "MassiveIntentClassification (ar)": 4.87,
+                    "MassiveIntentClassification (az)": 34.92,
+                    "MassiveIntentClassification (bn)": 2.52,
+                    "MassiveIntentClassification (cy)": 35.87,
+                    "MassiveIntentClassification (da)": 45.3,
+                    "MassiveIntentClassification (de)": 51.48,
+                    "MassiveIntentClassification (el)": 10.0,
+                    "MassiveIntentClassification (en)": 70.06,
+                    "MassiveIntentClassification (es)": 53.3,
+                    "MassiveIntentClassification (fa)": 3.59,
+                    "MassiveIntentClassification (fi)": 37.35,
+                    "MassiveIntentClassification (fr)": 54.83,
+                    "MassiveIntentClassification (he)": 2.52,
+                    "MassiveIntentClassification (hi)": 2.88,
+                    "MassiveIntentClassification (hu)": 33.52,
+                    "MassiveIntentClassification (hy)": 3.13,
+                    "MassiveIntentClassification (id)": 40.11,
+                    "MassiveIntentClassification (is)": 34.77,
+                    "MassiveIntentClassification (it)": 51.21,
+                    "MassiveIntentClassification (ja)": 4.75,
+                    "MassiveIntentClassification (jv)": 35.6,
+                    "MassiveIntentClassification (ka)": 2.71,
+                    "MassiveIntentClassification (km)": 5.48,
+                    "MassiveIntentClassification (kn)": 2.44,
+                    "MassiveIntentClassification (ko)": 2.59,
+                    "MassiveIntentClassification (lv)": 38.15,
+                    "MassiveIntentClassification (ml)": 2.67,
+                    "MassiveIntentClassification (mn)": 18.47,
+                    "MassiveIntentClassification (ms)": 35.58,
+                    "MassiveIntentClassification (my)": 4.35,
+                    "MassiveIntentClassification (nb)": 43.78,
+                    "MassiveIntentClassification (nl)": 45.96,
+                    "MassiveIntentClassification (pl)": 39.08,
+                    "MassiveIntentClassification (pt)": 52.27,
+                    "MassiveIntentClassification (ro)": 46.39,
+                    "MassiveIntentClassification (ru)": 16.82,
+                    "MassiveIntentClassification (sl)": 37.3,
+                    "MassiveIntentClassification (sq)": 41.73,
+                    "MassiveIntentClassification (sv)": 43.51,
+                    "MassiveIntentClassification (sw)": 35.97,
+                    "MassiveIntentClassification (ta)": 1.52,
+                    "MassiveIntentClassification (te)": 2.57,
+                    "MassiveIntentClassification (th)": 3.94,
+                    "MassiveIntentClassification (tl)": 41.03,
+                    "MassiveIntentClassification (tr)": 33.75,
+                    "MassiveIntentClassification (ur)": 2.57,
+                    "MassiveIntentClassification (vi)": 25.23,
+                    "MassiveIntentClassification (zh-CN)": 2.41,
+                    "MassiveIntentClassification (zh-TW)": 4.64,
+                    "MassiveScenarioClassification (af)": 51.48,
+                    "MassiveScenarioClassification (am)": 7.74,
+                    "MassiveScenarioClassification (ar)": 12.03,
+                    "MassiveScenarioClassification (az)": 41.77,
+                    "MassiveScenarioClassification (bn)": 8.07,
+                    "MassiveScenarioClassification (cy)": 43.67,
+                    "MassiveScenarioClassification (da)": 54.88,
+                    "MassiveScenarioClassification (de)": 63.63,
+                    "MassiveScenarioClassification (el)": 16.83,
+                    "MassiveScenarioClassification (en)": 75.49,
+                    "MassiveScenarioClassification (es)": 61.48,
+                    "MassiveScenarioClassification (fa)": 6.48,
+                    "MassiveScenarioClassification (fi)": 43.54,
+                    "MassiveScenarioClassification (fr)": 64.06,
+                    "MassiveScenarioClassification (he)": 8.03,
+                    "MassiveScenarioClassification (hi)": 7.5,
+                    "MassiveScenarioClassification (hu)": 42.59,
+                    "MassiveScenarioClassification (hy)": 9.22,
+                    "MassiveScenarioClassification (id)": 48.67,
+                    "MassiveScenarioClassification (is)": 43.87,
+                    "MassiveScenarioClassification (it)": 59.83,
+                    "MassiveScenarioClassification (ja)": 5.62,
+                    "MassiveScenarioClassification (jv)": 42.18,
+                    "MassiveScenarioClassification (ka)": 7.52,
+                    "MassiveScenarioClassification (km)": 9.55,
+                    "MassiveScenarioClassification (kn)": 8.34,
+                    "MassiveScenarioClassification (ko)": 6.11,
+                    "MassiveScenarioClassification (lv)": 43.35,
+                    "MassiveScenarioClassification (ml)": 7.28,
+                    "MassiveScenarioClassification (mn)": 23.94,
+                    "MassiveScenarioClassification (ms)": 45.18,
+                    "MassiveScenarioClassification (my)": 9.33,
+                    "MassiveScenarioClassification (nb)": 52.71,
+                    "MassiveScenarioClassification (nl)": 57.02,
+                    "MassiveScenarioClassification (pl)": 46.79,
+                    "MassiveScenarioClassification (pt)": 59.45,
+                    "MassiveScenarioClassification (ro)": 56.8,
+                    "MassiveScenarioClassification (ru)": 25.85,
+                    "MassiveScenarioClassification (sl)": 42.51,
+                    "MassiveScenarioClassification (sq)": 50.41,
+                    "MassiveScenarioClassification (sv)": 54.16,
+                    "MassiveScenarioClassification (sw)": 43.02,
+                    "MassiveScenarioClassification (ta)": 7.21,
+                    "MassiveScenarioClassification (te)": 6.9,
+                    "MassiveScenarioClassification (th)": 8.7,
+                    "MassiveScenarioClassification (tl)": 51.76,
+                    "MassiveScenarioClassification (tr)": 42.54,
+                    "MassiveScenarioClassification (ur)": 9.32,
+                    "MassiveScenarioClassification (vi)": 31.51,
+                    "MassiveScenarioClassification (zh-CN)": 3.84,
+                    "MassiveScenarioClassification (zh-TW)": 8.16,
+                    "ToxicConversationsClassification": 68.65,
+                    "TweetSentimentExtractionClassification": 54.09
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "gtr-t5-large",
+                    "ArxivClusteringP2P": 37.5,
+                    "ArxivClusteringS2S": 30.55,
+                    "BiorxivClusteringP2P": 29.59,
+                    "BiorxivClusteringS2S": 25.72,
+                    "MedrxivClusteringP2P": 28.72,
+                    "MedrxivClusteringS2S": 27.39,
+                    "RedditClustering": 61.69,
+                    "RedditClusteringP2P": 61.67,
+                    "StackExchangeClustering": 69.93,
+                    "StackExchangeClusteringP2P": 33.21,
+                    "TwentyNewsgroupsClustering": 51.64
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "gtr-t5-large",
+                    "SprintDuplicateQuestions": 95.05,
+                    "TwitterSemEval2015": 76.03,
+                    "TwitterURLCorpus": 84.89
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "gtr-t5-large",
+                    "AskUbuntuDupQuestions": 61.64,
+                    "MindSmallReranking": 31.84,
+                    "SciDocsRR": 76.39,
+                    "StackOverflowDupQuestions": 51.58
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "gtr-t5-large",
+                    "ArguAna": 52.09,
+                    "CQADupstackRetrieval": 36.62,
+                    "ClimateFEVER": 26.9,
+                    "DBPedia": 39.55,
+                    "FEVER": 72.66,
+                    "FiQA2018": 42.79,
+                    "HotpotQA": 57.85,
+                    "MSMARCO": 42.73,
+                    "NFCorpus": 32.63,
+                    "NQ": 55.09,
+                    "QuoraRetrieval": 88.47,
+                    "SCIDOCS": 15.51,
+                    "SciFact": 63.42,
+                    "TRECCOVID": 56.68,
+                    "Touche2020": 28.29
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "gtr-t5-large",
+                    "BIOSSES": 84.86,
+                    "SICK-R": 73.39,
+                    "STS12": 70.33,
+                    "STS13": 82.19,
+                    "STS14": 77.16,
+                    "STS15": 86.31,
+                    "STS16": 81.85,
+                    "STS17 (ar-ar)": 10.19,
+                    "STS17 (en-ar)": -5.77,
+                    "STS17 (en-de)": 67.43,
+                    "STS17 (en-en)": 83.93,
+                    "STS17 (en-tr)": 8.75,
+                    "STS17 (es-en)": 54.96,
+                    "STS17 (es-es)": 82.74,
+                    "STS17 (fr-en)": 60.5,
+                    "STS17 (it-en)": 46.26,
+                    "STS17 (ko-ko)": 8.96,
+                    "STS17 (nl-en)": 47.48,
+                    "STS22 (ar)": 34.97,
+                    "STS22 (de)": 51.7,
+                    "STS22 (de-en)": 48.76,
+                    "STS22 (de-fr)": 57.5,
+                    "STS22 (de-pl)": 32.76,
+                    "STS22 (en)": 64.3,
+                    "STS22 (es)": 57.49,
+                    "STS22 (es-en)": 67.76,
+                    "STS22 (es-it)": 57.18,
+                    "STS22 (fr)": 78.7,
+                    "STS22 (fr-pl)": 61.98,
+                    "STS22 (it)": 67.67,
+                    "STS22 (pl)": 30.68,
+                    "STS22 (pl-en)": 54.17,
+                    "STS22 (ru)": 15.36,
+                    "STS22 (tr)": 58.12,
+                    "STS22 (zh)": 27.32,
+                    "STS22 (zh-en)": 29.42,
+                    "STSBenchmark": 77.6
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "gtr-t5-large",
+                    "SummEval": 29.5
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "gtr-t5-large"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "gtr-t5-large"
+                }
+            ]
+        }
+    },
+    "e5-small": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "e5-small",
+                    "BornholmBitextMining": 40.27
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "e5-small",
+                    "AngryTweetsClassification": 43.6,
+                    "DKHateClassification": 57.57,
+                    "DanishPoliticalCommentsClassification": 28.37,
+                    "LccSentimentClassification": 40.27,
+                    "MassiveIntentClassification (da)": 41.89,
+                    "MassiveIntentClassification (nb)": 40.25,
+                    "MassiveIntentClassification (sv)": 40.07,
+                    "MassiveScenarioClassification (da)": 49.93,
+                    "MassiveScenarioClassification (nb)": 48.58,
+                    "MassiveScenarioClassification (sv)": 47.06,
+                    "NoRecClassification": 41.84,
+                    "NordicLangClassification": 53.47,
+                    "NorwegianParliament": 56.57,
+                    "ScalaDaClassification": 50.15,
+                    "ScalaNbClassification": 50.03
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "e5-small",
+                    "BiorxivClusteringP2P": 36.1,
+                    "BiorxivClusteringS2S": 31.51,
+                    "MedrxivClusteringP2P": 31.31,
+                    "MedrxivClusteringS2S": 28.32,
+                    "RedditClustering": 43.27,
+                    "RedditClusteringP2P": 57.22,
+                    "StackExchangeClustering": 59.6,
+                    "StackExchangeClusteringP2P": 30.82,
+                    "TwentyNewsgroupsClustering": 37.65
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "e5-small"
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "e5-small"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "e5-small"
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "e5-small"
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "e5-small"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "e5-small"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "e5-small"
+                }
+            ]
+        }
+    },
+    "all-MiniLM-L6-v2": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "all-MiniLM-L6-v2",
+                    "BornholmBitextMining": 29.68,
+                    "BornholmBitextMining (dan-Latn)": 29.68,
+                    "Tatoeba (kab-Latn_eng-Latn)": 0.96,
+                    "Tatoeba (aze-Latn_eng-Latn)": 1.04,
+                    "Tatoeba (wuu-Hans_eng-Latn)": 0.6,
+                    "Tatoeba (fra-Latn_eng-Latn)": 8.17,
+                    "Tatoeba (nov-Latn_eng-Latn)": 13.97,
+                    "Tatoeba (slk-Latn_eng-Latn)": 3.27,
+                    "Tatoeba (mkd-Cyrl_eng-Latn)": 0.0,
+                    "Tatoeba (ukr-Cyrl_eng-Latn)": 0.3,
+                    "Tatoeba (kur-Latn_eng-Latn)": 5.21,
+                    "Tatoeba (hin-Deva_eng-Latn)": 0.0,
+                    "Tatoeba (tgl-Latn_eng-Latn)": 2.69,
+                    "Tatoeba (jav-Latn_eng-Latn)": 3.37,
+                    "Tatoeba (nob-Latn_eng-Latn)": 4.34,
+                    "Tatoeba (tam-Taml_eng-Latn)": 0.33,
+                    "Tatoeba (hsb-Latn_eng-Latn)": 2.65,
+                    "Tatoeba (srp-Cyrl_eng-Latn)": 1.28,
+                    "Tatoeba (cat-Latn_eng-Latn)": 6.93,
+                    "Tatoeba (jpn-Jpan_eng-Latn)": 0.97,
+                    "Tatoeba (kzj-Latn_eng-Latn)": 2.78,
+                    "Tatoeba (uig-Arab_eng-Latn)": 0.2,
+                    "Tatoeba (max-Deva_eng-Latn)": 6.93,
+                    "Tatoeba (dtp-Latn_eng-Latn)": 1.88,
+                    "Tatoeba (cbk-Latn_eng-Latn)": 7.04,
+                    "Tatoeba (bre-Latn_eng-Latn)": 3.22,
+                    "Tatoeba (arz-Arab_eng-Latn)": 0.0,
+                    "Tatoeba (heb-Hebr_eng-Latn)": 0.22,
+                    "Tatoeba (kat-Geor_eng-Latn)": 0.3,
+                    "Tatoeba (yid-Hebr_eng-Latn)": 0.14,
+                    "Tatoeba (lit-Latn_eng-Latn)": 0.92,
+                    "Tatoeba (ber-Tfng_eng-Latn)": 4.69,
+                    "Tatoeba (hun-Latn_eng-Latn)": 3.56,
+                    "Tatoeba (mhr-Cyrl_eng-Latn)": 0.0,
+                    "Tatoeba (isl-Latn_eng-Latn)": 2.37,
+                    "Tatoeba (ind-Latn_eng-Latn)": 3.86,
+                    "Tatoeba (tuk-Latn_eng-Latn)": 3.52,
+                    "Tatoeba (kor-Hang_eng-Latn)": 0.45,
+                    "Tatoeba (ara-Arab_eng-Latn)": 0.0,
+                    "Tatoeba (tzl-Latn_eng-Latn)": 4.58,
+                    "Tatoeba (swe-Latn_eng-Latn)": 6.06,
+                    "Tatoeba (ang-Latn_eng-Latn)": 15.64,
+                    "Tatoeba (mon-Cyrl_eng-Latn)": 0.38,
+                    "Tatoeba (urd-Arab_eng-Latn)": 0.1,
+                    "Tatoeba (vie-Latn_eng-Latn)": 3.07,
+                    "Tatoeba (ina-Latn_eng-Latn)": 17.63,
+                    "Tatoeba (hrv-Latn_eng-Latn)": 3.83,
+                    "Tatoeba (war-Latn_eng-Latn)": 4.94,
+                    "Tatoeba (cor-Latn_eng-Latn)": 2.41,
+                    "Tatoeba (tur-Latn_eng-Latn)": 3.59,
+                    "Tatoeba (bul-Cyrl_eng-Latn)": 0.21,
+                    "Tatoeba (spa-Latn_eng-Latn)": 5.63,
+                    "Tatoeba (tel-Telu_eng-Latn)": 0.46,
+                    "Tatoeba (nds-Latn_eng-Latn)": 9.56,
+                    "Tatoeba (lvs-Latn_eng-Latn)": 2.61,
+                    "Tatoeba (amh-Ethi_eng-Latn)": 0.25,
+                    "Tatoeba (pms-Latn_eng-Latn)": 7.62,
+                    "Tatoeba (xho-Latn_eng-Latn)": 4.01,
+                    "Tatoeba (epo-Latn_eng-Latn)": 5.46,
+                    "Tatoeba (por-Latn_eng-Latn)": 8.29,
+                    "Tatoeba (ile-Latn_eng-Latn)": 13.54,
+                    "Tatoeba (ell-Grek_eng-Latn)": 0.1,
+                    "Tatoeba (oci-Latn_eng-Latn)": 6.55,
+                    "Tatoeba (pes-Arab_eng-Latn)": 0.0,
+                    "Tatoeba (tat-Cyrl_eng-Latn)": 0.44,
+                    "Tatoeba (awa-Deva_eng-Latn)": 0.51,
+                    "Tatoeba (fao-Latn_eng-Latn)": 5.33,
+                    "Tatoeba (swg-Latn_eng-Latn)": 8.92,
+                    "Tatoeba (uzb-Latn_eng-Latn)": 2.34,
+                    "Tatoeba (cym-Latn_eng-Latn)": 6.09,
+                    "Tatoeba (mar-Deva_eng-Latn)": 0.0,
+                    "Tatoeba (fry-Latn_eng-Latn)": 11.22,
+                    "Tatoeba (ces-Latn_eng-Latn)": 3.04,
+                    "Tatoeba (afr-Latn_eng-Latn)": 5.89,
+                    "Tatoeba (csb-Latn_eng-Latn)": 3.78,
+                    "Tatoeba (pol-Latn_eng-Latn)": 2.58,
+                    "Tatoeba (gla-Latn_eng-Latn)": 2.7,
+                    "Tatoeba (deu-Latn_eng-Latn)": 7.89,
+                    "Tatoeba (cmn-Hans_eng-Latn)": 1.92,
+                    "Tatoeba (ita-Latn_eng-Latn)": 9.9,
+                    "Tatoeba (ben-Beng_eng-Latn)": 0.0,
+                    "Tatoeba (glg-Latn_eng-Latn)": 9.31,
+                    "Tatoeba (dsb-Latn_eng-Latn)": 2.9,
+                    "Tatoeba (pam-Latn_eng-Latn)": 3.54,
+                    "Tatoeba (ast-Latn_eng-Latn)": 6.84,
+                    "Tatoeba (bos-Latn_eng-Latn)": 5.58,
+                    "Tatoeba (nld-Latn_eng-Latn)": 10.16,
+                    "Tatoeba (bel-Cyrl_eng-Latn)": 0.5,
+                    "Tatoeba (orv-Cyrl_eng-Latn)": 0.0,
+                    "Tatoeba (gsw-Latn_eng-Latn)": 11.33,
+                    "Tatoeba (dan-Latn_eng-Latn)": 7.84,
+                    "Tatoeba (hye-Armn_eng-Latn)": 0.41,
+                    "Tatoeba (mal-Mlym_eng-Latn)": 0.15,
+                    "Tatoeba (arq-Arab_eng-Latn)": 0.11,
+                    "Tatoeba (kaz-Cyrl_eng-Latn)": 0.42,
+                    "Tatoeba (khm-Khmr_eng-Latn)": 0.42,
+                    "Tatoeba (tha-Thai_eng-Latn)": 0.3,
+                    "Tatoeba (swh-Latn_eng-Latn)": 5.8,
+                    "Tatoeba (gle-Latn_eng-Latn)": 2.75,
+                    "Tatoeba (ceb-Latn_eng-Latn)": 3.39,
+                    "Tatoeba (sqi-Latn_eng-Latn)": 3.58,
+                    "Tatoeba (slv-Latn_eng-Latn)": 3.25,
+                    "Tatoeba (ido-Latn_eng-Latn)": 7.48,
+                    "Tatoeba (yue-Hant_eng-Latn)": 0.86,
+                    "Tatoeba (nno-Latn_eng-Latn)": 5.38,
+                    "Tatoeba (est-Latn_eng-Latn)": 2.36,
+                    "Tatoeba (lfn-Latn_eng-Latn)": 4.55,
+                    "Tatoeba (lat-Latn_eng-Latn)": 5.04,
+                    "Tatoeba (cha-Latn_eng-Latn)": 13.29,
+                    "Tatoeba (eus-Latn_eng-Latn)": 5.54,
+                    "Tatoeba (fin-Latn_eng-Latn)": 2.79,
+                    "Tatoeba (rus-Cyrl_eng-Latn)": 0.07,
+                    "Tatoeba (ron-Latn_eng-Latn)": 6.82,
+                    "Tatoeba (zsm-Latn_eng-Latn)": 4.24
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "all-MiniLM-L6-v2",
+                    "AllegroReviews (pol-Latn)": 24.64,
+                    "AmazonCounterfactualClassification (en)": 63.64,
+                    "AmazonCounterfactualClassification (en-ext)": 65.59,
+                    "AmazonCounterfactualClassification (deu-Latn)": 57.82,
+                    "AmazonCounterfactualClassification (jpn-Jpan)": 60.9,
+                    "AmazonPolarityClassification": 64.26,
+                    "AmazonReviewsClassification (en)": 30.85,
+                    "AmazonReviewsClassification (deu-Latn)": 26.44,
+                    "AmazonReviewsClassification (spa-Latn)": 27.35,
+                    "AmazonReviewsClassification (fra-Latn)": 26.88,
+                    "AmazonReviewsClassification (jpn-Jpan)": 23.78,
+                    "AmazonReviewsClassification (cmn-Hans)": 23.67,
+                    "AngryTweetsClassification": 42.49,
+                    "AngryTweetsClassification (dan-Latn)": 42.48,
+                    "Banking77Classification": 80.04,
+                    "CBD (pol-Latn)": 50.9,
+                    "DKHateClassification": 55.05,
+                    "DanishPoliticalCommentsClassification": 26.96,
+                    "DanishPoliticalCommentsClassification (dan-Latn)": 26.7,
+                    "EmotionClassification": 40.83,
+                    "GeoreviewClassification (rus-Cyrl)": 27.08,
+                    "HeadlineClassification (rus-Cyrl)": 27.77,
+                    "IFlyTek (cmn-Hans)": 16.09,
+                    "ImdbClassification": 61.76,
+                    "InappropriatenessClassification (rus-Cyrl)": 51.73,
+                    "JDReview (cmn-Hans)": 59.98,
+                    "KinopoiskClassification (rus-Cyrl)": 33.93,
+                    "LccSentimentClassification": 38.47,
+                    "LccSentimentClassification (dan-Latn)": 38.53,
+                    "MTOPDomainClassification (en)": 91.68,
+                    "MTOPDomainClassification (deu-Latn)": 70.47,
+                    "MTOPDomainClassification (spa-Latn)": 72.99,
+                    "MTOPDomainClassification (fra-Latn)": 75.1,
+                    "MTOPDomainClassification (hin-Deva)": 40.74,
+                    "MTOPDomainClassification (tha-Thai)": 15.66,
+                    "MTOPIntentClassification (en)": 61.55,
+                    "MTOPIntentClassification (deu-Latn)": 45.7,
+                    "MTOPIntentClassification (spa-Latn)": 44.19,
+                    "MTOPIntentClassification (fra-Latn)": 39.67,
+                    "MTOPIntentClassification (hin-Deva)": 18.69,
+                    "MTOPIntentClassification (tha-Thai)": 5.78,
+                    "MasakhaNEWSClassification (fra)": 74.05,
+                    "MasakhaNEWSClassification (amh-Ethi)": 33.03,
+                    "MasakhaNEWSClassification (eng)": 77.11,
+                    "MasakhaNEWSClassification (fra-Latn)": 68.84,
+                    "MasakhaNEWSClassification (hau-Latn)": 50.49,
+                    "MasakhaNEWSClassification (ibo-Latn)": 52.15,
+                    "MasakhaNEWSClassification (lin-Latn)": 68.29,
+                    "MasakhaNEWSClassification (lug-Latn)": 47.58,
+                    "MasakhaNEWSClassification (orm-Ethi)": 50.68,
+                    "MasakhaNEWSClassification (pcm-Latn)": 92.56,
+                    "MasakhaNEWSClassification (run-Latn)": 54.81,
+                    "MasakhaNEWSClassification (sna-Latn)": 65.58,
+                    "MasakhaNEWSClassification (som-Latn)": 39.8,
+                    "MasakhaNEWSClassification (swa-Latn)": 47.25,
+                    "MasakhaNEWSClassification (tir-Ethi)": 28.97,
+                    "MasakhaNEWSClassification (xho-Latn)": 54.14,
+                    "MasakhaNEWSClassification (yor-Latn)": 55.01,
+                    "MassiveIntentClassification (en)": 66.94,
+                    "MassiveIntentClassification (da)": 40.99,
+                    "MassiveIntentClassification (nb)": 39.34,
+                    "MassiveIntentClassification (sv)": 38.1,
+                    "MassiveIntentClassification (aze-Latn)": 30.63,
+                    "MassiveIntentClassification (spa-Latn)": 39.88,
+                    "MassiveIntentClassification (tam-Taml)": 11.31,
+                    "MassiveIntentClassification (swe-Latn)": 38.09,
+                    "MassiveIntentClassification (fas-Arab)": 19.1,
+                    "MassiveIntentClassification (khm-Khmr)": 4.89,
+                    "MassiveIntentClassification (mon-Cyrl)": 20.35,
+                    "MassiveIntentClassification (hye-Armn)": 7.62,
+                    "MassiveIntentClassification (kan-Knda)": 3.14,
+                    "MassiveIntentClassification (cmo-Hans)": 24.4,
+                    "MassiveIntentClassification (rus-Cyrl)": 27.58,
+                    "MassiveIntentClassification (jpn-Jpan)": 31.87,
+                    "MassiveIntentClassification (deu-Latn)": 43.44,
+                    "MassiveIntentClassification (ind-Latn)": 39.02,
+                    "MassiveIntentClassification (cym-Latn)": 34.54,
+                    "MassiveIntentClassification (nld-Latn)": 40.2,
+                    "MassiveIntentClassification (hin-Deva)": 17.7,
+                    "MassiveIntentClassification (afr-Latn)": 37.45,
+                    "MassiveIntentClassification (ell-Grek)": 24.19,
+                    "MassiveIntentClassification (mal-Mlym)": 2.87,
+                    "MassiveIntentClassification (por-Latn)": 43.76,
+                    "MassiveIntentClassification (sqi-Latn)": 40.7,
+                    "MassiveIntentClassification (urd-Arab)": 14.42,
+                    "MassiveIntentClassification (vie-Latn)": 37.09,
+                    "MassiveIntentClassification (hun-Latn)": 35.69,
+                    "MassiveIntentClassification (ron-Latn)": 40.54,
+                    "MassiveIntentClassification (ara-Arab)": 19.05,
+                    "MassiveIntentClassification (nob-Latn)": 39.36,
+                    "MassiveIntentClassification (slv-Latn)": 36.7,
+                    "MassiveIntentClassification (lav-Latn)": 36.97,
+                    "MassiveIntentClassification (heb-Hebr)": 22.48,
+                    "MassiveIntentClassification (pol-Latn)": 36.07,
+                    "MassiveIntentClassification (ita-Latn)": 41.59,
+                    "MassiveIntentClassification (msa-Latn)": 35.07,
+                    "MassiveIntentClassification (mya-Mymr)": 4.24,
+                    "MassiveIntentClassification (isl-Latn)": 29.95,
+                    "MassiveIntentClassification (tel-Telu)": 2.46,
+                    "MassiveIntentClassification (swa-Latn)": 34.98,
+                    "MassiveIntentClassification (amh-Ethi)": 2.62,
+                    "MassiveIntentClassification (cmo-Hant)": 22.56,
+                    "MassiveIntentClassification (tha-Thai)": 11.26,
+                    "MassiveIntentClassification (ben-Beng)": 13.1,
+                    "MassiveIntentClassification (fin-Latn)": 38.37,
+                    "MassiveIntentClassification (fra-Latn)": 42.55,
+                    "MassiveIntentClassification (kor-Kore)": 16.05,
+                    "MassiveIntentClassification (kat-Geor)": 9.07,
+                    "MassiveIntentClassification (dan-Latn)": 41.0,
+                    "MassiveIntentClassification (tur-Latn)": 33.76,
+                    "MassiveIntentClassification (tgl-Latn)": 37.92,
+                    "MassiveIntentClassification (jav-Latn)": 35.91,
+                    "MassiveScenarioClassification (en)": 73.81,
+                    "MassiveScenarioClassification (da)": 47.01,
+                    "MassiveScenarioClassification (nb)": 44.67,
+                    "MassiveScenarioClassification (sv)": 42.93,
+                    "MassiveScenarioClassification (mal-Mlym)": 7.67,
+                    "MassiveScenarioClassification (khm-Khmr)": 9.25,
+                    "MassiveScenarioClassification (deu-Latn)": 51.47,
+                    "MassiveScenarioClassification (msa-Latn)": 43.67,
+                    "MassiveScenarioClassification (heb-Hebr)": 24.01,
+                    "MassiveScenarioClassification (mon-Cyrl)": 25.47,
+                    "MassiveScenarioClassification (mya-Mymr)": 10.61,
+                    "MassiveScenarioClassification (ind-Latn)": 43.46,
+                    "MassiveScenarioClassification (nob-Latn)": 44.67,
+                    "MassiveScenarioClassification (fra-Latn)": 51.14,
+                    "MassiveScenarioClassification (tgl-Latn)": 45.69,
+                    "MassiveScenarioClassification (amh-Ethi)": 7.57,
+                    "MassiveScenarioClassification (fas-Arab)": 23.97,
+                    "MassiveScenarioClassification (vie-Latn)": 40.47,
+                    "MassiveScenarioClassification (sqi-Latn)": 47.21,
+                    "MassiveScenarioClassification (dan-Latn)": 47.02,
+                    "MassiveScenarioClassification (spa-Latn)": 49.0,
+                    "MassiveScenarioClassification (pol-Latn)": 43.82,
+                    "MassiveScenarioClassification (tel-Telu)": 7.95,
+                    "MassiveScenarioClassification (tha-Thai)": 19.5,
+                    "MassiveScenarioClassification (kor-Kore)": 20.3,
+                    "MassiveScenarioClassification (cmo-Hans)": 33.65,
+                    "MassiveScenarioClassification (urd-Arab)": 23.73,
+                    "MassiveScenarioClassification (aze-Latn)": 35.59,
+                    "MassiveScenarioClassification (ron-Latn)": 48.23,
+                    "MassiveScenarioClassification (jav-Latn)": 43.59,
+                    "MassiveScenarioClassification (slv-Latn)": 41.9,
+                    "MassiveScenarioClassification (kat-Geor)": 14.92,
+                    "MassiveScenarioClassification (lav-Latn)": 40.43,
+                    "MassiveScenarioClassification (cym-Latn)": 39.0,
+                    "MassiveScenarioClassification (swe-Latn)": 42.95,
+                    "MassiveScenarioClassification (rus-Cyrl)": 30.46,
+                    "MassiveScenarioClassification (ben-Beng)": 20.56,
+                    "MassiveScenarioClassification (por-Latn)": 50.72,
+                    "MassiveScenarioClassification (hye-Armn)": 13.03,
+                    "MassiveScenarioClassification (jpn-Jpan)": 37.3,
+                    "MassiveScenarioClassification (nld-Latn)": 48.43,
+                    "MassiveScenarioClassification (swa-Latn)": 43.32,
+                    "MassiveScenarioClassification (tam-Taml)": 17.37,
+                    "MassiveScenarioClassification (isl-Latn)": 36.12,
+                    "MassiveScenarioClassification (kan-Knda)": 7.85,
+                    "MassiveScenarioClassification (ell-Grek)": 31.3,
+                    "MassiveScenarioClassification (tur-Latn)": 38.85,
+                    "MassiveScenarioClassification (cmo-Hant)": 31.18,
+                    "MassiveScenarioClassification (fin-Latn)": 42.38,
+                    "MassiveScenarioClassification (hin-Deva)": 23.71,
+                    "MassiveScenarioClassification (ara-Arab)": 25.99,
+                    "MassiveScenarioClassification (hun-Latn)": 41.61,
+                    "MassiveScenarioClassification (afr-Latn)": 43.87,
+                    "MassiveScenarioClassification (ita-Latn)": 49.8,
+                    "MultilingualSentiment (cmn-Hans)": 41.28,
+                    "NoRecClassification": 40.02,
+                    "NoRecClassification (nob-Latn)": 37.93,
+                    "NordicLangClassification": 54.71,
+                    "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 54.7,
+                    "NorwegianParliament": 54.8,
+                    "OnlineShopping (cmn-Hans)": 57.74,
+                    "PAC (pol-Latn)": 59.78,
+                    "PolEmo2.0-IN (pol-Latn)": 40.29,
+                    "PolEmo2.0-OUT (pol-Latn)": 25.0,
+                    "RuReviewsClassification (rus-Cyrl)": 41.79,
+                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 10.08,
+                    "RuSciBenchOECDClassification (rus-Cyrl)": 8.3,
+                    "ScalaDaClassification": 50.03,
+                    "ScalaNbClassification": 50.17,
+                    "TNews (cmn-Hans)": 20.12,
+                    "ToxicConversationsClassification": 62.09,
+                    "TweetSentimentExtractionClassification": 54.04,
+                    "Waimai (cmn-Hans)": 62.72
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "all-MiniLM-L6-v2",
+                    "AlloProfClusteringP2P": 51.83,
+                    "AlloProfClusteringS2S": 32.07,
+                    "ArxivClusteringP2P": 46.55,
+                    "ArxivClusteringS2S": 37.86,
+                    "BiorxivClusteringP2P": 38.37,
+                    "BiorxivClusteringS2S": 32.88,
+                    "GeoreviewClusteringP2P (rus-Cyrl)": 20.25,
+                    "HALClusteringS2S": 18.84,
+                    "MLSUMClusteringP2P": 36.74,
+                    "MLSUMClusteringP2P (rus-Cyrl)": 23.91,
+                    "MLSUMClusteringS2S": 28.12,
+                    "MLSUMClusteringS2S (rus-Cyrl)": 19.07,
+                    "MasakhaNEWSClusteringP2P (fra)": 34.92,
+                    "MasakhaNEWSClusteringP2P (amh-Ethi)": 43.85,
+                    "MasakhaNEWSClusteringP2P (eng)": 48.88,
+                    "MasakhaNEWSClusteringP2P (fra-Latn)": 34.92,
+                    "MasakhaNEWSClusteringP2P (hau-Latn)": 24.77,
+                    "MasakhaNEWSClusteringP2P (ibo-Latn)": 45.94,
+                    "MasakhaNEWSClusteringP2P (lin-Latn)": 69.56,
+                    "MasakhaNEWSClusteringP2P (lug-Latn)": 49.4,
+                    "MasakhaNEWSClusteringP2P (orm-Ethi)": 25.34,
+                    "MasakhaNEWSClusteringP2P (pcm-Latn)": 85.57,
+                    "MasakhaNEWSClusteringP2P (run-Latn)": 50.75,
+                    "MasakhaNEWSClusteringP2P (sna-Latn)": 41.68,
+                    "MasakhaNEWSClusteringP2P (som-Latn)": 29.02,
+                    "MasakhaNEWSClusteringP2P (swa-Latn)": 21.87,
+                    "MasakhaNEWSClusteringP2P (tir-Ethi)": 42.93,
+                    "MasakhaNEWSClusteringP2P (xho-Latn)": 28.58,
+                    "MasakhaNEWSClusteringP2P (yor-Latn)": 31.45,
+                    "MasakhaNEWSClusteringS2S (fra)": 40.58,
+                    "MasakhaNEWSClusteringS2S (amh-Ethi)": 45.44,
+                    "MasakhaNEWSClusteringS2S (eng)": 41.09,
+                    "MasakhaNEWSClusteringS2S (fra-Latn)": 40.58,
+                    "MasakhaNEWSClusteringS2S (hau-Latn)": 15.42,
+                    "MasakhaNEWSClusteringS2S (ibo-Latn)": 37.02,
+                    "MasakhaNEWSClusteringS2S (lin-Latn)": 65.14,
+                    "MasakhaNEWSClusteringS2S (lug-Latn)": 44.21,
+                    "MasakhaNEWSClusteringS2S (orm-Ethi)": 24.79,
+                    "MasakhaNEWSClusteringS2S (pcm-Latn)": 61.48,
+                    "MasakhaNEWSClusteringS2S (run-Latn)": 51.25,
+                    "MasakhaNEWSClusteringS2S (sna-Latn)": 42.74,
+                    "MasakhaNEWSClusteringS2S (som-Latn)": 30.08,
+                    "MasakhaNEWSClusteringS2S (swa-Latn)": 9.55,
+                    "MasakhaNEWSClusteringS2S (tir-Ethi)": 46.04,
+                    "MasakhaNEWSClusteringS2S (xho-Latn)": 27.08,
+                    "MasakhaNEWSClusteringS2S (yor-Latn)": 31.04,
+                    "MedrxivClusteringP2P": 34.39,
+                    "MedrxivClusteringS2S": 31.86,
+                    "RedditClustering": 50.7,
+                    "RedditClusteringP2P": 54.8,
+                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 10.21,
+                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 9.43,
+                    "StackExchangeClustering": 53.14,
+                    "StackExchangeClusteringP2P": 34.26,
+                    "TwentyNewsgroupsClustering": 46.49
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "all-MiniLM-L6-v2",
+                    "CDSC-E (pol-Latn)": 47.27,
+                    "OpusparcusPC (fr)": 86.53,
+                    "OpusparcusPC (deu-Latn)": 89.91,
+                    "OpusparcusPC (en)": 97.46,
+                    "OpusparcusPC (fin-Latn)": 85.44,
+                    "OpusparcusPC (fra-Latn)": 86.53,
+                    "OpusparcusPC (rus-Cyrl)": 79.28,
+                    "OpusparcusPC (swe-Latn)": 83.78,
+                    "PSC (pol-Latn)": 81.87,
+                    "PawsXPairClassification (fr)": 55.4,
+                    "PawsXPairClassification (deu-Latn)": 51.22,
+                    "PawsXPairClassification (en)": 59.1,
+                    "PawsXPairClassification (spa-Latn)": 52.21,
+                    "PawsXPairClassification (fra-Latn)": 55.41,
+                    "PawsXPairClassification (jpn-Hira)": 48.97,
+                    "PawsXPairClassification (kor-Hang)": 50.53,
+                    "PawsXPairClassification (cmn-Hans)": 53.11,
+                    "SICK-E-PL (pol-Latn)": 47.32,
+                    "SprintDuplicateQuestions": 94.55,
+                    "TERRa (rus-Cyrl)": 45.03,
+                    "TwitterSemEval2015": 67.86,
+                    "TwitterURLCorpus": 84.7
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "all-MiniLM-L6-v2",
+                    "AlloprofReranking": 31.69,
+                    "AlloprofReranking (fra-Latn)": 62.62,
+                    "AskUbuntuDupQuestions": 63.48,
+                    "MMarcoReranking (cmn-Hans)": 4.74,
+                    "MindSmallReranking": 30.8,
+                    "RuBQReranking (rus-Cyrl)": 27.05,
+                    "SciDocsRR": 87.12,
+                    "StackOverflowDupQuestions": 50.76,
+                    "SyntecReranking": 59.57,
+                    "SyntecReranking (fra-Latn)": 67.31,
+                    "T2Reranking (cmn-Hans)": 56.26
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "all-MiniLM-L6-v2",
+                    "AILACasedocs": 19.72,
+                    "AILAStatutes": 20.52,
+                    "ARCChallenge": 9.48,
+                    "AlloprofRetrieval": 28.41,
+                    "AlloprofRetrieval (fra-Latn)": 28.41,
+                    "AlphaNLI": 28.19,
+                    "ArguAna": 50.17,
+                    "ArguAna-PL (pol-Latn)": 11.5,
+                    "BSARDRetrieval": 0.0,
+                    "BSARDRetrieval (fra-Latn)": 4.8,
+                    "CQADupstackRetrieval": 41.32,
+                    "ClimateFEVER": 20.27,
+                    "CmedqaRetrieval (cmn-Hans)": 2.03,
+                    "CovidRetrieval (cmn-Hans)": 0.8,
+                    "DBPedia": 32.33,
+                    "DuRetrieval (cmn-Hans)": 3.03,
+                    "EcomRetrieval (cmn-Hans)": 3.7,
+                    "FEVER": 51.93,
+                    "FiQA-PL (pol-Latn)": 2.29,
+                    "FiQA2018": 36.87,
+                    "GerDaLIRSmall (deu-Latn)": 2.41,
+                    "HellaSwag": 24.21,
+                    "HotpotQA": 46.51,
+                    "LEMBNarrativeQARetrieval": 18.27,
+                    "LEMBNeedleRetrieval": 20.0,
+                    "LEMBPasskeyRetrieval": 23.25,
+                    "LEMBQMSumRetrieval": 16.32,
+                    "LEMBSummScreenFDRetrieval": 54.8,
+                    "LEMBWikimQARetrieval": 46.23,
+                    "LeCaRDv2 (zho-Hans)": 17.5,
+                    "LegalBenchConsumerContractsQA": 65.6,
+                    "LegalBenchCorporateLobbying": 86.41,
+                    "LegalQuAD (deu-Latn)": 11.81,
+                    "LegalSummarization": 59.0,
+                    "MIRACLRetrieval (rus-Cyrl)": 0.39,
+                    "MMarcoRetrieval (cmn-Hans)": 6.21,
+                    "MSMARCO": 36.54,
+                    "MedicalRetrieval (cmn-Hans)": 1.76,
+                    "MintakaRetrieval (fr)": 9.19,
+                    "MintakaRetrieval (ara-Arab)": 2.22,
+                    "MintakaRetrieval (deu-Latn)": 15.43,
+                    "MintakaRetrieval (spa-Latn)": 7.72,
+                    "MintakaRetrieval (fra-Latn)": 9.19,
+                    "MintakaRetrieval (hin-Deva)": 2.65,
+                    "MintakaRetrieval (ita-Latn)": 8.48,
+                    "MintakaRetrieval (jpn-Hira)": 6.7,
+                    "MintakaRetrieval (por-Latn)": 9.76,
+                    "NFCorpus": 31.59,
+                    "NFCorpus-PL (pol-Latn)": 10.62,
+                    "NQ": 43.87,
+                    "PIQA": 25.28,
+                    "Quail": 3.92,
+                    "QuoraRetrieval": 87.56,
+                    "RARbCode": 44.27,
+                    "RARbMath": 68.19,
+                    "RiaNewsRetrieval (rus-Cyrl)": 0.67,
+                    "RuBQRetrieval (rus-Cyrl)": 2.64,
+                    "SCIDOCS": 21.64,
+                    "SCIDOCS-PL (pol-Latn)": 3.75,
+                    "SIQA": 1.56,
+                    "SciFact": 64.51,
+                    "SciFact-PL (pol-Latn)": 16.14,
+                    "SpartQA": 1.65,
+                    "SyntecRetrieval": 60.15,
+                    "SyntecRetrieval (fra-Latn)": 60.15,
+                    "T2Retrieval (cmn-Hans)": 1.6,
+                    "TRECCOVID": 47.25,
+                    "TRECCOVID-PL (pol-Latn)": 8.66,
+                    "TempReasonL1": 1.53,
+                    "TempReasonL2Fact": 17.65,
+                    "TempReasonL2Pure": 0.46,
+                    "TempReasonL3Fact": 14.16,
+                    "TempReasonL3Pure": 6.33,
+                    "Touche2020": 16.9,
+                    "VideoRetrieval (cmn-Hans)": 9.79,
+                    "WinoGrande": 47.33,
+                    "XPQARetrieval (fr)": 51.79,
+                    "XPQARetrieval (ara-Arab_ara-Arab)": 8.03,
+                    "XPQARetrieval (eng-Latn_ara-Arab)": 1.86,
+                    "XPQARetrieval (ara-Arab_eng-Latn)": 6.87,
+                    "XPQARetrieval (deu-Latn_deu-Latn)": 53.25,
+                    "XPQARetrieval (eng-Latn_deu-Latn)": 10.99,
+                    "XPQARetrieval (deu-Latn_eng-Latn)": 27.59,
+                    "XPQARetrieval (spa-Latn_spa-Latn)": 38.87,
+                    "XPQARetrieval (eng-Latn_spa-Latn)": 5.46,
+                    "XPQARetrieval (spa-Latn_eng-Latn)": 22.2,
+                    "XPQARetrieval (fra-Latn_fra-Latn)": 51.79,
+                    "XPQARetrieval (eng-Latn_fra-Latn)": 8.57,
+                    "XPQARetrieval (fra-Latn_eng-Latn)": 31.36,
+                    "XPQARetrieval (hin-Deva_hin-Deva)": 35.3,
+                    "XPQARetrieval (eng-Latn_hin-Deva)": 6.28,
+                    "XPQARetrieval (hin-Deva_eng-Latn)": 6.0,
+                    "XPQARetrieval (ita-Latn_ita-Latn)": 54.57,
+                    "XPQARetrieval (eng-Latn_ita-Latn)": 6.79,
+                    "XPQARetrieval (ita-Latn_eng-Latn)": 24.13,
+                    "XPQARetrieval (jpn-Hira_jpn-Hira)": 39.23,
+                    "XPQARetrieval (eng-Latn_jpn-Hira)": 4.1,
+                    "XPQARetrieval (jpn-Hira_eng-Latn)": 13.05,
+                    "XPQARetrieval (kor-Hang_kor-Hang)": 10.24,
+                    "XPQARetrieval (eng-Latn_kor-Hang)": 5.72,
+                    "XPQARetrieval (kor-Hang_eng-Latn)": 6.37,
+                    "XPQARetrieval (pol-Latn_pol-Latn)": 22.33,
+                    "XPQARetrieval (eng-Latn_pol-Latn)": 7.58,
+                    "XPQARetrieval (pol-Latn_eng-Latn)": 14.43,
+                    "XPQARetrieval (por-Latn_por-Latn)": 31.93,
+                    "XPQARetrieval (eng-Latn_por-Latn)": 5.9,
+                    "XPQARetrieval (por-Latn_eng-Latn)": 20.74,
+                    "XPQARetrieval (tam-Taml_tam-Taml)": 7.43,
+                    "XPQARetrieval (eng-Latn_tam-Taml)": 3.42,
+                    "XPQARetrieval (tam-Taml_eng-Latn)": 2.91,
+                    "XPQARetrieval (cmn-Hans_cmn-Hans)": 19.39,
+                    "XPQARetrieval (eng-Latn_cmn-Hans)": 5.05,
+                    "XPQARetrieval (cmn-Hans_eng-Latn)": 8.77
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "all-MiniLM-L6-v2",
+                    "AFQMC (cmn-Hans)": 8.59,
+                    "ATEC (cmn-Hans)": 13.52,
+                    "BIOSSES": 81.64,
+                    "BQ (cmn-Hans)": 23.84,
+                    "CDSC-R (pol-Latn)": 79.45,
+                    "LCQMC (cmn-Hans)": 23.85,
+                    "PAWSX (cmn-Hans)": 7.21,
+                    "RUParaPhraserSTS (rus-Cyrl)": 43.93,
+                    "RuSTSBenchmarkSTS (rus-Cyrl)": 55.56,
+                    "SICK-R": 77.58,
+                    "SICK-R-PL (pol-Latn)": 52.43,
+                    "SICKFr": 62.48,
+                    "SICKFr (fra-Latn)": 62.48,
+                    "STS12": 72.37,
+                    "STS13": 80.6,
+                    "STS14": 75.59,
+                    "STS15": 85.39,
+                    "STS16": 78.99,
+                    "STS17 (ar-ar)": 50.89,
+                    "STS17 (en-ar)": -4.28,
+                    "STS17 (en-de)": 35.82,
+                    "STS17 (en-en)": 87.59,
+                    "STS17 (en-tr)": 4.5,
+                    "STS17 (es-en)": 16.31,
+                    "STS17 (es-es)": 76.12,
+                    "STS17 (fr-en)": 37.09,
+                    "STS17 (it-en)": 24.45,
+                    "STS17 (ko-ko)": 43.39,
+                    "STS17 (nl-en)": 29.0,
+                    "STS17 (ara-Arab)": 50.89,
+                    "STS17 (spa-Latn_eng-Latn)": 16.31,
+                    "STS17 (kor-Hang)": 43.39,
+                    "STS17 (eng-Latn_tur-Latn)": 4.5,
+                    "STS17 (fra-Latn_eng-Latn)": 37.09,
+                    "STS17 (nld-Latn_eng-Latn)": 29.0,
+                    "STS17 (eng-Latn_ara-Arab)": -4.28,
+                    "STS17 (spa-Latn)": 76.12,
+                    "STS17 (eng-Latn_deu-Latn)": 35.82,
+                    "STS17 (ita-Latn_eng-Latn)": 24.45,
+                    "STS22 (ar)": 22.64,
+                    "STS22 (de)": 31.04,
+                    "STS22 (de-en)": 44.04,
+                    "STS22 (de-fr)": 30.07,
+                    "STS22 (de-pl)": 4.93,
+                    "STS22 (en)": 67.71,
+                    "STS22 (es)": 54.78,
+                    "STS22 (es-en)": 53.42,
+                    "STS22 (es-it)": 44.27,
+                    "STS22 (fr)": 77.0,
+                    "STS22 (fr-pl)": 50.71,
+                    "STS22 (it)": 60.4,
+                    "STS22 (pl)": 26.77,
+                    "STS22 (pl-en)": 32.8,
+                    "STS22 (ru)": 14.72,
+                    "STS22 (tr)": 33.69,
+                    "STS22 (zh)": 44.93,
+                    "STS22 (zh-en)": 41.64,
+                    "STS22 (ara-Arab)": 22.64,
+                    "STS22 (rus-Cyrl)": 14.72,
+                    "STS22 (tur-Latn)": 33.69,
+                    "STS22 (pol-Latn)": 26.77,
+                    "STS22 (spa-Latn_eng-Latn)": 53.42,
+                    "STS22 (cmn-Hans)": 44.93,
+                    "STS22 (spa-Latn)": 54.78,
+                    "STS22 (deu-Latn_fra-Latn)": 30.07,
+                    "STS22 (deu-Latn_pol-Latn)": -4.93,
+                    "STS22 (fra-Latn)": 77.0,
+                    "STS22 (deu-Latn)": 31.04,
+                    "STS22 (pol-Latn_eng-Latn)": 32.8,
+                    "STS22 (deu-Latn_eng-Latn)": 44.04,
+                    "STS22 (cmn-Hans_eng-Latn)": 41.64,
+                    "STS22 (spa-Latn_ita-Latn)": 44.27,
+                    "STS22 (fra-Latn_pol-Latn)": 50.71,
+                    "STS22 (ita-Latn)": 60.4,
+                    "STSB (cmn-Hans)": 37.8,
+                    "STSBenchmark": 82.03,
+                    "STSBenchmarkMultilingualSTS (fr)": 64.93,
+                    "STSBenchmarkMultilingualSTS (pol-Latn)": 56.42,
+                    "STSBenchmarkMultilingualSTS (por-Latn)": 61.56,
+                    "STSBenchmarkMultilingualSTS (ita-Latn)": 59.24,
+                    "STSBenchmarkMultilingualSTS (fra-Latn)": 64.93,
+                    "STSBenchmarkMultilingualSTS (deu-Latn)": 62.4,
+                    "STSBenchmarkMultilingualSTS (cmn-Hans)": 39.74,
+                    "STSBenchmarkMultilingualSTS (spa-Latn)": 61.62,
+                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 55.55,
+                    "STSBenchmarkMultilingualSTS (en)": 82.03,
+                    "STSBenchmarkMultilingualSTS (nld-Latn)": 55.46
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "all-MiniLM-L6-v2",
+                    "SummEval": 30.81,
+                    "SummEvalFr": 28.28,
+                    "SummEvalFr (fra-Latn)": 28.29
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "all-MiniLM-L6-v2",
+                    "CEDRClassification (rus-Cyrl)": 32.72,
+                    "SensitiveTopicsClassification (rus-Cyrl)": 17.82
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "all-MiniLM-L6-v2"
+                }
+            ]
+        }
+    },
+    "e5-base-v2": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "e5-base-v2"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "e5-base-v2"
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "e5-base-v2",
+                    "BiorxivClusteringP2P": 37.12,
+                    "BiorxivClusteringS2S": 33.41,
+                    "MedrxivClusteringP2P": 31.82,
+                    "MedrxivClusteringS2S": 29.68,
+                    "RedditClustering": 56.54,
+                    "RedditClusteringP2P": 63.23,
+                    "StackExchangeClustering": 64.6,
+                    "StackExchangeClusteringP2P": 33.02,
+                    "TwentyNewsgroupsClustering": 49.86
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "e5-base-v2"
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "e5-base-v2"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "e5-base-v2"
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "e5-base-v2"
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "e5-base-v2"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "e5-base-v2"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "e5-base-v2",
+                    "Core17InstructionRetrieval": -2.9,
+                    "News21InstructionRetrieval": -2.0,
+                    "Robust04InstructionRetrieval": -6.73
+                }
+            ]
+        }
+    },
+    "rubert-tiny-turbo": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "rubert-tiny-turbo",
+                    "Tatoeba (rus-Cyrl_eng-Latn)": 83.14
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "rubert-tiny-turbo",
+                    "AmazonPolarityClassification": 68.36,
+                    "Banking77Classification": 59.86,
+                    "EmotionClassification": 29.5,
+                    "GeoreviewClassification (rus-Cyrl)": 41.36,
+                    "HeadlineClassification (rus-Cyrl)": 68.9,
+                    "ImdbClassification": 58.36,
+                    "InappropriatenessClassification (rus-Cyrl)": 59.11,
+                    "KinopoiskClassification (rus-Cyrl)": 50.47,
+                    "MassiveIntentClassification (cmo-Hans)": 5.21,
+                    "MassiveIntentClassification (kor-Kore)": 2.53,
+                    "MassiveIntentClassification (hin-Deva)": 2.56,
+                    "MassiveIntentClassification (kan-Knda)": 2.06,
+                    "MassiveIntentClassification (kat-Geor)": 2.64,
+                    "MassiveIntentClassification (amh-Ethi)": 2.28,
+                    "MassiveIntentClassification (mya-Mymr)": 3.96,
+                    "MassiveIntentClassification (ell-Grek)": 9.66,
+                    "MassiveIntentClassification (lav-Latn)": 22.32,
+                    "MassiveIntentClassification (mal-Mlym)": 2.39,
+                    "MassiveIntentClassification (mon-Cyrl)": 28.99,
+                    "MassiveIntentClassification (urd-Arab)": 2.45,
+                    "MassiveIntentClassification (fas-Arab)": 3.34,
+                    "MassiveIntentClassification (ron-Latn)": 31.72,
+                    "MassiveIntentClassification (isl-Latn)": 24.85,
+                    "MassiveIntentClassification (en)": 50.16,
+                    "MassiveIntentClassification (hun-Latn)": 25.52,
+                    "MassiveIntentClassification (fra-Latn)": 31.51,
+                    "MassiveIntentClassification (tha-Thai)": 3.74,
+                    "MassiveIntentClassification (deu-Latn)": 32.1,
+                    "MassiveIntentClassification (tur-Latn)": 27.56,
+                    "MassiveIntentClassification (por-Latn)": 34.35,
+                    "MassiveIntentClassification (sqi-Latn)": 32.38,
+                    "MassiveIntentClassification (cmo-Hant)": 6.81,
+                    "MassiveIntentClassification (hye-Armn)": 2.72,
+                    "MassiveIntentClassification (dan-Latn)": 33.95,
+                    "MassiveIntentClassification (afr-Latn)": 30.4,
+                    "MassiveIntentClassification (ara-Arab)": 3.8,
+                    "MassiveIntentClassification (jav-Latn)": 28.53,
+                    "MassiveIntentClassification (tel-Telu)": 2.21,
+                    "MassiveIntentClassification (tgl-Latn)": 32.02,
+                    "MassiveIntentClassification (swa-Latn)": 27.79,
+                    "MassiveIntentClassification (jpn-Jpan)": 5.61,
+                    "MassiveIntentClassification (msa-Latn)": 28.94,
+                    "MassiveIntentClassification (nob-Latn)": 32.3,
+                    "MassiveIntentClassification (fin-Latn)": 31.13,
+                    "MassiveIntentClassification (ind-Latn)": 33.56,
+                    "MassiveIntentClassification (cym-Latn)": 31.68,
+                    "MassiveIntentClassification (slv-Latn)": 31.39,
+                    "MassiveIntentClassification (spa-Latn)": 31.03,
+                    "MassiveIntentClassification (ben-Beng)": 3.08,
+                    "MassiveIntentClassification (swe-Latn)": 30.23,
+                    "MassiveIntentClassification (rus-Cyrl)": 57.98,
+                    "MassiveIntentClassification (aze-Latn)": 23.58,
+                    "MassiveIntentClassification (ita-Latn)": 35.24,
+                    "MassiveIntentClassification (pol-Latn)": 26.82,
+                    "MassiveIntentClassification (vie-Latn)": 23.72,
+                    "MassiveIntentClassification (tam-Taml)": 1.5,
+                    "MassiveIntentClassification (heb-Hebr)": 2.25,
+                    "MassiveIntentClassification (nld-Latn)": 32.44,
+                    "MassiveIntentClassification (khm-Khmr)": 5.14,
+                    "MassiveScenarioClassification (cmo-Hans)": 10.6,
+                    "MassiveScenarioClassification (kor-Kore)": 5.63,
+                    "MassiveScenarioClassification (hin-Deva)": 7.41,
+                    "MassiveScenarioClassification (kan-Knda)": 7.6,
+                    "MassiveScenarioClassification (kat-Geor)": 7.01,
+                    "MassiveScenarioClassification (amh-Ethi)": 7.68,
+                    "MassiveScenarioClassification (mya-Mymr)": 10.73,
+                    "MassiveScenarioClassification (ell-Grek)": 17.95,
+                    "MassiveScenarioClassification (lav-Latn)": 29.29,
+                    "MassiveScenarioClassification (mal-Mlym)": 6.92,
+                    "MassiveScenarioClassification (mon-Cyrl)": 33.7,
+                    "MassiveScenarioClassification (urd-Arab)": 8.53,
+                    "MassiveScenarioClassification (fas-Arab)": 6.62,
+                    "MassiveScenarioClassification (ron-Latn)": 40.02,
+                    "MassiveScenarioClassification (isl-Latn)": 33.1,
+                    "MassiveScenarioClassification (en)": 61.29,
+                    "MassiveScenarioClassification (hun-Latn)": 36.41,
+                    "MassiveScenarioClassification (fra-Latn)": 42.9,
+                    "MassiveScenarioClassification (tha-Thai)": 8.26,
+                    "MassiveScenarioClassification (deu-Latn)": 42.07,
+                    "MassiveScenarioClassification (tur-Latn)": 34.85,
+                    "MassiveScenarioClassification (por-Latn)": 40.79,
+                    "MassiveScenarioClassification (sqi-Latn)": 42.66,
+                    "MassiveScenarioClassification (cmo-Hant)": 11.93,
+                    "MassiveScenarioClassification (hye-Armn)": 8.78,
+                    "MassiveScenarioClassification (dan-Latn)": 43.69,
+                    "MassiveScenarioClassification (afr-Latn)": 40.84,
+                    "MassiveScenarioClassification (ara-Arab)": 11.86,
+                    "MassiveScenarioClassification (jav-Latn)": 37.23,
+                    "MassiveScenarioClassification (tel-Telu)": 6.91,
+                    "MassiveScenarioClassification (tgl-Latn)": 38.16,
+                    "MassiveScenarioClassification (swa-Latn)": 35.66,
+                    "MassiveScenarioClassification (jpn-Jpan)": 10.6,
+                    "MassiveScenarioClassification (msa-Latn)": 38.97,
+                    "MassiveScenarioClassification (nob-Latn)": 39.05,
+                    "MassiveScenarioClassification (fin-Latn)": 35.19,
+                    "MassiveScenarioClassification (ind-Latn)": 39.54,
+                    "MassiveScenarioClassification (cym-Latn)": 39.85,
+                    "MassiveScenarioClassification (slv-Latn)": 35.98,
+                    "MassiveScenarioClassification (spa-Latn)": 37.13,
+                    "MassiveScenarioClassification (ben-Beng)": 8.85,
+                    "MassiveScenarioClassification (swe-Latn)": 36.12,
+                    "MassiveScenarioClassification (rus-Cyrl)": 62.9,
+                    "MassiveScenarioClassification (aze-Latn)": 30.32,
+                    "MassiveScenarioClassification (ita-Latn)": 42.69,
+                    "MassiveScenarioClassification (pol-Latn)": 31.62,
+                    "MassiveScenarioClassification (vie-Latn)": 31.89,
+                    "MassiveScenarioClassification (tam-Taml)": 7.01,
+                    "MassiveScenarioClassification (heb-Hebr)": 7.61,
+                    "MassiveScenarioClassification (nld-Latn)": 40.94,
+                    "MassiveScenarioClassification (khm-Khmr)": 8.51,
+                    "RuReviewsClassification (rus-Cyrl)": 60.66,
+                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 52.93,
+                    "RuSciBenchOECDClassification (rus-Cyrl)": 40.79,
+                    "ToxicConversationsClassification": 57.77,
+                    "TweetSentimentExtractionClassification": 55.3
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "rubert-tiny-turbo",
+                    "ArxivClusteringP2P": 24.83,
+                    "ArxivClusteringS2S": 16.68,
+                    "BiorxivClusteringP2P": 20.0,
+                    "BiorxivClusteringS2S": 12.67,
+                    "GeoreviewClusteringP2P (rus-Cyrl)": 59.71,
+                    "MLSUMClusteringP2P (rus-Cyrl)": 40.02,
+                    "MLSUMClusteringS2S (rus-Cyrl)": 41.36,
+                    "MedrxivClusteringP2P": 20.79,
+                    "MedrxivClusteringS2S": 18.18,
+                    "RedditClustering": 26.28,
+                    "RedditClusteringP2P": 40.48,
+                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 47.55,
+                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 41.44,
+                    "StackExchangeClustering": 33.51,
+                    "StackExchangeClusteringP2P": 27.98,
+                    "TwentyNewsgroupsClustering": 19.9
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "rubert-tiny-turbo",
+                    "OpusparcusPC (rus-Cyrl)": 87.58,
+                    "TERRa (rus-Cyrl)": 56.09
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "rubert-tiny-turbo",
+                    "RuBQReranking (rus-Cyrl)": 62.15
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "rubert-tiny-turbo",
+                    "AILACasedocs": 7.43,
+                    "AILAStatutes": 13.62,
+                    "ARCChallenge": 3.85,
+                    "AlphaNLI": 14.15,
+                    "ArguAna": 32.03,
+                    "ClimateFEVER": 5.56,
+                    "DBPedia": 9.61,
+                    "RiaNewsRetrieval (rus-Cyrl)": 51.27,
+                    "RuBQRetrieval (rus-Cyrl)": 51.73
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "rubert-tiny-turbo",
+                    "RUParaPhraserSTS (rus-Cyrl)": 72.15,
+                    "RuSTSBenchmarkSTS (rus-Cyrl)": 78.48,
+                    "STS22 (cmn-Hans)": 32.83,
+                    "STS22 (deu-Latn_fra-Latn)": 17.5,
+                    "STS22 (pol-Latn_eng-Latn)": 42.08,
+                    "STS22 (rus-Cyrl)": 60.06,
+                    "STS22 (fra-Latn)": 42.0,
+                    "STS22 (deu-Latn)": 8.16,
+                    "STS22 (tur-Latn)": 15.46,
+                    "STS22 (deu-Latn_eng-Latn)": 21.55,
+                    "STS22 (ita-Latn)": 39.69,
+                    "STS22 (pol-Latn)": 9.71,
+                    "STS22 (fra-Latn_pol-Latn)": 39.44,
+                    "STS22 (deu-Latn_pol-Latn)": 25.53,
+                    "STS22 (ara-Arab)": 27.95,
+                    "STS22 (spa-Latn_eng-Latn)": 42.77,
+                    "STS22 (spa-Latn_ita-Latn)": 32.83,
+                    "STS22 (spa-Latn)": 45.31,
+                    "STS22 (cmn-Hans_eng-Latn)": 31.25,
+                    "STS22 (en)": 47.06,
+                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 78.12
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "rubert-tiny-turbo"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "rubert-tiny-turbo",
+                    "CEDRClassification (rus-Cyrl)": 38.95,
+                    "SensitiveTopicsClassification (rus-Cyrl)": 24.44
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "rubert-tiny-turbo"
+                }
+            ]
+        }
+    },
+    "bge-large-zh-noinstruct": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "bge-large-zh-noinstruct"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "bge-large-zh-noinstruct",
+                    "AmazonReviewsClassification (zh)": 41.94,
+                    "IFlyTek": 45.32,
+                    "JDReview": 85.38,
+                    "MassiveIntentClassification (zh-CN)": 66.96,
+                    "MassiveScenarioClassification (zh-CN)": 73.39,
+                    "MultilingualSentiment": 73.7,
+                    "OnlineShopping": 91.66,
+                    "TNews": 52.05,
+                    "Waimai": 86.83
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "bge-large-zh-noinstruct",
+                    "CLSClusteringP2P": 41.23,
+                    "CLSClusteringS2S": 40.04,
+                    "ThuNewsClusteringP2P": 62.03,
+                    "ThuNewsClusteringS2S": 56.75
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "bge-large-zh-noinstruct",
+                    "Cmnli": 82.17,
+                    "Ocnli": 71.37
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "bge-large-zh-noinstruct",
+                    "CMedQAv1": 81.72,
+                    "CMedQAv2": 84.64,
+                    "MMarcoReranking": 27.1,
+                    "T2Reranking": 66.16
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "bge-large-zh-noinstruct",
+                    "CmedqaRetrieval": 41.03,
+                    "CovidRetrieval": 75.07,
+                    "DuRetrieval": 84.68,
+                    "EcomRetrieval": 65.6,
+                    "MMarcoRetrieval": 81.38,
+                    "MedicalRetrieval": 58.28,
+                    "T2Retrieval": 84.39,
+                    "VideoRetrieval": 73.93
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "bge-large-zh-noinstruct",
+                    "AFQMC": 43.06,
+                    "ATEC": 48.29,
+                    "BQ": 60.53,
+                    "LCQMC": 74.71,
+                    "PAWSX": 16.64,
+                    "QBQTC": 35.2,
+                    "STS22 (zh)": 67.19,
+                    "STSB": 78.41
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "bge-large-zh-noinstruct"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "bge-large-zh-noinstruct"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "bge-large-zh-noinstruct"
+                }
+            ]
+        }
+    },
+    "cross-en-de-roberta-sentence-transformer": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "cross-en-de-roberta-sentence-transformer"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "cross-en-de-roberta-sentence-transformer"
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "cross-en-de-roberta-sentence-transformer",
+                    "BlurbsClusteringP2P": 30.82,
+                    "BlurbsClusteringS2S": 12.69,
+                    "TenKGnadClusteringP2P": 23.5,
+                    "TenKGnadClusteringS2S": 10.94
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "cross-en-de-roberta-sentence-transformer"
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "cross-en-de-roberta-sentence-transformer"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "cross-en-de-roberta-sentence-transformer"
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "cross-en-de-roberta-sentence-transformer"
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "cross-en-de-roberta-sentence-transformer"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "cross-en-de-roberta-sentence-transformer"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "cross-en-de-roberta-sentence-transformer"
+                }
+            ]
+        }
+    },
+    "text-search-davinci-001": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "text-search-davinci-001"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "text-search-davinci-001"
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "text-search-davinci-001"
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "text-search-davinci-001"
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "text-search-davinci-001"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "text-search-davinci-001",
+                    "ArguAna": 43.5,
+                    "ClimateFEVER": 22.3,
+                    "FEVER": 77.5,
+                    "FiQA2018": 51.2,
+                    "HotpotQA": 68.8,
+                    "NFCorpus": 40.7,
+                    "QuoraRetrieval": 63.8,
+                    "SciFact": 75.4,
+                    "TRECCOVID": 64.9,
+                    "Touche2020": 29.1
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "text-search-davinci-001"
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "text-search-davinci-001"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "text-search-davinci-001"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "text-search-davinci-001"
+                }
+            ]
+        }
+    },
+    "gelectra-base": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "gelectra-base"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "gelectra-base"
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "gelectra-base",
+                    "BlurbsClusteringP2P": 10.06,
+                    "BlurbsClusteringS2S": 7.74,
+                    "TenKGnadClusteringP2P": 9.02,
+                    "TenKGnadClusteringS2S": 4.11
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "gelectra-base"
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "gelectra-base"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "gelectra-base"
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "gelectra-base"
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "gelectra-base"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "gelectra-base"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "gelectra-base"
+                }
+            ]
+        }
+    },
+    "distilrubert-small-cased-conversational": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "distilrubert-small-cased-conversational",
+                    "Tatoeba (rus-Cyrl_eng-Latn)": 24.16
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "distilrubert-small-cased-conversational",
+                    "GeoreviewClassification (rus-Cyrl)": 38.95,
+                    "HeadlineClassification (rus-Cyrl)": 75.59,
+                    "InappropriatenessClassification (rus-Cyrl)": 60.68,
+                    "KinopoiskClassification (rus-Cyrl)": 49.67,
+                    "MassiveIntentClassification (rus-Cyrl)": 63.12,
+                    "MassiveScenarioClassification (rus-Cyrl)": 68.08,
+                    "RuReviewsClassification (rus-Cyrl)": 54.05,
+                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 48.53,
+                    "RuSciBenchOECDClassification (rus-Cyrl)": 37.65
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "distilrubert-small-cased-conversational",
+                    "GeoreviewClusteringP2P (rus-Cyrl)": 43.26,
+                    "MLSUMClusteringP2P (rus-Cyrl)": 50.08,
+                    "MLSUMClusteringS2S (rus-Cyrl)": 51.12,
+                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 37.84,
+                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 34.12
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "distilrubert-small-cased-conversational",
+                    "OpusparcusPC (rus-Cyrl)": 84.35,
+                    "TERRa (rus-Cyrl)": 52.48
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "distilrubert-small-cased-conversational",
+                    "RuBQReranking (rus-Cyrl)": 42.58
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "distilrubert-small-cased-conversational",
+                    "RiaNewsRetrieval (rus-Cyrl)": 4.14,
+                    "RuBQRetrieval (rus-Cyrl)": 10.6
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "distilrubert-small-cased-conversational",
+                    "RUParaPhraserSTS (rus-Cyrl)": 55.01,
+                    "RuSTSBenchmarkSTS (rus-Cyrl)": 61.72,
+                    "STS22 (rus-Cyrl)": 51.87,
+                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 61.6
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "distilrubert-small-cased-conversational"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "distilrubert-small-cased-conversational",
+                    "CEDRClassification (rus-Cyrl)": 36.19,
+                    "SensitiveTopicsClassification (rus-Cyrl)": 22.45
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "distilrubert-small-cased-conversational"
+                }
+            ]
+        }
+    },
+    "SFR-Embedding-Mistral": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "SFR-Embedding-Mistral"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "SFR-Embedding-Mistral"
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "SFR-Embedding-Mistral"
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "SFR-Embedding-Mistral"
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "SFR-Embedding-Mistral"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "SFR-Embedding-Mistral",
+                    "BrightRetrieval (sustainable_living)": 19.79,
+                    "BrightRetrieval (economics)": 17.84,
+                    "BrightRetrieval (theoremqa_theorems)": 24.05,
+                    "BrightRetrieval (aops)": 7.43,
+                    "BrightRetrieval (theoremqa_questions)": 23.05,
+                    "BrightRetrieval (psychology)": 18.97,
+                    "BrightRetrieval (stackoverflow)": 12.72,
+                    "BrightRetrieval (pony)": 1.97,
+                    "BrightRetrieval (leetcode)": 27.35,
+                    "BrightRetrieval (biology)": 19.49,
+                    "BrightRetrieval (earth_science)": 26.63,
+                    "BrightRetrieval (robotics)": 16.7
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "SFR-Embedding-Mistral"
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "SFR-Embedding-Mistral"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "SFR-Embedding-Mistral"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "SFR-Embedding-Mistral"
+                }
+            ]
+        }
+    },
+    "sentence-croissant-llm-base": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "sentence-croissant-llm-base"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "sentence-croissant-llm-base",
+                    "AmazonReviewsClassification (fr)": 34.79,
+                    "MTOPDomainClassification (fr)": 85.52,
+                    "MTOPIntentClassification (fr)": 63.12,
+                    "MasakhaNEWSClassification (fra)": 79.29,
+                    "MassiveIntentClassification (fr)": 59.41,
+                    "MassiveScenarioClassification (fr)": 65.29
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "sentence-croissant-llm-base",
+                    "AlloProfClusteringP2P": 64.12,
+                    "AlloProfClusteringS2S": 32.52,
+                    "HALClusteringS2S": 23.4,
+                    "MLSUMClusteringP2P": 42.94,
+                    "MLSUMClusteringS2S": 33.91,
+                    "MasakhaNEWSClusteringP2P (fra)": 53.94,
+                    "MasakhaNEWSClusteringS2S (fra)": 41.05
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "sentence-croissant-llm-base",
+                    "OpusparcusPC (fr)": 91.42,
+                    "PawsXPairClassification (fr)": 63.13
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "sentence-croissant-llm-base",
+                    "AlloprofReranking": 53.0,
+                    "SyntecReranking": 82.9
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "sentence-croissant-llm-base",
+                    "AlloprofRetrieval": 29.97,
+                    "BSARDRetrieval": 0.0,
+                    "MintakaRetrieval (fr)": 21.31,
+                    "SyntecRetrieval": 74.2,
+                    "XPQARetrieval (fr)": 58.57
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "sentence-croissant-llm-base",
+                    "SICKFr": 69.6,
+                    "STS22 (fr)": 78.77,
+                    "STSBenchmarkMultilingualSTS (fr)": 79.23
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "sentence-croissant-llm-base",
+                    "SummEvalFr": 29.04
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "sentence-croissant-llm-base"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "sentence-croissant-llm-base"
+                }
+            ]
+        }
+    },
+    "monobert-large-msmarco": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "monobert-large-msmarco"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "monobert-large-msmarco"
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "monobert-large-msmarco"
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "monobert-large-msmarco"
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "monobert-large-msmarco"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "monobert-large-msmarco"
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "monobert-large-msmarco"
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "monobert-large-msmarco"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "monobert-large-msmarco"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "monobert-large-msmarco",
+                    "Core17InstructionRetrieval": -0.24,
+                    "News21InstructionRetrieval": -0.8,
+                    "Robust04InstructionRetrieval": -9.36
+                }
+            ]
+        }
+    },
+    "text-similarity-curie-001": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "text-similarity-curie-001"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "text-similarity-curie-001"
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "text-similarity-curie-001",
+                    "RedditClustering": 40.79,
+                    "StackExchangeClustering": 55.14,
+                    "TwentyNewsgroupsClustering": 37.64
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "text-similarity-curie-001",
+                    "SprintDuplicateQuestions": 79.85,
+                    "TwitterSemEval2015": 69.45,
+                    "TwitterURLCorpus": 84.06
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "text-similarity-curie-001",
+                    "AskUbuntuDupQuestions": 55.09,
+                    "SciDocsRR": 70.93,
+                    "StackOverflowDupQuestions": 42.42
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "text-similarity-curie-001",
+                    "FiQA2018": 5.14,
+                    "NFCorpus": 19.96,
+                    "QuoraRetrieval": 83.11,
+                    "SciFact": 46.68,
+                    "TRECCOVID": 7.61
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "text-similarity-curie-001",
+                    "BIOSSES": 77.46,
+                    "SICK-R": 77.26,
+                    "STSBenchmark": 83.02
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "text-similarity-curie-001"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "text-similarity-curie-001"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "text-similarity-curie-001"
+                }
+            ]
+        }
+    },
+    "llama-2-7b-chat": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "llama-2-7b-chat"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "llama-2-7b-chat"
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "llama-2-7b-chat"
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "llama-2-7b-chat"
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "llama-2-7b-chat"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "llama-2-7b-chat"
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "llama-2-7b-chat"
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "llama-2-7b-chat"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "llama-2-7b-chat"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "llama-2-7b-chat",
+                    "Core17InstructionRetrieval": 2.84,
+                    "News21InstructionRetrieval": 0.23,
+                    "Robust04InstructionRetrieval": 2.0
+                }
+            ]
+        }
+    },
+    "text-embedding-3-small": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "text-embedding-3-small"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "text-embedding-3-small",
+                    "AmazonCounterfactualClassification (en)": 76.42,
+                    "AmazonPolarityClassification": 90.84,
+                    "AmazonReviewsClassification (en)": 45.73,
+                    "Banking77Classification": 83.01,
+                    "EmotionClassification": 50.63,
+                    "ImdbClassification": 83.66,
+                    "MTOPDomainClassification (en)": 93.91,
+                    "MTOPIntentClassification (en)": 70.98,
+                    "MassiveIntentClassification (en)": 72.86,
+                    "MassiveScenarioClassification (en)": 76.84,
+                    "ToxicConversationsClassification": 71.91,
+                    "TweetSentimentExtractionClassification": 61.72
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "text-embedding-3-small",
+                    "ArxivClusteringP2P": 46.57,
+                    "ArxivClusteringS2S": 39.35,
+                    "BiorxivClusteringP2P": 37.77,
+                    "BiorxivClusteringS2S": 34.68,
+                    "MedrxivClusteringP2P": 32.77,
+                    "MedrxivClusteringS2S": 31.85,
+                    "RedditClustering": 64.09,
+                    "RedditClusteringP2P": 65.12,
+                    "StackExchangeClustering": 72.05,
+                    "StackExchangeClusteringP2P": 34.04,
+                    "TwentyNewsgroupsClustering": 54.81
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "text-embedding-3-small",
+                    "OpusparcusPC (fr)": 94.45,
+                    "SprintDuplicateQuestions": 94.58,
+                    "TwitterSemEval2015": 73.33,
+                    "TwitterURLCorpus": 87.21
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "text-embedding-3-small",
+                    "AskUbuntuDupQuestions": 62.18,
+                    "MindSmallReranking": 29.93,
+                    "SciDocsRR": 83.25,
+                    "StackOverflowDupQuestions": 51.53
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "text-embedding-3-small",
+                    "ARCChallenge": 14.63,
+                    "AlphaNLI": 30.61,
+                    "ArguAna": 55.49,
+                    "CQADupstackRetrieval": 42.58,
+                    "ClimateFEVER": 26.86,
+                    "DBPedia": 39.97,
+                    "FEVER": 79.42,
+                    "FiQA2018": 44.91,
+                    "HellaSwag": 30.94,
+                    "HotpotQA": 63.63,
+                    "MSMARCO": 37.02,
+                    "NFCorpus": 38.33,
+                    "NQ": 52.86,
+                    "PIQA": 33.69,
+                    "Quail": 6.11,
+                    "QuoraRetrieval": 88.83,
+                    "RARbCode": 72.03,
+                    "RARbMath": 71.07,
+                    "SCIDOCS": 20.8,
+                    "SIQA": 3.03,
+                    "SciFact": 73.37,
+                    "SpartQA": 6.63,
+                    "TRECCOVID": 77.9,
+                    "TempReasonL1": 2.35,
+                    "TempReasonL2Fact": 25.68,
+                    "TempReasonL2Pure": 2.76,
+                    "TempReasonL3Fact": 22.09,
+                    "TempReasonL3Pure": 9.79,
+                    "Touche2020": 24.28,
+                    "WinoGrande": 31.53
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "text-embedding-3-small",
+                    "BIOSSES": 88.72,
+                    "SICK-R": 76.73,
+                    "STS12": 73.09,
+                    "STS13": 84.92,
+                    "STS14": 79.81,
+                    "STS15": 88.01,
+                    "STS16": 84.41,
+                    "STS17 (en-en)": 90.94,
+                    "STS22 (en)": 64.96,
+                    "STSBenchmark": 84.24
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "text-embedding-3-small",
+                    "SummEval": 31.12
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "text-embedding-3-small"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "text-embedding-3-small"
+                }
+            ]
+        }
+    },
+    "bge-base-en-v1.5-instruct": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "bge-base-en-v1.5-instruct"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "bge-base-en-v1.5-instruct"
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "bge-base-en-v1.5-instruct"
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "bge-base-en-v1.5-instruct"
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "bge-base-en-v1.5-instruct"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "bge-base-en-v1.5-instruct",
+                    "ARCChallenge": 8.85,
+                    "AlphaNLI": 4.13,
+                    "HellaSwag": 24.03,
+                    "PIQA": 23.03,
+                    "Quail": 1.25,
+                    "RARbCode": 46.32,
+                    "RARbMath": 45.62,
+                    "SIQA": 0.24,
+                    "SpartQA": 2.67,
+                    "TempReasonL1": 0.8,
+                    "TempReasonL2Fact": 16.56,
+                    "TempReasonL2Pure": 1.33,
+                    "TempReasonL3Fact": 12.68,
+                    "TempReasonL3Pure": 5.08,
+                    "WinoGrande": 10.27
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "bge-base-en-v1.5-instruct"
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "bge-base-en-v1.5-instruct"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "bge-base-en-v1.5-instruct"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "bge-base-en-v1.5-instruct"
+                }
+            ]
+        }
+    },
+    "text-embedding-3-small-instruct": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "text-embedding-3-small-instruct"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "text-embedding-3-small-instruct"
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "text-embedding-3-small-instruct"
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "text-embedding-3-small-instruct"
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "text-embedding-3-small-instruct"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "text-embedding-3-small-instruct",
+                    "ARCChallenge": 13.76,
+                    "AlphaNLI": 21.14,
+                    "HellaSwag": 27.2,
+                    "PIQA": 29.59,
+                    "Quail": 6.64,
+                    "RARbCode": 72.14,
+                    "RARbMath": 64.31,
+                    "SIQA": 2.98,
+                    "SpartQA": 3.58,
+                    "TempReasonL1": 2.29,
+                    "TempReasonL2Fact": 26.34,
+                    "TempReasonL2Pure": 3.17,
+                    "TempReasonL3Fact": 22.72,
+                    "TempReasonL3Pure": 9.98,
+                    "WinoGrande": 25.49
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "text-embedding-3-small-instruct"
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "text-embedding-3-small-instruct"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "text-embedding-3-small-instruct"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "text-embedding-3-small-instruct"
+                }
+            ]
+        }
+    },
+    "text-similarity-ada-001": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "text-similarity-ada-001"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "text-similarity-ada-001",
+                    "AmazonCounterfactualClassification (en)": 76.4,
+                    "AmazonPolarityClassification": 92.83,
+                    "AmazonReviewsClassification (en)": 47.45,
+                    "Banking77Classification": 68.04,
+                    "EmotionClassification": 50.33,
+                    "ImdbClassification": 89.38,
+                    "MTOPDomainClassification (en)": 89.89,
+                    "MTOPIntentClassification (en)": 64.8,
+                    "MassiveIntentClassification (en)": 65.17,
+                    "MassiveScenarioClassification (en)": 67.67,
+                    "ToxicConversationsClassification": 70.0,
+                    "TweetSentimentExtractionClassification": 63.35
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "text-similarity-ada-001",
+                    "ArxivClusteringP2P": 41.49,
+                    "ArxivClusteringS2S": 28.47,
+                    "BiorxivClusteringP2P": 36.86,
+                    "BiorxivClusteringS2S": 27.55,
+                    "MedrxivClusteringP2P": 31.09,
+                    "MedrxivClusteringS2S": 26.5,
+                    "RedditClustering": 42.47,
+                    "RedditClusteringP2P": 58.1,
+                    "StackExchangeClustering": 53.52,
+                    "StackExchangeClusteringP2P": 30.43,
+                    "TwentyNewsgroupsClustering": 36.26
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "text-similarity-ada-001",
+                    "SprintDuplicateQuestions": 77.85,
+                    "TwitterSemEval2015": 69.04,
+                    "TwitterURLCorpus": 83.69
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "text-similarity-ada-001",
+                    "AskUbuntuDupQuestions": 53.49,
+                    "MindSmallReranking": 30.71,
+                    "SciDocsRR": 71.04,
+                    "StackOverflowDupQuestions": 40.85
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "text-similarity-ada-001",
+                    "ArguAna": 39.65,
+                    "CQADupstackRetrieval": 10.17,
+                    "ClimateFEVER": 2.83,
+                    "DBPedia": 3.48,
+                    "FEVER": 4.45,
+                    "FiQA2018": 7.54,
+                    "HotpotQA": 12.6,
+                    "MSMARCO": 10.53,
+                    "NFCorpus": 20.59,
+                    "NQ": 2.02,
+                    "QuoraRetrieval": 82.18,
+                    "SCIDOCS": 6.28,
+                    "SciFact": 45.46,
+                    "TRECCOVID": 24.56,
+                    "Touche2020": 3.1
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "text-similarity-ada-001",
+                    "BIOSSES": 78.04,
+                    "SICK-R": 77.48,
+                    "STS12": 72.3,
+                    "STS13": 81.49,
+                    "STS14": 74.74,
+                    "STS15": 84.28,
+                    "STS16": 82.06,
+                    "STS17 (en-en)": 87.08,
+                    "STS22 (en)": 64.71,
+                    "STSBenchmark": 83.78
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "text-similarity-ada-001",
+                    "SummEval": 26.94
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "text-similarity-ada-001"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "text-similarity-ada-001"
+                }
+            ]
+        }
+    },
+    "paraphrase-multilingual-mpnet-base-v2": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "paraphrase-multilingual-mpnet-base-v2",
+                    "BUCC (de-en)": 98.59,
+                    "BUCC (fr-en)": 96.89,
+                    "BUCC (ru-en)": 96.44,
+                    "BUCC (zh-en)": 97.56,
+                    "BornholmBitextMining (dan-Latn)": 18.18,
+                    "Tatoeba (afr-eng)": 72.96,
+                    "Tatoeba (amh-eng)": 53.49,
+                    "Tatoeba (ang-eng)": 16.72,
+                    "Tatoeba (ara-eng)": 90.19,
+                    "Tatoeba (arq-eng)": 19.84,
+                    "Tatoeba (arz-eng)": 55.69,
+                    "Tatoeba (ast-eng)": 70.08,
+                    "Tatoeba (awa-eng)": 42.83,
+                    "Tatoeba (aze-eng)": 76.36,
+                    "Tatoeba (bel-eng)": 79.94,
+                    "Tatoeba (ben-eng)": 64.9,
+                    "Tatoeba (ber-eng)": 4.88,
+                    "Tatoeba (bos-eng)": 94.02,
+                    "Tatoeba (bre-eng)": 6.42,
+                    "Tatoeba (bul-eng)": 93.52,
+                    "Tatoeba (cat-eng)": 96.05,
+                    "Tatoeba (cbk-eng)": 58.68,
+                    "Tatoeba (ceb-eng)": 7.39,
+                    "Tatoeba (ces-eng)": 95.73,
+                    "Tatoeba (cha-eng)": 12.59,
+                    "Tatoeba (cmn-eng)": 95.83,
+                    "Tatoeba (cor-eng)": 3.53,
+                    "Tatoeba (csb-eng)": 23.73,
+                    "Tatoeba (cym-eng)": 22.31,
+                    "Tatoeba (dan-eng)": 96.17,
+                    "Tatoeba (deu-eng)": 97.73,
+                    "Tatoeba (dsb-eng)": 36.85,
+                    "Tatoeba (dtp-eng)": 5.03,
+                    "Tatoeba (ell-eng)": 94.93,
+                    "Tatoeba (epo-eng)": 55.12,
+                    "Tatoeba (est-eng)": 98.4,
+                    "Tatoeba (eus-eng)": 31.33,
+                    "Tatoeba (fao-eng)": 38.24,
+                    "Tatoeba (fin-eng)": 95.92,
+                    "Tatoeba (fra-eng)": 93.12,
+                    "Tatoeba (fry-eng)": 43.54,
+                    "Tatoeba (gla-eng)": 4.72,
+                    "Tatoeba (gle-eng)": 16.85,
+                    "Tatoeba (glg-eng)": 95.32,
+                    "Tatoeba (gsw-eng)": 25.12,
+                    "Tatoeba (heb-eng)": 88.26,
+                    "Tatoeba (hin-eng)": 97.75,
+                    "Tatoeba (hrv-eng)": 97.0,
+                    "Tatoeba (hsb-eng)": 44.32,
+                    "Tatoeba (hun-eng)": 94.18,
+                    "Tatoeba (hye-eng)": 94.38,
+                    "Tatoeba (ido-eng)": 43.91,
+                    "Tatoeba (ile-eng)": 60.36,
+                    "Tatoeba (ina-eng)": 84.32,
+                    "Tatoeba (ind-eng)": 93.5,
+                    "Tatoeba (isl-eng)": 59.25,
+                    "Tatoeba (ita-eng)": 93.76,
+                    "Tatoeba (jav-eng)": 23.39,
+                    "Tatoeba (jpn-eng)": 92.51,
+                    "Tatoeba (kab-eng)": 1.41,
+                    "Tatoeba (kat-eng)": 95.46,
+                    "Tatoeba (kaz-eng)": 61.49,
+                    "Tatoeba (khm-eng)": 58.8,
+                    "Tatoeba (kor-eng)": 93.07,
+                    "Tatoeba (kur-eng)": 61.44,
+                    "Tatoeba (kzj-eng)": 5.88,
+                    "Tatoeba (lat-eng)": 24.25,
+                    "Tatoeba (lfn-eng)": 49.56,
+                    "Tatoeba (lit-eng)": 95.37,
+                    "Tatoeba (lvs-eng)": 97.53,
+                    "Tatoeba (mal-eng)": 88.46,
+                    "Tatoeba (mar-eng)": 93.83,
+                    "Tatoeba (max-eng)": 48.77,
+                    "Tatoeba (mhr-eng)": 7.57,
+                    "Tatoeba (mkd-eng)": 93.02,
+                    "Tatoeba (mon-eng)": 96.14,
+                    "Tatoeba (nds-eng)": 38.88,
+                    "Tatoeba (nld-eng)": 95.5,
+                    "Tatoeba (nno-eng)": 81.41,
+                    "Tatoeba (nob-eng)": 98.53,
+                    "Tatoeba (nov-eng)": 50.23,
+                    "Tatoeba (oci-eng)": 43.49,
+                    "Tatoeba (orv-eng)": 23.77,
+                    "Tatoeba (pam-eng)": 5.39,
+                    "Tatoeba (pes-eng)": 93.47,
+                    "Tatoeba (pms-eng)": 34.19,
+                    "Tatoeba (pol-eng)": 96.95,
+                    "Tatoeba (por-eng)": 93.02,
+                    "Tatoeba (ron-eng)": 96.43,
+                    "Tatoeba (rus-eng)": 92.92,
+                    "Tatoeba (slk-eng)": 96.62,
+                    "Tatoeba (slv-eng)": 97.08,
+                    "Tatoeba (spa-eng)": 97.0,
+                    "Tatoeba (sqi-eng)": 98.57,
+                    "Tatoeba (srp-eng)": 94.12,
+                    "Tatoeba (swe-eng)": 95.45,
+                    "Tatoeba (swg-eng)": 22.8,
+                    "Tatoeba (swh-eng)": 16.02,
+                    "Tatoeba (tam-eng)": 73.6,
+                    "Tatoeba (tat-eng)": 10.89,
+                    "Tatoeba (tel-eng)": 79.73,
+                    "Tatoeba (tgl-eng)": 17.67,
+                    "Tatoeba (tha-eng)": 95.99,
+                    "Tatoeba (tuk-eng)": 14.91,
+                    "Tatoeba (tur-eng)": 96.17,
+                    "Tatoeba (tzl-eng)": 34.21,
+                    "Tatoeba (uig-eng)": 48.35,
+                    "Tatoeba (ukr-eng)": 92.67,
+                    "Tatoeba (urd-eng)": 95.12,
+                    "Tatoeba (uzb-eng)": 23.19,
+                    "Tatoeba (vie-eng)": 97.23,
+                    "Tatoeba (war-eng)": 7.42,
+                    "Tatoeba (wuu-eng)": 78.25,
+                    "Tatoeba (xho-eng)": 6.53,
+                    "Tatoeba (yid-eng)": 30.73,
+                    "Tatoeba (yue-eng)": 77.58,
+                    "Tatoeba (zsm-eng)": 95.8,
+                    "Tatoeba (gsw-Latn_eng-Latn)": 25.12,
+                    "Tatoeba (spa-Latn_eng-Latn)": 97.0,
+                    "Tatoeba (lat-Latn_eng-Latn)": 24.25,
+                    "Tatoeba (hun-Latn_eng-Latn)": 94.18,
+                    "Tatoeba (eus-Latn_eng-Latn)": 31.33,
+                    "Tatoeba (heb-Hebr_eng-Latn)": 88.26,
+                    "Tatoeba (ang-Latn_eng-Latn)": 16.72,
+                    "Tatoeba (swe-Latn_eng-Latn)": 95.45,
+                    "Tatoeba (slk-Latn_eng-Latn)": 96.62,
+                    "Tatoeba (ell-Grek_eng-Latn)": 94.93,
+                    "Tatoeba (nld-Latn_eng-Latn)": 95.5,
+                    "Tatoeba (cym-Latn_eng-Latn)": 22.31,
+                    "Tatoeba (sqi-Latn_eng-Latn)": 98.57,
+                    "Tatoeba (csb-Latn_eng-Latn)": 23.73,
+                    "Tatoeba (ben-Beng_eng-Latn)": 64.9,
+                    "Tatoeba (bre-Latn_eng-Latn)": 6.42,
+                    "Tatoeba (mkd-Cyrl_eng-Latn)": 93.02,
+                    "Tatoeba (cmn-Hans_eng-Latn)": 95.83,
+                    "Tatoeba (deu-Latn_eng-Latn)": 97.73,
+                    "Tatoeba (fao-Latn_eng-Latn)": 38.24,
+                    "Tatoeba (afr-Latn_eng-Latn)": 72.96,
+                    "Tatoeba (nno-Latn_eng-Latn)": 81.41,
+                    "Tatoeba (jpn-Jpan_eng-Latn)": 92.51,
+                    "Tatoeba (tzl-Latn_eng-Latn)": 34.21,
+                    "Tatoeba (arz-Arab_eng-Latn)": 55.69,
+                    "Tatoeba (ita-Latn_eng-Latn)": 93.76,
+                    "Tatoeba (arq-Arab_eng-Latn)": 19.84,
+                    "Tatoeba (uzb-Latn_eng-Latn)": 23.19,
+                    "Tatoeba (rus-Cyrl_eng-Latn)": 92.92,
+                    "Tatoeba (tat-Cyrl_eng-Latn)": 10.89,
+                    "Tatoeba (fin-Latn_eng-Latn)": 95.92,
+                    "Tatoeba (nob-Latn_eng-Latn)": 98.53,
+                    "Tatoeba (tam-Taml_eng-Latn)": 73.6,
+                    "Tatoeba (kur-Latn_eng-Latn)": 61.44,
+                    "Tatoeba (wuu-Hans_eng-Latn)": 78.25,
+                    "Tatoeba (cor-Latn_eng-Latn)": 3.53,
+                    "Tatoeba (cha-Latn_eng-Latn)": 12.59,
+                    "Tatoeba (hsb-Latn_eng-Latn)": 44.32,
+                    "Tatoeba (max-Deva_eng-Latn)": 48.77,
+                    "Tatoeba (kat-Geor_eng-Latn)": 95.46,
+                    "Tatoeba (mal-Mlym_eng-Latn)": 88.46,
+                    "Tatoeba (ina-Latn_eng-Latn)": 84.32,
+                    "Tatoeba (cbk-Latn_eng-Latn)": 58.68,
+                    "Tatoeba (yid-Hebr_eng-Latn)": 30.73,
+                    "Tatoeba (swg-Latn_eng-Latn)": 22.8,
+                    "Tatoeba (dtp-Latn_eng-Latn)": 5.03,
+                    "Tatoeba (ber-Tfng_eng-Latn)": 4.88,
+                    "Tatoeba (epo-Latn_eng-Latn)": 55.12,
+                    "Tatoeba (mar-Deva_eng-Latn)": 93.83,
+                    "Tatoeba (kaz-Cyrl_eng-Latn)": 61.49,
+                    "Tatoeba (tgl-Latn_eng-Latn)": 17.67,
+                    "Tatoeba (hrv-Latn_eng-Latn)": 97.0,
+                    "Tatoeba (bel-Cyrl_eng-Latn)": 79.94,
+                    "Tatoeba (pam-Latn_eng-Latn)": 5.39,
+                    "Tatoeba (zsm-Latn_eng-Latn)": 95.8,
+                    "Tatoeba (ces-Latn_eng-Latn)": 95.73,
+                    "Tatoeba (gla-Latn_eng-Latn)": 4.72,
+                    "Tatoeba (hin-Deva_eng-Latn)": 97.75,
+                    "Tatoeba (slv-Latn_eng-Latn)": 97.08,
+                    "Tatoeba (cat-Latn_eng-Latn)": 96.05,
+                    "Tatoeba (war-Latn_eng-Latn)": 7.42,
+                    "Tatoeba (hye-Armn_eng-Latn)": 94.38,
+                    "Tatoeba (ind-Latn_eng-Latn)": 93.5,
+                    "Tatoeba (kor-Hang_eng-Latn)": 93.07,
+                    "Tatoeba (por-Latn_eng-Latn)": 93.02,
+                    "Tatoeba (fry-Latn_eng-Latn)": 43.54,
+                    "Tatoeba (dan-Latn_eng-Latn)": 96.17,
+                    "Tatoeba (nov-Latn_eng-Latn)": 50.23,
+                    "Tatoeba (vie-Latn_eng-Latn)": 97.23,
+                    "Tatoeba (kzj-Latn_eng-Latn)": 5.88,
+                    "Tatoeba (ido-Latn_eng-Latn)": 43.91,
+                    "Tatoeba (tuk-Latn_eng-Latn)": 14.91,
+                    "Tatoeba (glg-Latn_eng-Latn)": 95.32,
+                    "Tatoeba (bos-Latn_eng-Latn)": 94.02,
+                    "Tatoeba (gle-Latn_eng-Latn)": 16.85,
+                    "Tatoeba (fra-Latn_eng-Latn)": 93.12,
+                    "Tatoeba (lvs-Latn_eng-Latn)": 97.53,
+                    "Tatoeba (mon-Cyrl_eng-Latn)": 96.14,
+                    "Tatoeba (lit-Latn_eng-Latn)": 95.37,
+                    "Tatoeba (ron-Latn_eng-Latn)": 96.43,
+                    "Tatoeba (pms-Latn_eng-Latn)": 34.19,
+                    "Tatoeba (lfn-Latn_eng-Latn)": 49.56,
+                    "Tatoeba (isl-Latn_eng-Latn)": 59.25,
+                    "Tatoeba (xho-Latn_eng-Latn)": 6.53,
+                    "Tatoeba (orv-Cyrl_eng-Latn)": 23.77,
+                    "Tatoeba (ukr-Cyrl_eng-Latn)": 92.67,
+                    "Tatoeba (dsb-Latn_eng-Latn)": 36.85,
+                    "Tatoeba (nds-Latn_eng-Latn)": 38.88,
+                    "Tatoeba (amh-Ethi_eng-Latn)": 53.49,
+                    "Tatoeba (yue-Hant_eng-Latn)": 77.58,
+                    "Tatoeba (urd-Arab_eng-Latn)": 95.12,
+                    "Tatoeba (tel-Telu_eng-Latn)": 79.73,
+                    "Tatoeba (ile-Latn_eng-Latn)": 60.36,
+                    "Tatoeba (jav-Latn_eng-Latn)": 23.39,
+                    "Tatoeba (ast-Latn_eng-Latn)": 70.08,
+                    "Tatoeba (tha-Thai_eng-Latn)": 95.99,
+                    "Tatoeba (ara-Arab_eng-Latn)": 90.19,
+                    "Tatoeba (pes-Arab_eng-Latn)": 93.47,
+                    "Tatoeba (awa-Deva_eng-Latn)": 42.83,
+                    "Tatoeba (tur-Latn_eng-Latn)": 96.17,
+                    "Tatoeba (ceb-Latn_eng-Latn)": 7.39,
+                    "Tatoeba (swh-Latn_eng-Latn)": 16.02,
+                    "Tatoeba (srp-Cyrl_eng-Latn)": 94.12,
+                    "Tatoeba (est-Latn_eng-Latn)": 98.4,
+                    "Tatoeba (aze-Latn_eng-Latn)": 76.36,
+                    "Tatoeba (bul-Cyrl_eng-Latn)": 93.52,
+                    "Tatoeba (oci-Latn_eng-Latn)": 43.49,
+                    "Tatoeba (pol-Latn_eng-Latn)": 96.95,
+                    "Tatoeba (kab-Latn_eng-Latn)": 1.41,
+                    "Tatoeba (khm-Khmr_eng-Latn)": 58.8,
+                    "Tatoeba (uig-Arab_eng-Latn)": 48.35,
+                    "Tatoeba (mhr-Cyrl_eng-Latn)": 7.57
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "paraphrase-multilingual-mpnet-base-v2",
+                    "AllegroReviews": 33.86,
+                    "AllegroReviews (pol-Latn)": 33.89,
+                    "AmazonCounterfactualClassification (de)": 69.95,
+                    "AmazonCounterfactualClassification (en)": 75.81,
+                    "AmazonCounterfactualClassification (en-ext)": 76.25,
+                    "AmazonCounterfactualClassification (ja)": 69.79,
+                    "AmazonCounterfactualClassification (deu-Latn)": 69.96,
+                    "AmazonCounterfactualClassification (jpn-Jpan)": 69.78,
+                    "AmazonPolarityClassification": 76.41,
+                    "AmazonReviewsClassification (de)": 39.52,
+                    "AmazonReviewsClassification (en)": 38.52,
+                    "AmazonReviewsClassification (es)": 39.99,
+                    "AmazonReviewsClassification (fr)": 39.0,
+                    "AmazonReviewsClassification (ja)": 36.64,
+                    "AmazonReviewsClassification (zh)": 37.74,
+                    "AmazonReviewsClassification (deu-Latn)": 39.53,
+                    "AmazonReviewsClassification (spa-Latn)": 39.97,
+                    "AmazonReviewsClassification (fra-Latn)": 38.98,
+                    "AmazonReviewsClassification (jpn-Jpan)": 36.65,
+                    "AmazonReviewsClassification (cmn-Hans)": 37.74,
+                    "AngryTweetsClassification (dan-Latn)": 54.84,
+                    "Banking77Classification": 81.1,
+                    "CBD": 65.0,
+                    "CBD (pol-Latn)": 64.97,
+                    "DanishPoliticalCommentsClassification (dan-Latn)": 40.96,
+                    "EmotionClassification": 45.85,
+                    "GeoreviewClassification (rus-Cyrl)": 42.33,
+                    "HeadlineClassification (rus-Cyrl)": 70.35,
+                    "IFlyTek (cmn-Hans)": 43.98,
+                    "ImdbClassification": 64.58,
+                    "InappropriatenessClassification (rus-Cyrl)": 59.32,
+                    "JDReview (cmn-Hans)": 70.34,
+                    "KinopoiskClassification (rus-Cyrl)": 44.31,
+                    "LccSentimentClassification (dan-Latn)": 58.4,
+                    "MTOPDomainClassification (de)": 85.73,
+                    "MTOPDomainClassification (en)": 89.24,
+                    "MTOPDomainClassification (es)": 86.96,
+                    "MTOPDomainClassification (fr)": 81.21,
+                    "MTOPDomainClassification (hi)": 84.76,
+                    "MTOPDomainClassification (th)": 82.51,
+                    "MTOPDomainClassification (deu-Latn)": 85.73,
+                    "MTOPDomainClassification (spa-Latn)": 86.98,
+                    "MTOPDomainClassification (fra-Latn)": 81.21,
+                    "MTOPDomainClassification (hin-Deva)": 84.76,
+                    "MTOPDomainClassification (tha-Thai)": 82.51,
+                    "MTOPIntentClassification (de)": 61.27,
+                    "MTOPIntentClassification (en)": 68.69,
+                    "MTOPIntentClassification (es)": 66.59,
+                    "MTOPIntentClassification (fr)": 59.76,
+                    "MTOPIntentClassification (hi)": 62.37,
+                    "MTOPIntentClassification (th)": 64.8,
+                    "MTOPIntentClassification (deu-Latn)": 61.26,
+                    "MTOPIntentClassification (spa-Latn)": 66.6,
+                    "MTOPIntentClassification (fra-Latn)": 59.75,
+                    "MTOPIntentClassification (hin-Deva)": 62.38,
+                    "MTOPIntentClassification (tha-Thai)": 64.77,
+                    "MasakhaNEWSClassification (fra)": 78.1,
+                    "MasakhaNEWSClassification (amh-Ethi)": 78.83,
+                    "MasakhaNEWSClassification (eng)": 75.39,
+                    "MasakhaNEWSClassification (fra-Latn)": 72.94,
+                    "MasakhaNEWSClassification (hau-Latn)": 54.49,
+                    "MasakhaNEWSClassification (ibo-Latn)": 46.79,
+                    "MasakhaNEWSClassification (lin-Latn)": 69.77,
+                    "MasakhaNEWSClassification (lug-Latn)": 43.05,
+                    "MasakhaNEWSClassification (orm-Ethi)": 41.97,
+                    "MasakhaNEWSClassification (pcm-Latn)": 90.2,
+                    "MasakhaNEWSClassification (run-Latn)": 49.97,
+                    "MasakhaNEWSClassification (sna-Latn)": 59.78,
+                    "MasakhaNEWSClassification (som-Latn)": 47.65,
+                    "MasakhaNEWSClassification (swa-Latn)": 60.42,
+                    "MasakhaNEWSClassification (tir-Ethi)": 45.04,
+                    "MasakhaNEWSClassification (xho-Latn)": 48.82,
+                    "MasakhaNEWSClassification (yor-Latn)": 58.3,
+                    "MassiveIntentClassification (pl)": 64.29,
+                    "MassiveIntentClassification (fr)": 61.88,
+                    "MassiveIntentClassification (mal-Mlym)": 54.34,
+                    "MassiveIntentClassification (tel-Telu)": 52.85,
+                    "MassiveIntentClassification (jpn-Jpan)": 63.76,
+                    "MassiveIntentClassification (nld-Latn)": 63.57,
+                    "MassiveIntentClassification (jav-Latn)": 36.49,
+                    "MassiveIntentClassification (heb-Hebr)": 58.25,
+                    "MassiveIntentClassification (tam-Taml)": 50.18,
+                    "MassiveIntentClassification (slv-Latn)": 63.5,
+                    "MassiveIntentClassification (tha-Thai)": 61.12,
+                    "MassiveIntentClassification (fra-Latn)": 64.8,
+                    "MassiveIntentClassification (ind-Latn)": 65.43,
+                    "MassiveIntentClassification (amh-Ethi)": 41.56,
+                    "MassiveIntentClassification (en)": 69.32,
+                    "MassiveIntentClassification (nob-Latn)": 62.62,
+                    "MassiveIntentClassification (kan-Knda)": 50.62,
+                    "MassiveIntentClassification (dan-Latn)": 62.8,
+                    "MassiveIntentClassification (ell-Grek)": 62.63,
+                    "MassiveIntentClassification (msa-Latn)": 60.72,
+                    "MassiveIntentClassification (ita-Latn)": 64.69,
+                    "MassiveIntentClassification (tur-Latn)": 64.58,
+                    "MassiveIntentClassification (ben-Beng)": 48.79,
+                    "MassiveIntentClassification (aze-Latn)": 56.98,
+                    "MassiveIntentClassification (tgl-Latn)": 38.83,
+                    "MassiveIntentClassification (mon-Cyrl)": 56.61,
+                    "MassiveIntentClassification (urd-Arab)": 56.36,
+                    "MassiveIntentClassification (vie-Latn)": 59.71,
+                    "MassiveIntentClassification (cmo-Hans)": 65.32,
+                    "MassiveIntentClassification (cym-Latn)": 27.89,
+                    "MassiveIntentClassification (rus-Cyrl)": 63.23,
+                    "MassiveIntentClassification (mya-Mymr)": 57.08,
+                    "MassiveIntentClassification (hun-Latn)": 63.85,
+                    "MassiveIntentClassification (hin-Deva)": 62.79,
+                    "MassiveIntentClassification (hye-Armn)": 57.76,
+                    "MassiveIntentClassification (kat-Geor)": 49.88,
+                    "MassiveIntentClassification (fin-Latn)": 62.26,
+                    "MassiveIntentClassification (ara-Arab)": 51.43,
+                    "MassiveIntentClassification (por-Latn)": 64.88,
+                    "MassiveIntentClassification (pol-Latn)": 64.32,
+                    "MassiveIntentClassification (isl-Latn)": 37.09,
+                    "MassiveIntentClassification (afr-Latn)": 52.35,
+                    "MassiveIntentClassification (fas-Arab)": 65.33,
+                    "MassiveIntentClassification (khm-Khmr)": 45.48,
+                    "MassiveIntentClassification (kor-Kore)": 61.84,
+                    "MassiveIntentClassification (spa-Latn)": 64.45,
+                    "MassiveIntentClassification (cmo-Hant)": 62.33,
+                    "MassiveIntentClassification (ron-Latn)": 62.83,
+                    "MassiveIntentClassification (sqi-Latn)": 62.48,
+                    "MassiveIntentClassification (swa-Latn)": 31.93,
+                    "MassiveIntentClassification (swe-Latn)": 64.71,
+                    "MassiveIntentClassification (deu-Latn)": 59.56,
+                    "MassiveIntentClassification (lav-Latn)": 61.29,
+                    "MassiveScenarioClassification (pl)": 68.98,
+                    "MassiveScenarioClassification (fr)": 67.9,
+                    "MassiveScenarioClassification (tam-Taml)": 55.97,
+                    "MassiveScenarioClassification (heb-Hebr)": 65.16,
+                    "MassiveScenarioClassification (ind-Latn)": 70.73,
+                    "MassiveScenarioClassification (afr-Latn)": 59.68,
+                    "MassiveScenarioClassification (fin-Latn)": 67.58,
+                    "MassiveScenarioClassification (vie-Latn)": 65.7,
+                    "MassiveScenarioClassification (mon-Cyrl)": 60.84,
+                    "MassiveScenarioClassification (sqi-Latn)": 69.62,
+                    "MassiveScenarioClassification (nob-Latn)": 70.23,
+                    "MassiveScenarioClassification (por-Latn)": 70.08,
+                    "MassiveScenarioClassification (aze-Latn)": 61.52,
+                    "MassiveScenarioClassification (nld-Latn)": 70.37,
+                    "MassiveScenarioClassification (spa-Latn)": 70.4,
+                    "MassiveScenarioClassification (mal-Mlym)": 60.14,
+                    "MassiveScenarioClassification (cmo-Hant)": 68.71,
+                    "MassiveScenarioClassification (fra-Latn)": 70.71,
+                    "MassiveScenarioClassification (ita-Latn)": 69.74,
+                    "MassiveScenarioClassification (hun-Latn)": 70.31,
+                    "MassiveScenarioClassification (urd-Arab)": 62.92,
+                    "MassiveScenarioClassification (cym-Latn)": 35.27,
+                    "MassiveScenarioClassification (khm-Khmr)": 53.13,
+                    "MassiveScenarioClassification (swa-Latn)": 37.26,
+                    "MassiveScenarioClassification (mya-Mymr)": 63.03,
+                    "MassiveScenarioClassification (isl-Latn)": 44.16,
+                    "MassiveScenarioClassification (tha-Thai)": 69.44,
+                    "MassiveScenarioClassification (kat-Geor)": 57.3,
+                    "MassiveScenarioClassification (pol-Latn)": 68.99,
+                    "MassiveScenarioClassification (ell-Grek)": 68.81,
+                    "MassiveScenarioClassification (cmo-Hans)": 71.25,
+                    "MassiveScenarioClassification (tgl-Latn)": 43.98,
+                    "MassiveScenarioClassification (lav-Latn)": 66.28,
+                    "MassiveScenarioClassification (jpn-Jpan)": 69.68,
+                    "MassiveScenarioClassification (deu-Latn)": 67.35,
+                    "MassiveScenarioClassification (ara-Arab)": 57.79,
+                    "MassiveScenarioClassification (en)": 75.35,
+                    "MassiveScenarioClassification (msa-Latn)": 65.85,
+                    "MassiveScenarioClassification (tel-Telu)": 58.79,
+                    "MassiveScenarioClassification (ben-Beng)": 54.52,
+                    "MassiveScenarioClassification (kan-Knda)": 56.08,
+                    "MassiveScenarioClassification (tur-Latn)": 70.41,
+                    "MassiveScenarioClassification (kor-Kore)": 68.51,
+                    "MassiveScenarioClassification (hye-Armn)": 63.03,
+                    "MassiveScenarioClassification (jav-Latn)": 44.22,
+                    "MassiveScenarioClassification (rus-Cyrl)": 69.92,
+                    "MassiveScenarioClassification (hin-Deva)": 67.94,
+                    "MassiveScenarioClassification (amh-Ethi)": 48.96,
+                    "MassiveScenarioClassification (dan-Latn)": 71.04,
+                    "MassiveScenarioClassification (fas-Arab)": 69.88,
+                    "MassiveScenarioClassification (slv-Latn)": 70.81,
+                    "MassiveScenarioClassification (swe-Latn)": 71.6,
+                    "MassiveScenarioClassification (ron-Latn)": 67.94,
+                    "MultilingualSentiment (cmn-Hans)": 66.49,
+                    "NoRecClassification (nob-Latn)": 50.32,
+                    "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 41.57,
+                    "OnlineShopping (cmn-Hans)": 87.75,
+                    "PAC": 63.76,
+                    "PAC (pol-Latn)": 63.76,
+                    "PolEmo2.0-IN": 62.78,
+                    "PolEmo2.0-IN (pol-Latn)": 62.74,
+                    "PolEmo2.0-OUT": 19.98,
+                    "PolEmo2.0-OUT (pol-Latn)": 19.92,
+                    "RuReviewsClassification (rus-Cyrl)": 62.33,
+                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 56.01,
+                    "RuSciBenchOECDClassification (rus-Cyrl)": 44.14,
+                    "TNews (cmn-Hans)": 43.73,
+                    "ToxicConversationsClassification": 65.56,
+                    "TweetSentimentExtractionClassification": 59.04,
+                    "Waimai (cmn-Hans)": 83.97
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "paraphrase-multilingual-mpnet-base-v2",
+                    "8TagsClustering": 25.62,
+                    "AlloProfClusteringP2P": 54.49,
+                    "AlloProfClusteringS2S": 44.79,
+                    "ArxivClusteringP2P": 37.78,
+                    "ArxivClusteringS2S": 31.68,
+                    "BiorxivClusteringP2P": 33.02,
+                    "BiorxivClusteringS2S": 29.45,
+                    "BlurbsClusteringP2P": 34.38,
+                    "BlurbsClusteringS2S": 15.81,
+                    "GeoreviewClusteringP2P (rus-Cyrl)": 56.18,
+                    "HALClusteringS2S": 23.97,
+                    "MLSUMClusteringP2P": 40.55,
+                    "MLSUMClusteringP2P (rus-Cyrl)": 35.95,
+                    "MLSUMClusteringS2S": 37.53,
+                    "MLSUMClusteringS2S (rus-Cyrl)": 38.88,
+                    "MasakhaNEWSClusteringP2P (fra)": 41.57,
+                    "MasakhaNEWSClusteringP2P (amh-Ethi)": 46.85,
+                    "MasakhaNEWSClusteringP2P (eng)": 47.3,
+                    "MasakhaNEWSClusteringP2P (fra-Latn)": 53.3,
+                    "MasakhaNEWSClusteringP2P (hau-Latn)": 27.61,
+                    "MasakhaNEWSClusteringP2P (ibo-Latn)": 41.32,
+                    "MasakhaNEWSClusteringP2P (lin-Latn)": 58.37,
+                    "MasakhaNEWSClusteringP2P (lug-Latn)": 47.56,
+                    "MasakhaNEWSClusteringP2P (orm-Ethi)": 24.53,
+                    "MasakhaNEWSClusteringP2P (pcm-Latn)": 66.55,
+                    "MasakhaNEWSClusteringP2P (run-Latn)": 51.97,
+                    "MasakhaNEWSClusteringP2P (sna-Latn)": 45.55,
+                    "MasakhaNEWSClusteringP2P (som-Latn)": 33.98,
+                    "MasakhaNEWSClusteringP2P (swa-Latn)": 25.03,
+                    "MasakhaNEWSClusteringP2P (tir-Ethi)": 48.33,
+                    "MasakhaNEWSClusteringP2P (xho-Latn)": 29.47,
+                    "MasakhaNEWSClusteringP2P (yor-Latn)": 28.25,
+                    "MasakhaNEWSClusteringS2S (fra)": 30.88,
+                    "MasakhaNEWSClusteringS2S (amh-Ethi)": 51.54,
+                    "MasakhaNEWSClusteringS2S (eng)": 43.28,
+                    "MasakhaNEWSClusteringS2S (fra-Latn)": 37.92,
+                    "MasakhaNEWSClusteringS2S (hau-Latn)": 17.97,
+                    "MasakhaNEWSClusteringS2S (ibo-Latn)": 34.56,
+                    "MasakhaNEWSClusteringS2S (lin-Latn)": 57.43,
+                    "MasakhaNEWSClusteringS2S (lug-Latn)": 45.22,
+                    "MasakhaNEWSClusteringS2S (orm-Ethi)": 21.9,
+                    "MasakhaNEWSClusteringS2S (pcm-Latn)": 62.1,
+                    "MasakhaNEWSClusteringS2S (run-Latn)": 46.81,
+                    "MasakhaNEWSClusteringS2S (sna-Latn)": 43.15,
+                    "MasakhaNEWSClusteringS2S (som-Latn)": 29.44,
+                    "MasakhaNEWSClusteringS2S (swa-Latn)": 10.31,
+                    "MasakhaNEWSClusteringS2S (tir-Ethi)": 51.95,
+                    "MasakhaNEWSClusteringS2S (xho-Latn)": 21.26,
+                    "MasakhaNEWSClusteringS2S (yor-Latn)": 28.88,
+                    "MedrxivClusteringP2P": 31.93,
+                    "MedrxivClusteringS2S": 31.53,
+                    "RedditClustering": 45.65,
+                    "RedditClusteringP2P": 52.05,
+                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 48.47,
+                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 42.9,
+                    "StackExchangeClustering": 52.99,
+                    "StackExchangeClusteringP2P": 33.06,
+                    "TenKGnadClusteringP2P": 35.96,
+                    "TenKGnadClusteringS2S": 22.0,
+                    "TwentyNewsgroupsClustering": 44.36
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "paraphrase-multilingual-mpnet-base-v2",
+                    "CDSC-E": 75.76,
+                    "CDSC-E (pol-Latn)": 75.77,
+                    "OpusparcusPC (fr)": 93.45,
+                    "OpusparcusPC (deu-Latn)": 97.34,
+                    "OpusparcusPC (en)": 98.59,
+                    "OpusparcusPC (fin-Latn)": 95.33,
+                    "OpusparcusPC (fra-Latn)": 93.45,
+                    "OpusparcusPC (rus-Cyrl)": 90.47,
+                    "OpusparcusPC (swe-Latn)": 95.16,
+                    "PPC": 93.67,
+                    "PSC": 98.26,
+                    "PSC (pol-Latn)": 98.26,
+                    "PawsXPairClassification (fr)": 58.14,
+                    "PawsXPairClassification (deu-Latn)": 55.69,
+                    "PawsXPairClassification (en)": 60.12,
+                    "PawsXPairClassification (spa-Latn)": 56.94,
+                    "PawsXPairClassification (fra-Latn)": 58.14,
+                    "PawsXPairClassification (jpn-Hira)": 49.37,
+                    "PawsXPairClassification (kor-Hang)": 50.66,
+                    "PawsXPairClassification (cmn-Hans)": 55.47,
+                    "SICK-E-PL": 77.22,
+                    "SICK-E-PL (pol-Latn)": 77.22,
+                    "SprintDuplicateQuestions": 90.55,
+                    "TERRa (rus-Cyrl)": 64.57,
+                    "TwitterSemEval2015": 66.75,
+                    "TwitterURLCorpus": 85.14
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "paraphrase-multilingual-mpnet-base-v2",
+                    "AlloprofReranking": 54.34,
+                    "AlloprofReranking (fra-Latn)": 67.2,
+                    "AskUbuntuDupQuestions": 60.16,
+                    "MMarcoReranking (cmn-Hans)": 14.57,
+                    "MindSmallReranking": 30.15,
+                    "RuBQReranking (rus-Cyrl)": 58.77,
+                    "SciDocsRR": 78.09,
+                    "StackOverflowDupQuestions": 46.78,
+                    "SyntecReranking": 83.23,
+                    "SyntecReranking (fra-Latn)": 80.97,
+                    "T2Reranking (cmn-Hans)": 64.49
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "paraphrase-multilingual-mpnet-base-v2",
+                    "AILACasedocs": 17.45,
+                    "AILAStatutes": 22.24,
+                    "ARCChallenge": 7.19,
+                    "AlloprofRetrieval": 30.8,
+                    "AlloprofRetrieval (fra-Latn)": 30.8,
+                    "AlphaNLI": 21.87,
+                    "ArguAna": 48.91,
+                    "ArguAna-PL": 42.62,
+                    "ArguAna-PL (pol-Latn)": 42.61,
+                    "BSARDRetrieval": 0.0,
+                    "BSARDRetrieval (fra-Latn)": 13.19,
+                    "CQADupstackRetrieval": 31.32,
+                    "ClimateFEVER": 15.27,
+                    "CmedqaRetrieval (cmn-Hans)": 10.15,
+                    "CovidRetrieval (cmn-Hans)": 28.85,
+                    "DBPedia": 26.22,
+                    "DBPedia-PL": 20.18,
+                    "DuRetrieval (cmn-Hans)": 33.41,
+                    "EcomRetrieval (cmn-Hans)": 9.69,
+                    "FEVER": 56.76,
+                    "FiQA-PL": 14.68,
+                    "FiQA-PL (pol-Latn)": 14.71,
+                    "FiQA2018": 22.96,
+                    "GerDaLIRSmall (deu-Latn)": 3.0,
+                    "HellaSwag": 17.53,
+                    "HotpotQA": 37.03,
+                    "HotpotQA-PL": 29.36,
+                    "LEMBNarrativeQARetrieval": 16.02,
+                    "LEMBNeedleRetrieval": 14.0,
+                    "LEMBPasskeyRetrieval": 7.75,
+                    "LEMBQMSumRetrieval": 12.23,
+                    "LEMBSummScreenFDRetrieval": 41.15,
+                    "LEMBWikimQARetrieval": 38.86,
+                    "LeCaRDv2 (zho-Hans)": 33.91,
+                    "LegalBenchConsumerContractsQA": 52.37,
+                    "LegalBenchCorporateLobbying": 87.62,
+                    "LegalQuAD (deu-Latn)": 17.8,
+                    "LegalSummarization": 56.8,
+                    "MMarcoRetrieval (cmn-Hans)": 44.62,
+                    "MSMARCO": 26.6,
+                    "MSMARCO-PL": 12.45,
+                    "MedicalRetrieval (cmn-Hans)": 14.1,
+                    "MintakaRetrieval (fr)": 24.45,
+                    "MintakaRetrieval (ara-Arab)": 14.55,
+                    "MintakaRetrieval (deu-Latn)": 25.43,
+                    "MintakaRetrieval (spa-Latn)": 24.94,
+                    "MintakaRetrieval (fra-Latn)": 24.45,
+                    "MintakaRetrieval (hin-Deva)": 18.67,
+                    "MintakaRetrieval (ita-Latn)": 25.62,
+                    "MintakaRetrieval (jpn-Hira)": 15.46,
+                    "MintakaRetrieval (por-Latn)": 26.15,
+                    "NFCorpus": 25.49,
+                    "NFCorpus-PL": 18.53,
+                    "NFCorpus-PL (pol-Latn)": 18.54,
+                    "NQ": 33.6,
+                    "NQ-PL": 15.64,
+                    "PIQA": 18.65,
+                    "Quail": 2.98,
+                    "Quora-PL": 79.18,
+                    "QuoraRetrieval": 86.4,
+                    "RARbCode": 11.02,
+                    "RARbMath": 30.93,
+                    "RiaNewsRetrieval (rus-Cyrl)": 51.75,
+                    "RuBQRetrieval (rus-Cyrl)": 37.04,
+                    "SCIDOCS": 13.97,
+                    "SCIDOCS-PL": 11.18,
+                    "SCIDOCS-PL (pol-Latn)": 11.17,
+                    "SIQA": 1.21,
+                    "SciFact": 50.3,
+                    "SciFact-PL": 41.53,
+                    "SciFact-PL (pol-Latn)": 41.55,
+                    "SpartQA": 5.69,
+                    "SyntecRetrieval": 76.0,
+                    "SyntecRetrieval (fra-Latn)": 76.0,
+                    "T2Retrieval (cmn-Hans)": 28.35,
+                    "TRECCOVID": 37.87,
+                    "TRECCOVID-PL": 35.38,
+                    "TRECCOVID-PL (pol-Latn)": 35.43,
+                    "TempReasonL1": 1.94,
+                    "TempReasonL2Fact": 5.34,
+                    "TempReasonL2Pure": 0.33,
+                    "TempReasonL3Fact": 6.79,
+                    "TempReasonL3Pure": 3.19,
+                    "Touche2020": 17.4,
+                    "VideoRetrieval (cmn-Hans)": 14.18,
+                    "WinoGrande": 49.01,
+                    "XPQARetrieval (fr)": 46.22,
+                    "XPQARetrieval (ara-Arab_ara-Arab)": 24.86,
+                    "XPQARetrieval (eng-Latn_ara-Arab)": 19.6,
+                    "XPQARetrieval (ara-Arab_eng-Latn)": 28.21,
+                    "XPQARetrieval (deu-Latn_deu-Latn)": 48.81,
+                    "XPQARetrieval (eng-Latn_deu-Latn)": 31.93,
+                    "XPQARetrieval (deu-Latn_eng-Latn)": 53.26,
+                    "XPQARetrieval (spa-Latn_spa-Latn)": 41.08,
+                    "XPQARetrieval (eng-Latn_spa-Latn)": 30.05,
+                    "XPQARetrieval (spa-Latn_eng-Latn)": 43.4,
+                    "XPQARetrieval (fra-Latn_fra-Latn)": 46.22,
+                    "XPQARetrieval (eng-Latn_fra-Latn)": 29.55,
+                    "XPQARetrieval (fra-Latn_eng-Latn)": 47.3,
+                    "XPQARetrieval (hin-Deva_hin-Deva)": 50.74,
+                    "XPQARetrieval (eng-Latn_hin-Deva)": 24.97,
+                    "XPQARetrieval (hin-Deva_eng-Latn)": 49.24,
+                    "XPQARetrieval (ita-Latn_ita-Latn)": 52.87,
+                    "XPQARetrieval (eng-Latn_ita-Latn)": 33.44,
+                    "XPQARetrieval (ita-Latn_eng-Latn)": 51.49,
+                    "XPQARetrieval (jpn-Hira_jpn-Hira)": 53.17,
+                    "XPQARetrieval (eng-Latn_jpn-Hira)": 26.66,
+                    "XPQARetrieval (jpn-Hira_eng-Latn)": 49.86,
+                    "XPQARetrieval (kor-Hang_kor-Hang)": 24.9,
+                    "XPQARetrieval (eng-Latn_kor-Hang)": 24.5,
+                    "XPQARetrieval (kor-Hang_eng-Latn)": 24.57,
+                    "XPQARetrieval (pol-Latn_pol-Latn)": 29.36,
+                    "XPQARetrieval (eng-Latn_pol-Latn)": 20.48,
+                    "XPQARetrieval (pol-Latn_eng-Latn)": 29.31,
+                    "XPQARetrieval (por-Latn_por-Latn)": 34.26,
+                    "XPQARetrieval (eng-Latn_por-Latn)": 21.72,
+                    "XPQARetrieval (por-Latn_eng-Latn)": 37.62,
+                    "XPQARetrieval (tam-Taml_tam-Taml)": 19.8,
+                    "XPQARetrieval (eng-Latn_tam-Taml)": 13.93,
+                    "XPQARetrieval (tam-Taml_eng-Latn)": 18.26,
+                    "XPQARetrieval (cmn-Hans_cmn-Hans)": 42.54,
+                    "XPQARetrieval (eng-Latn_cmn-Hans)": 20.91,
+                    "XPQARetrieval (cmn-Hans_eng-Latn)": 42.81
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "paraphrase-multilingual-mpnet-base-v2",
+                    "AFQMC (cmn-Hans)": 15.69,
+                    "ATEC (cmn-Hans)": 20.27,
+                    "BIOSSES": 76.27,
+                    "BQ (cmn-Hans)": 36.33,
+                    "CDSC-R": 88.8,
+                    "CDSC-R (pol-Latn)": 88.8,
+                    "LCQMC (cmn-Hans)": 63.3,
+                    "PAWSX (cmn-Hans)": 12.16,
+                    "RUParaPhraserSTS (rus-Cyrl)": 65.74,
+                    "RuSTSBenchmarkSTS (rus-Cyrl)": 82.46,
+                    "SICK-R": 79.62,
+                    "SICK-R-PL": 73.13,
+                    "SICK-R-PL (pol-Latn)": 73.13,
+                    "SICKFr": 75.56,
+                    "SICKFr (fra-Latn)": 75.56,
+                    "STS12": 77.9,
+                    "STS13": 85.11,
+                    "STS14": 80.81,
+                    "STS15": 87.48,
+                    "STS16": 83.2,
+                    "STS17 (ar-ar)": 79.1,
+                    "STS17 (en-ar)": 80.85,
+                    "STS17 (en-de)": 83.28,
+                    "STS17 (en-en)": 86.99,
+                    "STS17 (en-tr)": 74.9,
+                    "STS17 (es-en)": 86.11,
+                    "STS17 (es-es)": 85.14,
+                    "STS17 (fr-en)": 81.17,
+                    "STS17 (it-en)": 84.24,
+                    "STS17 (ko-ko)": 83.41,
+                    "STS17 (nl-en)": 82.51,
+                    "STS17 (eng-Latn_deu-Latn)": 83.28,
+                    "STS17 (eng-Latn_tur-Latn)": 74.9,
+                    "STS17 (eng-Latn_ara-Arab)": 80.85,
+                    "STS17 (ara-Arab)": 79.1,
+                    "STS17 (nld-Latn_eng-Latn)": 82.51,
+                    "STS17 (fra-Latn_eng-Latn)": 81.17,
+                    "STS17 (ita-Latn_eng-Latn)": 84.24,
+                    "STS17 (spa-Latn_eng-Latn)": 86.11,
+                    "STS17 (spa-Latn)": 85.14,
+                    "STS17 (kor-Hang)": 83.41,
+                    "STS22 (pl)": 33.64,
+                    "STS22 (fr)": 74.3,
+                    "STS22 (rus-Cyrl)": 58.74,
+                    "STS22 (spa-Latn_eng-Latn)": 70.26,
+                    "STS22 (spa-Latn)": 59.91,
+                    "STS22 (tur-Latn)": 56.3,
+                    "STS22 (ita-Latn)": 60.65,
+                    "STS22 (ara-Arab)": 52.19,
+                    "STS22 (pol-Latn)": 33.65,
+                    "STS22 (deu-Latn)": 46.7,
+                    "STS22 (cmn-Hans)": 61.75,
+                    "STS22 (fra-Latn)": 74.3,
+                    "STS22 (deu-Latn_eng-Latn)": 50.81,
+                    "STS22 (pol-Latn_eng-Latn)": 73.07,
+                    "STS22 (en)": 63.52,
+                    "STS22 (spa-Latn_ita-Latn)": 53.7,
+                    "STS22 (deu-Latn_fra-Latn)": 62.34,
+                    "STS22 (deu-Latn_pol-Latn)": 40.53,
+                    "STS22 (cmn-Hans_eng-Latn)": 67.96,
+                    "STS22 (fra-Latn_pol-Latn)": 84.52,
+                    "STSB (cmn-Hans)": 80.84,
+                    "STSBenchmark": 86.82,
+                    "STSBenchmarkMultilingualSTS (fr)": 84.69,
+                    "STSBenchmarkMultilingualSTS (nld-Latn)": 83.36,
+                    "STSBenchmarkMultilingualSTS (deu-Latn)": 83.56,
+                    "STSBenchmarkMultilingualSTS (fra-Latn)": 84.69,
+                    "STSBenchmarkMultilingualSTS (spa-Latn)": 84.61,
+                    "STSBenchmarkMultilingualSTS (cmn-Hans)": 81.98,
+                    "STSBenchmarkMultilingualSTS (en)": 86.82,
+                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 82.45,
+                    "STSBenchmarkMultilingualSTS (por-Latn)": 84.0,
+                    "STSBenchmarkMultilingualSTS (ita-Latn)": 84.09,
+                    "STSBenchmarkMultilingualSTS (pol-Latn)": 81.46
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "paraphrase-multilingual-mpnet-base-v2",
+                    "SummEval": 31.57,
+                    "SummEvalFr": 29.47,
+                    "SummEvalFr (fra-Latn)": 29.47
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "paraphrase-multilingual-mpnet-base-v2",
+                    "CEDRClassification (rus-Cyrl)": 39.98,
+                    "SensitiveTopicsClassification (rus-Cyrl)": 25.83
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "paraphrase-multilingual-mpnet-base-v2"
+                }
+            ]
+        }
+    },
+    "text-embedding-ada-002": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "text-embedding-ada-002"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "text-embedding-ada-002",
+                    "AmazonCounterfactualClassification (en)": 75.94,
+                    "AmazonPolarityClassification": 86.72,
+                    "AmazonReviewsClassification (zh)": 38.3,
+                    "AmazonReviewsClassification (en)": 44.78,
+                    "AmazonReviewsClassification (fr)": 43.76,
+                    "Banking77Classification": 80.66,
+                    "EmotionClassification": 48.74,
+                    "IFlyTek": 44.62,
+                    "ImdbClassification": 77.98,
+                    "JDReview": 74.6,
+                    "MTOPDomainClassification (en)": 92.13,
+                    "MTOPDomainClassification (fr)": 89.38,
+                    "MTOPIntentClassification (en)": 64.68,
+                    "MTOPIntentClassification (fr)": 64.45,
+                    "MasakhaNEWSClassification (fra)": 81.52,
+                    "MassiveIntentClassification (zh-CN)": 64.81,
+                    "MassiveIntentClassification (en)": 70.15,
+                    "MassiveIntentClassification (fr)": 65.42,
+                    "MassiveScenarioClassification (zh-CN)": 71.4,
+                    "MassiveScenarioClassification (en)": 75.33,
+                    "MassiveScenarioClassification (fr)": 71.11,
+                    "MultilingualSentiment": 67.99,
+                    "OnlineShopping": 88.94,
+                    "TNews": 45.77,
+                    "ToxicConversationsClassification": 72.29,
+                    "TweetSentimentExtractionClassification": 61.81,
+                    "Waimai": 82.37
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "text-embedding-ada-002",
+                    "AlloProfClusteringP2P": 64.83,
+                    "AlloProfClusteringS2S": 53.52,
+                    "ArxivClusteringP2P": 45.01,
+                    "ArxivClusteringS2S": 36.85,
+                    "BiorxivClusteringP2P": 36.66,
+                    "BiorxivClusteringS2S": 34.21,
+                    "CLSClusteringP2P": 38.26,
+                    "CLSClusteringS2S": 35.91,
+                    "HALClusteringS2S": 26.18,
+                    "MLSUMClusteringP2P": 44.59,
+                    "MLSUMClusteringS2S": 41.67,
+                    "MasakhaNEWSClusteringP2P (fra)": 68.35,
+                    "MasakhaNEWSClusteringS2S (fra)": 48.58,
+                    "MedrxivClusteringP2P": 32.6,
+                    "MedrxivClusteringS2S": 30.8,
+                    "RedditClustering": 61.42,
+                    "RedditClusteringP2P": 64.13,
+                    "StackExchangeClustering": 72.22,
+                    "StackExchangeClusteringP2P": 38.49,
+                    "ThuNewsClusteringP2P": 58.71,
+                    "ThuNewsClusteringS2S": 49.86,
+                    "TwentyNewsgroupsClustering": 52.56
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "text-embedding-ada-002",
+                    "Cmnli": 76.03,
+                    "Ocnli": 63.08,
+                    "OpusparcusPC (fr)": 94.12,
+                    "PawsXPairClassification (fr)": 60.16,
+                    "SprintDuplicateQuestions": 92.17,
+                    "TwitterSemEval2015": 75.28,
+                    "TwitterURLCorpus": 87.22
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "text-embedding-ada-002",
+                    "AskUbuntuDupQuestions": 62.05,
+                    "CMedQAv1": 63.08,
+                    "CMedQAv2": 64.02,
+                    "MMarcoReranking": 23.39,
+                    "MindSmallReranking": 31.45,
+                    "SciDocsRR": 81.22,
+                    "StackOverflowDupQuestions": 50.54,
+                    "SyntecReranking": 89.87,
+                    "T2Reranking": 66.65
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "text-embedding-ada-002",
+                    "ARCChallenge": 13.3,
+                    "AlloprofRetrieval": 51.64,
+                    "AlphaNLI": 25.65,
+                    "ArguAna": 57.44,
+                    "BSARDRetrieval": 0.61,
+                    "CQADupstackRetrieval": 41.69,
+                    "ClimateFEVER": 21.64,
+                    "CmedqaRetrieval": 22.36,
+                    "CovidRetrieval": 57.21,
+                    "DBPedia": 39.39,
+                    "DuRetrieval": 71.17,
+                    "EcomRetrieval": 44.49,
+                    "FEVER": 74.99,
+                    "FiQA2018": 44.41,
+                    "HellaSwag": 29.29,
+                    "HotpotQA": 60.9,
+                    "MMarcoRetrieval": 69.86,
+                    "MSMARCO": 40.91,
+                    "MedicalRetrieval": 37.92,
+                    "MintakaRetrieval (fr)": 29.94,
+                    "NFCorpus": 36.97,
+                    "NQ": 51.58,
+                    "PIQA": 31.02,
+                    "Quail": 5.83,
+                    "QuoraRetrieval": 87.6,
+                    "RARbCode": 83.39,
+                    "RARbMath": 73.21,
+                    "SCIDOCS": 18.36,
+                    "SIQA": 3.14,
+                    "SciFact": 72.75,
+                    "SpartQA": 4.23,
+                    "SyntecRetrieval": 85.97,
+                    "T2Retrieval": 69.14,
+                    "TRECCOVID": 68.47,
+                    "TempReasonL1": 1.68,
+                    "TempReasonL2Fact": 19.93,
+                    "TempReasonL2Pure": 2.6,
+                    "TempReasonL3Fact": 18.02,
+                    "TempReasonL3Pure": 7.58,
+                    "Touche2020": 21.61,
+                    "VideoRetrieval": 43.85,
+                    "WinoGrande": 19.65,
+                    "XPQARetrieval (fr)": 73.0
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "text-embedding-ada-002",
+                    "AFQMC": 23.88,
+                    "ATEC": 29.25,
+                    "BIOSSES": 86.35,
+                    "BQ": 45.33,
+                    "LCQMC": 68.41,
+                    "PAWSX": 16.55,
+                    "QBQTC": 30.27,
+                    "SICK-R": 80.6,
+                    "SICKFr": 76.28,
+                    "STS12": 69.8,
+                    "STS13": 83.27,
+                    "STS14": 76.09,
+                    "STS15": 86.12,
+                    "STS16": 85.96,
+                    "STS17 (en-en)": 90.25,
+                    "STS22 (zh)": 62.53,
+                    "STS22 (en)": 68.12,
+                    "STS22 (tr)": 64.5,
+                    "STS22 (fr)": 81.09,
+                    "STSB": 70.61,
+                    "STSBenchmark": 83.17,
+                    "STSBenchmarkMultilingualSTS (fr)": 77.55
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "text-embedding-ada-002",
+                    "SummEval": 30.8,
+                    "SummEvalFr": 30.5
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "text-embedding-ada-002"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "text-embedding-ada-002"
+                }
+            ]
+        }
+    },
+    "use-cmlm-multilingual": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "use-cmlm-multilingual"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "use-cmlm-multilingual"
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "use-cmlm-multilingual",
+                    "BlurbsClusteringP2P": 29.63,
+                    "BlurbsClusteringS2S": 15.24,
+                    "TenKGnadClusteringP2P": 37.1,
+                    "TenKGnadClusteringS2S": 25.64
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "use-cmlm-multilingual"
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "use-cmlm-multilingual"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "use-cmlm-multilingual"
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "use-cmlm-multilingual"
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "use-cmlm-multilingual"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "use-cmlm-multilingual"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "use-cmlm-multilingual"
+                }
+            ]
+        }
+    },
+    "Cohere-embed-multilingual-light-v3.0": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "Cohere-embed-multilingual-light-v3.0"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "Cohere-embed-multilingual-light-v3.0",
+                    "AmazonReviewsClassification (fr)": 38.6,
+                    "MTOPDomainClassification (fr)": 80.79,
+                    "MTOPIntentClassification (fr)": 50.01,
+                    "MasakhaNEWSClassification (fra)": 82.58,
+                    "MassiveIntentClassification (fr)": 56.31,
+                    "MassiveScenarioClassification (fr)": 59.5
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "Cohere-embed-multilingual-light-v3.0",
+                    "AlloProfClusteringP2P": 61.96,
+                    "AlloProfClusteringS2S": 31.36,
+                    "HALClusteringS2S": 17.31,
+                    "MLSUMClusteringP2P": 42.8,
+                    "MLSUMClusteringS2S": 32.72,
+                    "MasakhaNEWSClusteringP2P (fra)": 56.81,
+                    "MasakhaNEWSClusteringS2S (fra)": 29.41
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "Cohere-embed-multilingual-light-v3.0",
+                    "OpusparcusPC (fr)": 90.92,
+                    "PawsXPairClassification (fr)": 57.32
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "Cohere-embed-multilingual-light-v3.0",
+                    "AlloprofReranking": 51.6,
+                    "SyntecReranking": 88.03
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "Cohere-embed-multilingual-light-v3.0",
+                    "AlloprofRetrieval": 35.39,
+                    "BSARDRetrieval": 0.0,
+                    "MintakaRetrieval (fr)": 23.0,
+                    "SyntecRetrieval": 76.88,
+                    "XPQARetrieval (fr)": 45.23
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "Cohere-embed-multilingual-light-v3.0",
+                    "SICKFr": 75.5,
+                    "STS22 (fr)": 82.8,
+                    "STSBenchmarkMultilingualSTS (fr)": 76.48
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "Cohere-embed-multilingual-light-v3.0",
+                    "SummEvalFr": 31.4
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "Cohere-embed-multilingual-light-v3.0"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "Cohere-embed-multilingual-light-v3.0"
+                }
+            ]
+        }
+    },
+    "bge-large-en-v1.5": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "bge-large-en-v1.5"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "bge-large-en-v1.5"
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "bge-large-en-v1.5"
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "bge-large-en-v1.5"
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "bge-large-en-v1.5"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "bge-large-en-v1.5",
+                    "AILACasedocs": 25.15,
+                    "AILAStatutes": 20.74,
+                    "ARCChallenge": 9.99,
+                    "AlphaNLI": 13.13,
+                    "BrightRetrieval (stackoverflow)": 9.51,
+                    "BrightRetrieval (earth_science)": 24.15,
+                    "BrightRetrieval (aops)": 6.08,
+                    "BrightRetrieval (sustainable_living)": 13.27,
+                    "BrightRetrieval (psychology)": 17.44,
+                    "BrightRetrieval (robotics)": 12.21,
+                    "BrightRetrieval (theoremqa_theorems)": 5.51,
+                    "BrightRetrieval (pony)": 5.64,
+                    "BrightRetrieval (biology)": 11.96,
+                    "BrightRetrieval (theoremqa_questions)": 12.56,
+                    "BrightRetrieval (leetcode)": 26.68,
+                    "BrightRetrieval (economics)": 16.59,
+                    "GerDaLIRSmall": 3.96,
+                    "HellaSwag": 28.5,
+                    "LeCaRDv2": 22.68,
+                    "LegalBenchConsumerContractsQA": 73.52,
+                    "LegalBenchCorporateLobbying": 91.51,
+                    "LegalQuAD": 16.22,
+                    "LegalSummarization": 59.99,
+                    "PIQA": 27.99,
+                    "Quail": 1.83,
+                    "RARbCode": 48.12,
+                    "RARbMath": 57.36,
+                    "SIQA": 1.04,
+                    "SpartQA": 2.99,
+                    "TempReasonL1": 1.46,
+                    "TempReasonL2Fact": 24.25,
+                    "TempReasonL2Pure": 2.35,
+                    "TempReasonL3Fact": 20.64,
+                    "TempReasonL3Pure": 6.67,
+                    "WinoGrande": 19.18
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "bge-large-en-v1.5"
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "bge-large-en-v1.5"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "bge-large-en-v1.5"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "bge-large-en-v1.5"
+                }
+            ]
+        }
+    },
+    "mistral-embed": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "mistral-embed"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "mistral-embed",
+                    "AmazonReviewsClassification (fr)": 41.59,
+                    "MTOPDomainClassification (fr)": 90.05,
+                    "MTOPIntentClassification (fr)": 66.09,
+                    "MasakhaNEWSClassification (fra)": 81.4,
+                    "MassiveIntentClassification (fr)": 62.83,
+                    "MassiveScenarioClassification (fr)": 69.71
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "mistral-embed",
+                    "AlloProfClusteringP2P": 62.01,
+                    "AlloProfClusteringS2S": 49.2,
+                    "HALClusteringS2S": 26.17,
+                    "MLSUMClusteringP2P": 45.28,
+                    "MLSUMClusteringS2S": 42.74,
+                    "MasakhaNEWSClusteringP2P (fra)": 48.13,
+                    "MasakhaNEWSClusteringS2S (fra)": 39.62
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "mistral-embed",
+                    "OpusparcusPC (fr)": 92.61,
+                    "PawsXPairClassification (fr)": 62.02
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "mistral-embed",
+                    "AlloprofReranking": 72.36,
+                    "SyntecReranking": 88.57
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "mistral-embed",
+                    "AILACasedocs": 38.2,
+                    "AILAStatutes": 44.81,
+                    "AlloprofRetrieval": 56.84,
+                    "BSARDRetrieval": 2.48,
+                    "GerDaLIRSmall": 17.85,
+                    "LeCaRDv2": 61.12,
+                    "LegalBenchConsumerContractsQA": 80.8,
+                    "LegalBenchCorporateLobbying": 94.11,
+                    "LegalQuAD": 47.17,
+                    "LegalSummarization": 67.39,
+                    "MintakaRetrieval (fr)": 21.73,
+                    "SyntecRetrieval": 78.77,
+                    "XPQARetrieval (fr)": 74.24
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "mistral-embed",
+                    "SICKFr": 76.21,
+                    "STS22 (fr)": 82.74,
+                    "STSBenchmarkMultilingualSTS (fr)": 79.72
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "mistral-embed",
+                    "SummEvalFr": 31.47
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "mistral-embed"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "mistral-embed"
+                }
+            ]
+        }
+    },
+    "FollowIR-7B": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "FollowIR-7B"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "FollowIR-7B"
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "FollowIR-7B"
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "FollowIR-7B"
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "FollowIR-7B"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "FollowIR-7B"
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "FollowIR-7B"
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "FollowIR-7B"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "FollowIR-7B"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "FollowIR-7B",
+                    "Core17InstructionRetrieval": 16.48,
+                    "News21InstructionRetrieval": 6.26,
+                    "Robust04InstructionRetrieval": 13.72
+                }
+            ]
+        }
+    },
+    "gottbert-base": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "gottbert-base"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "gottbert-base"
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "gottbert-base",
+                    "BlurbsClusteringP2P": 34.49,
+                    "BlurbsClusteringS2S": 8.37,
+                    "TenKGnadClusteringP2P": 33.66,
+                    "TenKGnadClusteringS2S": 9.34
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "gottbert-base"
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "gottbert-base"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "gottbert-base"
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "gottbert-base"
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "gottbert-base"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "gottbert-base"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "gottbert-base"
+                }
+            ]
+        }
+    },
+    "e5-base-4k": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "e5-base-4k"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "e5-base-4k"
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "e5-base-4k"
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "e5-base-4k"
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "e5-base-4k"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "e5-base-4k",
+                    "LEMBNarrativeQARetrieval": 30.35,
+                    "LEMBNeedleRetrieval": 41.5,
+                    "LEMBPasskeyRetrieval": 67.25,
+                    "LEMBQMSumRetrieval": 35.6,
+                    "LEMBSummScreenFDRetrieval": 95.23,
+                    "LEMBWikimQARetrieval": 69.19
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "e5-base-4k"
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "e5-base-4k"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "e5-base-4k"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "e5-base-4k"
+                }
+            ]
+        }
+    },
+    "LLM2Vec-Sheared-Llama-supervised": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "LLM2Vec-Sheared-Llama-supervised"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "LLM2Vec-Sheared-Llama-supervised",
+                    "AmazonCounterfactualClassification (en)": 77.42,
+                    "AmazonPolarityClassification": 82.05,
+                    "AmazonReviewsClassification (en)": 40.81,
+                    "Banking77Classification": 86.01,
+                    "EmotionClassification": 48.38,
+                    "ImdbClassification": 75.33,
+                    "MTOPDomainClassification (en)": 94.09,
+                    "MTOPIntentClassification (en)": 77.05,
+                    "MassiveIntentClassification (en)": 75.58,
+                    "MassiveScenarioClassification (en)": 79.16,
+                    "ToxicConversationsClassification": 69.92,
+                    "TweetSentimentExtractionClassification": 60.76
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "LLM2Vec-Sheared-Llama-supervised",
+                    "ArxivClusteringP2P": 43.47,
+                    "ArxivClusteringS2S": 39.85,
+                    "BiorxivClusteringP2P": 37.1,
+                    "BiorxivClusteringS2S": 34.28,
+                    "MedrxivClusteringP2P": 33.55,
+                    "MedrxivClusteringS2S": 31.11,
+                    "RedditClustering": 53.02,
+                    "RedditClusteringP2P": 60.47,
+                    "StackExchangeClustering": 63.04,
+                    "StackExchangeClusteringP2P": 34.01,
+                    "TwentyNewsgroupsClustering": 49.37
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "LLM2Vec-Sheared-Llama-supervised",
+                    "SprintDuplicateQuestions": 96.25,
+                    "TwitterSemEval2015": 76.14,
+                    "TwitterURLCorpus": 86.23
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "LLM2Vec-Sheared-Llama-supervised",
+                    "AskUbuntuDupQuestions": 60.71,
+                    "MindSmallReranking": 31.96,
+                    "SciDocsRR": 79.23,
+                    "StackOverflowDupQuestions": 49.61
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "LLM2Vec-Sheared-Llama-supervised",
+                    "ArguAna": 51.66,
+                    "CQADupstackRetrieval": 41.73,
+                    "ClimateFEVER": 33.49,
+                    "DBPedia": 43.58,
+                    "FEVER": 86.81,
+                    "FiQA2018": 41.0,
+                    "HotpotQA": 63.85,
+                    "MSMARCO": 38.32,
+                    "NFCorpus": 37.12,
+                    "NQ": 53.89,
+                    "QuoraRetrieval": 87.37,
+                    "SCIDOCS": 17.96,
+                    "SciFact": 72.08,
+                    "TRECCOVID": 80.41,
+                    "Touche2020": 22.31
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "LLM2Vec-Sheared-Llama-supervised",
+                    "BIOSSES": 85.88,
+                    "SICK-R": 82.25,
+                    "STS12": 78.28,
+                    "STS13": 85.52,
+                    "STS14": 82.49,
+                    "STS15": 88.76,
+                    "STS16": 87.11,
+                    "STS17 (en-en)": 90.1,
+                    "STS22 (en)": 68.25,
+                    "STSBenchmark": 87.16
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "LLM2Vec-Sheared-Llama-supervised",
+                    "SummEval": 30.01
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "LLM2Vec-Sheared-Llama-supervised"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "LLM2Vec-Sheared-Llama-supervised"
+                }
+            ]
+        }
+    },
+    "multilingual-e5-large": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "multilingual-e5-large",
+                    "BornholmBitextMining (dan-Latn)": 29.61,
+                    "BornholmBitextMining": 44.16,
+                    "Tatoeba (tgl-Latn_eng-Latn)": 92.0,
+                    "Tatoeba (gsw-Latn_eng-Latn)": 51.65,
+                    "Tatoeba (tzl-Latn_eng-Latn)": 53.16,
+                    "Tatoeba (slv-Latn_eng-Latn)": 89.57,
+                    "Tatoeba (jav-Latn_eng-Latn)": 75.46,
+                    "Tatoeba (uig-Arab_eng-Latn)": 72.17,
+                    "Tatoeba (ind-Latn_eng-Latn)": 92.9,
+                    "Tatoeba (rus-Cyrl_eng-Latn)": 92.32,
+                    "Tatoeba (war-Latn_eng-Latn)": 62.02,
+                    "Tatoeba (mar-Deva_eng-Latn)": 88.58,
+                    "Tatoeba (mkd-Cyrl_eng-Latn)": 85.63,
+                    "Tatoeba (jpn-Jpan_eng-Latn)": 95.28,
+                    "Tatoeba (hun-Latn_eng-Latn)": 94.01,
+                    "Tatoeba (slk-Latn_eng-Latn)": 93.13,
+                    "Tatoeba (tha-Thai_eng-Latn)": 95.38,
+                    "Tatoeba (fra-Latn_eng-Latn)": 93.42,
+                    "Tatoeba (ukr-Cyrl_eng-Latn)": 93.32,
+                    "Tatoeba (kat-Geor_eng-Latn)": 84.09,
+                    "Tatoeba (nov-Latn_eng-Latn)": 71.62,
+                    "Tatoeba (kor-Hang_eng-Latn)": 90.65,
+                    "Tatoeba (ben-Beng_eng-Latn)": 83.02,
+                    "Tatoeba (cor-Latn_eng-Latn)": 6.28,
+                    "Tatoeba (lfn-Latn_eng-Latn)": 62.91,
+                    "Tatoeba (swh-Latn_eng-Latn)": 71.61,
+                    "Tatoeba (tur-Latn_eng-Latn)": 96.27,
+                    "Tatoeba (cbk-Latn_eng-Latn)": 69.26,
+                    "Tatoeba (kur-Latn_eng-Latn)": 66.83,
+                    "Tatoeba (arq-Arab_eng-Latn)": 41.56,
+                    "Tatoeba (ceb-Latn_eng-Latn)": 55.31,
+                    "Tatoeba (max-Deva_eng-Latn)": 63.41,
+                    "Tatoeba (ang-Latn_eng-Latn)": 40.18,
+                    "Tatoeba (nds-Latn_eng-Latn)": 69.28,
+                    "Tatoeba (epo-Latn_eng-Latn)": 96.01,
+                    "Tatoeba (heb-Hebr_eng-Latn)": 86.61,
+                    "Tatoeba (yue-Hant_eng-Latn)": 88.71,
+                    "Tatoeba (dan-Latn_eng-Latn)": 95.08,
+                    "Tatoeba (swe-Latn_eng-Latn)": 95.3,
+                    "Tatoeba (lvs-Latn_eng-Latn)": 90.06,
+                    "Tatoeba (ast-Latn_eng-Latn)": 81.76,
+                    "Tatoeba (dsb-Latn_eng-Latn)": 48.44,
+                    "Tatoeba (pes-Arab_eng-Latn)": 92.14,
+                    "Tatoeba (dtp-Latn_eng-Latn)": 7.03,
+                    "Tatoeba (tuk-Latn_eng-Latn)": 33.15,
+                    "Tatoeba (isl-Latn_eng-Latn)": 92.09,
+                    "Tatoeba (khm-Khmr_eng-Latn)": 59.96,
+                    "Tatoeba (pam-Latn_eng-Latn)": 9.32,
+                    "Tatoeba (tat-Cyrl_eng-Latn)": 73.51,
+                    "Tatoeba (bos-Latn_eng-Latn)": 92.86,
+                    "Tatoeba (spa-Latn_eng-Latn)": 97.1,
+                    "Tatoeba (kaz-Cyrl_eng-Latn)": 79.67,
+                    "Tatoeba (bel-Cyrl_eng-Latn)": 91.08,
+                    "Tatoeba (zsm-Latn_eng-Latn)": 94.53,
+                    "Tatoeba (cat-Latn_eng-Latn)": 91.03,
+                    "Tatoeba (urd-Arab_eng-Latn)": 89.21,
+                    "Tatoeba (mon-Cyrl_eng-Latn)": 87.53,
+                    "Tatoeba (tam-Taml_eng-Latn)": 88.23,
+                    "Tatoeba (fry-Latn_eng-Latn)": 63.43,
+                    "Tatoeba (nob-Latn_eng-Latn)": 97.2,
+                    "Tatoeba (tel-Telu_eng-Latn)": 91.34,
+                    "Tatoeba (hye-Armn_eng-Latn)": 90.92,
+                    "Tatoeba (awa-Deva_eng-Latn)": 72.27,
+                    "Tatoeba (hrv-Latn_eng-Latn)": 96.15,
+                    "Tatoeba (ile-Latn_eng-Latn)": 79.16,
+                    "Tatoeba (amh-Ethi_eng-Latn)": 80.69,
+                    "Tatoeba (orv-Cyrl_eng-Latn)": 39.87,
+                    "Tatoeba (ara-Arab_eng-Latn)": 85.48,
+                    "Tatoeba (ido-Latn_eng-Latn)": 83.52,
+                    "Tatoeba (hin-Deva_eng-Latn)": 94.48,
+                    "Tatoeba (por-Latn_eng-Latn)": 93.63,
+                    "Tatoeba (ron-Latn_eng-Latn)": 94.87,
+                    "Tatoeba (swg-Latn_eng-Latn)": 55.64,
+                    "Tatoeba (cmn-Hans_eng-Latn)": 95.28,
+                    "Tatoeba (pol-Latn_eng-Latn)": 96.6,
+                    "Tatoeba (bul-Cyrl_eng-Latn)": 92.93,
+                    "Tatoeba (ina-Latn_eng-Latn)": 93.47,
+                    "Tatoeba (bre-Latn_eng-Latn)": 11.1,
+                    "Tatoeba (wuu-Hans_eng-Latn)": 86.37,
+                    "Tatoeba (lit-Latn_eng-Latn)": 88.48,
+                    "Tatoeba (csb-Latn_eng-Latn)": 36.98,
+                    "Tatoeba (lat-Latn_eng-Latn)": 53.37,
+                    "Tatoeba (gle-Latn_eng-Latn)": 71.48,
+                    "Tatoeba (ita-Latn_eng-Latn)": 93.29,
+                    "Tatoeba (srp-Cyrl_eng-Latn)": 93.1,
+                    "Tatoeba (arz-Arab_eng-Latn)": 74.73,
+                    "Tatoeba (cym-Latn_eng-Latn)": 76.21,
+                    "Tatoeba (ber-Tfng_eng-Latn)": 38.9,
+                    "Tatoeba (xho-Latn_eng-Latn)": 80.87,
+                    "Tatoeba (uzb-Latn_eng-Latn)": 72.35,
+                    "Tatoeba (pms-Latn_eng-Latn)": 59.85,
+                    "Tatoeba (est-Latn_eng-Latn)": 85.03,
+                    "Tatoeba (deu-Latn_eng-Latn)": 99.07,
+                    "Tatoeba (yid-Hebr_eng-Latn)": 76.33,
+                    "Tatoeba (ell-Grek_eng-Latn)": 93.88,
+                    "Tatoeba (afr-Latn_eng-Latn)": 90.22,
+                    "Tatoeba (fao-Latn_eng-Latn)": 72.62,
+                    "Tatoeba (nld-Latn_eng-Latn)": 96.63,
+                    "Tatoeba (hsb-Latn_eng-Latn)": 58.9,
+                    "Tatoeba (aze-Latn_eng-Latn)": 87.61,
+                    "Tatoeba (kzj-Latn_eng-Latn)": 7.91,
+                    "Tatoeba (kab-Latn_eng-Latn)": 36.54,
+                    "Tatoeba (mal-Mlym_eng-Latn)": 97.7,
+                    "Tatoeba (mhr-Cyrl_eng-Latn)": 6.79,
+                    "Tatoeba (ces-Latn_eng-Latn)": 94.89,
+                    "Tatoeba (gla-Latn_eng-Latn)": 59.0,
+                    "Tatoeba (cha-Latn_eng-Latn)": 27.16,
+                    "Tatoeba (glg-Latn_eng-Latn)": 93.34,
+                    "Tatoeba (vie-Latn_eng-Latn)": 97.0,
+                    "Tatoeba (oci-Latn_eng-Latn)": 54.91,
+                    "Tatoeba (nno-Latn_eng-Latn)": 91.4,
+                    "Tatoeba (fin-Latn_eng-Latn)": 95.44,
+                    "Tatoeba (eus-Latn_eng-Latn)": 77.82,
+                    "Tatoeba (sqi-Latn_eng-Latn)": 94.7
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "multilingual-e5-large",
+                    "AllegroReviews (pol-Latn)": 41.04,
+                    "AllegroReviews": 41.14,
+                    "AmazonCounterfactualClassification (en-ext)": 78.73,
+                    "AmazonCounterfactualClassification (en)": 78.67,
+                    "AmazonCounterfactualClassification (deu-Latn)": 68.66,
+                    "AmazonCounterfactualClassification (jpn-Jpan)": 78.8,
+                    "AmazonPolarityClassification": 93.26,
+                    "AmazonReviewsClassification (en)": 49.2,
+                    "AmazonReviewsClassification (deu-Latn)": 46.5,
+                    "AmazonReviewsClassification (spa-Latn)": 44.35,
+                    "AmazonReviewsClassification (fra-Latn)": 42.55,
+                    "AmazonReviewsClassification (jpn-Jpan)": 41.71,
+                    "AmazonReviewsClassification (cmn-Hans)": 38.87,
+                    "AmazonReviewsClassification (fr)": 41.91,
+                    "AngryTweetsClassification (dan-Latn)": 57.69,
+                    "AngryTweetsClassification": 54.95,
+                    "Banking77Classification": 75.88,
+                    "CBD (pol-Latn)": 69.84,
+                    "CBD": 69.9,
+                    "DKHateClassification": 66.02,
+                    "DanishPoliticalCommentsClassification (dan-Latn)": 39.43,
+                    "DanishPoliticalCommentsClassification": 38.27,
+                    "EmotionClassification": 47.58,
+                    "GeoreviewClassification (rus-Cyrl)": 49.69,
+                    "HeadlineClassification (rus-Cyrl)": 77.19,
+                    "IFlyTek (cmn-Hans)": 41.86,
+                    "IFlyTek": 45.47,
+                    "ImdbClassification": 90.23,
+                    "InappropriatenessClassification (rus-Cyrl)": 61.6,
+                    "JDReview (cmn-Hans)": 80.54,
+                    "JDReview": 80.99,
+                    "KinopoiskClassification (rus-Cyrl)": 56.59,
+                    "LccSentimentClassification (dan-Latn)": 61.53,
+                    "LccSentimentClassification": 59.6,
+                    "MTOPDomainClassification (en)": 91.81,
+                    "MTOPDomainClassification (deu-Latn)": 90.44,
+                    "MTOPDomainClassification (spa-Latn)": 88.34,
+                    "MTOPDomainClassification (fra-Latn)": 86.23,
+                    "MTOPDomainClassification (hin-Deva)": 86.84,
+                    "MTOPDomainClassification (tha-Thai)": 86.88,
+                    "MTOPDomainClassification (fr)": 86.41,
+                    "MTOPIntentClassification (en)": 64.29,
+                    "MTOPIntentClassification (deu-Latn)": 65.97,
+                    "MTOPIntentClassification (spa-Latn)": 61.9,
+                    "MTOPIntentClassification (fra-Latn)": 56.25,
+                    "MTOPIntentClassification (hin-Deva)": 59.17,
+                    "MTOPIntentClassification (tha-Thai)": 62.59,
+                    "MTOPIntentClassification (fr)": 59.43,
+                    "MasakhaNEWSClassification (amh-Ethi)": 83.7,
+                    "MasakhaNEWSClassification (eng)": 78.26,
+                    "MasakhaNEWSClassification (fra-Latn)": 76.11,
+                    "MasakhaNEWSClassification (hau-Latn)": 76.17,
+                    "MasakhaNEWSClassification (ibo-Latn)": 70.05,
+                    "MasakhaNEWSClassification (lin-Latn)": 75.89,
+                    "MasakhaNEWSClassification (lug-Latn)": 73.63,
+                    "MasakhaNEWSClassification (orm-Ethi)": 80.31,
+                    "MasakhaNEWSClassification (pcm-Latn)": 89.15,
+                    "MasakhaNEWSClassification (run-Latn)": 76.55,
+                    "MasakhaNEWSClassification (sna-Latn)": 86.99,
+                    "MasakhaNEWSClassification (som-Latn)": 64.63,
+                    "MasakhaNEWSClassification (swa-Latn)": 73.42,
+                    "MasakhaNEWSClassification (tir-Ethi)": 72.06,
+                    "MasakhaNEWSClassification (xho-Latn)": 82.56,
+                    "MasakhaNEWSClassification (yor-Latn)": 81.09,
+                    "MasakhaNEWSClassification (fra)": 79.38,
+                    "MassiveIntentClassification (kor-Kore)": 63.92,
+                    "MassiveIntentClassification (lav-Latn)": 58.31,
+                    "MassiveIntentClassification (isl-Latn)": 53.3,
+                    "MassiveIntentClassification (tel-Telu)": 53.96,
+                    "MassiveIntentClassification (mya-Mymr)": 49.73,
+                    "MassiveIntentClassification (nob-Latn)": 64.54,
+                    "MassiveIntentClassification (en)": 68.51,
+                    "MassiveIntentClassification (spa-Latn)": 64.01,
+                    "MassiveIntentClassification (swe-Latn)": 66.52,
+                    "MassiveIntentClassification (cmo-Hant)": 58.78,
+                    "MassiveIntentClassification (pol-Latn)": 65.09,
+                    "MassiveIntentClassification (rus-Cyrl)": 65.76,
+                    "MassiveIntentClassification (aze-Latn)": 54.68,
+                    "MassiveIntentClassification (fin-Latn)": 64.28,
+                    "MassiveIntentClassification (cmo-Hans)": 66.23,
+                    "MassiveIntentClassification (urd-Arab)": 54.6,
+                    "MassiveIntentClassification (tam-Taml)": 53.41,
+                    "MassiveIntentClassification (hin-Deva)": 60.93,
+                    "MassiveIntentClassification (deu-Latn)": 63.82,
+                    "MassiveIntentClassification (ell-Grek)": 64.34,
+                    "MassiveIntentClassification (hye-Armn)": 50.89,
+                    "MassiveIntentClassification (por-Latn)": 65.6,
+                    "MassiveIntentClassification (nld-Latn)": 65.0,
+                    "MassiveIntentClassification (fas-Arab)": 63.74,
+                    "MassiveIntentClassification (ron-Latn)": 59.76,
+                    "MassiveIntentClassification (slv-Latn)": 59.38,
+                    "MassiveIntentClassification (heb-Hebr)": 62.44,
+                    "MassiveIntentClassification (vie-Latn)": 63.39,
+                    "MassiveIntentClassification (sqi-Latn)": 57.3,
+                    "MassiveIntentClassification (khm-Khmr)": 34.88,
+                    "MassiveIntentClassification (ben-Beng)": 55.6,
+                    "MassiveIntentClassification (tgl-Latn)": 54.77,
+                    "MassiveIntentClassification (jpn-Jpan)": 67.11,
+                    "MassiveIntentClassification (kat-Geor)": 41.45,
+                    "MassiveIntentClassification (afr-Latn)": 53.69,
+                    "MassiveIntentClassification (cym-Latn)": 44.22,
+                    "MassiveIntentClassification (amh-Ethi)": 45.48,
+                    "MassiveIntentClassification (ita-Latn)": 63.89,
+                    "MassiveIntentClassification (mal-Mlym)": 57.58,
+                    "MassiveIntentClassification (tha-Thai)": 62.75,
+                    "MassiveIntentClassification (ind-Latn)": 63.51,
+                    "MassiveIntentClassification (jav-Latn)": 48.96,
+                    "MassiveIntentClassification (dan-Latn)": 63.7,
+                    "MassiveIntentClassification (ara-Arab)": 54.1,
+                    "MassiveIntentClassification (kan-Knda)": 53.45,
+                    "MassiveIntentClassification (hun-Latn)": 64.0,
+                    "MassiveIntentClassification (tur-Latn)": 64.61,
+                    "MassiveIntentClassification (msa-Latn)": 58.49,
+                    "MassiveIntentClassification (mon-Cyrl)": 49.6,
+                    "MassiveIntentClassification (swa-Latn)": 47.69,
+                    "MassiveIntentClassification (fra-Latn)": 63.37,
+                    "MassiveIntentClassification (da)": 60.16,
+                    "MassiveIntentClassification (nb)": 59.83,
+                    "MassiveIntentClassification (sv)": 61.78,
+                    "MassiveIntentClassification (pl)": 65.07,
+                    "MassiveScenarioClassification (heb-Hebr)": 67.72,
+                    "MassiveScenarioClassification (vie-Latn)": 68.91,
+                    "MassiveScenarioClassification (cmo-Hant)": 64.35,
+                    "MassiveScenarioClassification (urd-Arab)": 60.89,
+                    "MassiveScenarioClassification (isl-Latn)": 60.74,
+                    "MassiveScenarioClassification (ell-Grek)": 69.74,
+                    "MassiveScenarioClassification (mon-Cyrl)": 55.37,
+                    "MassiveScenarioClassification (swa-Latn)": 56.27,
+                    "MassiveScenarioClassification (tam-Taml)": 58.76,
+                    "MassiveScenarioClassification (hye-Armn)": 55.76,
+                    "MassiveScenarioClassification (amh-Ethi)": 52.69,
+                    "MassiveScenarioClassification (ben-Beng)": 61.85,
+                    "MassiveScenarioClassification (tel-Telu)": 59.49,
+                    "MassiveScenarioClassification (dan-Latn)": 71.18,
+                    "MassiveScenarioClassification (slv-Latn)": 65.33,
+                    "MassiveScenarioClassification (en)": 73.04,
+                    "MassiveScenarioClassification (rus-Cyrl)": 70.85,
+                    "MassiveScenarioClassification (mal-Mlym)": 63.17,
+                    "MassiveScenarioClassification (sqi-Latn)": 63.79,
+                    "MassiveScenarioClassification (ita-Latn)": 69.45,
+                    "MassiveScenarioClassification (kor-Kore)": 70.54,
+                    "MassiveScenarioClassification (cmo-Hans)": 72.25,
+                    "MassiveScenarioClassification (cym-Latn)": 51.25,
+                    "MassiveScenarioClassification (pol-Latn)": 69.83,
+                    "MassiveScenarioClassification (ind-Latn)": 69.43,
+                    "MassiveScenarioClassification (tur-Latn)": 68.12,
+                    "MassiveScenarioClassification (tgl-Latn)": 60.71,
+                    "MassiveScenarioClassification (hin-Deva)": 66.85,
+                    "MassiveScenarioClassification (spa-Latn)": 69.07,
+                    "MassiveScenarioClassification (lav-Latn)": 64.28,
+                    "MassiveScenarioClassification (mya-Mymr)": 54.03,
+                    "MassiveScenarioClassification (ara-Arab)": 61.0,
+                    "MassiveScenarioClassification (kan-Knda)": 59.36,
+                    "MassiveScenarioClassification (jav-Latn)": 56.24,
+                    "MassiveScenarioClassification (por-Latn)": 68.33,
+                    "MassiveScenarioClassification (tha-Thai)": 69.06,
+                    "MassiveScenarioClassification (aze-Latn)": 58.49,
+                    "MassiveScenarioClassification (fra-Latn)": 68.74,
+                    "MassiveScenarioClassification (ron-Latn)": 66.06,
+                    "MassiveScenarioClassification (nld-Latn)": 71.11,
+                    "MassiveScenarioClassification (fas-Arab)": 67.55,
+                    "MassiveScenarioClassification (deu-Latn)": 71.25,
+                    "MassiveScenarioClassification (nob-Latn)": 70.44,
+                    "MassiveScenarioClassification (msa-Latn)": 63.55,
+                    "MassiveScenarioClassification (afr-Latn)": 62.35,
+                    "MassiveScenarioClassification (hun-Latn)": 70.53,
+                    "MassiveScenarioClassification (swe-Latn)": 72.77,
+                    "MassiveScenarioClassification (kat-Geor)": 47.82,
+                    "MassiveScenarioClassification (jpn-Jpan)": 73.16,
+                    "MassiveScenarioClassification (khm-Khmr)": 41.14,
+                    "MassiveScenarioClassification (fin-Latn)": 68.62,
+                    "MassiveScenarioClassification (da)": 67.46,
+                    "MassiveScenarioClassification (nb)": 66.18,
+                    "MassiveScenarioClassification (sv)": 69.15,
+                    "MassiveScenarioClassification (pl)": 69.82,
+                    "MultilingualSentiment (cmn-Hans)": 70.81,
+                    "MultilingualSentiment": 68.58,
+                    "NoRecClassification (nob-Latn)": 58.43,
+                    "NoRecClassification": 62.76,
+                    "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 80.15,
+                    "NordicLangClassification": 82.29,
+                    "NorwegianParliament": 60.36,
+                    "OnlineShopping (cmn-Hans)": 90.45,
+                    "OnlineShopping": 90.81,
+                    "PAC (pol-Latn)": 70.33,
+                    "PAC": 70.37,
+                    "PolEmo2.0-IN (pol-Latn)": 77.06,
+                    "PolEmo2.0-IN": 77.06,
+                    "PolEmo2.0-OUT (pol-Latn)": 53.48,
+                    "PolEmo2.0-OUT": 53.38,
+                    "RuReviewsClassification (rus-Cyrl)": 65.28,
+                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 58.2,
+                    "RuSciBenchOECDClassification (rus-Cyrl)": 43.91,
+                    "ScalaDaClassification": 50.77,
+                    "ScalaNbClassification": 50.44,
+                    "TNews (cmn-Hans)": 48.8,
+                    "TNews": 48.38,
+                    "ToxicConversationsClassification": 66.01,
+                    "TweetSentimentExtractionClassification": 62.8,
+                    "Waimai (cmn-Hans)": 86.3,
+                    "Waimai": 85.02
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "multilingual-e5-large",
+                    "8TagsClustering": 33.88,
+                    "AlloProfClusteringP2P": 62.99,
+                    "AlloProfClusteringS2S": 32.26,
+                    "BiorxivClusteringP2P": 35.5,
+                    "BiorxivClusteringS2S": 33.3,
+                    "CLSClusteringP2P": 40.68,
+                    "CLSClusteringS2S": 38.59,
+                    "GeoreviewClusteringP2P (rus-Cyrl)": 60.51,
+                    "HALClusteringS2S": 22.44,
+                    "MLSUMClusteringP2P (rus-Cyrl)": 42.79,
+                    "MLSUMClusteringP2P": 44.04,
+                    "MLSUMClusteringS2S (rus-Cyrl)": 44.32,
+                    "MLSUMClusteringS2S": 37.65,
+                    "MasakhaNEWSClusteringP2P (amh-Ethi)": 67.16,
+                    "MasakhaNEWSClusteringP2P (eng)": 61.1,
+                    "MasakhaNEWSClusteringP2P (fra-Latn)": 41.66,
+                    "MasakhaNEWSClusteringP2P (hau-Latn)": 60.7,
+                    "MasakhaNEWSClusteringP2P (ibo-Latn)": 48.41,
+                    "MasakhaNEWSClusteringP2P (lin-Latn)": 57.69,
+                    "MasakhaNEWSClusteringP2P (lug-Latn)": 71.95,
+                    "MasakhaNEWSClusteringP2P (orm-Ethi)": 60.14,
+                    "MasakhaNEWSClusteringP2P (pcm-Latn)": 80.84,
+                    "MasakhaNEWSClusteringP2P (run-Latn)": 59.91,
+                    "MasakhaNEWSClusteringP2P (sna-Latn)": 53.3,
+                    "MasakhaNEWSClusteringP2P (som-Latn)": 34.38,
+                    "MasakhaNEWSClusteringP2P (swa-Latn)": 33.25,
+                    "MasakhaNEWSClusteringP2P (tir-Ethi)": 54.21,
+                    "MasakhaNEWSClusteringP2P (xho-Latn)": 41.12,
+                    "MasakhaNEWSClusteringP2P (yor-Latn)": 36.22,
+                    "MasakhaNEWSClusteringP2P (fra)": 40.94,
+                    "MasakhaNEWSClusteringS2S (amh-Ethi)": 47.24,
+                    "MasakhaNEWSClusteringS2S (eng)": 53.93,
+                    "MasakhaNEWSClusteringS2S (fra-Latn)": 39.84,
+                    "MasakhaNEWSClusteringS2S (hau-Latn)": 19.24,
+                    "MasakhaNEWSClusteringS2S (ibo-Latn)": 28.88,
+                    "MasakhaNEWSClusteringS2S (lin-Latn)": 42.22,
+                    "MasakhaNEWSClusteringS2S (lug-Latn)": 43.63,
+                    "MasakhaNEWSClusteringS2S (orm-Ethi)": 26.29,
+                    "MasakhaNEWSClusteringS2S (pcm-Latn)": 59.77,
+                    "MasakhaNEWSClusteringS2S (run-Latn)": 51.46,
+                    "MasakhaNEWSClusteringS2S (sna-Latn)": 48.14,
+                    "MasakhaNEWSClusteringS2S (som-Latn)": 25.14,
+                    "MasakhaNEWSClusteringS2S (swa-Latn)": 7.28,
+                    "MasakhaNEWSClusteringS2S (tir-Ethi)": 50.51,
+                    "MasakhaNEWSClusteringS2S (xho-Latn)": 30.98,
+                    "MasakhaNEWSClusteringS2S (yor-Latn)": 34.09,
+                    "MasakhaNEWSClusteringS2S (fra)": 30.56,
+                    "MedrxivClusteringP2P": 31.7,
+                    "MedrxivClusteringS2S": 29.76,
+                    "RedditClustering": 46.91,
+                    "RedditClusteringP2P": 63.0,
+                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 52.03,
+                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 45.11,
+                    "StackExchangeClustering": 58.37,
+                    "StackExchangeClusteringP2P": 32.9,
+                    "ThuNewsClusteringP2P": 58.05,
+                    "ThuNewsClusteringS2S": 55.59,
+                    "TwentyNewsgroupsClustering": 39.4
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "multilingual-e5-large",
+                    "CDSC-E (pol-Latn)": 74.47,
+                    "CDSC-E": 74.47,
+                    "Cmnli": 78.18,
+                    "Ocnli": 61.6,
+                    "OpusparcusPC (deu-Latn)": 97.27,
+                    "OpusparcusPC (en)": 98.74,
+                    "OpusparcusPC (fin-Latn)": 94.26,
+                    "OpusparcusPC (fra-Latn)": 93.68,
+                    "OpusparcusPC (rus-Cyrl)": 89.64,
+                    "OpusparcusPC (swe-Latn)": 94.98,
+                    "OpusparcusPC (fr)": 93.89,
+                    "PPC": 92.18,
+                    "PSC (pol-Latn)": 99.4,
+                    "PSC": 99.39,
+                    "PawsXPairClassification (deu-Latn)": 56.81,
+                    "PawsXPairClassification (en)": 62.97,
+                    "PawsXPairClassification (spa-Latn)": 56.85,
+                    "PawsXPairClassification (fra-Latn)": 58.68,
+                    "PawsXPairClassification (jpn-Hira)": 50.7,
+                    "PawsXPairClassification (kor-Hang)": 52.08,
+                    "PawsXPairClassification (cmn-Hans)": 56.82,
+                    "PawsXPairClassification (fr)": 58.5,
+                    "SICK-E-PL (pol-Latn)": 75.95,
+                    "SICK-E-PL": 75.96,
+                    "SprintDuplicateQuestions": 93.14,
+                    "TERRa (rus-Cyrl)": 58.4,
+                    "TwitterSemEval2015": 75.28,
+                    "TwitterURLCorpus": 85.83
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "multilingual-e5-large",
+                    "AlloprofReranking (fra-Latn)": 69.44,
+                    "AlloprofReranking": 57.37,
+                    "AskUbuntuDupQuestions": 59.24,
+                    "CMedQAv1": 68.25,
+                    "CMedQAv2": 68.56,
+                    "MIRACLReranking (rus-Cyrl)": 63.71,
+                    "MMarcoReranking (cmn-Hans)": 29.12,
+                    "MMarcoReranking": 21.34,
+                    "MindSmallReranking": 30.24,
+                    "RuBQReranking (rus-Cyrl)": 75.58,
+                    "SciDocsRR": 84.22,
+                    "StackOverflowDupQuestions": 50.14,
+                    "SyntecReranking (fra-Latn)": 85.45,
+                    "SyntecReranking": 86.9,
+                    "T2Reranking (cmn-Hans)": 66.32,
+                    "T2Reranking": 65.83
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "multilingual-e5-large",
+                    "AILACasedocs": 26.43,
+                    "AILAStatutes": 20.84,
+                    "ARCChallenge": 10.83,
+                    "AlloprofRetrieval (fra-Latn)": 39.34,
+                    "AlloprofRetrieval": 38.15,
+                    "AlphaNLI": 13.59,
+                    "ArguAna": 54.36,
+                    "ArguAna-PL (pol-Latn)": 52.99,
+                    "ArguAna-PL": 53.02,
+                    "BSARDRetrieval (fra-Latn)": 21.28,
+                    "BSARDRetrieval": 0.27,
+                    "CmedqaRetrieval (cmn-Hans)": 28.66,
+                    "CmedqaRetrieval": 28.67,
+                    "CovidRetrieval (cmn-Hans)": 75.61,
+                    "CovidRetrieval": 75.51,
+                    "DBPedia-PL": 35.82,
+                    "DuRetrieval (cmn-Hans)": 85.3,
+                    "DuRetrieval": 85.32,
+                    "EcomRetrieval (cmn-Hans)": 54.67,
+                    "EcomRetrieval": 54.75,
+                    "FiQA-PL (pol-Latn)": 32.97,
+                    "FiQA-PL": 33.0,
+                    "FiQA2018": 43.81,
+                    "GerDaLIRSmall (deu-Latn)": 15.72,
+                    "HellaSwag": 27.35,
+                    "HotpotQA-PL": 67.41,
+                    "LEMBNarrativeQARetrieval": 24.22,
+                    "LEMBNeedleRetrieval": 28.0,
+                    "LEMBPasskeyRetrieval": 38.25,
+                    "LEMBQMSumRetrieval": 24.26,
+                    "LEMBSummScreenFDRetrieval": 71.12,
+                    "LEMBWikimQARetrieval": 56.8,
+                    "LeCaRDv2 (zho-Hans)": 55.83,
+                    "LegalBenchConsumerContractsQA": 73.3,
+                    "LegalBenchCorporateLobbying": 89.72,
+                    "LegalQuAD (deu-Latn)": 43.17,
+                    "LegalSummarization": 62.1,
+                    "MIRACLRetrieval (rus-Cyrl)": 67.33,
+                    "MMarcoRetrieval (cmn-Hans)": 79.2,
+                    "MMarcoRetrieval": 79.2,
+                    "MSMARCO-PL": 33.38,
+                    "MedicalRetrieval (cmn-Hans)": 51.44,
+                    "MedicalRetrieval": 51.44,
+                    "MintakaRetrieval (ara-Arab)": 26.5,
+                    "MintakaRetrieval (deu-Latn)": 32.77,
+                    "MintakaRetrieval (spa-Latn)": 34.23,
+                    "MintakaRetrieval (fra-Latn)": 34.24,
+                    "MintakaRetrieval (hin-Deva)": 27.45,
+                    "MintakaRetrieval (ita-Latn)": 33.84,
+                    "MintakaRetrieval (jpn-Hira)": 26.45,
+                    "MintakaRetrieval (por-Latn)": 35.9,
+                    "MintakaRetrieval (fr)": 25.2,
+                    "NFCorpus": 33.95,
+                    "NFCorpus-PL (pol-Latn)": 30.21,
+                    "NFCorpus-PL": 30.24,
+                    "NQ-PL": 52.79,
+                    "PIQA": 28.82,
+                    "Quail": 4.85,
+                    "Quora-PL": 83.65,
+                    "RARbCode": 58.92,
+                    "RARbMath": 67.32,
+                    "RiaNewsRetrieval (rus-Cyrl)": 80.67,
+                    "RuBQRetrieval (rus-Cyrl)": 74.11,
+                    "SCIDOCS": 17.45,
+                    "SCIDOCS-PL (pol-Latn)": 13.82,
+                    "SCIDOCS-PL": 13.81,
+                    "SIQA": 5.36,
+                    "SciFact": 70.42,
+                    "SciFact-PL (pol-Latn)": 65.66,
+                    "SciFact-PL": 65.66,
+                    "SpartQA": 5.64,
+                    "SyntecRetrieval (fra-Latn)": 82.39,
+                    "SyntecRetrieval": 81.07,
+                    "T2Retrieval (cmn-Hans)": 76.07,
+                    "T2Retrieval": 76.11,
+                    "TRECCOVID": 71.21,
+                    "TRECCOVID-PL (pol-Latn)": 69.9,
+                    "TRECCOVID-PL": 70.03,
+                    "TempReasonL1": 1.14,
+                    "TempReasonL2Fact": 42.97,
+                    "TempReasonL2Pure": 2.05,
+                    "TempReasonL3Fact": 38.22,
+                    "TempReasonL3Pure": 8.31,
+                    "Touche2020": 23.13,
+                    "VideoRetrieval (cmn-Hans)": 58.28,
+                    "VideoRetrieval": 58.25,
+                    "WinoGrande": 54.99,
+                    "XPQARetrieval (ara-Arab_ara-Arab)": 43.69,
+                    "XPQARetrieval (eng-Latn_ara-Arab)": 30.86,
+                    "XPQARetrieval (ara-Arab_eng-Latn)": 39.11,
+                    "XPQARetrieval (deu-Latn_deu-Latn)": 76.83,
+                    "XPQARetrieval (eng-Latn_deu-Latn)": 42.87,
+                    "XPQARetrieval (deu-Latn_eng-Latn)": 68.25,
+                    "XPQARetrieval (spa-Latn_spa-Latn)": 61.77,
+                    "XPQARetrieval (eng-Latn_spa-Latn)": 37.55,
+                    "XPQARetrieval (spa-Latn_eng-Latn)": 52.86,
+                    "XPQARetrieval (fra-Latn_fra-Latn)": 61.38,
+                    "XPQARetrieval (eng-Latn_fra-Latn)": 39.12,
+                    "XPQARetrieval (fra-Latn_eng-Latn)": 57.93,
+                    "XPQARetrieval (hin-Deva_hin-Deva)": 71.09,
+                    "XPQARetrieval (eng-Latn_hin-Deva)": 32.39,
+                    "XPQARetrieval (hin-Deva_eng-Latn)": 68.31,
+                    "XPQARetrieval (ita-Latn_ita-Latn)": 74.32,
+                    "XPQARetrieval (eng-Latn_ita-Latn)": 37.95,
+                    "XPQARetrieval (ita-Latn_eng-Latn)": 64.54,
+                    "XPQARetrieval (jpn-Hira_jpn-Hira)": 74.11,
+                    "XPQARetrieval (eng-Latn_jpn-Hira)": 38.31,
+                    "XPQARetrieval (jpn-Hira_eng-Latn)": 65.42,
+                    "XPQARetrieval (kor-Hang_kor-Hang)": 35.72,
+                    "XPQARetrieval (eng-Latn_kor-Hang)": 31.09,
+                    "XPQARetrieval (kor-Hang_eng-Latn)": 34.06,
+                    "XPQARetrieval (pol-Latn_pol-Latn)": 51.01,
+                    "XPQARetrieval (eng-Latn_pol-Latn)": 30.49,
+                    "XPQARetrieval (pol-Latn_eng-Latn)": 44.66,
+                    "XPQARetrieval (por-Latn_por-Latn)": 41.1,
+                    "XPQARetrieval (eng-Latn_por-Latn)": 22.03,
+                    "XPQARetrieval (por-Latn_eng-Latn)": 35.15,
+                    "XPQARetrieval (tam-Taml_tam-Taml)": 39.51,
+                    "XPQARetrieval (eng-Latn_tam-Taml)": 17.33,
+                    "XPQARetrieval (tam-Taml_eng-Latn)": 33.67,
+                    "XPQARetrieval (cmn-Hans_cmn-Hans)": 66.27,
+                    "XPQARetrieval (eng-Latn_cmn-Hans)": 26.24,
+                    "XPQARetrieval (cmn-Hans_eng-Latn)": 55.15,
+                    "XPQARetrieval (fr)": 66.15
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "multilingual-e5-large",
+                    "AFQMC (cmn-Hans)": 33.01,
+                    "AFQMC": 33.02,
+                    "ATEC (cmn-Hans)": 39.8,
+                    "ATEC": 39.81,
+                    "BIOSSES": 82.49,
+                    "BQ (cmn-Hans)": 46.44,
+                    "BQ": 46.44,
+                    "CDSC-R (pol-Latn)": 91.0,
+                    "CDSC-R": 91.0,
+                    "LCQMC (cmn-Hans)": 75.95,
+                    "LCQMC": 75.95,
+                    "PAWSX (cmn-Hans)": 14.63,
+                    "PAWSX": 14.63,
+                    "QBQTC": 29.77,
+                    "RUParaPhraserSTS (rus-Cyrl)": 71.82,
+                    "RuSTSBenchmarkSTS (rus-Cyrl)": 83.15,
+                    "SICK-R": 80.23,
+                    "SICK-R-PL (pol-Latn)": 75.08,
+                    "SICK-R-PL": 75.08,
+                    "SICKFr (fra-Latn)": 78.81,
+                    "SICKFr": 78.78,
+                    "STS12": 80.02,
+                    "STS13": 81.55,
+                    "STS14": 77.72,
+                    "STS15": 89.31,
+                    "STS16": 85.79,
+                    "STS17 (en-en)": 88.12,
+                    "STS17 (spa-Latn)": 86.71,
+                    "STS17 (spa-Latn_eng-Latn)": 80.74,
+                    "STS17 (eng-Latn_ara-Arab)": 75.03,
+                    "STS17 (fra-Latn_eng-Latn)": 85.62,
+                    "STS17 (kor-Hang)": 82.27,
+                    "STS17 (ita-Latn_eng-Latn)": 84.52,
+                    "STS17 (ara-Arab)": 77.83,
+                    "STS17 (eng-Latn_tur-Latn)": 71.22,
+                    "STS17 (eng-Latn_deu-Latn)": 86.15,
+                    "STS17 (nld-Latn_eng-Latn)": 85.29,
+                    "STS22 (deu-Latn)": 56.58,
+                    "STS22 (deu-Latn_fra-Latn)": 67.96,
+                    "STS22 (ara-Arab)": 56.95,
+                    "STS22 (rus-Cyrl)": 59.89,
+                    "STS22 (deu-Latn_eng-Latn)": 56.59,
+                    "STS22 (en)": 63.66,
+                    "STS22 (tur-Latn)": 63.56,
+                    "STS22 (pol-Latn_eng-Latn)": 65.54,
+                    "STS22 (cmn-Hans)": 66.82,
+                    "STS22 (ita-Latn)": 76.99,
+                    "STS22 (pol-Latn)": 34.65,
+                    "STS22 (spa-Latn_ita-Latn)": 68.92,
+                    "STS22 (fra-Latn)": 76.77,
+                    "STS22 (cmn-Hans_eng-Latn)": 65.95,
+                    "STS22 (fra-Latn_pol-Latn)": 50.71,
+                    "STS22 (spa-Latn_eng-Latn)": 72.51,
+                    "STS22 (spa-Latn)": 64.6,
+                    "STS22 (deu-Latn_pol-Latn)": 49.58,
+                    "STS22 (zh)": 65.64,
+                    "STS22 (pl)": 34.66,
+                    "STSB (cmn-Hans)": 81.08,
+                    "STSB": 81.08,
+                    "STSBenchmark": 87.29,
+                    "STSBenchmarkMultilingualSTS (cmn-Hans)": 81.22,
+                    "STSBenchmarkMultilingualSTS (en)": 87.29,
+                    "STSBenchmarkMultilingualSTS (pol-Latn)": 81.06,
+                    "STSBenchmarkMultilingualSTS (nld-Latn)": 81.63,
+                    "STSBenchmarkMultilingualSTS (ita-Latn)": 81.75,
+                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 83.05,
+                    "STSBenchmarkMultilingualSTS (por-Latn)": 73.31,
+                    "STSBenchmarkMultilingualSTS (spa-Latn)": 83.81,
+                    "STSBenchmarkMultilingualSTS (fra-Latn)": 83.28,
+                    "STSBenchmarkMultilingualSTS (deu-Latn)": 84.27,
+                    "STSBenchmarkMultilingualSTS (fr)": 82.53
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "multilingual-e5-large",
+                    "SummEval": 29.65,
+                    "SummEvalFr (fra-Latn)": 30.92,
+                    "SummEvalFr": 30.92
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "multilingual-e5-large",
+                    "CEDRClassification (rus-Cyrl)": 44.84,
+                    "SensitiveTopicsClassification (rus-Cyrl)": 27.17
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "multilingual-e5-large"
+                }
+            ]
+        }
+    },
+    "st-polish-paraphrase-from-distilroberta": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "st-polish-paraphrase-from-distilroberta"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "st-polish-paraphrase-from-distilroberta",
+                    "AllegroReviews": 34.5,
+                    "CBD": 70.27,
+                    "MassiveIntentClassification (pl)": 64.81,
+                    "MassiveScenarioClassification (pl)": 70.01,
+                    "PAC": 64.6,
+                    "PolEmo2.0-IN": 67.06,
+                    "PolEmo2.0-OUT": 38.58
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "st-polish-paraphrase-from-distilroberta",
+                    "8TagsClustering": 31.68
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "st-polish-paraphrase-from-distilroberta",
+                    "CDSC-E": 75.99,
+                    "PPC": 93.29,
+                    "PSC": 99.1,
+                    "SICK-E-PL": 79.63
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "st-polish-paraphrase-from-distilroberta"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "st-polish-paraphrase-from-distilroberta",
+                    "ArguAna-PL": 49.42,
+                    "DBPedia-PL": 19.82,
+                    "FiQA-PL": 19.58,
+                    "HotpotQA-PL": 23.47,
+                    "MSMARCO-PL": 16.51,
+                    "NFCorpus-PL": 22.49,
+                    "NQ-PL": 19.83,
+                    "Quora-PL": 81.17,
+                    "SCIDOCS-PL": 12.15,
+                    "SciFact-PL": 49.49,
+                    "TRECCOVID-PL": 38.97
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "st-polish-paraphrase-from-distilroberta",
+                    "CDSC-R": 89.62,
+                    "SICK-R-PL": 76.37,
+                    "STS22 (pl)": 40.36
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "st-polish-paraphrase-from-distilroberta"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "st-polish-paraphrase-from-distilroberta"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "st-polish-paraphrase-from-distilroberta"
+                }
+            ]
+        }
+    },
+    "sbert_large_nlu_ru": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "sbert_large_nlu_ru"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "sbert_large_nlu_ru",
+                    "GeoreviewClassification (rus-Cyrl)": 39.97,
+                    "HeadlineClassification (rus-Cyrl)": 79.26,
+                    "InappropriatenessClassification (rus-Cyrl)": 62.52,
+                    "KinopoiskClassification (rus-Cyrl)": 49.51,
+                    "MassiveIntentClassification (rus-Cyrl)": 61.09,
+                    "MassiveScenarioClassification (rus-Cyrl)": 67.6,
+                    "RuReviewsClassification (rus-Cyrl)": 58.27,
+                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 53.9,
+                    "RuSciBenchOECDClassification (rus-Cyrl)": 43.04
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "sbert_large_nlu_ru",
+                    "GeoreviewClusteringP2P (rus-Cyrl)": 59.02,
+                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 50.4,
+                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 46.41
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "sbert_large_nlu_ru",
+                    "TERRa (rus-Cyrl)": 50.17
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "sbert_large_nlu_ru",
+                    "MIRACLReranking (rus-Cyrl)": 18.8,
+                    "RuBQReranking (rus-Cyrl)": 46.81
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "sbert_large_nlu_ru",
+                    "MIRACLRetrieval (rus-Cyrl)": 1.98,
+                    "RiaNewsRetrieval (rus-Cyrl)": 11.11,
+                    "RuBQRetrieval (rus-Cyrl)": 12.45
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "sbert_large_nlu_ru",
+                    "RUParaPhraserSTS (rus-Cyrl)": 62.06,
+                    "RuSTSBenchmarkSTS (rus-Cyrl)": 58.82,
+                    "STS22 (rus-Cyrl)": 50.75
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "sbert_large_nlu_ru"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "sbert_large_nlu_ru",
+                    "CEDRClassification (rus-Cyrl)": 35.84,
+                    "SensitiveTopicsClassification (rus-Cyrl)": 27.97
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "sbert_large_nlu_ru"
+                }
+            ]
+        }
+    },
+    "LASER2": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "LASER2",
+                    "BUCC (de-en)": 99.21,
+                    "BUCC (fr-en)": 98.39,
+                    "BUCC (ru-en)": 97.62,
+                    "BUCC (zh-en)": 97.7,
+                    "Tatoeba (afr-eng)": 92.59,
+                    "Tatoeba (amh-eng)": 80.82,
+                    "Tatoeba (ang-eng)": 25.22,
+                    "Tatoeba (ara-eng)": 90.14,
+                    "Tatoeba (arq-eng)": 26.63,
+                    "Tatoeba (arz-eng)": 66.16,
+                    "Tatoeba (ast-eng)": 76.35,
+                    "Tatoeba (awa-eng)": 33.74,
+                    "Tatoeba (aze-eng)": 82.41,
+                    "Tatoeba (bel-eng)": 79.54,
+                    "Tatoeba (ben-eng)": 89.43,
+                    "Tatoeba (ber-eng)": 77.63,
+                    "Tatoeba (bos-eng)": 95.86,
+                    "Tatoeba (bre-eng)": 31.2,
+                    "Tatoeba (bul-eng)": 93.57,
+                    "Tatoeba (cat-eng)": 95.8,
+                    "Tatoeba (cbk-eng)": 77.17,
+                    "Tatoeba (ceb-eng)": 9.93,
+                    "Tatoeba (ces-eng)": 95.52,
+                    "Tatoeba (cha-eng)": 14.86,
+                    "Tatoeba (cmn-eng)": 85.62,
+                    "Tatoeba (cor-eng)": 4.45,
+                    "Tatoeba (csb-eng)": 27.03,
+                    "Tatoeba (cym-eng)": 5.85,
+                    "Tatoeba (dan-eng)": 95.22,
+                    "Tatoeba (deu-eng)": 99.07,
+                    "Tatoeba (dsb-eng)": 42.34,
+                    "Tatoeba (dtp-eng)": 7.39,
+                    "Tatoeba (ell-eng)": 96.2,
+                    "Tatoeba (epo-eng)": 96.61,
+                    "Tatoeba (est-eng)": 96.43,
+                    "Tatoeba (eus-eng)": 93.32,
+                    "Tatoeba (fao-eng)": 57.04,
+                    "Tatoeba (fin-eng)": 96.98,
+                    "Tatoeba (fra-eng)": 94.28,
+                    "Tatoeba (fry-eng)": 42.07,
+                    "Tatoeba (gla-eng)": 1.52,
+                    "Tatoeba (gle-eng)": 4.2,
+                    "Tatoeba (glg-eng)": 96.14,
+                    "Tatoeba (gsw-eng)": 27.52,
+                    "Tatoeba (heb-eng)": 0.0,
+                    "Tatoeba (hin-eng)": 95.32,
+                    "Tatoeba (hrv-eng)": 96.72,
+                    "Tatoeba (hsb-eng)": 45.75,
+                    "Tatoeba (hun-eng)": 95.2,
+                    "Tatoeba (hye-eng)": 88.72,
+                    "Tatoeba (ido-eng)": 80.86,
+                    "Tatoeba (ile-eng)": 87.88,
+                    "Tatoeba (ina-eng)": 93.93,
+                    "Tatoeba (ind-eng)": 92.98,
+                    "Tatoeba (isl-eng)": 94.32,
+                    "Tatoeba (ita-eng)": 94.32,
+                    "Tatoeba (jav-eng)": 9.95,
+                    "Tatoeba (jpn-eng)": 93.78,
+                    "Tatoeba (kab-eng)": 65.88,
+                    "Tatoeba (kat-eng)": 81.16,
+                    "Tatoeba (kaz-eng)": 53.3,
+                    "Tatoeba (khm-eng)": 74.19,
+                    "Tatoeba (kor-eng)": 87.97,
+                    "Tatoeba (kur-eng)": 19.09,
+                    "Tatoeba (kzj-eng)": 4.46,
+                    "Tatoeba (lat-eng)": 64.81,
+                    "Tatoeba (lfn-eng)": 63.39,
+                    "Tatoeba (lit-eng)": 96.2,
+                    "Tatoeba (lvs-eng)": 95.33,
+                    "Tatoeba (mal-eng)": 98.16,
+                    "Tatoeba (mar-eng)": 92.93,
+                    "Tatoeba (max-eng)": 36.96,
+                    "Tatoeba (mhr-eng)": 6.86,
+                    "Tatoeba (mkd-eng)": 93.63,
+                    "Tatoeba (mon-eng)": 3.42,
+                    "Tatoeba (nds-eng)": 77.13,
+                    "Tatoeba (nld-eng)": 95.35,
+                    "Tatoeba (nno-eng)": 72.75,
+                    "Tatoeba (nob-eng)": 95.77,
+                    "Tatoeba (nov-eng)": 60.02,
+                    "Tatoeba (oci-eng)": 58.13,
+                    "Tatoeba (orv-eng)": 23.24,
+                    "Tatoeba (pam-eng)": 3.24,
+                    "Tatoeba (pes-eng)": 93.13,
+                    "Tatoeba (pms-eng)": 36.23,
+                    "Tatoeba (pol-eng)": 97.32,
+                    "Tatoeba (por-eng)": 94.54,
+                    "Tatoeba (ron-eng)": 96.52,
+                    "Tatoeba (rus-eng)": 92.58,
+                    "Tatoeba (slk-eng)": 95.82,
+                    "Tatoeba (slv-eng)": 95.4,
+                    "Tatoeba (spa-eng)": 97.33,
+                    "Tatoeba (sqi-eng)": 97.22,
+                    "Tatoeba (srp-eng)": 93.64,
+                    "Tatoeba (swe-eng)": 95.31,
+                    "Tatoeba (swg-eng)": 33.1,
+                    "Tatoeba (swh-eng)": 55.66,
+                    "Tatoeba (tam-eng)": 87.32,
+                    "Tatoeba (tat-eng)": 34.74,
+                    "Tatoeba (tel-eng)": 96.72,
+                    "Tatoeba (tgl-eng)": 63.19,
+                    "Tatoeba (tha-eng)": 96.38,
+                    "Tatoeba (tuk-eng)": 16.35,
+                    "Tatoeba (tur-eng)": 98.03,
+                    "Tatoeba (tzl-eng)": 36.56,
+                    "Tatoeba (uig-eng)": 56.49,
+                    "Tatoeba (ukr-eng)": 93.52,
+                    "Tatoeba (urd-eng)": 84.23,
+                    "Tatoeba (uzb-eng)": 23.2,
+                    "Tatoeba (vie-eng)": 96.73,
+                    "Tatoeba (war-eng)": 8.25,
+                    "Tatoeba (wuu-eng)": 75.09,
+                    "Tatoeba (xho-eng)": 4.68,
+                    "Tatoeba (yid-eng)": 2.49,
+                    "Tatoeba (yue-eng)": 87.75,
+                    "Tatoeba (zsm-eng)": 95.41
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "LASER2",
+                    "AmazonCounterfactualClassification (de)": 67.82,
+                    "AmazonCounterfactualClassification (en)": 76.84,
+                    "AmazonCounterfactualClassification (en-ext)": 76.17,
+                    "AmazonCounterfactualClassification (ja)": 68.76,
+                    "AmazonPolarityClassification": 61.01,
+                    "AmazonReviewsClassification (de)": 31.07,
+                    "AmazonReviewsClassification (en)": 28.71,
+                    "AmazonReviewsClassification (es)": 32.72,
+                    "AmazonReviewsClassification (fr)": 31.12,
+                    "AmazonReviewsClassification (ja)": 28.94,
+                    "AmazonReviewsClassification (zh)": 30.89,
+                    "Banking77Classification": 57.76,
+                    "EmotionClassification": 24.83,
+                    "ImdbClassification": 57.58,
+                    "MTOPDomainClassification (de)": 74.08,
+                    "MTOPDomainClassification (en)": 75.36,
+                    "MTOPDomainClassification (es)": 73.47,
+                    "MTOPDomainClassification (fr)": 72.26,
+                    "MTOPDomainClassification (hi)": 72.95,
+                    "MTOPDomainClassification (th)": 72.68,
+                    "MTOPIntentClassification (de)": 51.62,
+                    "MTOPIntentClassification (en)": 49.47,
+                    "MTOPIntentClassification (es)": 52.75,
+                    "MTOPIntentClassification (fr)": 50.12,
+                    "MTOPIntentClassification (hi)": 45.55,
+                    "MTOPIntentClassification (th)": 50.07,
+                    "MasakhaNEWSClassification (fra)": 65.9,
+                    "MassiveIntentClassification (af)": 38.01,
+                    "MassiveIntentClassification (am)": 12.7,
+                    "MassiveIntentClassification (ar)": 37.16,
+                    "MassiveIntentClassification (az)": 19.98,
+                    "MassiveIntentClassification (bn)": 42.51,
+                    "MassiveIntentClassification (cy)": 17.33,
+                    "MassiveIntentClassification (da)": 45.61,
+                    "MassiveIntentClassification (de)": 44.79,
+                    "MassiveIntentClassification (el)": 46.71,
+                    "MassiveIntentClassification (en)": 47.91,
+                    "MassiveIntentClassification (es)": 45.44,
+                    "MassiveIntentClassification (fa)": 45.01,
+                    "MassiveIntentClassification (fi)": 45.94,
+                    "MassiveIntentClassification (fr)": 46.13,
+                    "MassiveIntentClassification (he)": 42.55,
+                    "MassiveIntentClassification (hi)": 40.2,
+                    "MassiveIntentClassification (hu)": 42.77,
+                    "MassiveIntentClassification (hy)": 28.07,
+                    "MassiveIntentClassification (id)": 45.81,
+                    "MassiveIntentClassification (is)": 39.86,
+                    "MassiveIntentClassification (it)": 48.25,
+                    "MassiveIntentClassification (ja)": 45.3,
+                    "MassiveIntentClassification (jv)": 24.3,
+                    "MassiveIntentClassification (ka)": 22.7,
+                    "MassiveIntentClassification (km)": 22.48,
+                    "MassiveIntentClassification (kn)": 4.32,
+                    "MassiveIntentClassification (ko)": 44.26,
+                    "MassiveIntentClassification (lv)": 39.75,
+                    "MassiveIntentClassification (ml)": 41.33,
+                    "MassiveIntentClassification (mn)": 16.2,
+                    "MassiveIntentClassification (ms)": 43.23,
+                    "MassiveIntentClassification (my)": 25.37,
+                    "MassiveIntentClassification (nb)": 37.74,
+                    "MassiveIntentClassification (nl)": 45.0,
+                    "MassiveIntentClassification (pl)": 44.99,
+                    "MassiveIntentClassification (pt)": 48.55,
+                    "MassiveIntentClassification (ro)": 44.3,
+                    "MassiveIntentClassification (ru)": 44.29,
+                    "MassiveIntentClassification (sl)": 44.72,
+                    "MassiveIntentClassification (sq)": 46.12,
+                    "MassiveIntentClassification (sv)": 45.95,
+                    "MassiveIntentClassification (sw)": 31.89,
+                    "MassiveIntentClassification (ta)": 29.63,
+                    "MassiveIntentClassification (te)": 36.03,
+                    "MassiveIntentClassification (th)": 43.39,
+                    "MassiveIntentClassification (tl)": 29.73,
+                    "MassiveIntentClassification (tr)": 43.93,
+                    "MassiveIntentClassification (ur)": 26.11,
+                    "MassiveIntentClassification (vi)": 44.33,
+                    "MassiveIntentClassification (zh-CN)": 40.62,
+                    "MassiveIntentClassification (zh-TW)": 32.93,
+                    "MassiveScenarioClassification (af)": 47.1,
+                    "MassiveScenarioClassification (am)": 17.7,
+                    "MassiveScenarioClassification (ar)": 45.21,
+                    "MassiveScenarioClassification (az)": 28.21,
+                    "MassiveScenarioClassification (bn)": 50.52,
+                    "MassiveScenarioClassification (cy)": 22.58,
+                    "MassiveScenarioClassification (da)": 54.87,
+                    "MassiveScenarioClassification (de)": 54.34,
+                    "MassiveScenarioClassification (el)": 55.47,
+                    "MassiveScenarioClassification (en)": 55.92,
+                    "MassiveScenarioClassification (es)": 52.77,
+                    "MassiveScenarioClassification (fa)": 52.5,
+                    "MassiveScenarioClassification (fi)": 52.63,
+                    "MassiveScenarioClassification (fr)": 54.32,
+                    "MassiveScenarioClassification (he)": 52.41,
+                    "MassiveScenarioClassification (hi)": 47.37,
+                    "MassiveScenarioClassification (hu)": 53.43,
+                    "MassiveScenarioClassification (hy)": 33.57,
+                    "MassiveScenarioClassification (id)": 54.38,
+                    "MassiveScenarioClassification (is)": 49.78,
+                    "MassiveScenarioClassification (it)": 54.84,
+                    "MassiveScenarioClassification (ja)": 54.12,
+                    "MassiveScenarioClassification (jv)": 32.71,
+                    "MassiveScenarioClassification (ka)": 26.92,
+                    "MassiveScenarioClassification (km)": 27.23,
+                    "MassiveScenarioClassification (kn)": 10.06,
+                    "MassiveScenarioClassification (ko)": 52.01,
+                    "MassiveScenarioClassification (lv)": 44.82,
+                    "MassiveScenarioClassification (ml)": 49.1,
+                    "MassiveScenarioClassification (mn)": 21.51,
+                    "MassiveScenarioClassification (ms)": 53.6,
+                    "MassiveScenarioClassification (my)": 29.72,
+                    "MassiveScenarioClassification (nb)": 43.9,
+                    "MassiveScenarioClassification (nl)": 53.33,
+                    "MassiveScenarioClassification (pl)": 52.92,
+                    "MassiveScenarioClassification (pt)": 53.41,
+                    "MassiveScenarioClassification (ro)": 50.48,
+                    "MassiveScenarioClassification (ru)": 51.84,
+                    "MassiveScenarioClassification (sl)": 51.29,
+                    "MassiveScenarioClassification (sq)": 55.65,
+                    "MassiveScenarioClassification (sv)": 54.64,
+                    "MassiveScenarioClassification (sw)": 42.04,
+                    "MassiveScenarioClassification (ta)": 36.72,
+                    "MassiveScenarioClassification (te)": 42.08,
+                    "MassiveScenarioClassification (th)": 52.15,
+                    "MassiveScenarioClassification (tl)": 37.34,
+                    "MassiveScenarioClassification (tr)": 52.56,
+                    "MassiveScenarioClassification (ur)": 32.6,
+                    "MassiveScenarioClassification (vi)": 50.97,
+                    "MassiveScenarioClassification (zh-CN)": 50.22,
+                    "MassiveScenarioClassification (zh-TW)": 42.32,
+                    "ToxicConversationsClassification": 54.05,
+                    "TweetSentimentExtractionClassification": 48.73
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "LASER2",
+                    "AlloProfClusteringP2P": 48.45,
+                    "AlloProfClusteringS2S": 25.81,
+                    "ArxivClusteringP2P": 17.77,
+                    "ArxivClusteringS2S": 12.39,
+                    "BiorxivClusteringP2P": 12.4,
+                    "BiorxivClusteringS2S": 8.83,
+                    "HALClusteringS2S": 11.52,
+                    "MLSUMClusteringP2P": 34.53,
+                    "MLSUMClusteringS2S": 27.35,
+                    "MasakhaNEWSClusteringP2P (fra)": 32.04,
+                    "MasakhaNEWSClusteringS2S (fra)": 29.77,
+                    "MedrxivClusteringP2P": 17.91,
+                    "MedrxivClusteringS2S": 16.63,
+                    "RedditClustering": 9.96,
+                    "RedditClusteringP2P": 26.42,
+                    "StackExchangeClustering": 15.79,
+                    "StackExchangeClusteringP2P": 18.63,
+                    "TwentyNewsgroupsClustering": 11.38
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "LASER2",
+                    "OpusparcusPC (fr)": 93.77,
+                    "PawsXPairClassification (fr)": 69.53,
+                    "SprintDuplicateQuestions": 65.54,
+                    "TwitterSemEval2015": 59.57,
+                    "TwitterURLCorpus": 81.47
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "LASER2",
+                    "AlloprofReranking": 35.29,
+                    "AskUbuntuDupQuestions": 48.99,
+                    "MindSmallReranking": 24.79,
+                    "SciDocsRR": 54.99,
+                    "StackOverflowDupQuestions": 36.98,
+                    "SyntecReranking": 55.93
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "LASER2",
+                    "AlloprofRetrieval": 3.1,
+                    "ArguAna": 12.86,
+                    "BSARDRetrieval": 0.36,
+                    "CQADupstackRetrieval": 4.12,
+                    "ClimateFEVER": 0.36,
+                    "DBPedia": 1.53,
+                    "FEVER": 0.77,
+                    "FiQA2018": 1.73,
+                    "HotpotQA": 5.5,
+                    "MSMARCO": 1.09,
+                    "MintakaRetrieval (fr)": 6.31,
+                    "NFCorpus": 2.44,
+                    "NQ": 0.64,
+                    "QuoraRetrieval": 71.14,
+                    "SCIDOCS": 0.78,
+                    "SciFact": 4.04,
+                    "SyntecRetrieval": 28.58,
+                    "TRECCOVID": 10.97,
+                    "Touche2020": 1.06,
+                    "XPQARetrieval (fr)": 42.59
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "LASER2",
+                    "BIOSSES": 62.01,
+                    "SICK-R": 62.86,
+                    "SICKFr": 64.95,
+                    "STS12": 62.6,
+                    "STS13": 59.62,
+                    "STS14": 57.03,
+                    "STS15": 71.57,
+                    "STS16": 70.75,
+                    "STS17 (ar-ar)": 67.47,
+                    "STS17 (en-ar)": 65.05,
+                    "STS17 (en-de)": 66.66,
+                    "STS17 (en-en)": 76.73,
+                    "STS17 (en-tr)": 70.05,
+                    "STS17 (es-en)": 55.3,
+                    "STS17 (es-es)": 79.67,
+                    "STS17 (fr-en)": 70.82,
+                    "STS17 (it-en)": 70.98,
+                    "STS17 (ko-ko)": 70.52,
+                    "STS17 (nl-en)": 68.12,
+                    "STS22 (ar)": 42.57,
+                    "STS22 (de)": 25.69,
+                    "STS22 (de-en)": 32.35,
+                    "STS22 (de-fr)": 37.41,
+                    "STS22 (de-pl)": 15.67,
+                    "STS22 (en)": 39.76,
+                    "STS22 (es)": 54.92,
+                    "STS22 (es-en)": 54.34,
+                    "STS22 (es-it)": 42.21,
+                    "STS22 (fr)": 58.61,
+                    "STS22 (fr-pl)": 39.44,
+                    "STS22 (it)": 60.31,
+                    "STS22 (pl)": 18.34,
+                    "STS22 (pl-en)": 53.63,
+                    "STS22 (ru)": 39.24,
+                    "STS22 (tr)": 36.97,
+                    "STS22 (zh)": 49.41,
+                    "STS22 (zh-en)": 46.19,
+                    "STSBenchmark": 69.77,
+                    "STSBenchmarkMultilingualSTS (fr)": 69.82
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "LASER2",
+                    "SummEval": 26.8,
+                    "SummEvalFr": 31.56
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "LASER2"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "LASER2"
+                }
+            ]
+        }
+    },
+    "gbert-large": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "gbert-large"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "gbert-large"
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "gbert-large",
+                    "BlurbsClusteringP2P": 39.3,
+                    "BlurbsClusteringS2S": 13.38,
+                    "TenKGnadClusteringP2P": 41.69,
+                    "TenKGnadClusteringS2S": 34.97
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "gbert-large"
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "gbert-large"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "gbert-large"
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "gbert-large"
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "gbert-large"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "gbert-large"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "gbert-large"
+                }
+            ]
+        }
+    },
+    "voyage-2": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "voyage-2"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "voyage-2",
+                    "AmazonReviewsClassification (fr)": 37.26,
+                    "MTOPDomainClassification (fr)": 79.79,
+                    "MTOPIntentClassification (fr)": 45.62,
+                    "MasakhaNEWSClassification (fra)": 80.19,
+                    "MassiveIntentClassification (fr)": 53.7,
+                    "MassiveScenarioClassification (fr)": 62.46
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "voyage-2",
+                    "AlloProfClusteringP2P": 57.96,
+                    "AlloProfClusteringS2S": 41.65,
+                    "HALClusteringS2S": 24.84,
+                    "MLSUMClusteringP2P": 45.08,
+                    "MLSUMClusteringS2S": 38.77,
+                    "MasakhaNEWSClusteringP2P (fra)": 48.54,
+                    "MasakhaNEWSClusteringS2S (fra)": 36.33
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "voyage-2",
+                    "OpusparcusPC (fr)": 89.76,
+                    "PawsXPairClassification (fr)": 58.96
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "voyage-2",
+                    "AlloprofReranking": 63.54,
+                    "SyntecReranking": 82.65
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "voyage-2",
+                    "AlloprofRetrieval": 45.5,
+                    "BSARDRetrieval": 0.15,
+                    "MintakaRetrieval (fr)": 15.51,
+                    "SyntecRetrieval": 75.83,
+                    "XPQARetrieval (fr)": 67.07
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "voyage-2",
+                    "SICKFr": 68.51,
+                    "STS22 (fr)": 70.51,
+                    "STSBenchmarkMultilingualSTS (fr)": 76.43
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "voyage-2",
+                    "SummEvalFr": 30.88
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "voyage-2"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "voyage-2"
+                }
+            ]
+        }
+    },
+    "e5-mistral-7b-instruct": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "e5-mistral-7b-instruct",
+                    "Tatoeba (rus-Cyrl_eng-Latn)": 93.75
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "e5-mistral-7b-instruct",
+                    "AmazonReviewsClassification (fr)": 36.71,
+                    "GeoreviewClassification (rus-Cyrl)": 50.25,
+                    "HeadlineClassification (rus-Cyrl)": 85.68,
+                    "InappropriatenessClassification (rus-Cyrl)": 67.19,
+                    "KinopoiskClassification (rus-Cyrl)": 65.49,
+                    "MTOPDomainClassification (fr)": 74.8,
+                    "MTOPIntentClassification (fr)": 53.97,
+                    "MasakhaNEWSClassification (fra)": 80.59,
+                    "MassiveIntentClassification (rus-Cyrl)": 76.08,
+                    "MassiveIntentClassification (fr)": 46.39,
+                    "MassiveScenarioClassification (rus-Cyrl)": 79.61,
+                    "MassiveScenarioClassification (fr)": 53.86,
+                    "RuReviewsClassification (rus-Cyrl)": 67.68,
+                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 64.59,
+                    "RuSciBenchOECDClassification (rus-Cyrl)": 51.13
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "e5-mistral-7b-instruct",
+                    "AlloProfClusteringP2P": 61.06,
+                    "AlloProfClusteringS2S": 28.12,
+                    "GeoreviewClusteringP2P (rus-Cyrl)": 65.68,
+                    "HALClusteringS2S": 19.69,
+                    "MLSUMClusteringP2P": 45.59,
+                    "MLSUMClusteringS2S": 32.0,
+                    "MasakhaNEWSClusteringP2P (fra)": 52.47,
+                    "MasakhaNEWSClusteringS2S (fra)": 49.2,
+                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 61.55,
+                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 52.72
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "e5-mistral-7b-instruct",
+                    "OpusparcusPC (rus-Cyrl)": 91.44,
+                    "OpusparcusPC (fr)": 88.5,
+                    "PawsXPairClassification (fr)": 63.65,
+                    "TERRa (rus-Cyrl)": 59.38
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "e5-mistral-7b-instruct",
+                    "AlloprofReranking": 47.36,
+                    "RuBQReranking (rus-Cyrl)": 74.61,
+                    "SyntecReranking": 77.05
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "e5-mistral-7b-instruct",
+                    "AILACasedocs": 38.76,
+                    "AILAStatutes": 38.07,
+                    "ARCChallenge": 17.81,
+                    "AlloprofRetrieval": 16.46,
+                    "AlphaNLI": 26.12,
+                    "BSARDRetrieval": 0.0,
+                    "BrightRetrieval (sustainable_living)": 18.51,
+                    "BrightRetrieval (economics)": 15.49,
+                    "BrightRetrieval (theoremqa_theorems)": 23.78,
+                    "BrightRetrieval (aops)": 7.1,
+                    "BrightRetrieval (theoremqa_questions)": 23.94,
+                    "BrightRetrieval (stackoverflow)": 9.83,
+                    "BrightRetrieval (psychology)": 15.79,
+                    "BrightRetrieval (pony)": 4.81,
+                    "BrightRetrieval (leetcode)": 28.72,
+                    "BrightRetrieval (biology)": 18.84,
+                    "BrightRetrieval (earth_science)": 25.96,
+                    "BrightRetrieval (robotics)": 16.37,
+                    "GerDaLIRSmall": 37.18,
+                    "HellaSwag": 34.85,
+                    "LEMBNarrativeQARetrieval": 44.62,
+                    "LEMBNeedleRetrieval": 48.25,
+                    "LEMBPasskeyRetrieval": 71.0,
+                    "LEMBQMSumRetrieval": 43.63,
+                    "LEMBSummScreenFDRetrieval": 96.82,
+                    "LEMBWikimQARetrieval": 82.11,
+                    "LeCaRDv2": 68.56,
+                    "LegalBenchConsumerContractsQA": 75.46,
+                    "LegalBenchCorporateLobbying": 94.01,
+                    "LegalQuAD": 59.64,
+                    "LegalSummarization": 66.51,
+                    "MintakaRetrieval (fr)": 3.57,
+                    "PIQA": 39.37,
+                    "Quail": 7.01,
+                    "RARbCode": 78.46,
+                    "RARbMath": 72.16,
+                    "RiaNewsRetrieval (rus-Cyrl)": 81.94,
+                    "RuBQRetrieval (rus-Cyrl)": 73.98,
+                    "SIQA": 5.42,
+                    "SpartQA": 9.92,
+                    "SyntecRetrieval": 55.9,
+                    "TempReasonL1": 3.31,
+                    "TempReasonL2Fact": 36.9,
+                    "TempReasonL2Pure": 9.18,
+                    "TempReasonL3Fact": 30.18,
+                    "TempReasonL3Pure": 14.31,
+                    "WinoGrande": 41.21,
+                    "XPQARetrieval (fr)": 41.29
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "e5-mistral-7b-instruct",
+                    "RUParaPhraserSTS (rus-Cyrl)": 76.17,
+                    "RuSTSBenchmarkSTS (rus-Cyrl)": 84.13,
+                    "SICKFr": 64.39,
+                    "STS22 (fr)": 69.82,
+                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 84.25,
+                    "STSBenchmarkMultilingualSTS (fr)": 61.87
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "e5-mistral-7b-instruct",
+                    "SummEvalFr": 32.22
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "e5-mistral-7b-instruct",
+                    "CEDRClassification (rus-Cyrl)": 40.8,
+                    "SensitiveTopicsClassification (rus-Cyrl)": 25.94
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "e5-mistral-7b-instruct",
+                    "Core17InstructionRetrieval": 0.09,
+                    "News21InstructionRetrieval": -0.86,
+                    "Robust04InstructionRetrieval": -9.59
+                }
+            ]
+        }
+    },
+    "gte-Qwen1.5-7B-instruct": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "gte-Qwen1.5-7B-instruct"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "gte-Qwen1.5-7B-instruct",
+                    "AmazonCounterfactualClassification (en)": 83.16,
+                    "AmazonPolarityClassification": 96.7,
+                    "AmazonReviewsClassification (en)": 62.17,
+                    "AmazonReviewsClassification (zh)": 52.95,
+                    "Banking77Classification": 81.68,
+                    "EmotionClassification": 54.53,
+                    "IFlyTek": 53.77,
+                    "ImdbClassification": 95.58,
+                    "JDReview": 88.2,
+                    "MTOPDomainClassification (en)": 95.75,
+                    "MTOPIntentClassification (en)": 84.26,
+                    "MassiveIntentClassification (zh-CN)": 76.25,
+                    "MassiveIntentClassification (en)": 78.47,
+                    "MassiveScenarioClassification (en)": 78.19,
+                    "MassiveScenarioClassification (zh-CN)": 77.26,
+                    "MultilingualSentiment": 77.42,
+                    "OnlineShopping": 94.48,
+                    "TNews": 51.24,
+                    "ToxicConversationsClassification": 78.75,
+                    "TweetSentimentExtractionClassification": 66.0,
+                    "Waimai": 88.63
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "gte-Qwen1.5-7B-instruct",
+                    "ArxivClusteringP2P": 56.4,
+                    "ArxivClusteringS2S": 51.45,
+                    "BiorxivClusteringP2P": 49.01,
+                    "BiorxivClusteringS2S": 45.06,
+                    "CLSClusteringP2P": 47.21,
+                    "CLSClusteringS2S": 45.79,
+                    "MedrxivClusteringP2P": 44.37,
+                    "MedrxivClusteringS2S": 42.0,
+                    "RedditClustering": 73.37,
+                    "RedditClusteringP2P": 72.51,
+                    "StackExchangeClustering": 79.07,
+                    "StackExchangeClusteringP2P": 49.57,
+                    "ThuNewsClusteringP2P": 87.43,
+                    "ThuNewsClusteringS2S": 87.9,
+                    "TwentyNewsgroupsClustering": 51.31
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "gte-Qwen1.5-7B-instruct",
+                    "Cmnli": 91.81,
+                    "Ocnli": 85.22,
+                    "SprintDuplicateQuestions": 95.99,
+                    "TwitterSemEval2015": 79.36,
+                    "TwitterURLCorpus": 86.79
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "gte-Qwen1.5-7B-instruct",
+                    "AskUbuntuDupQuestions": 66.0,
+                    "CMedQAv1": 86.37,
+                    "CMedQAv2": 87.41,
+                    "MindSmallReranking": 32.71,
+                    "SciDocsRR": 87.89,
+                    "StackOverflowDupQuestions": 53.93,
+                    "T2Reranking": 68.11
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "gte-Qwen1.5-7B-instruct",
+                    "ArguAna": 62.65,
+                    "BrightRetrieval (stackoverflow)": 19.85,
+                    "BrightRetrieval (earth_science)": 36.22,
+                    "BrightRetrieval (leetcode)": 25.46,
+                    "BrightRetrieval (theoremqa_questions)": 26.97,
+                    "BrightRetrieval (economics)": 17.72,
+                    "BrightRetrieval (robotics)": 13.47,
+                    "BrightRetrieval (pony)": 9.79,
+                    "BrightRetrieval (aops)": 14.36,
+                    "BrightRetrieval (psychology)": 24.61,
+                    "BrightRetrieval (theoremqa_theorems)": 26.66,
+                    "BrightRetrieval (biology)": 30.92,
+                    "BrightRetrieval (sustainable_living)": 14.93,
+                    "CQADupstackRetrieval": 40.64,
+                    "ClimateFEVER": 44.0,
+                    "CmedqaRetrieval": 43.47,
+                    "CovidRetrieval": 80.87,
+                    "DBPedia": 48.04,
+                    "DuRetrieval": 86.01,
+                    "EcomRetrieval": 66.46,
+                    "FEVER": 93.35,
+                    "FiQA2018": 55.31,
+                    "HotpotQA": 72.25,
+                    "MMarcoRetrieval": 73.83,
+                    "MSMARCO": 41.68,
+                    "MedicalRetrieval": 61.33,
+                    "NFCorpus": 38.25,
+                    "NQ": 61.79,
+                    "QuoraRetrieval": 89.61,
+                    "SCIDOCS": 27.69,
+                    "SciFact": 75.31,
+                    "T2Retrieval": 83.58,
+                    "TRECCOVID": 72.72,
+                    "Touche2020": 20.3,
+                    "VideoRetrieval": 69.41
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "gte-Qwen1.5-7B-instruct",
+                    "AFQMC": 58.47,
+                    "ATEC": 55.46,
+                    "BIOSSES": 81.12,
+                    "BQ": 77.59,
+                    "LCQMC": 76.29,
+                    "PAWSX": 50.22,
+                    "QBQTC": 31.82,
+                    "SICK-R": 79.15,
+                    "STS12": 76.52,
+                    "STS13": 88.63,
+                    "STS14": 83.32,
+                    "STS15": 87.5,
+                    "STS16": 86.39,
+                    "STS17 (en-en)": 87.79,
+                    "STS22 (en)": 66.4,
+                    "STS22 (zh)": 67.36,
+                    "STSB": 81.37,
+                    "STSBenchmark": 87.35
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "gte-Qwen1.5-7B-instruct",
+                    "SummEval": 31.46
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "gte-Qwen1.5-7B-instruct"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "gte-Qwen1.5-7B-instruct"
+                }
+            ]
+        }
+    },
+    "allenai-specter": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "allenai-specter"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "allenai-specter",
+                    "AmazonCounterfactualClassification (de)": 54.46,
+                    "AmazonCounterfactualClassification (en)": 58.7,
+                    "AmazonCounterfactualClassification (en-ext)": 59.28,
+                    "AmazonCounterfactualClassification (ja)": 43.87,
+                    "AmazonPolarityClassification": 57.77,
+                    "AmazonReviewsClassification (de)": 24.08,
+                    "AmazonReviewsClassification (en)": 26.26,
+                    "AmazonReviewsClassification (es)": 23.88,
+                    "AmazonReviewsClassification (fr)": 23.31,
+                    "AmazonReviewsClassification (ja)": 20.25,
+                    "AmazonReviewsClassification (zh)": 20.49,
+                    "Banking77Classification": 66.66,
+                    "EmotionClassification": 24.82,
+                    "ImdbClassification": 56.35,
+                    "MTOPDomainClassification (de)": 48.55,
+                    "MTOPDomainClassification (en)": 74.53,
+                    "MTOPDomainClassification (es)": 58.39,
+                    "MTOPDomainClassification (fr)": 54.61,
+                    "MTOPDomainClassification (hi)": 21.22,
+                    "MTOPDomainClassification (th)": 14.98,
+                    "MTOPIntentClassification (de)": 35.55,
+                    "MTOPIntentClassification (en)": 50.05,
+                    "MTOPIntentClassification (es)": 36.72,
+                    "MTOPIntentClassification (fr)": 34.71,
+                    "MTOPIntentClassification (hi)": 4.44,
+                    "MTOPIntentClassification (th)": 4.67,
+                    "MassiveIntentClassification (af)": 33.68,
+                    "MassiveIntentClassification (am)": 2.94,
+                    "MassiveIntentClassification (ar)": 10.04,
+                    "MassiveIntentClassification (az)": 30.74,
+                    "MassiveIntentClassification (bn)": 3.02,
+                    "MassiveIntentClassification (cy)": 33.94,
+                    "MassiveIntentClassification (da)": 38.47,
+                    "MassiveIntentClassification (de)": 36.06,
+                    "MassiveIntentClassification (el)": 27.7,
+                    "MassiveIntentClassification (en)": 51.73,
+                    "MassiveIntentClassification (es)": 35.6,
+                    "MassiveIntentClassification (fa)": 17.97,
+                    "MassiveIntentClassification (fi)": 35.53,
+                    "MassiveIntentClassification (fr)": 38.41,
+                    "MassiveIntentClassification (he)": 2.69,
+                    "MassiveIntentClassification (hi)": 3.43,
+                    "MassiveIntentClassification (hu)": 34.05,
+                    "MassiveIntentClassification (hy)": 3.11,
+                    "MassiveIntentClassification (id)": 40.02,
+                    "MassiveIntentClassification (is)": 32.63,
+                    "MassiveIntentClassification (it)": 39.28,
+                    "MassiveIntentClassification (ja)": 4.95,
+                    "MassiveIntentClassification (jv)": 34.95,
+                    "MassiveIntentClassification (ka)": 2.57,
+                    "MassiveIntentClassification (km)": 4.73,
+                    "MassiveIntentClassification (kn)": 3.54,
+                    "MassiveIntentClassification (ko)": 2.68,
+                    "MassiveIntentClassification (lv)": 37.91,
+                    "MassiveIntentClassification (ml)": 2.88,
+                    "MassiveIntentClassification (mn)": 16.94,
+                    "MassiveIntentClassification (ms)": 36.6,
+                    "MassiveIntentClassification (my)": 3.96,
+                    "MassiveIntentClassification (nb)": 34.75,
+                    "MassiveIntentClassification (nl)": 33.95,
+                    "MassiveIntentClassification (pl)": 35.77,
+                    "MassiveIntentClassification (pt)": 43.05,
+                    "MassiveIntentClassification (ro)": 36.2,
+                    "MassiveIntentClassification (ru)": 25.3,
+                    "MassiveIntentClassification (sl)": 35.9,
+                    "MassiveIntentClassification (sq)": 36.6,
+                    "MassiveIntentClassification (sv)": 36.0,
+                    "MassiveIntentClassification (sw)": 34.81,
+                    "MassiveIntentClassification (ta)": 3.11,
+                    "MassiveIntentClassification (te)": 2.53,
+                    "MassiveIntentClassification (th)": 4.38,
+                    "MassiveIntentClassification (tl)": 35.51,
+                    "MassiveIntentClassification (tr)": 32.02,
+                    "MassiveIntentClassification (ur)": 9.61,
+                    "MassiveIntentClassification (vi)": 37.07,
+                    "MassiveIntentClassification (zh-CN)": 2.81,
+                    "MassiveIntentClassification (zh-TW)": 4.79,
+                    "MassiveScenarioClassification (af)": 36.17,
+                    "MassiveScenarioClassification (am)": 7.64,
+                    "MassiveScenarioClassification (ar)": 15.26,
+                    "MassiveScenarioClassification (az)": 30.73,
+                    "MassiveScenarioClassification (bn)": 7.15,
+                    "MassiveScenarioClassification (cy)": 34.73,
+                    "MassiveScenarioClassification (da)": 39.93,
+                    "MassiveScenarioClassification (de)": 38.62,
+                    "MassiveScenarioClassification (el)": 27.18,
+                    "MassiveScenarioClassification (en)": 58.58,
+                    "MassiveScenarioClassification (es)": 39.44,
+                    "MassiveScenarioClassification (fa)": 21.43,
+                    "MassiveScenarioClassification (fi)": 33.21,
+                    "MassiveScenarioClassification (fr)": 40.26,
+                    "MassiveScenarioClassification (he)": 7.42,
+                    "MassiveScenarioClassification (hi)": 8.06,
+                    "MassiveScenarioClassification (hu)": 34.54,
+                    "MassiveScenarioClassification (hy)": 8.61,
+                    "MassiveScenarioClassification (id)": 40.04,
+                    "MassiveScenarioClassification (is)": 33.57,
+                    "MassiveScenarioClassification (it)": 40.1,
+                    "MassiveScenarioClassification (ja)": 9.96,
+                    "MassiveScenarioClassification (jv)": 36.11,
+                    "MassiveScenarioClassification (ka)": 7.13,
+                    "MassiveScenarioClassification (km)": 9.66,
+                    "MassiveScenarioClassification (kn)": 7.55,
+                    "MassiveScenarioClassification (ko)": 7.27,
+                    "MassiveScenarioClassification (lv)": 37.03,
+                    "MassiveScenarioClassification (ml)": 7.22,
+                    "MassiveScenarioClassification (mn)": 21.53,
+                    "MassiveScenarioClassification (ms)": 37.57,
+                    "MassiveScenarioClassification (my)": 9.54,
+                    "MassiveScenarioClassification (nb)": 35.71,
+                    "MassiveScenarioClassification (nl)": 34.62,
+                    "MassiveScenarioClassification (pl)": 36.87,
+                    "MassiveScenarioClassification (pt)": 44.68,
+                    "MassiveScenarioClassification (ro)": 37.29,
+                    "MassiveScenarioClassification (ru)": 28.16,
+                    "MassiveScenarioClassification (sl)": 37.95,
+                    "MassiveScenarioClassification (sq)": 37.82,
+                    "MassiveScenarioClassification (sv)": 35.35,
+                    "MassiveScenarioClassification (sw)": 35.37,
+                    "MassiveScenarioClassification (ta)": 7.19,
+                    "MassiveScenarioClassification (te)": 7.29,
+                    "MassiveScenarioClassification (th)": 9.47,
+                    "MassiveScenarioClassification (tl)": 37.31,
+                    "MassiveScenarioClassification (tr)": 34.57,
+                    "MassiveScenarioClassification (ur)": 16.17,
+                    "MassiveScenarioClassification (vi)": 35.91,
+                    "MassiveScenarioClassification (zh-CN)": 9.19,
+                    "MassiveScenarioClassification (zh-TW)": 10.19,
+                    "ToxicConversationsClassification": 57.44,
+                    "TweetSentimentExtractionClassification": 45.52
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "allenai-specter",
+                    "ArxivClusteringP2P": 44.75,
+                    "ArxivClusteringS2S": 35.27,
+                    "BiorxivClusteringP2P": 39.52,
+                    "BiorxivClusteringS2S": 34.53,
+                    "MedrxivClusteringP2P": 35.04,
+                    "MedrxivClusteringS2S": 31.66,
+                    "RedditClustering": 24.13,
+                    "RedditClusteringP2P": 35.06,
+                    "StackExchangeClustering": 39.01,
+                    "StackExchangeClusteringP2P": 31.46,
+                    "TwentyNewsgroupsClustering": 24.22
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "allenai-specter",
+                    "SprintDuplicateQuestions": 71.63,
+                    "TwitterSemEval2015": 43.25,
+                    "TwitterURLCorpus": 69.22
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "allenai-specter",
+                    "AskUbuntuDupQuestions": 50.07,
+                    "MindSmallReranking": 24.8,
+                    "SciDocsRR": 81.31,
+                    "StackOverflowDupQuestions": 36.22
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "allenai-specter",
+                    "ArguAna": 32.67,
+                    "CQADupstackRetrieval": 14.6,
+                    "ClimateFEVER": 6.86,
+                    "DBPedia": 4.14,
+                    "FEVER": 5.45,
+                    "FiQA2018": 5.64,
+                    "HotpotQA": 5.46,
+                    "MSMARCO": 5.59,
+                    "NFCorpus": 0.85,
+                    "NQ": 5.99,
+                    "QuoraRetrieval": 64.65,
+                    "SCIDOCS": 0.0,
+                    "SciFact": 47.88,
+                    "TRECCOVID": 29.91,
+                    "Touche2020": 8.46
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "allenai-specter",
+                    "BIOSSES": 64.95,
+                    "SICK-R": 56.39,
+                    "STS12": 62.49,
+                    "STS13": 58.7,
+                    "STS14": 54.87,
+                    "STS15": 62.54,
+                    "STS16": 64.27,
+                    "STS17 (ar-ar)": 27.14,
+                    "STS17 (en-ar)": 6.9,
+                    "STS17 (en-de)": 11.59,
+                    "STS17 (en-en)": 69.63,
+                    "STS17 (en-tr)": 6.46,
+                    "STS17 (es-en)": 10.86,
+                    "STS17 (es-es)": 55.45,
+                    "STS17 (fr-en)": 16.02,
+                    "STS17 (it-en)": 19.87,
+                    "STS17 (ko-ko)": 8.08,
+                    "STS17 (nl-en)": 24.92,
+                    "STS22 (ar)": 19.57,
+                    "STS22 (de)": 17.31,
+                    "STS22 (de-en)": 26.03,
+                    "STS22 (de-fr)": 10.26,
+                    "STS22 (de-pl)": 16.94,
+                    "STS22 (en)": 55.06,
+                    "STS22 (es)": 48.89,
+                    "STS22 (es-en)": 51.79,
+                    "STS22 (es-it)": 25.24,
+                    "STS22 (fr)": 53.92,
+                    "STS22 (fr-pl)": 39.44,
+                    "STS22 (it)": 39.43,
+                    "STS22 (pl)": 13.56,
+                    "STS22 (pl-en)": 25.36,
+                    "STS22 (ru)": 1.11,
+                    "STS22 (tr)": 31.73,
+                    "STS22 (zh)": 16.35,
+                    "STS22 (zh-en)": 8.44,
+                    "STSBenchmark": 61.26
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "allenai-specter",
+                    "SummEval": 27.66
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "allenai-specter"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "allenai-specter"
+                }
+            ]
+        }
+    },
+    "m3e-base": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "m3e-base"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "m3e-base",
+                    "AmazonReviewsClassification (zh)": 43.02,
+                    "IFlyTek": 44.42,
+                    "JDReview": 85.33,
+                    "MassiveIntentClassification (zh-CN)": 68.4,
+                    "MassiveScenarioClassification (zh-CN)": 74.6,
+                    "MultilingualSentiment": 71.9,
+                    "OnlineShopping": 87.77,
+                    "TNews": 48.28,
+                    "Waimai": 83.99
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "m3e-base",
+                    "CLSClusteringP2P": 39.81,
+                    "CLSClusteringS2S": 37.34,
+                    "ThuNewsClusteringP2P": 59.77,
+                    "ThuNewsClusteringS2S": 53.78
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "m3e-base",
+                    "Cmnli": 69.98,
+                    "Ocnli": 58.0
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "m3e-base",
+                    "CMedQAv1": 77.05,
+                    "CMedQAv2": 76.76,
+                    "MMarcoReranking": 17.51,
+                    "T2Reranking": 66.03
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "m3e-base",
+                    "CmedqaRetrieval": 30.33,
+                    "CovidRetrieval": 66.42,
+                    "DuRetrieval": 75.76,
+                    "EcomRetrieval": 50.27,
+                    "MMarcoRetrieval": 65.46,
+                    "MedicalRetrieval": 42.79,
+                    "T2Retrieval": 73.14,
+                    "VideoRetrieval": 51.11
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "m3e-base",
+                    "AFQMC": 35.87,
+                    "ATEC": 41.27,
+                    "BQ": 63.81,
+                    "LCQMC": 74.88,
+                    "PAWSX": 12.19,
+                    "QBQTC": 32.07,
+                    "STS22 (zh)": 66.73,
+                    "STSB": 76.97
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "m3e-base"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "m3e-base"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "m3e-base"
+                }
+            ]
+        }
+    },
+    "multilingual-e5-small": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "multilingual-e5-small",
+                    "BornholmBitextMining (dan-Latn)": 37.15,
+                    "BornholmBitextMining": 43.89,
+                    "Tatoeba (swh-Latn_eng-Latn)": 65.43,
+                    "Tatoeba (jpn-Jpan_eng-Latn)": 77.43,
+                    "Tatoeba (tuk-Latn_eng-Latn)": 16.99,
+                    "Tatoeba (lat-Latn_eng-Latn)": 37.76,
+                    "Tatoeba (mal-Mlym_eng-Latn)": 94.78,
+                    "Tatoeba (ast-Latn_eng-Latn)": 62.81,
+                    "Tatoeba (est-Latn_eng-Latn)": 56.47,
+                    "Tatoeba (cym-Latn_eng-Latn)": 62.3,
+                    "Tatoeba (pol-Latn_eng-Latn)": 88.85,
+                    "Tatoeba (ukr-Cyrl_eng-Latn)": 82.98,
+                    "Tatoeba (ido-Latn_eng-Latn)": 70.07,
+                    "Tatoeba (zsm-Latn_eng-Latn)": 91.37,
+                    "Tatoeba (bul-Cyrl_eng-Latn)": 85.47,
+                    "Tatoeba (dsb-Latn_eng-Latn)": 29.87,
+                    "Tatoeba (tha-Thai_eng-Latn)": 90.88,
+                    "Tatoeba (arz-Arab_eng-Latn)": 53.35,
+                    "Tatoeba (cbk-Latn_eng-Latn)": 55.36,
+                    "Tatoeba (pms-Latn_eng-Latn)": 35.47,
+                    "Tatoeba (ber-Tfng_eng-Latn)": 18.22,
+                    "Tatoeba (slk-Latn_eng-Latn)": 79.86,
+                    "Tatoeba (ang-Latn_eng-Latn)": 30.3,
+                    "Tatoeba (ind-Latn_eng-Latn)": 88.28,
+                    "Tatoeba (cha-Latn_eng-Latn)": 24.88,
+                    "Tatoeba (slv-Latn_eng-Latn)": 73.93,
+                    "Tatoeba (kab-Latn_eng-Latn)": 18.06,
+                    "Tatoeba (ina-Latn_eng-Latn)": 86.39,
+                    "Tatoeba (lfn-Latn_eng-Latn)": 51.46,
+                    "Tatoeba (hye-Armn_eng-Latn)": 83.81,
+                    "Tatoeba (war-Latn_eng-Latn)": 39.14,
+                    "Tatoeba (dtp-Latn_eng-Latn)": 6.42,
+                    "Tatoeba (nds-Latn_eng-Latn)": 52.46,
+                    "Tatoeba (urd-Arab_eng-Latn)": 85.07,
+                    "Tatoeba (rus-Cyrl_eng-Latn)": 89.77,
+                    "Tatoeba (fao-Latn_eng-Latn)": 56.57,
+                    "Tatoeba (cat-Latn_eng-Latn)": 79.3,
+                    "Tatoeba (gla-Latn_eng-Latn)": 35.96,
+                    "Tatoeba (kur-Latn_eng-Latn)": 39.99,
+                    "Tatoeba (cor-Latn_eng-Latn)": 5.24,
+                    "Tatoeba (nov-Latn_eng-Latn)": 64.2,
+                    "Tatoeba (max-Deva_eng-Latn)": 48.29,
+                    "Tatoeba (nno-Latn_eng-Latn)": 70.29,
+                    "Tatoeba (kor-Hang_eng-Latn)": 73.74,
+                    "Tatoeba (vie-Latn_eng-Latn)": 89.03,
+                    "Tatoeba (tur-Latn_eng-Latn)": 88.42,
+                    "Tatoeba (spa-Latn_eng-Latn)": 93.01,
+                    "Tatoeba (gsw-Latn_eng-Latn)": 40.13,
+                    "Tatoeba (yid-Hebr_eng-Latn)": 65.9,
+                    "Tatoeba (orv-Cyrl_eng-Latn)": 14.89,
+                    "Tatoeba (wuu-Hans_eng-Latn)": 67.3,
+                    "Tatoeba (heb-Hebr_eng-Latn)": 73.68,
+                    "Tatoeba (arq-Arab_eng-Latn)": 23.62,
+                    "Tatoeba (nld-Latn_eng-Latn)": 91.87,
+                    "Tatoeba (kaz-Cyrl_eng-Latn)": 70.57,
+                    "Tatoeba (mon-Cyrl_eng-Latn)": 77.7,
+                    "Tatoeba (fin-Latn_eng-Latn)": 70.23,
+                    "Tatoeba (hrv-Latn_eng-Latn)": 84.42,
+                    "Tatoeba (fra-Latn_eng-Latn)": 90.51,
+                    "Tatoeba (khm-Khmr_eng-Latn)": 44.34,
+                    "Tatoeba (amh-Ethi_eng-Latn)": 74.11,
+                    "Tatoeba (eus-Latn_eng-Latn)": 50.9,
+                    "Tatoeba (lvs-Latn_eng-Latn)": 61.84,
+                    "Tatoeba (pes-Arab_eng-Latn)": 85.51,
+                    "Tatoeba (tzl-Latn_eng-Latn)": 34.83,
+                    "Tatoeba (oci-Latn_eng-Latn)": 38.27,
+                    "Tatoeba (ell-Grek_eng-Latn)": 86.81,
+                    "Tatoeba (tgl-Latn_eng-Latn)": 77.54,
+                    "Tatoeba (uig-Arab_eng-Latn)": 60.59,
+                    "Tatoeba (ben-Beng_eng-Latn)": 81.4,
+                    "Tatoeba (uzb-Latn_eng-Latn)": 59.11,
+                    "Tatoeba (epo-Latn_eng-Latn)": 88.96,
+                    "Tatoeba (sqi-Latn_eng-Latn)": 86.21,
+                    "Tatoeba (kzj-Latn_eng-Latn)": 6.56,
+                    "Tatoeba (mkd-Cyrl_eng-Latn)": 63.74,
+                    "Tatoeba (bre-Latn_eng-Latn)": 7.09,
+                    "Tatoeba (dan-Latn_eng-Latn)": 86.38,
+                    "Tatoeba (mhr-Cyrl_eng-Latn)": 5.58,
+                    "Tatoeba (csb-Latn_eng-Latn)": 26.23,
+                    "Tatoeba (xho-Latn_eng-Latn)": 63.2,
+                    "Tatoeba (swe-Latn_eng-Latn)": 87.46,
+                    "Tatoeba (tat-Cyrl_eng-Latn)": 66.8,
+                    "Tatoeba (srp-Cyrl_eng-Latn)": 83.06,
+                    "Tatoeba (cmn-Hans_eng-Latn)": 89.85,
+                    "Tatoeba (ces-Latn_eng-Latn)": 80.99,
+                    "Tatoeba (bel-Cyrl_eng-Latn)": 80.89,
+                    "Tatoeba (yue-Hant_eng-Latn)": 69.33,
+                    "Tatoeba (lit-Latn_eng-Latn)": 59.95,
+                    "Tatoeba (tel-Telu_eng-Latn)": 86.82,
+                    "Tatoeba (nob-Latn_eng-Latn)": 90.18,
+                    "Tatoeba (mar-Deva_eng-Latn)": 85.94,
+                    "Tatoeba (ara-Arab_eng-Latn)": 76.09,
+                    "Tatoeba (swg-Latn_eng-Latn)": 44.0,
+                    "Tatoeba (bos-Latn_eng-Latn)": 81.15,
+                    "Tatoeba (pam-Latn_eng-Latn)": 5.76,
+                    "Tatoeba (fry-Latn_eng-Latn)": 49.05,
+                    "Tatoeba (hun-Latn_eng-Latn)": 74.44,
+                    "Tatoeba (ron-Latn_eng-Latn)": 85.68,
+                    "Tatoeba (afr-Latn_eng-Latn)": 85.17,
+                    "Tatoeba (isl-Latn_eng-Latn)": 62.32,
+                    "Tatoeba (aze-Latn_eng-Latn)": 80.79,
+                    "Tatoeba (hsb-Latn_eng-Latn)": 36.49,
+                    "Tatoeba (tam-Taml_eng-Latn)": 82.82,
+                    "Tatoeba (ceb-Latn_eng-Latn)": 42.35,
+                    "Tatoeba (jav-Latn_eng-Latn)": 53.39,
+                    "Tatoeba (glg-Latn_eng-Latn)": 79.65,
+                    "Tatoeba (por-Latn_eng-Latn)": 89.63,
+                    "Tatoeba (awa-Deva_eng-Latn)": 74.55,
+                    "Tatoeba (hin-Deva_eng-Latn)": 92.36,
+                    "Tatoeba (ita-Latn_eng-Latn)": 88.54,
+                    "Tatoeba (deu-Latn_eng-Latn)": 97.22,
+                    "Tatoeba (gle-Latn_eng-Latn)": 56.32,
+                    "Tatoeba (kat-Geor_eng-Latn)": 77.6,
+                    "Tatoeba (ile-Latn_eng-Latn)": 70.31
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "multilingual-e5-small",
+                    "AllegroReviews (pol-Latn)": 37.33,
+                    "AllegroReviews": 37.42,
+                    "AmazonCounterfactualClassification (en-ext)": 73.07,
+                    "AmazonCounterfactualClassification (en)": 71.87,
+                    "AmazonCounterfactualClassification (deu-Latn)": 71.72,
+                    "AmazonCounterfactualClassification (jpn-Jpan)": 61.46,
+                    "AmazonPolarityClassification": 88.61,
+                    "AmazonReviewsClassification (en)": 45.75,
+                    "AmazonReviewsClassification (deu-Latn)": 41.07,
+                    "AmazonReviewsClassification (spa-Latn)": 41.37,
+                    "AmazonReviewsClassification (fra-Latn)": 39.47,
+                    "AmazonReviewsClassification (jpn-Jpan)": 38.55,
+                    "AmazonReviewsClassification (cmn-Hans)": 38.31,
+                    "AmazonReviewsClassification (fr)": 39.68,
+                    "AngryTweetsClassification (dan-Latn)": 56.27,
+                    "AngryTweetsClassification": 53.57,
+                    "Banking77Classification": 70.44,
+                    "CBD (pol-Latn)": 63.33,
+                    "CBD": 63.25,
+                    "DKHateClassification": 60.73,
+                    "DanishPoliticalCommentsClassification (dan-Latn)": 34.82,
+                    "DanishPoliticalCommentsClassification": 34.38,
+                    "EmotionClassification": 42.86,
+                    "GeoreviewClassification (rus-Cyrl)": 44.66,
+                    "HeadlineClassification (rus-Cyrl)": 73.94,
+                    "IFlyTek (cmn-Hans)": 40.74,
+                    "IFlyTek": 47.35,
+                    "ImdbClassification": 79.57,
+                    "InappropriatenessClassification (rus-Cyrl)": 59.16,
+                    "JDReview (cmn-Hans)": 78.37,
+                    "JDReview": 79.34,
+                    "KinopoiskClassification (rus-Cyrl)": 49.96,
+                    "LccSentimentClassification (dan-Latn)": 58.6,
+                    "LccSentimentClassification": 57.87,
+                    "MTOPDomainClassification (en)": 88.99,
+                    "MTOPDomainClassification (deu-Latn)": 86.15,
+                    "MTOPDomainClassification (spa-Latn)": 85.53,
+                    "MTOPDomainClassification (fra-Latn)": 81.5,
+                    "MTOPDomainClassification (hin-Deva)": 84.07,
+                    "MTOPDomainClassification (tha-Thai)": 83.16,
+                    "MTOPDomainClassification (fr)": 81.2,
+                    "MTOPIntentClassification (en)": 56.69,
+                    "MTOPIntentClassification (deu-Latn)": 55.88,
+                    "MTOPIntentClassification (spa-Latn)": 53.15,
+                    "MTOPIntentClassification (fra-Latn)": 44.35,
+                    "MTOPIntentClassification (hin-Deva)": 52.26,
+                    "MTOPIntentClassification (tha-Thai)": 54.61,
+                    "MTOPIntentClassification (fr)": 46.01,
+                    "MasakhaNEWSClassification (amh-Ethi)": 84.28,
+                    "MasakhaNEWSClassification (eng)": 75.61,
+                    "MasakhaNEWSClassification (fra-Latn)": 74.67,
+                    "MasakhaNEWSClassification (hau-Latn)": 73.08,
+                    "MasakhaNEWSClassification (ibo-Latn)": 63.9,
+                    "MasakhaNEWSClassification (lin-Latn)": 73.37,
+                    "MasakhaNEWSClassification (lug-Latn)": 67.89,
+                    "MasakhaNEWSClassification (orm-Ethi)": 68.77,
+                    "MasakhaNEWSClassification (pcm-Latn)": 90.79,
+                    "MasakhaNEWSClassification (run-Latn)": 75.4,
+                    "MasakhaNEWSClassification (sna-Latn)": 82.76,
+                    "MasakhaNEWSClassification (som-Latn)": 59.8,
+                    "MasakhaNEWSClassification (swa-Latn)": 69.85,
+                    "MasakhaNEWSClassification (tir-Ethi)": 68.01,
+                    "MasakhaNEWSClassification (xho-Latn)": 72.22,
+                    "MasakhaNEWSClassification (yor-Latn)": 73.84,
+                    "MasakhaNEWSClassification (fra)": 77.65,
+                    "MassiveIntentClassification (ben-Beng)": 50.68,
+                    "MassiveIntentClassification (tur-Latn)": 56.88,
+                    "MassiveIntentClassification (ind-Latn)": 56.2,
+                    "MassiveIntentClassification (khm-Khmr)": 33.45,
+                    "MassiveIntentClassification (en)": 63.87,
+                    "MassiveIntentClassification (mal-Mlym)": 52.81,
+                    "MassiveIntentClassification (pol-Latn)": 57.33,
+                    "MassiveIntentClassification (lav-Latn)": 44.93,
+                    "MassiveIntentClassification (isl-Latn)": 41.53,
+                    "MassiveIntentClassification (sqi-Latn)": 48.68,
+                    "MassiveIntentClassification (amh-Ethi)": 43.52,
+                    "MassiveIntentClassification (cmo-Hans)": 62.04,
+                    "MassiveIntentClassification (nld-Latn)": 59.27,
+                    "MassiveIntentClassification (deu-Latn)": 55.52,
+                    "MassiveIntentClassification (nob-Latn)": 55.36,
+                    "MassiveIntentClassification (cmo-Hant)": 53.75,
+                    "MassiveIntentClassification (urd-Arab)": 50.51,
+                    "MassiveIntentClassification (slv-Latn)": 47.71,
+                    "MassiveIntentClassification (hun-Latn)": 53.21,
+                    "MassiveIntentClassification (jpn-Jpan)": 61.58,
+                    "MassiveIntentClassification (swa-Latn)": 44.84,
+                    "MassiveIntentClassification (fra-Latn)": 57.9,
+                    "MassiveIntentClassification (spa-Latn)": 59.19,
+                    "MassiveIntentClassification (mon-Cyrl)": 47.38,
+                    "MassiveIntentClassification (dan-Latn)": 56.12,
+                    "MassiveIntentClassification (msa-Latn)": 50.8,
+                    "MassiveIntentClassification (aze-Latn)": 49.32,
+                    "MassiveIntentClassification (fas-Arab)": 57.73,
+                    "MassiveIntentClassification (kan-Knda)": 47.85,
+                    "MassiveIntentClassification (kor-Kore)": 57.12,
+                    "MassiveIntentClassification (tha-Thai)": 56.26,
+                    "MassiveIntentClassification (heb-Hebr)": 51.11,
+                    "MassiveIntentClassification (hin-Deva)": 55.69,
+                    "MassiveIntentClassification (ara-Arab)": 47.78,
+                    "MassiveIntentClassification (por-Latn)": 60.12,
+                    "MassiveIntentClassification (vie-Latn)": 56.19,
+                    "MassiveIntentClassification (hye-Armn)": 47.89,
+                    "MassiveIntentClassification (ita-Latn)": 58.8,
+                    "MassiveIntentClassification (ell-Grek)": 54.14,
+                    "MassiveIntentClassification (cym-Latn)": 36.62,
+                    "MassiveIntentClassification (tel-Telu)": 48.85,
+                    "MassiveIntentClassification (kat-Geor)": 39.52,
+                    "MassiveIntentClassification (swe-Latn)": 58.2,
+                    "MassiveIntentClassification (tam-Taml)": 47.65,
+                    "MassiveIntentClassification (fin-Latn)": 55.14,
+                    "MassiveIntentClassification (tgl-Latn)": 48.7,
+                    "MassiveIntentClassification (ron-Latn)": 52.82,
+                    "MassiveIntentClassification (jav-Latn)": 42.96,
+                    "MassiveIntentClassification (rus-Cyrl)": 58.43,
+                    "MassiveIntentClassification (afr-Latn)": 48.74,
+                    "MassiveIntentClassification (mya-Mymr)": 45.64,
+                    "MassiveIntentClassification (da)": 54.63,
+                    "MassiveIntentClassification (nb)": 53.96,
+                    "MassiveIntentClassification (sv)": 56.6,
+                    "MassiveIntentClassification (pl)": 57.4,
+                    "MassiveScenarioClassification (nld-Latn)": 67.01,
+                    "MassiveScenarioClassification (tur-Latn)": 62.14,
+                    "MassiveScenarioClassification (cym-Latn)": 44.63,
+                    "MassiveScenarioClassification (jav-Latn)": 51.39,
+                    "MassiveScenarioClassification (hin-Deva)": 62.22,
+                    "MassiveScenarioClassification (fra-Latn)": 63.9,
+                    "MassiveScenarioClassification (cmo-Hans)": 68.96,
+                    "MassiveScenarioClassification (kan-Knda)": 52.73,
+                    "MassiveScenarioClassification (isl-Latn)": 49.66,
+                    "MassiveScenarioClassification (jpn-Jpan)": 67.75,
+                    "MassiveScenarioClassification (mal-Mlym)": 60.31,
+                    "MassiveScenarioClassification (pol-Latn)": 64.27,
+                    "MassiveScenarioClassification (mya-Mymr)": 51.07,
+                    "MassiveScenarioClassification (slv-Latn)": 54.05,
+                    "MassiveScenarioClassification (rus-Cyrl)": 63.89,
+                    "MassiveScenarioClassification (urd-Arab)": 55.91,
+                    "MassiveScenarioClassification (fas-Arab)": 63.32,
+                    "MassiveScenarioClassification (fin-Latn)": 61.89,
+                    "MassiveScenarioClassification (kat-Geor)": 44.96,
+                    "MassiveScenarioClassification (sqi-Latn)": 56.15,
+                    "MassiveScenarioClassification (en)": 69.28,
+                    "MassiveScenarioClassification (hun-Latn)": 61.93,
+                    "MassiveScenarioClassification (aze-Latn)": 53.27,
+                    "MassiveScenarioClassification (heb-Hebr)": 59.22,
+                    "MassiveScenarioClassification (kor-Kore)": 65.7,
+                    "MassiveScenarioClassification (nob-Latn)": 61.96,
+                    "MassiveScenarioClassification (dan-Latn)": 64.03,
+                    "MassiveScenarioClassification (cmo-Hant)": 61.15,
+                    "MassiveScenarioClassification (ron-Latn)": 60.0,
+                    "MassiveScenarioClassification (amh-Ethi)": 50.53,
+                    "MassiveScenarioClassification (spa-Latn)": 64.43,
+                    "MassiveScenarioClassification (afr-Latn)": 58.0,
+                    "MassiveScenarioClassification (lav-Latn)": 51.0,
+                    "MassiveScenarioClassification (deu-Latn)": 65.88,
+                    "MassiveScenarioClassification (ita-Latn)": 64.03,
+                    "MassiveScenarioClassification (tha-Thai)": 65.72,
+                    "MassiveScenarioClassification (msa-Latn)": 59.18,
+                    "MassiveScenarioClassification (tam-Taml)": 52.74,
+                    "MassiveScenarioClassification (ara-Arab)": 54.56,
+                    "MassiveScenarioClassification (tgl-Latn)": 55.3,
+                    "MassiveScenarioClassification (por-Latn)": 62.75,
+                    "MassiveScenarioClassification (swe-Latn)": 67.33,
+                    "MassiveScenarioClassification (tel-Telu)": 54.86,
+                    "MassiveScenarioClassification (khm-Khmr)": 39.01,
+                    "MassiveScenarioClassification (swa-Latn)": 52.42,
+                    "MassiveScenarioClassification (vie-Latn)": 62.67,
+                    "MassiveScenarioClassification (ind-Latn)": 62.0,
+                    "MassiveScenarioClassification (hye-Armn)": 52.93,
+                    "MassiveScenarioClassification (ben-Beng)": 57.38,
+                    "MassiveScenarioClassification (mon-Cyrl)": 52.41,
+                    "MassiveScenarioClassification (ell-Grek)": 62.29,
+                    "MassiveScenarioClassification (da)": 62.34,
+                    "MassiveScenarioClassification (nb)": 59.9,
+                    "MassiveScenarioClassification (sv)": 65.54,
+                    "MassiveScenarioClassification (pl)": 64.25,
+                    "MultilingualSentiment (cmn-Hans)": 66.0,
+                    "MultilingualSentiment": 64.74,
+                    "NoRecClassification (nob-Latn)": 50.08,
+                    "NoRecClassification": 53.96,
+                    "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 72.15,
+                    "NordicLangClassification": 75.15,
+                    "NorwegianParliament": 60.15,
+                    "OnlineShopping (cmn-Hans)": 88.7,
+                    "OnlineShopping": 88.73,
+                    "PAC (pol-Latn)": 70.48,
+                    "PAC": 70.55,
+                    "PolEmo2.0-IN (pol-Latn)": 67.31,
+                    "PolEmo2.0-IN": 67.35,
+                    "PolEmo2.0-OUT (pol-Latn)": 39.17,
+                    "PolEmo2.0-OUT": 39.13,
+                    "RuReviewsClassification (rus-Cyrl)": 61.18,
+                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 54.99,
+                    "RuSciBenchOECDClassification (rus-Cyrl)": 41.72,
+                    "ScalaDaClassification": 50.3,
+                    "ScalaNbClassification": 50.06,
+                    "TNews (cmn-Hans)": 46.6,
+                    "TNews": 48.38,
+                    "ToxicConversationsClassification": 63.59,
+                    "TweetSentimentExtractionClassification": 62.79,
+                    "Waimai (cmn-Hans)": 84.15,
+                    "Waimai": 83.9
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "multilingual-e5-small",
+                    "8TagsClustering": 23.92,
+                    "AlloProfClusteringP2P": 60.89,
+                    "AlloProfClusteringS2S": 32.52,
+                    "BiorxivClusteringP2P": 35.84,
+                    "BiorxivClusteringS2S": 27.35,
+                    "CLSClusteringP2P": 39.14,
+                    "CLSClusteringS2S": 37.79,
+                    "GeoreviewClusteringP2P (rus-Cyrl)": 58.57,
+                    "HALClusteringS2S": 18.95,
+                    "MLSUMClusteringP2P (rus-Cyrl)": 39.69,
+                    "MLSUMClusteringP2P": 43.2,
+                    "MLSUMClusteringS2S (rus-Cyrl)": 39.9,
+                    "MLSUMClusteringS2S": 37.61,
+                    "MasakhaNEWSClusteringP2P (amh-Ethi)": 66.2,
+                    "MasakhaNEWSClusteringP2P (eng)": 50.08,
+                    "MasakhaNEWSClusteringP2P (fra-Latn)": 56.32,
+                    "MasakhaNEWSClusteringP2P (hau-Latn)": 53.63,
+                    "MasakhaNEWSClusteringP2P (ibo-Latn)": 49.19,
+                    "MasakhaNEWSClusteringP2P (lin-Latn)": 55.06,
+                    "MasakhaNEWSClusteringP2P (lug-Latn)": 59.97,
+                    "MasakhaNEWSClusteringP2P (orm-Ethi)": 32.72,
+                    "MasakhaNEWSClusteringP2P (pcm-Latn)": 62.22,
+                    "MasakhaNEWSClusteringP2P (run-Latn)": 57.52,
+                    "MasakhaNEWSClusteringP2P (sna-Latn)": 45.11,
+                    "MasakhaNEWSClusteringP2P (som-Latn)": 42.39,
+                    "MasakhaNEWSClusteringP2P (swa-Latn)": 23.77,
+                    "MasakhaNEWSClusteringP2P (tir-Ethi)": 57.68,
+                    "MasakhaNEWSClusteringP2P (xho-Latn)": 39.96,
+                    "MasakhaNEWSClusteringP2P (yor-Latn)": 26.56,
+                    "MasakhaNEWSClusteringP2P (fra)": 40.12,
+                    "MasakhaNEWSClusteringS2S (amh-Ethi)": 55.48,
+                    "MasakhaNEWSClusteringS2S (eng)": 37.79,
+                    "MasakhaNEWSClusteringS2S (fra-Latn)": 35.8,
+                    "MasakhaNEWSClusteringS2S (hau-Latn)": 20.22,
+                    "MasakhaNEWSClusteringS2S (ibo-Latn)": 35.67,
+                    "MasakhaNEWSClusteringS2S (lin-Latn)": 41.12,
+                    "MasakhaNEWSClusteringS2S (lug-Latn)": 48.63,
+                    "MasakhaNEWSClusteringS2S (orm-Ethi)": 29.16,
+                    "MasakhaNEWSClusteringS2S (pcm-Latn)": 65.36,
+                    "MasakhaNEWSClusteringS2S (run-Latn)": 45.5,
+                    "MasakhaNEWSClusteringS2S (sna-Latn)": 47.61,
+                    "MasakhaNEWSClusteringS2S (som-Latn)": 28.59,
+                    "MasakhaNEWSClusteringS2S (swa-Latn)": 13.91,
+                    "MasakhaNEWSClusteringS2S (tir-Ethi)": 50.51,
+                    "MasakhaNEWSClusteringS2S (xho-Latn)": 37.26,
+                    "MasakhaNEWSClusteringS2S (yor-Latn)": 23.38,
+                    "MasakhaNEWSClusteringS2S (fra)": 39.22,
+                    "MedrxivClusteringP2P": 30.72,
+                    "MedrxivClusteringS2S": 27.0,
+                    "RedditClustering": 40.12,
+                    "RedditClusteringP2P": 59.49,
+                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 51.1,
+                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 45.29,
+                    "StackExchangeClustering": 53.32,
+                    "StackExchangeClusteringP2P": 31.87,
+                    "ThuNewsClusteringP2P": 55.18,
+                    "ThuNewsClusteringS2S": 48.93,
+                    "TwentyNewsgroupsClustering": 33.67
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "multilingual-e5-small",
+                    "CDSC-E (pol-Latn)": 69.69,
+                    "CDSC-E": 69.7,
+                    "Cmnli": 72.12,
+                    "Ocnli": 60.77,
+                    "OpusparcusPC (deu-Latn)": 94.9,
+                    "OpusparcusPC (en)": 98.42,
+                    "OpusparcusPC (fin-Latn)": 88.29,
+                    "OpusparcusPC (fra-Latn)": 91.77,
+                    "OpusparcusPC (rus-Cyrl)": 84.79,
+                    "OpusparcusPC (swe-Latn)": 91.07,
+                    "OpusparcusPC (fr)": 92.52,
+                    "PPC": 86.72,
+                    "PSC (pol-Latn)": 99.23,
+                    "PSC": 99.24,
+                    "PawsXPairClassification (deu-Latn)": 52.13,
+                    "PawsXPairClassification (en)": 53.91,
+                    "PawsXPairClassification (spa-Latn)": 51.39,
+                    "PawsXPairClassification (fra-Latn)": 52.69,
+                    "PawsXPairClassification (jpn-Hira)": 48.24,
+                    "PawsXPairClassification (kor-Hang)": 49.95,
+                    "PawsXPairClassification (cmn-Hans)": 54.01,
+                    "PawsXPairClassification (fr)": 55.68,
+                    "SICK-E-PL (pol-Latn)": 66.35,
+                    "SICK-E-PL": 66.34,
+                    "SprintDuplicateQuestions": 92.18,
+                    "TERRa (rus-Cyrl)": 55.14,
+                    "TwitterSemEval2015": 70.75,
+                    "TwitterURLCorpus": 85.03
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "multilingual-e5-small",
+                    "AlloprofReranking (fra-Latn)": 64.41,
+                    "AlloprofReranking": 56.17,
+                    "AskUbuntuDupQuestions": 56.42,
+                    "CMedQAv1": 63.44,
+                    "CMedQAv2": 62.41,
+                    "MIRACLReranking (rus-Cyrl)": 59.12,
+                    "MMarcoReranking (cmn-Hans)": 29.98,
+                    "MMarcoReranking": 24.33,
+                    "MindSmallReranking": 29.96,
+                    "RuBQReranking (rus-Cyrl)": 71.46,
+                    "SciDocsRR": 78.26,
+                    "StackOverflowDupQuestions": 46.97,
+                    "SyntecReranking (fra-Latn)": 81.22,
+                    "SyntecReranking": 86.7,
+                    "T2Reranking (cmn-Hans)": 65.72,
+                    "T2Reranking": 65.24
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "multilingual-e5-small",
+                    "AILACasedocs": 23.43,
+                    "AILAStatutes": 19.01,
+                    "ARCChallenge": 7.14,
+                    "AlloprofRetrieval (fra-Latn)": 27.38,
+                    "AlloprofRetrieval": 27.01,
+                    "AlphaNLI": 13.0,
+                    "ArguAna": 39.09,
+                    "ArguAna-PL (pol-Latn)": 37.49,
+                    "ArguAna-PL": 37.43,
+                    "BSARDRetrieval (fra-Latn)": 14.54,
+                    "BSARDRetrieval": 0.0,
+                    "CmedqaRetrieval (cmn-Hans)": 24.36,
+                    "CmedqaRetrieval": 24.38,
+                    "CovidRetrieval (cmn-Hans)": 72.82,
+                    "CovidRetrieval": 72.82,
+                    "DBPedia-PL": 29.27,
+                    "DuRetrieval (cmn-Hans)": 81.36,
+                    "DuRetrieval": 81.35,
+                    "EcomRetrieval (cmn-Hans)": 53.53,
+                    "EcomRetrieval": 53.56,
+                    "FiQA-PL (pol-Latn)": 22.02,
+                    "FiQA-PL": 22.03,
+                    "FiQA2018": 33.13,
+                    "GerDaLIRSmall (deu-Latn)": 14.81,
+                    "HellaSwag": 23.73,
+                    "HotpotQA-PL": 60.15,
+                    "LEMBNarrativeQARetrieval": 22.6,
+                    "LEMBNeedleRetrieval": 30.75,
+                    "LEMBPasskeyRetrieval": 38.25,
+                    "LEMBQMSumRetrieval": 21.51,
+                    "LEMBSummScreenFDRetrieval": 62.75,
+                    "LEMBWikimQARetrieval": 57.13,
+                    "LeCaRDv2 (zho-Hans)": 61.58,
+                    "LegalBenchConsumerContractsQA": 66.98,
+                    "LegalBenchCorporateLobbying": 89.47,
+                    "LegalQuAD (deu-Latn)": 47.8,
+                    "LegalSummarization": 55.76,
+                    "MIRACLRetrieval (rus-Cyrl)": 59.01,
+                    "MMarcoRetrieval (cmn-Hans)": 73.17,
+                    "MMarcoRetrieval": 73.17,
+                    "MSMARCO-PL": 26.94,
+                    "MedicalRetrieval (cmn-Hans)": 44.84,
+                    "MedicalRetrieval": 44.84,
+                    "MintakaRetrieval (ara-Arab)": 21.22,
+                    "MintakaRetrieval (deu-Latn)": 25.6,
+                    "MintakaRetrieval (spa-Latn)": 26.4,
+                    "MintakaRetrieval (fra-Latn)": 25.0,
+                    "MintakaRetrieval (hin-Deva)": 21.1,
+                    "MintakaRetrieval (ita-Latn)": 26.25,
+                    "MintakaRetrieval (jpn-Hira)": 20.69,
+                    "MintakaRetrieval (por-Latn)": 24.44,
+                    "MintakaRetrieval (fr)": 22.53,
+                    "NFCorpus": 31.0,
+                    "NFCorpus-PL (pol-Latn)": 26.5,
+                    "NFCorpus-PL": 26.48,
+                    "NQ-PL": 40.46,
+                    "PIQA": 21.08,
+                    "Quail": 2.38,
+                    "Quora-PL": 78.7,
+                    "RARbCode": 46.96,
+                    "RARbMath": 63.91,
+                    "RiaNewsRetrieval (rus-Cyrl)": 70.01,
+                    "RuBQRetrieval (rus-Cyrl)": 68.53,
+                    "SCIDOCS": 13.9,
+                    "SCIDOCS-PL (pol-Latn)": 11.59,
+                    "SCIDOCS-PL": 11.6,
+                    "SIQA": 2.57,
+                    "SciFact": 67.7,
+                    "SciFact-PL (pol-Latn)": 62.76,
+                    "SciFact-PL": 62.76,
+                    "SpartQA": 5.43,
+                    "SyntecRetrieval (fra-Latn)": 73.46,
+                    "SyntecRetrieval": 75.76,
+                    "T2Retrieval (cmn-Hans)": 71.36,
+                    "T2Retrieval": 71.39,
+                    "TRECCOVID": 72.57,
+                    "TRECCOVID-PL (pol-Latn)": 70.92,
+                    "TRECCOVID-PL": 70.92,
+                    "TempReasonL1": 0.8,
+                    "TempReasonL2Fact": 36.76,
+                    "TempReasonL2Pure": 0.62,
+                    "TempReasonL3Fact": 32.42,
+                    "TempReasonL3Pure": 6.36,
+                    "Touche2020": 21.16,
+                    "VideoRetrieval (cmn-Hans)": 58.06,
+                    "VideoRetrieval": 58.09,
+                    "WinoGrande": 37.46,
+                    "XPQARetrieval (ara-Arab_ara-Arab)": 39.93,
+                    "XPQARetrieval (eng-Latn_ara-Arab)": 18.09,
+                    "XPQARetrieval (ara-Arab_eng-Latn)": 31.64,
+                    "XPQARetrieval (deu-Latn_deu-Latn)": 69.43,
+                    "XPQARetrieval (eng-Latn_deu-Latn)": 25.14,
+                    "XPQARetrieval (deu-Latn_eng-Latn)": 52.36,
+                    "XPQARetrieval (spa-Latn_spa-Latn)": 55.71,
+                    "XPQARetrieval (eng-Latn_spa-Latn)": 22.5,
+                    "XPQARetrieval (spa-Latn_eng-Latn)": 42.4,
+                    "XPQARetrieval (fra-Latn_fra-Latn)": 57.17,
+                    "XPQARetrieval (eng-Latn_fra-Latn)": 27.69,
+                    "XPQARetrieval (fra-Latn_eng-Latn)": 47.46,
+                    "XPQARetrieval (hin-Deva_hin-Deva)": 68.15,
+                    "XPQARetrieval (eng-Latn_hin-Deva)": 25.82,
+                    "XPQARetrieval (hin-Deva_eng-Latn)": 63.79,
+                    "XPQARetrieval (ita-Latn_ita-Latn)": 67.71,
+                    "XPQARetrieval (eng-Latn_ita-Latn)": 22.97,
+                    "XPQARetrieval (ita-Latn_eng-Latn)": 46.61,
+                    "XPQARetrieval (jpn-Hira_jpn-Hira)": 69.49,
+                    "XPQARetrieval (eng-Latn_jpn-Hira)": 25.08,
+                    "XPQARetrieval (jpn-Hira_eng-Latn)": 54.6,
+                    "XPQARetrieval (kor-Hang_kor-Hang)": 33.0,
+                    "XPQARetrieval (eng-Latn_kor-Hang)": 22.49,
+                    "XPQARetrieval (kor-Hang_eng-Latn)": 23.02,
+                    "XPQARetrieval (pol-Latn_pol-Latn)": 43.37,
+                    "XPQARetrieval (eng-Latn_pol-Latn)": 19.89,
+                    "XPQARetrieval (pol-Latn_eng-Latn)": 28.72,
+                    "XPQARetrieval (por-Latn_por-Latn)": 41.8,
+                    "XPQARetrieval (eng-Latn_por-Latn)": 15.79,
+                    "XPQARetrieval (por-Latn_eng-Latn)": 33.74,
+                    "XPQARetrieval (tam-Taml_tam-Taml)": 31.65,
+                    "XPQARetrieval (eng-Latn_tam-Taml)": 13.18,
+                    "XPQARetrieval (tam-Taml_eng-Latn)": 26.44,
+                    "XPQARetrieval (cmn-Hans_cmn-Hans)": 63.98,
+                    "XPQARetrieval (eng-Latn_cmn-Hans)": 16.52,
+                    "XPQARetrieval (cmn-Hans_eng-Latn)": 45.32,
+                    "XPQARetrieval (fr)": 57.47
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "multilingual-e5-small",
+                    "AFQMC (cmn-Hans)": 25.21,
+                    "AFQMC": 25.21,
+                    "ATEC (cmn-Hans)": 35.14,
+                    "ATEC": 35.14,
+                    "BIOSSES": 82.46,
+                    "BQ (cmn-Hans)": 43.27,
+                    "BQ": 43.27,
+                    "CDSC-R (pol-Latn)": 90.27,
+                    "CDSC-R": 90.27,
+                    "LCQMC (cmn-Hans)": 72.7,
+                    "LCQMC": 72.7,
+                    "PAWSX (cmn-Hans)": 11.0,
+                    "PAWSX": 11.01,
+                    "QBQTC": 30.25,
+                    "RUParaPhraserSTS (rus-Cyrl)": 70.46,
+                    "RuSTSBenchmarkSTS (rus-Cyrl)": 78.08,
+                    "SICK-R": 77.51,
+                    "SICK-R-PL (pol-Latn)": 69.45,
+                    "SICK-R-PL": 69.46,
+                    "SICKFr (fra-Latn)": 74.67,
+                    "SICKFr": 75.62,
+                    "STS12": 76.56,
+                    "STS13": 76.97,
+                    "STS14": 75.52,
+                    "STS15": 87.12,
+                    "STS16": 83.63,
+                    "STS17 (ita-Latn_eng-Latn)": 77.31,
+                    "STS17 (en-en)": 86.42,
+                    "STS17 (eng-Latn_ara-Arab)": 57.39,
+                    "STS17 (eng-Latn_tur-Latn)": 55.93,
+                    "STS17 (spa-Latn_eng-Latn)": 72.43,
+                    "STS17 (kor-Hang)": 78.87,
+                    "STS17 (spa-Latn)": 84.83,
+                    "STS17 (eng-Latn_deu-Latn)": 76.82,
+                    "STS17 (fra-Latn_eng-Latn)": 72.28,
+                    "STS17 (nld-Latn_eng-Latn)": 75.43,
+                    "STS17 (ara-Arab)": 73.0,
+                    "STS22 (pol-Latn_eng-Latn)": 72.69,
+                    "STS22 (deu-Latn_eng-Latn)": 56.07,
+                    "STS22 (spa-Latn)": 66.86,
+                    "STS22 (ara-Arab)": 56.65,
+                    "STS22 (rus-Cyrl)": 59.9,
+                    "STS22 (deu-Latn)": 53.45,
+                    "STS22 (cmn-Hans_eng-Latn)": 65.32,
+                    "STS22 (en)": 61.25,
+                    "STS22 (fra-Latn)": 76.58,
+                    "STS22 (ita-Latn)": 76.53,
+                    "STS22 (spa-Latn_ita-Latn)": 71.74,
+                    "STS22 (spa-Latn_eng-Latn)": 74.2,
+                    "STS22 (deu-Latn_fra-Latn)": 60.62,
+                    "STS22 (tur-Latn)": 63.69,
+                    "STS22 (pol-Latn)": 35.78,
+                    "STS22 (fra-Latn_pol-Latn)": 84.52,
+                    "STS22 (cmn-Hans)": 66.85,
+                    "STS22 (deu-Latn_pol-Latn)": 28.24,
+                    "STS22 (pl)": 35.8,
+                    "STSB (cmn-Hans)": 77.73,
+                    "STSB": 77.73,
+                    "STSBenchmark": 84.11,
+                    "STSBenchmarkMultilingualSTS (en)": 84.11,
+                    "STSBenchmarkMultilingualSTS (cmn-Hans)": 78.49,
+                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 78.24,
+                    "STSBenchmarkMultilingualSTS (spa-Latn)": 80.31,
+                    "STSBenchmarkMultilingualSTS (deu-Latn)": 79.17,
+                    "STSBenchmarkMultilingualSTS (fra-Latn)": 79.2,
+                    "STSBenchmarkMultilingualSTS (nld-Latn)": 76.04,
+                    "STSBenchmarkMultilingualSTS (pol-Latn)": 72.61,
+                    "STSBenchmarkMultilingualSTS (por-Latn)": 77.39,
+                    "STSBenchmarkMultilingualSTS (ita-Latn)": 78.21,
+                    "STSBenchmarkMultilingualSTS (fr)": 79.32
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "multilingual-e5-small",
+                    "SummEval": 30.04,
+                    "SummEvalFr (fra-Latn)": 31.14,
+                    "SummEvalFr": 31.85
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "multilingual-e5-small",
+                    "CEDRClassification (rus-Cyrl)": 40.07,
+                    "SensitiveTopicsClassification (rus-Cyrl)": 23.91
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "multilingual-e5-small"
+                }
+            ]
+        }
+    },
+    "rubert-base-cased": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "rubert-base-cased",
+                    "Tatoeba (rus-Cyrl_eng-Latn)": 16.76
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "rubert-base-cased",
+                    "GeoreviewClassification (rus-Cyrl)": 37.22,
+                    "HeadlineClassification (rus-Cyrl)": 75.23,
+                    "InappropriatenessClassification (rus-Cyrl)": 57.34,
+                    "KinopoiskClassification (rus-Cyrl)": 49.91,
+                    "MassiveIntentClassification (rus-Cyrl)": 53.02,
+                    "MassiveScenarioClassification (rus-Cyrl)": 56.79,
+                    "RuReviewsClassification (rus-Cyrl)": 50.74,
+                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 48.03,
+                    "RuSciBenchOECDClassification (rus-Cyrl)": 36.13
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "rubert-base-cased",
+                    "GeoreviewClusteringP2P (rus-Cyrl)": 28.77,
+                    "MLSUMClusteringP2P (rus-Cyrl)": 41.42,
+                    "MLSUMClusteringS2S (rus-Cyrl)": 40.52,
+                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 28.29,
+                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 26.67
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "rubert-base-cased",
+                    "OpusparcusPC (rus-Cyrl)": 81.65,
+                    "TERRa (rus-Cyrl)": 52.12
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "rubert-base-cased",
+                    "RuBQReranking (rus-Cyrl)": 41.65
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "rubert-base-cased",
+                    "RiaNewsRetrieval (rus-Cyrl)": 5.58,
+                    "RuBQRetrieval (rus-Cyrl)": 9.52
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "rubert-base-cased",
+                    "RUParaPhraserSTS (rus-Cyrl)": 49.72,
+                    "RuSTSBenchmarkSTS (rus-Cyrl)": 53.95,
+                    "STS22 (rus-Cyrl)": 34.98,
+                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 53.76
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "rubert-base-cased"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "rubert-base-cased",
+                    "CEDRClassification (rus-Cyrl)": 33.59,
+                    "SensitiveTopicsClassification (rus-Cyrl)": 18.8
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "rubert-base-cased"
+                }
+            ]
+        }
+    },
+    "bge-small-en-v1.5": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "bge-small-en-v1.5"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "bge-small-en-v1.5"
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "bge-small-en-v1.5"
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "bge-small-en-v1.5"
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "bge-small-en-v1.5"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "bge-small-en-v1.5",
+                    "ARCChallenge": 8.95,
+                    "AlphaNLI": 11.64,
+                    "HellaSwag": 25.44,
+                    "PIQA": 23.92,
+                    "Quail": 1.75,
+                    "RARbCode": 42.36,
+                    "RARbMath": 44.98,
+                    "SIQA": 0.77,
+                    "SpartQA": 3.55,
+                    "TempReasonL1": 1.41,
+                    "TempReasonL2Fact": 17.56,
+                    "TempReasonL2Pure": 1.05,
+                    "TempReasonL3Fact": 13.88,
+                    "TempReasonL3Pure": 4.76,
+                    "WinoGrande": 10.28
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "bge-small-en-v1.5"
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "bge-small-en-v1.5"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "bge-small-en-v1.5"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "bge-small-en-v1.5"
+                }
+            ]
+        }
+    },
+    "Cohere-embed-english-v3.0-instruct": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "Cohere-embed-english-v3.0-instruct"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "Cohere-embed-english-v3.0-instruct"
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "Cohere-embed-english-v3.0-instruct"
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "Cohere-embed-english-v3.0-instruct"
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "Cohere-embed-english-v3.0-instruct"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "Cohere-embed-english-v3.0-instruct",
+                    "ARCChallenge": 10.1,
+                    "AlphaNLI": 18.75,
+                    "HellaSwag": 29.02,
+                    "PIQA": 27.89,
+                    "Quail": 7.77,
+                    "RARbCode": 56.56,
+                    "RARbMath": 72.05,
+                    "SIQA": 5.03,
+                    "SpartQA": 3.33,
+                    "TempReasonL1": 1.43,
+                    "TempReasonL2Fact": 40.46,
+                    "TempReasonL2Pure": 2.39,
+                    "TempReasonL3Fact": 33.87,
+                    "TempReasonL3Pure": 7.52,
+                    "WinoGrande": 65.02
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "Cohere-embed-english-v3.0-instruct"
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "Cohere-embed-english-v3.0-instruct"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "Cohere-embed-english-v3.0-instruct"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "Cohere-embed-english-v3.0-instruct"
+                }
+            ]
+        }
+    },
+    "universal-sentence-encoder-multilingual-3": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "universal-sentence-encoder-multilingual-3"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "universal-sentence-encoder-multilingual-3",
+                    "AmazonReviewsClassification (fr)": 33.51,
+                    "MTOPDomainClassification (fr)": 85.5,
+                    "MTOPIntentClassification (fr)": 53.98,
+                    "MasakhaNEWSClassification (fra)": 82.06,
+                    "MassiveIntentClassification (fr)": 61.19,
+                    "MassiveScenarioClassification (fr)": 70.22
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "universal-sentence-encoder-multilingual-3",
+                    "AlloProfClusteringP2P": 56.9,
+                    "AlloProfClusteringS2S": 37.84,
+                    "HALClusteringS2S": 18.95,
+                    "MLSUMClusteringP2P": 43.9,
+                    "MLSUMClusteringS2S": 35.5,
+                    "MasakhaNEWSClusteringP2P (fra)": 60.57,
+                    "MasakhaNEWSClusteringS2S (fra)": 40.31
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "universal-sentence-encoder-multilingual-3",
+                    "OpusparcusPC (fr)": 91.46,
+                    "PawsXPairClassification (fr)": 52.39
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "universal-sentence-encoder-multilingual-3",
+                    "AlloprofReranking": 56.23,
+                    "SyntecReranking": 73.85
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "universal-sentence-encoder-multilingual-3",
+                    "AlloprofRetrieval": 35.27,
+                    "BSARDRetrieval": 0.0,
+                    "MintakaRetrieval (fr)": 26.12,
+                    "SyntecRetrieval": 69.82,
+                    "XPQARetrieval (fr)": 59.59
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "universal-sentence-encoder-multilingual-3",
+                    "SICKFr": 71.37,
+                    "STS22 (fr)": 77.91,
+                    "STSBenchmarkMultilingualSTS (fr)": 75.48
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "universal-sentence-encoder-multilingual-3",
+                    "SummEvalFr": 28.21
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "universal-sentence-encoder-multilingual-3"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "universal-sentence-encoder-multilingual-3"
+                }
+            ]
+        }
+    },
+    "USER-bge-m3": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "USER-bge-m3",
+                    "Tatoeba (rus-Cyrl_eng-Latn)": 93.52
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "USER-bge-m3",
+                    "GeoreviewClassification (rus-Cyrl)": 50.98,
+                    "HeadlineClassification (rus-Cyrl)": 70.09,
+                    "InappropriatenessClassification (rus-Cyrl)": 60.76,
+                    "KinopoiskClassification (rus-Cyrl)": 63.33,
+                    "MassiveIntentClassification (rus-Cyrl)": 68.85,
+                    "MassiveScenarioClassification (rus-Cyrl)": 72.9,
+                    "RuReviewsClassification (rus-Cyrl)": 68.52,
+                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 57.67,
+                    "RuSciBenchOECDClassification (rus-Cyrl)": 44.2
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "USER-bge-m3",
+                    "GeoreviewClusteringP2P (rus-Cyrl)": 62.79,
+                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 53.11,
+                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 44.93
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "USER-bge-m3",
+                    "OpusparcusPC (rus-Cyrl)": 90.73,
+                    "TERRa (rus-Cyrl)": 64.99
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "USER-bge-m3",
+                    "RuBQReranking (rus-Cyrl)": 73.08
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "USER-bge-m3",
+                    "RiaNewsRetrieval (rus-Cyrl)": 83.53,
+                    "RuBQRetrieval (rus-Cyrl)": 70.03
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "USER-bge-m3",
+                    "RUParaPhraserSTS (rus-Cyrl)": 76.36,
+                    "RuSTSBenchmarkSTS (rus-Cyrl)": 83.35,
+                    "STS22 (rus-Cyrl)": 66.42,
+                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 82.96
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "USER-bge-m3"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "USER-bge-m3",
+                    "CEDRClassification (rus-Cyrl)": 45.48,
+                    "SensitiveTopicsClassification (rus-Cyrl)": 26.29
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "USER-bge-m3"
+                }
+            ]
+        }
+    },
+    "DanskBERT": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "DanskBERT",
+                    "BornholmBitextMining": 6.34
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "DanskBERT",
+                    "AngryTweetsClassification": 54.28,
+                    "DKHateClassification": 59.3,
+                    "DanishPoliticalCommentsClassification": 39.81,
+                    "LccSentimentClassification": 58.0,
+                    "MassiveIntentClassification (da)": 54.68,
+                    "MassiveIntentClassification (nb)": 45.38,
+                    "MassiveIntentClassification (sv)": 40.82,
+                    "MassiveScenarioClassification (da)": 59.56,
+                    "MassiveScenarioClassification (nb)": 47.55,
+                    "MassiveScenarioClassification (sv)": 40.14,
+                    "NoRecClassification": 46.06,
+                    "NordicLangClassification": 74.25,
+                    "NorwegianParliament": 56.79,
+                    "ScalaDaClassification": 66.59,
+                    "ScalaNbClassification": 59.99
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "DanskBERT"
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "DanskBERT"
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "DanskBERT"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "DanskBERT"
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "DanskBERT"
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "DanskBERT"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "DanskBERT"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "DanskBERT"
+                }
+            ]
+        }
+    },
+    "rubert-tiny2": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "rubert-tiny2"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "rubert-tiny2",
+                    "GeoreviewClassification (rus-Cyrl)": 39.64,
+                    "HeadlineClassification (rus-Cyrl)": 74.19,
+                    "InappropriatenessClassification (rus-Cyrl)": 58.57,
+                    "KinopoiskClassification (rus-Cyrl)": 49.06,
+                    "MassiveIntentClassification (rus-Cyrl)": 50.83,
+                    "MassiveScenarioClassification (rus-Cyrl)": 59.15,
+                    "RuReviewsClassification (rus-Cyrl)": 56.99,
+                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 45.63,
+                    "RuSciBenchOECDClassification (rus-Cyrl)": 35.48
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "rubert-tiny2",
+                    "GeoreviewClusteringP2P (rus-Cyrl)": 44.18,
+                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 41.41,
+                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 38.09
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "rubert-tiny2",
+                    "TERRa (rus-Cyrl)": 51.87
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "rubert-tiny2",
+                    "MIRACLReranking (rus-Cyrl)": 15.81,
+                    "RuBQReranking (rus-Cyrl)": 46.09
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "rubert-tiny2",
+                    "MIRACLRetrieval (rus-Cyrl)": 1.89,
+                    "RiaNewsRetrieval (rus-Cyrl)": 13.92,
+                    "RuBQRetrieval (rus-Cyrl)": 10.87
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "rubert-tiny2",
+                    "RUParaPhraserSTS (rus-Cyrl)": 65.14,
+                    "RuSTSBenchmarkSTS (rus-Cyrl)": 69.43,
+                    "STS22 (rus-Cyrl)": 50.23
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "rubert-tiny2"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "rubert-tiny2",
+                    "CEDRClassification (rus-Cyrl)": 36.87,
+                    "SensitiveTopicsClassification (rus-Cyrl)": 22.02
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "rubert-tiny2"
+                }
+            ]
+        }
+    },
+    "bert-base-multilingual-uncased": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "bert-base-multilingual-uncased"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "bert-base-multilingual-uncased",
+                    "AmazonReviewsClassification (fr)": 29.02,
+                    "MTOPDomainClassification (fr)": 64.49,
+                    "MTOPIntentClassification (fr)": 39.4,
+                    "MasakhaNEWSClassification (fra)": 75.69,
+                    "MassiveIntentClassification (fr)": 38.01,
+                    "MassiveScenarioClassification (fr)": 43.63
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "bert-base-multilingual-uncased",
+                    "AlloProfClusteringP2P": 60.66,
+                    "AlloProfClusteringS2S": 35.05,
+                    "HALClusteringS2S": 20.9,
+                    "MLSUMClusteringP2P": 43.5,
+                    "MLSUMClusteringS2S": 30.99,
+                    "MasakhaNEWSClusteringP2P (fra)": 49.71,
+                    "MasakhaNEWSClusteringS2S (fra)": 42.23
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "bert-base-multilingual-uncased",
+                    "OpusparcusPC (fr)": 87.43,
+                    "PawsXPairClassification (fr)": 53.22
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "bert-base-multilingual-uncased",
+                    "AlloprofReranking": 38.85,
+                    "SyntecReranking": 66.4
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "bert-base-multilingual-uncased",
+                    "AlloprofRetrieval": 5.51,
+                    "BSARDRetrieval": 0.0,
+                    "MintakaRetrieval (fr)": 2.87,
+                    "SyntecRetrieval": 34.95,
+                    "XPQARetrieval (fr)": 26.12
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "bert-base-multilingual-uncased",
+                    "SICKFr": 58.26,
+                    "STS22 (fr)": 56.47,
+                    "STSBenchmarkMultilingualSTS (fr)": 54.97
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "bert-base-multilingual-uncased",
+                    "SummEvalFr": 30.72
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "bert-base-multilingual-uncased"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "bert-base-multilingual-uncased"
+                }
+            ]
+        }
+    },
+    "bert-base-multilingual-cased": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "bert-base-multilingual-cased"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "bert-base-multilingual-cased",
+                    "AmazonReviewsClassification (fr)": 29.39,
+                    "MTOPDomainClassification (fr)": 63.61,
+                    "MTOPIntentClassification (fr)": 37.84,
+                    "MasakhaNEWSClassification (fra)": 64.0,
+                    "MassiveIntentClassification (fr)": 37.3,
+                    "MassiveScenarioClassification (fr)": 44.47
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "bert-base-multilingual-cased",
+                    "AlloProfClusteringP2P": 51.5,
+                    "AlloProfClusteringS2S": 43.06,
+                    "HALClusteringS2S": 20.81,
+                    "MLSUMClusteringP2P": 40.9,
+                    "MLSUMClusteringS2S": 31.8,
+                    "MasakhaNEWSClusteringP2P (fra)": 24.23,
+                    "MasakhaNEWSClusteringS2S (fra)": 24.46
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "bert-base-multilingual-cased",
+                    "OpusparcusPC (fr)": 86.77,
+                    "PawsXPairClassification (fr)": 53.39
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "bert-base-multilingual-cased",
+                    "AlloprofReranking": 36.23,
+                    "SyntecReranking": 53.25
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "bert-base-multilingual-cased",
+                    "AlloprofRetrieval": 1.63,
+                    "BSARDRetrieval": 0.0,
+                    "MintakaRetrieval (fr)": 3.55,
+                    "SyntecRetrieval": 18.95,
+                    "XPQARetrieval (fr)": 18.49
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "bert-base-multilingual-cased",
+                    "SICKFr": 58.75,
+                    "STS22 (fr)": 39.05,
+                    "STSBenchmarkMultilingualSTS (fr)": 52.25
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "bert-base-multilingual-cased",
+                    "SummEvalFr": 28.81
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "bert-base-multilingual-cased"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "bert-base-multilingual-cased"
+                }
+            ]
+        }
+    },
+    "all-mpnet-base-v2": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "all-mpnet-base-v2",
+                    "BornholmBitextMining (dan-Latn)": 27.44,
+                    "Tatoeba (pol-Latn_eng-Latn)": 4.09,
+                    "Tatoeba (ita-Latn_eng-Latn)": 11.1,
+                    "Tatoeba (cat-Latn_eng-Latn)": 9.44,
+                    "Tatoeba (aze-Latn_eng-Latn)": 1.49,
+                    "Tatoeba (eus-Latn_eng-Latn)": 3.94,
+                    "Tatoeba (epo-Latn_eng-Latn)": 7.15,
+                    "Tatoeba (lit-Latn_eng-Latn)": 1.02,
+                    "Tatoeba (ast-Latn_eng-Latn)": 9.78,
+                    "Tatoeba (bul-Cyrl_eng-Latn)": 0.35,
+                    "Tatoeba (ceb-Latn_eng-Latn)": 4.41,
+                    "Tatoeba (mkd-Cyrl_eng-Latn)": 0.0,
+                    "Tatoeba (tzl-Latn_eng-Latn)": 3.55,
+                    "Tatoeba (zsm-Latn_eng-Latn)": 4.75,
+                    "Tatoeba (mhr-Cyrl_eng-Latn)": 0.17,
+                    "Tatoeba (pam-Latn_eng-Latn)": 4.32,
+                    "Tatoeba (amh-Ethi_eng-Latn)": 0.0,
+                    "Tatoeba (slv-Latn_eng-Latn)": 3.73,
+                    "Tatoeba (lvs-Latn_eng-Latn)": 2.98,
+                    "Tatoeba (sqi-Latn_eng-Latn)": 3.45,
+                    "Tatoeba (orv-Cyrl_eng-Latn)": 0.0,
+                    "Tatoeba (vie-Latn_eng-Latn)": 4.96,
+                    "Tatoeba (pes-Arab_eng-Latn)": 0.2,
+                    "Tatoeba (por-Latn_eng-Latn)": 10.48,
+                    "Tatoeba (dtp-Latn_eng-Latn)": 3.54,
+                    "Tatoeba (yid-Hebr_eng-Latn)": 0.08,
+                    "Tatoeba (isl-Latn_eng-Latn)": 3.86,
+                    "Tatoeba (cha-Latn_eng-Latn)": 12.2,
+                    "Tatoeba (ron-Latn_eng-Latn)": 7.34,
+                    "Tatoeba (hye-Armn_eng-Latn)": 0.14,
+                    "Tatoeba (mar-Deva_eng-Latn)": 0.11,
+                    "Tatoeba (hin-Deva_eng-Latn)": 0.02,
+                    "Tatoeba (kor-Hang_eng-Latn)": 0.32,
+                    "Tatoeba (srp-Cyrl_eng-Latn)": 1.89,
+                    "Tatoeba (csb-Latn_eng-Latn)": 4.19,
+                    "Tatoeba (jpn-Jpan_eng-Latn)": 1.71,
+                    "Tatoeba (ber-Tfng_eng-Latn)": 4.56,
+                    "Tatoeba (wuu-Hans_eng-Latn)": 0.91,
+                    "Tatoeba (jav-Latn_eng-Latn)": 3.17,
+                    "Tatoeba (nob-Latn_eng-Latn)": 4.37,
+                    "Tatoeba (bre-Latn_eng-Latn)": 3.65,
+                    "Tatoeba (kzj-Latn_eng-Latn)": 3.62,
+                    "Tatoeba (urd-Arab_eng-Latn)": 0.0,
+                    "Tatoeba (ces-Latn_eng-Latn)": 3.56,
+                    "Tatoeba (cbk-Latn_eng-Latn)": 9.33,
+                    "Tatoeba (gla-Latn_eng-Latn)": 2.04,
+                    "Tatoeba (war-Latn_eng-Latn)": 5.14,
+                    "Tatoeba (swh-Latn_eng-Latn)": 6.01,
+                    "Tatoeba (swg-Latn_eng-Latn)": 7.86,
+                    "Tatoeba (glg-Latn_eng-Latn)": 12.0,
+                    "Tatoeba (fao-Latn_eng-Latn)": 7.08,
+                    "Tatoeba (gsw-Latn_eng-Latn)": 10.67,
+                    "Tatoeba (rus-Cyrl_eng-Latn)": 0.14,
+                    "Tatoeba (kaz-Cyrl_eng-Latn)": 0.52,
+                    "Tatoeba (gle-Latn_eng-Latn)": 2.19,
+                    "Tatoeba (slk-Latn_eng-Latn)": 3.4,
+                    "Tatoeba (nno-Latn_eng-Latn)": 5.75,
+                    "Tatoeba (cor-Latn_eng-Latn)": 2.42,
+                    "Tatoeba (nov-Latn_eng-Latn)": 16.61,
+                    "Tatoeba (swe-Latn_eng-Latn)": 6.55,
+                    "Tatoeba (max-Deva_eng-Latn)": 6.46,
+                    "Tatoeba (oci-Latn_eng-Latn)": 8.57,
+                    "Tatoeba (lfn-Latn_eng-Latn)": 6.1,
+                    "Tatoeba (fra-Latn_eng-Latn)": 16.9,
+                    "Tatoeba (ben-Beng_eng-Latn)": 0.0,
+                    "Tatoeba (bel-Cyrl_eng-Latn)": 0.65,
+                    "Tatoeba (lat-Latn_eng-Latn)": 5.78,
+                    "Tatoeba (cmn-Hans_eng-Latn)": 2.22,
+                    "Tatoeba (kat-Geor_eng-Latn)": 0.43,
+                    "Tatoeba (bos-Latn_eng-Latn)": 4.6,
+                    "Tatoeba (xho-Latn_eng-Latn)": 3.3,
+                    "Tatoeba (tha-Thai_eng-Latn)": 0.0,
+                    "Tatoeba (cym-Latn_eng-Latn)": 4.88,
+                    "Tatoeba (deu-Latn_eng-Latn)": 11.46,
+                    "Tatoeba (awa-Deva_eng-Latn)": 0.44,
+                    "Tatoeba (ido-Latn_eng-Latn)": 9.84,
+                    "Tatoeba (tat-Cyrl_eng-Latn)": 0.24,
+                    "Tatoeba (kab-Latn_eng-Latn)": 1.31,
+                    "Tatoeba (uzb-Latn_eng-Latn)": 1.98,
+                    "Tatoeba (heb-Hebr_eng-Latn)": 0.28,
+                    "Tatoeba (ara-Arab_eng-Latn)": 0.1,
+                    "Tatoeba (fry-Latn_eng-Latn)": 12.43,
+                    "Tatoeba (afr-Latn_eng-Latn)": 6.08,
+                    "Tatoeba (kur-Latn_eng-Latn)": 3.65,
+                    "Tatoeba (pms-Latn_eng-Latn)": 7.63,
+                    "Tatoeba (ell-Grek_eng-Latn)": 0.0,
+                    "Tatoeba (spa-Latn_eng-Latn)": 10.12,
+                    "Tatoeba (dsb-Latn_eng-Latn)": 2.96,
+                    "Tatoeba (uig-Arab_eng-Latn)": 0.33,
+                    "Tatoeba (nld-Latn_eng-Latn)": 9.29,
+                    "Tatoeba (tel-Telu_eng-Latn)": 0.73,
+                    "Tatoeba (hrv-Latn_eng-Latn)": 3.77,
+                    "Tatoeba (nds-Latn_eng-Latn)": 10.96,
+                    "Tatoeba (hun-Latn_eng-Latn)": 3.23,
+                    "Tatoeba (est-Latn_eng-Latn)": 2.35,
+                    "Tatoeba (mal-Mlym_eng-Latn)": 0.15,
+                    "Tatoeba (khm-Khmr_eng-Latn)": 0.28,
+                    "Tatoeba (hsb-Latn_eng-Latn)": 3.12,
+                    "Tatoeba (tgl-Latn_eng-Latn)": 4.06,
+                    "Tatoeba (ang-Latn_eng-Latn)": 9.77,
+                    "Tatoeba (tur-Latn_eng-Latn)": 3.16,
+                    "Tatoeba (tuk-Latn_eng-Latn)": 2.23,
+                    "Tatoeba (ile-Latn_eng-Latn)": 17.84,
+                    "Tatoeba (mon-Cyrl_eng-Latn)": 0.81,
+                    "Tatoeba (yue-Hant_eng-Latn)": 1.16,
+                    "Tatoeba (ina-Latn_eng-Latn)": 22.55,
+                    "Tatoeba (tam-Taml_eng-Latn)": 0.73,
+                    "Tatoeba (ukr-Cyrl_eng-Latn)": 0.5,
+                    "Tatoeba (dan-Latn_eng-Latn)": 10.01,
+                    "Tatoeba (arq-Arab_eng-Latn)": 0.33,
+                    "Tatoeba (arz-Arab_eng-Latn)": 0.0,
+                    "Tatoeba (fin-Latn_eng-Latn)": 3.82,
+                    "Tatoeba (ind-Latn_eng-Latn)": 4.88
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "all-mpnet-base-v2",
+                    "AllegroReviews (pol-Latn)": 22.99,
+                    "AmazonCounterfactualClassification (en-ext)": 67.5,
+                    "AmazonCounterfactualClassification (en)": 65.03,
+                    "AmazonCounterfactualClassification (deu-Latn)": 55.66,
+                    "AmazonCounterfactualClassification (jpn-Jpan)": 60.69,
+                    "AmazonPolarityClassification": 67.14,
+                    "AmazonReviewsClassification (en)": 31.44,
+                    "AmazonReviewsClassification (deu-Latn)": 26.05,
+                    "AmazonReviewsClassification (spa-Latn)": 27.73,
+                    "AmazonReviewsClassification (fra-Latn)": 28.49,
+                    "AmazonReviewsClassification (jpn-Jpan)": 23.65,
+                    "AmazonReviewsClassification (cmn-Hans)": 23.62,
+                    "AngryTweetsClassification (dan-Latn)": 44.13,
+                    "Banking77Classification": 81.7,
+                    "CBD (pol-Latn)": 50.25,
+                    "DanishPoliticalCommentsClassification (dan-Latn)": 28.31,
+                    "EmotionClassification": 42.22,
+                    "GeoreviewClassification (rus-Cyrl)": 25.93,
+                    "HeadlineClassification (rus-Cyrl)": 28.53,
+                    "IFlyTek (cmn-Hans)": 17.18,
+                    "ImdbClassification": 71.17,
+                    "InappropriatenessClassification (rus-Cyrl)": 51.82,
+                    "JDReview (cmn-Hans)": 60.19,
+                    "KinopoiskClassification (rus-Cyrl)": 34.18,
+                    "LccSentimentClassification (dan-Latn)": 39.27,
+                    "MTOPDomainClassification (en)": 91.89,
+                    "MTOPDomainClassification (deu-Latn)": 71.86,
+                    "MTOPDomainClassification (spa-Latn)": 71.3,
+                    "MTOPDomainClassification (fra-Latn)": 74.88,
+                    "MTOPDomainClassification (hin-Deva)": 39.93,
+                    "MTOPDomainClassification (tha-Thai)": 17.54,
+                    "MTOPIntentClassification (en)": 68.27,
+                    "MTOPIntentClassification (deu-Latn)": 44.36,
+                    "MTOPIntentClassification (spa-Latn)": 39.48,
+                    "MTOPIntentClassification (fra-Latn)": 37.57,
+                    "MTOPIntentClassification (hin-Deva)": 18.63,
+                    "MTOPIntentClassification (tha-Thai)": 5.42,
+                    "MasakhaNEWSClassification (amh-Ethi)": 36.49,
+                    "MasakhaNEWSClassification (eng)": 79.75,
+                    "MasakhaNEWSClassification (fra-Latn)": 77.77,
+                    "MasakhaNEWSClassification (hau-Latn)": 59.22,
+                    "MasakhaNEWSClassification (ibo-Latn)": 61.64,
+                    "MasakhaNEWSClassification (lin-Latn)": 74.0,
+                    "MasakhaNEWSClassification (lug-Latn)": 58.43,
+                    "MasakhaNEWSClassification (orm-Ethi)": 48.15,
+                    "MasakhaNEWSClassification (pcm-Latn)": 92.2,
+                    "MasakhaNEWSClassification (run-Latn)": 64.72,
+                    "MasakhaNEWSClassification (sna-Latn)": 73.69,
+                    "MasakhaNEWSClassification (som-Latn)": 49.97,
+                    "MasakhaNEWSClassification (swa-Latn)": 55.15,
+                    "MasakhaNEWSClassification (tir-Ethi)": 27.46,
+                    "MasakhaNEWSClassification (xho-Latn)": 60.98,
+                    "MasakhaNEWSClassification (yor-Latn)": 63.33,
+                    "MassiveIntentClassification (en)": 69.76,
+                    "MassiveIntentClassification (jav-Latn)": 31.75,
+                    "MassiveIntentClassification (fra-Latn)": 44.27,
+                    "MassiveIntentClassification (msa-Latn)": 30.53,
+                    "MassiveIntentClassification (hun-Latn)": 34.38,
+                    "MassiveIntentClassification (pol-Latn)": 34.26,
+                    "MassiveIntentClassification (nld-Latn)": 38.49,
+                    "MassiveIntentClassification (tha-Thai)": 8.51,
+                    "MassiveIntentClassification (tur-Latn)": 32.02,
+                    "MassiveIntentClassification (tam-Taml)": 9.25,
+                    "MassiveIntentClassification (hye-Armn)": 10.11,
+                    "MassiveIntentClassification (khm-Khmr)": 4.74,
+                    "MassiveIntentClassification (lav-Latn)": 35.08,
+                    "MassiveIntentClassification (deu-Latn)": 44.54,
+                    "MassiveIntentClassification (spa-Latn)": 39.75,
+                    "MassiveIntentClassification (ben-Beng)": 12.35,
+                    "MassiveIntentClassification (por-Latn)": 42.83,
+                    "MassiveIntentClassification (ara-Arab)": 20.42,
+                    "MassiveIntentClassification (cym-Latn)": 30.82,
+                    "MassiveIntentClassification (dan-Latn)": 42.36,
+                    "MassiveIntentClassification (mya-Mymr)": 4.6,
+                    "MassiveIntentClassification (heb-Hebr)": 23.6,
+                    "MassiveIntentClassification (kan-Knda)": 3.76,
+                    "MassiveIntentClassification (swa-Latn)": 31.82,
+                    "MassiveIntentClassification (fas-Arab)": 22.45,
+                    "MassiveIntentClassification (hin-Deva)": 17.68,
+                    "MassiveIntentClassification (kat-Geor)": 7.66,
+                    "MassiveIntentClassification (mal-Mlym)": 2.64,
+                    "MassiveIntentClassification (fin-Latn)": 34.58,
+                    "MassiveIntentClassification (slv-Latn)": 34.49,
+                    "MassiveIntentClassification (afr-Latn)": 36.49,
+                    "MassiveIntentClassification (urd-Arab)": 12.86,
+                    "MassiveIntentClassification (ron-Latn)": 38.07,
+                    "MassiveIntentClassification (sqi-Latn)": 37.26,
+                    "MassiveIntentClassification (cmo-Hant)": 22.43,
+                    "MassiveIntentClassification (ita-Latn)": 40.29,
+                    "MassiveIntentClassification (ind-Latn)": 36.31,
+                    "MassiveIntentClassification (nob-Latn)": 39.3,
+                    "MassiveIntentClassification (jpn-Jpan)": 33.13,
+                    "MassiveIntentClassification (aze-Latn)": 28.92,
+                    "MassiveIntentClassification (mon-Cyrl)": 19.65,
+                    "MassiveIntentClassification (ell-Grek)": 24.52,
+                    "MassiveIntentClassification (rus-Cyrl)": 23.98,
+                    "MassiveIntentClassification (kor-Kore)": 13.35,
+                    "MassiveIntentClassification (cmo-Hans)": 24.36,
+                    "MassiveIntentClassification (isl-Latn)": 31.46,
+                    "MassiveIntentClassification (swe-Latn)": 39.02,
+                    "MassiveIntentClassification (tel-Telu)": 2.26,
+                    "MassiveIntentClassification (vie-Latn)": 31.47,
+                    "MassiveIntentClassification (tgl-Latn)": 36.33,
+                    "MassiveIntentClassification (amh-Ethi)": 2.39,
+                    "MassiveScenarioClassification (en)": 75.67,
+                    "MassiveScenarioClassification (tur-Latn)": 39.11,
+                    "MassiveScenarioClassification (kat-Geor)": 13.45,
+                    "MassiveScenarioClassification (jpn-Jpan)": 40.57,
+                    "MassiveScenarioClassification (spa-Latn)": 50.92,
+                    "MassiveScenarioClassification (fas-Arab)": 27.8,
+                    "MassiveScenarioClassification (hun-Latn)": 41.01,
+                    "MassiveScenarioClassification (jav-Latn)": 40.0,
+                    "MassiveScenarioClassification (por-Latn)": 52.06,
+                    "MassiveScenarioClassification (sqi-Latn)": 44.67,
+                    "MassiveScenarioClassification (lav-Latn)": 39.28,
+                    "MassiveScenarioClassification (deu-Latn)": 54.09,
+                    "MassiveScenarioClassification (nld-Latn)": 47.79,
+                    "MassiveScenarioClassification (mon-Cyrl)": 25.58,
+                    "MassiveScenarioClassification (swa-Latn)": 40.34,
+                    "MassiveScenarioClassification (ben-Beng)": 17.49,
+                    "MassiveScenarioClassification (cym-Latn)": 34.82,
+                    "MassiveScenarioClassification (swe-Latn)": 44.53,
+                    "MassiveScenarioClassification (rus-Cyrl)": 28.71,
+                    "MassiveScenarioClassification (fra-Latn)": 54.26,
+                    "MassiveScenarioClassification (dan-Latn)": 49.45,
+                    "MassiveScenarioClassification (mya-Mymr)": 10.8,
+                    "MassiveScenarioClassification (ron-Latn)": 47.86,
+                    "MassiveScenarioClassification (cmo-Hans)": 35.33,
+                    "MassiveScenarioClassification (hin-Deva)": 23.13,
+                    "MassiveScenarioClassification (cmo-Hant)": 31.7,
+                    "MassiveScenarioClassification (afr-Latn)": 43.63,
+                    "MassiveScenarioClassification (aze-Latn)": 36.42,
+                    "MassiveScenarioClassification (msa-Latn)": 37.28,
+                    "MassiveScenarioClassification (ell-Grek)": 33.85,
+                    "MassiveScenarioClassification (isl-Latn)": 39.36,
+                    "MassiveScenarioClassification (fin-Latn)": 38.41,
+                    "MassiveScenarioClassification (ind-Latn)": 43.05,
+                    "MassiveScenarioClassification (pol-Latn)": 42.66,
+                    "MassiveScenarioClassification (tam-Taml)": 14.55,
+                    "MassiveScenarioClassification (ita-Latn)": 51.37,
+                    "MassiveScenarioClassification (urd-Arab)": 20.0,
+                    "MassiveScenarioClassification (kan-Knda)": 8.34,
+                    "MassiveScenarioClassification (tel-Telu)": 7.81,
+                    "MassiveScenarioClassification (mal-Mlym)": 7.69,
+                    "MassiveScenarioClassification (ara-Arab)": 27.8,
+                    "MassiveScenarioClassification (kor-Kore)": 17.28,
+                    "MassiveScenarioClassification (vie-Latn)": 35.9,
+                    "MassiveScenarioClassification (amh-Ethi)": 7.43,
+                    "MassiveScenarioClassification (heb-Hebr)": 25.49,
+                    "MassiveScenarioClassification (hye-Armn)": 16.86,
+                    "MassiveScenarioClassification (khm-Khmr)": 9.63,
+                    "MassiveScenarioClassification (slv-Latn)": 39.88,
+                    "MassiveScenarioClassification (tgl-Latn)": 47.04,
+                    "MassiveScenarioClassification (nob-Latn)": 45.75,
+                    "MassiveScenarioClassification (tha-Thai)": 17.01,
+                    "MultilingualSentiment (cmn-Hans)": 41.2,
+                    "NoRecClassification (nob-Latn)": 38.34,
+                    "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 50.15,
+                    "OnlineShopping (cmn-Hans)": 56.94,
+                    "PAC (pol-Latn)": 62.1,
+                    "PolEmo2.0-IN (pol-Latn)": 41.63,
+                    "PolEmo2.0-OUT (pol-Latn)": 25.0,
+                    "RuReviewsClassification (rus-Cyrl)": 42.33,
+                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 13.29,
+                    "RuSciBenchOECDClassification (rus-Cyrl)": 10.62,
+                    "TNews (cmn-Hans)": 21.05,
+                    "ToxicConversationsClassification": 61.05,
+                    "TweetSentimentExtractionClassification": 55.05,
+                    "Waimai (cmn-Hans)": 63.31
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "all-mpnet-base-v2",
+                    "ArxivClusteringP2P": 48.38,
+                    "ArxivClusteringS2S": 39.72,
+                    "BiorxivClusteringP2P": 39.62,
+                    "BiorxivClusteringS2S": 35.02,
+                    "GeoreviewClusteringP2P (rus-Cyrl)": 20.33,
+                    "MasakhaNEWSClusteringP2P (amh-Ethi)": 42.49,
+                    "MasakhaNEWSClusteringP2P (eng)": 67.24,
+                    "MasakhaNEWSClusteringP2P (fra-Latn)": 61.99,
+                    "MasakhaNEWSClusteringP2P (hau-Latn)": 37.17,
+                    "MasakhaNEWSClusteringP2P (ibo-Latn)": 52.0,
+                    "MasakhaNEWSClusteringP2P (lin-Latn)": 69.68,
+                    "MasakhaNEWSClusteringP2P (lug-Latn)": 50.96,
+                    "MasakhaNEWSClusteringP2P (orm-Ethi)": 28.42,
+                    "MasakhaNEWSClusteringP2P (pcm-Latn)": 64.01,
+                    "MasakhaNEWSClusteringP2P (run-Latn)": 57.6,
+                    "MasakhaNEWSClusteringP2P (sna-Latn)": 54.99,
+                    "MasakhaNEWSClusteringP2P (som-Latn)": 31.16,
+                    "MasakhaNEWSClusteringP2P (swa-Latn)": 28.29,
+                    "MasakhaNEWSClusteringP2P (tir-Ethi)": 41.85,
+                    "MasakhaNEWSClusteringP2P (xho-Latn)": 35.24,
+                    "MasakhaNEWSClusteringP2P (yor-Latn)": 42.15,
+                    "MasakhaNEWSClusteringS2S (amh-Ethi)": 44.48,
+                    "MasakhaNEWSClusteringS2S (eng)": 35.69,
+                    "MasakhaNEWSClusteringS2S (fra-Latn)": 41.05,
+                    "MasakhaNEWSClusteringS2S (hau-Latn)": 16.64,
+                    "MasakhaNEWSClusteringS2S (ibo-Latn)": 38.63,
+                    "MasakhaNEWSClusteringS2S (lin-Latn)": 70.72,
+                    "MasakhaNEWSClusteringS2S (lug-Latn)": 46.97,
+                    "MasakhaNEWSClusteringS2S (orm-Ethi)": 23.85,
+                    "MasakhaNEWSClusteringS2S (pcm-Latn)": 68.7,
+                    "MasakhaNEWSClusteringS2S (run-Latn)": 52.27,
+                    "MasakhaNEWSClusteringS2S (sna-Latn)": 47.64,
+                    "MasakhaNEWSClusteringS2S (som-Latn)": 30.94,
+                    "MasakhaNEWSClusteringS2S (swa-Latn)": 17.12,
+                    "MasakhaNEWSClusteringS2S (tir-Ethi)": 42.01,
+                    "MasakhaNEWSClusteringS2S (xho-Latn)": 24.16,
+                    "MasakhaNEWSClusteringS2S (yor-Latn)": 35.04,
+                    "MedrxivClusteringP2P": 35.58,
+                    "MedrxivClusteringS2S": 32.87,
+                    "RedditClustering": 54.82,
+                    "RedditClusteringP2P": 56.77,
+                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 14.66,
+                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 12.49,
+                    "StackExchangeClustering": 53.8,
+                    "StackExchangeClusteringP2P": 34.28,
+                    "TwentyNewsgroupsClustering": 49.74
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "all-mpnet-base-v2",
+                    "CDSC-E (pol-Latn)": 45.37,
+                    "OpusparcusPC (deu-Latn)": 89.78,
+                    "OpusparcusPC (en)": 97.75,
+                    "OpusparcusPC (fin-Latn)": 85.82,
+                    "OpusparcusPC (fra-Latn)": 86.61,
+                    "OpusparcusPC (rus-Cyrl)": 79.85,
+                    "OpusparcusPC (swe-Latn)": 81.81,
+                    "PSC (pol-Latn)": 83.28,
+                    "PawsXPairClassification (deu-Latn)": 52.17,
+                    "PawsXPairClassification (en)": 61.99,
+                    "PawsXPairClassification (spa-Latn)": 55.06,
+                    "PawsXPairClassification (fra-Latn)": 56.42,
+                    "PawsXPairClassification (jpn-Hira)": 47.43,
+                    "PawsXPairClassification (kor-Hang)": 49.75,
+                    "PawsXPairClassification (cmn-Hans)": 52.47,
+                    "SICK-E-PL (pol-Latn)": 46.51,
+                    "SprintDuplicateQuestions": 90.15,
+                    "TERRa (rus-Cyrl)": 44.52,
+                    "TwitterSemEval2015": 73.85,
+                    "TwitterURLCorpus": 85.11
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "all-mpnet-base-v2",
+                    "AlloprofReranking (fra-Latn)": 69.63,
+                    "AskUbuntuDupQuestions": 65.85,
+                    "MMarcoReranking (cmn-Hans)": 4.65,
+                    "MindSmallReranking": 30.97,
+                    "RuBQReranking (rus-Cyrl)": 30.96,
+                    "SciDocsRR": 88.65,
+                    "StackOverflowDupQuestions": 51.98,
+                    "SyntecReranking (fra-Latn)": 66.12,
+                    "T2Reranking (cmn-Hans)": 58.3
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "all-mpnet-base-v2",
+                    "AILACasedocs": 22.51,
+                    "AILAStatutes": 21.27,
+                    "ARCChallenge": 11.8,
+                    "AlloprofRetrieval (fra-Latn)": 34.27,
+                    "AlphaNLI": 22.41,
+                    "ArguAna": 46.52,
+                    "ArguAna-PL (pol-Latn)": 14.72,
+                    "BSARDRetrieval (fra-Latn)": 6.98,
+                    "CQADupstackRetrieval": 44.96,
+                    "ClimateFEVER": 21.97,
+                    "CmedqaRetrieval (cmn-Hans)": 2.0,
+                    "CovidRetrieval (cmn-Hans)": 3.7,
+                    "DBPedia": 32.09,
+                    "DuRetrieval (cmn-Hans)": 4.92,
+                    "EcomRetrieval (cmn-Hans)": 3.94,
+                    "FEVER": 50.86,
+                    "FiQA-PL (pol-Latn)": 3.6,
+                    "FiQA2018": 49.96,
+                    "GerDaLIRSmall (deu-Latn)": 3.78,
+                    "HellaSwag": 26.27,
+                    "HotpotQA": 39.29,
+                    "LEMBNarrativeQARetrieval": 19.34,
+                    "LEMBNeedleRetrieval": 16.0,
+                    "LEMBPasskeyRetrieval": 24.5,
+                    "LEMBQMSumRetrieval": 21.54,
+                    "LEMBSummScreenFDRetrieval": 60.43,
+                    "LEMBWikimQARetrieval": 44.92,
+                    "LeCaRDv2 (zho-Hans)": 18.09,
+                    "LegalBenchConsumerContractsQA": 75.25,
+                    "LegalBenchCorporateLobbying": 89.04,
+                    "LegalQuAD (deu-Latn)": 10.67,
+                    "LegalSummarization": 58.55,
+                    "MMarcoRetrieval (cmn-Hans)": 7.13,
+                    "MSMARCO": 39.75,
+                    "MedicalRetrieval (cmn-Hans)": 1.71,
+                    "MintakaRetrieval (ara-Arab)": 1.97,
+                    "MintakaRetrieval (deu-Latn)": 17.21,
+                    "MintakaRetrieval (spa-Latn)": 10.11,
+                    "MintakaRetrieval (fra-Latn)": 12.93,
+                    "MintakaRetrieval (hin-Deva)": 2.05,
+                    "MintakaRetrieval (ita-Latn)": 5.63,
+                    "MintakaRetrieval (jpn-Hira)": 6.72,
+                    "MintakaRetrieval (por-Latn)": 8.05,
+                    "NFCorpus": 33.29,
+                    "NFCorpus-PL (pol-Latn)": 8.77,
+                    "NQ": 50.45,
+                    "PIQA": 29.03,
+                    "Quail": 3.41,
+                    "QuoraRetrieval": 87.46,
+                    "RARbCode": 53.21,
+                    "RARbMath": 71.85,
+                    "RuBQRetrieval (rus-Cyrl)": 4.75,
+                    "SCIDOCS": 23.76,
+                    "SCIDOCS-PL (pol-Latn)": 4.02,
+                    "SIQA": 2.38,
+                    "SciFact": 65.57,
+                    "SciFact-PL (pol-Latn)": 13.31,
+                    "SpartQA": 0.22,
+                    "SyntecRetrieval (fra-Latn)": 57.39,
+                    "T2Retrieval (cmn-Hans)": 2.98,
+                    "TRECCOVID": 51.33,
+                    "TRECCOVID-PL (pol-Latn)": 12.12,
+                    "TempReasonL1": 1.77,
+                    "TempReasonL2Fact": 11.2,
+                    "TempReasonL2Pure": 1.15,
+                    "TempReasonL3Fact": 9.42,
+                    "TempReasonL3Pure": 5.59,
+                    "Touche2020": 19.93,
+                    "VideoRetrieval (cmn-Hans)": 8.48,
+                    "WinoGrande": 20.8,
+                    "XPQARetrieval (ara-Arab_ara-Arab)": 9.42,
+                    "XPQARetrieval (eng-Latn_ara-Arab)": 2.39,
+                    "XPQARetrieval (ara-Arab_eng-Latn)": 8.98,
+                    "XPQARetrieval (deu-Latn_deu-Latn)": 55.82,
+                    "XPQARetrieval (eng-Latn_deu-Latn)": 11.74,
+                    "XPQARetrieval (deu-Latn_eng-Latn)": 30.44,
+                    "XPQARetrieval (spa-Latn_spa-Latn)": 40.01,
+                    "XPQARetrieval (eng-Latn_spa-Latn)": 6.12,
+                    "XPQARetrieval (spa-Latn_eng-Latn)": 29.44,
+                    "XPQARetrieval (fra-Latn_fra-Latn)": 51.94,
+                    "XPQARetrieval (eng-Latn_fra-Latn)": 11.48,
+                    "XPQARetrieval (fra-Latn_eng-Latn)": 32.52,
+                    "XPQARetrieval (hin-Deva_hin-Deva)": 37.48,
+                    "XPQARetrieval (eng-Latn_hin-Deva)": 5.11,
+                    "XPQARetrieval (hin-Deva_eng-Latn)": 7.37,
+                    "XPQARetrieval (ita-Latn_ita-Latn)": 54.2,
+                    "XPQARetrieval (eng-Latn_ita-Latn)": 6.08,
+                    "XPQARetrieval (ita-Latn_eng-Latn)": 30.32,
+                    "XPQARetrieval (jpn-Hira_jpn-Hira)": 37.45,
+                    "XPQARetrieval (eng-Latn_jpn-Hira)": 5.79,
+                    "XPQARetrieval (jpn-Hira_eng-Latn)": 14.77,
+                    "XPQARetrieval (kor-Hang_kor-Hang)": 10.4,
+                    "XPQARetrieval (eng-Latn_kor-Hang)": 7.09,
+                    "XPQARetrieval (kor-Hang_eng-Latn)": 6.95,
+                    "XPQARetrieval (pol-Latn_pol-Latn)": 23.67,
+                    "XPQARetrieval (eng-Latn_pol-Latn)": 8.83,
+                    "XPQARetrieval (pol-Latn_eng-Latn)": 15.94,
+                    "XPQARetrieval (por-Latn_por-Latn)": 33.56,
+                    "XPQARetrieval (eng-Latn_por-Latn)": 3.76,
+                    "XPQARetrieval (por-Latn_eng-Latn)": 23.45,
+                    "XPQARetrieval (tam-Taml_tam-Taml)": 5.53,
+                    "XPQARetrieval (eng-Latn_tam-Taml)": 3.3,
+                    "XPQARetrieval (tam-Taml_eng-Latn)": 4.0,
+                    "XPQARetrieval (cmn-Hans_cmn-Hans)": 23.84,
+                    "XPQARetrieval (eng-Latn_cmn-Hans)": 7.2,
+                    "XPQARetrieval (cmn-Hans_eng-Latn)": 12.84
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "all-mpnet-base-v2",
+                    "AFQMC (cmn-Hans)": 8.01,
+                    "ATEC (cmn-Hans)": 14.03,
+                    "BIOSSES": 80.43,
+                    "BQ (cmn-Hans)": 21.39,
+                    "CDSC-R (pol-Latn)": 77.04,
+                    "LCQMC (cmn-Hans)": 22.84,
+                    "PAWSX (cmn-Hans)": 6.44,
+                    "RUParaPhraserSTS (rus-Cyrl)": 42.15,
+                    "RuSTSBenchmarkSTS (rus-Cyrl)": 55.68,
+                    "SICK-R": 80.59,
+                    "SICK-R-PL (pol-Latn)": 50.2,
+                    "SICKFr (fra-Latn)": 67.05,
+                    "STS12": 72.63,
+                    "STS13": 83.48,
+                    "STS14": 78.0,
+                    "STS15": 85.66,
+                    "STS16": 80.03,
+                    "STS17 (en-en)": 90.6,
+                    "STS17 (eng-Latn_ara-Arab)": 6.76,
+                    "STS17 (fra-Latn_eng-Latn)": 41.64,
+                    "STS17 (eng-Latn_tur-Latn)": -4.58,
+                    "STS17 (eng-Latn_deu-Latn)": 35.5,
+                    "STS17 (spa-Latn_eng-Latn)": 25.28,
+                    "STS17 (ita-Latn_eng-Latn)": 31.8,
+                    "STS17 (spa-Latn)": 78.4,
+                    "STS17 (kor-Hang)": 39.11,
+                    "STS17 (ara-Arab)": 55.42,
+                    "STS17 (nld-Latn_eng-Latn)": 32.89,
+                    "STS22 (en)": 68.39,
+                    "STS22 (pol-Latn)": 24.21,
+                    "STS22 (ita-Latn)": 58.02,
+                    "STS22 (spa-Latn_eng-Latn)": 55.09,
+                    "STS22 (fra-Latn)": 77.1,
+                    "STS22 (tur-Latn)": 29.35,
+                    "STS22 (cmn-Hans)": 42.24,
+                    "STS22 (deu-Latn)": 27.0,
+                    "STS22 (spa-Latn_ita-Latn)": 41.61,
+                    "STS22 (fra-Latn_pol-Latn)": 73.25,
+                    "STS22 (deu-Latn_eng-Latn)": 49.73,
+                    "STS22 (cmn-Hans_eng-Latn)": 40.47,
+                    "STS22 (spa-Latn)": 55.98,
+                    "STS22 (ara-Arab)": 38.96,
+                    "STS22 (deu-Latn_pol-Latn)": 23.53,
+                    "STS22 (rus-Cyrl)": 15.83,
+                    "STS22 (pol-Latn_eng-Latn)": 51.07,
+                    "STS22 (deu-Latn_fra-Latn)": 31.39,
+                    "STSB (cmn-Hans)": 37.7,
+                    "STSBenchmark": 83.42,
+                    "STSBenchmarkMultilingualSTS (nld-Latn)": 57.01,
+                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 55.54,
+                    "STSBenchmarkMultilingualSTS (fra-Latn)": 65.15,
+                    "STSBenchmarkMultilingualSTS (ita-Latn)": 62.72,
+                    "STSBenchmarkMultilingualSTS (spa-Latn)": 65.78,
+                    "STSBenchmarkMultilingualSTS (en)": 83.42,
+                    "STSBenchmarkMultilingualSTS (deu-Latn)": 61.43,
+                    "STSBenchmarkMultilingualSTS (por-Latn)": 62.12,
+                    "STSBenchmarkMultilingualSTS (cmn-Hans)": 39.43,
+                    "STSBenchmarkMultilingualSTS (pol-Latn)": 52.36
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "all-mpnet-base-v2",
+                    "SummEval": 27.49,
+                    "SummEvalFr (fra-Latn)": 28.11
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "all-mpnet-base-v2",
+                    "CEDRClassification (rus-Cyrl)": 35.98,
+                    "SensitiveTopicsClassification (rus-Cyrl)": 17.83
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "all-mpnet-base-v2"
+                }
+            ]
+        }
+    },
+    "nb-bert-base": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "nb-bert-base",
+                    "BornholmBitextMining": 9.88
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "nb-bert-base",
+                    "AngryTweetsClassification": 52.14,
+                    "DKHateClassification": 61.73,
+                    "DanishPoliticalCommentsClassification": 34.84,
+                    "LccSentimentClassification": 51.4,
+                    "MassiveIntentClassification (da)": 56.69,
+                    "MassiveIntentClassification (nb)": 60.67,
+                    "MassiveIntentClassification (sv)": 53.89,
+                    "MassiveScenarioClassification (da)": 61.93,
+                    "MassiveScenarioClassification (nb)": 67.31,
+                    "MassiveScenarioClassification (sv)": 55.37,
+                    "NoRecClassification": 51.32,
+                    "NordicLangClassification": 84.69,
+                    "NorwegianParliament": 57.41,
+                    "ScalaDaClassification": 57.99,
+                    "ScalaNbClassification": 62.25
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "nb-bert-base"
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "nb-bert-base"
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "nb-bert-base"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "nb-bert-base"
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "nb-bert-base"
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "nb-bert-base"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "nb-bert-base"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "nb-bert-base"
+                }
+            ]
+        }
+    },
+    "herbert-base-retrieval-v2": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "herbert-base-retrieval-v2"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "herbert-base-retrieval-v2",
+                    "AllegroReviews": 34.11,
+                    "CBD": 68.35,
+                    "MassiveIntentClassification (pl)": 65.53,
+                    "MassiveScenarioClassification (pl)": 68.51,
+                    "PAC": 68.4,
+                    "PolEmo2.0-IN": 64.18,
+                    "PolEmo2.0-OUT": 45.73
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "herbert-base-retrieval-v2",
+                    "8TagsClustering": 28.15
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "herbert-base-retrieval-v2",
+                    "CDSC-E": 63.31,
+                    "PPC": 84.18,
+                    "PSC": 98.87,
+                    "SICK-E-PL": 54.93
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "herbert-base-retrieval-v2"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "herbert-base-retrieval-v2",
+                    "ArguAna-PL": 41.97,
+                    "DBPedia-PL": 24.07,
+                    "FiQA-PL": 24.25,
+                    "HotpotQA-PL": 43.41,
+                    "MSMARCO-PL": 51.56,
+                    "NFCorpus-PL": 25.95,
+                    "NQ-PL": 35.09,
+                    "Quora-PL": 78.86,
+                    "SCIDOCS-PL": 11.0,
+                    "SciFact-PL": 51.92,
+                    "TRECCOVID-PL": 42.64
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "herbert-base-retrieval-v2",
+                    "CDSC-R": 86.18,
+                    "SICK-R-PL": 64.67,
+                    "STS22 (pl)": 39.73
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "herbert-base-retrieval-v2"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "herbert-base-retrieval-v2"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "herbert-base-retrieval-v2"
+                }
+            ]
+        }
+    },
+    "voyage-lite-02-instruct": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "voyage-lite-02-instruct"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "voyage-lite-02-instruct",
+                    "AmazonCounterfactualClassification (en)": 88.31,
+                    "AmazonPolarityClassification": 96.32,
+                    "AmazonReviewsClassification (en)": 56.25,
+                    "Banking77Classification": 88.59,
+                    "EmotionClassification": 50.28,
+                    "ImdbClassification": 95.75,
+                    "MTOPDomainClassification (en)": 97.65,
+                    "MTOPIntentClassification (en)": 75.16,
+                    "MassiveIntentClassification (en)": 73.97,
+                    "MassiveScenarioClassification (en)": 83.99,
+                    "ToxicConversationsClassification": 81.75,
+                    "TweetSentimentExtractionClassification": 62.98
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "voyage-lite-02-instruct",
+                    "ArxivClusteringP2P": 51.95,
+                    "ArxivClusteringS2S": 42.48,
+                    "BiorxivClusteringP2P": 50.15,
+                    "BiorxivClusteringS2S": 42.84,
+                    "MedrxivClusteringP2P": 47.24,
+                    "MedrxivClusteringS2S": 43.48,
+                    "RedditClustering": 63.73,
+                    "RedditClusteringP2P": 64.09,
+                    "StackExchangeClustering": 70.71,
+                    "StackExchangeClusteringP2P": 40.34,
+                    "TwentyNewsgroupsClustering": 59.56
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "voyage-lite-02-instruct",
+                    "SprintDuplicateQuestions": 98.07,
+                    "TwitterSemEval2015": 74.44,
+                    "TwitterURLCorpus": 88.11
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "voyage-lite-02-instruct",
+                    "AskUbuntuDupQuestions": 63.24,
+                    "MindSmallReranking": 31.48,
+                    "SciDocsRR": 84.68,
+                    "StackOverflowDupQuestions": 53.56
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "voyage-lite-02-instruct",
+                    "ArguAna": 70.28,
+                    "CQADupstackRetrieval": 46.2,
+                    "ClimateFEVER": 31.95,
+                    "DBPedia": 39.79,
+                    "FEVER": 91.35,
+                    "FiQA2018": 52.51,
+                    "HotpotQA": 75.51,
+                    "MSMARCO": 37.93,
+                    "NFCorpus": 43.7,
+                    "NQ": 64.26,
+                    "QuoraRetrieval": 87.62,
+                    "SCIDOCS": 20.24,
+                    "SciFact": 79.91,
+                    "TRECCOVID": 81.02,
+                    "Touche2020": 26.8
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "voyage-lite-02-instruct",
+                    "BIOSSES": 89.7,
+                    "SICK-R": 78.44,
+                    "STS12": 86.46,
+                    "STS13": 87.76,
+                    "STS14": 86.6,
+                    "STS15": 90.1,
+                    "STS16": 86.39,
+                    "STS17 (en-en)": 86.98,
+                    "STS22 (en)": 76.89,
+                    "STSBenchmark": 88.56
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "voyage-lite-02-instruct",
+                    "SummEval": 31.01
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "voyage-lite-02-instruct"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "voyage-lite-02-instruct"
+                }
+            ]
+        }
+    },
+    "norbert3-large": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "norbert3-large",
+                    "BornholmBitextMining": 2.9
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "norbert3-large",
+                    "AngryTweetsClassification": 49.04,
+                    "DKHateClassification": 62.71,
+                    "DanishPoliticalCommentsClassification": 33.53,
+                    "LccSentimentClassification": 46.93,
+                    "MassiveIntentClassification (da)": 45.98,
+                    "MassiveIntentClassification (nb)": 47.42,
+                    "MassiveIntentClassification (sv)": 48.47,
+                    "MassiveScenarioClassification (da)": 50.51,
+                    "MassiveScenarioClassification (nb)": 54.25,
+                    "MassiveScenarioClassification (sv)": 50.6,
+                    "NoRecClassification": 50.46,
+                    "NordicLangClassification": 84.25,
+                    "NorwegianParliament": 58.85,
+                    "ScalaDaClassification": 60.72,
+                    "ScalaNbClassification": 66.79
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "norbert3-large"
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "norbert3-large"
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "norbert3-large"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "norbert3-large"
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "norbert3-large"
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "norbert3-large"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "norbert3-large"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "norbert3-large"
+                }
+            ]
+        }
+    },
+    "text2vec-large-chinese": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "text2vec-large-chinese"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "text2vec-large-chinese",
+                    "AmazonReviewsClassification (zh)": 33.77,
+                    "IFlyTek": 41.54,
+                    "JDReview": 81.56,
+                    "MassiveIntentClassification (zh-CN)": 63.23,
+                    "MassiveScenarioClassification (zh-CN)": 68.45,
+                    "MultilingualSentiment": 58.97,
+                    "OnlineShopping": 83.51,
+                    "TNews": 38.92,
+                    "Waimai": 76.01
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "text2vec-large-chinese",
+                    "CLSClusteringP2P": 30.13,
+                    "CLSClusteringS2S": 28.77,
+                    "ThuNewsClusteringP2P": 35.05,
+                    "ThuNewsClusteringS2S": 26.14
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "text2vec-large-chinese",
+                    "Cmnli": 77.67,
+                    "Ocnli": 64.04
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "text2vec-large-chinese",
+                    "CMedQAv1": 58.92,
+                    "CMedQAv2": 60.41,
+                    "MMarcoReranking": 12.48,
+                    "T2Reranking": 64.82
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "text2vec-large-chinese",
+                    "CmedqaRetrieval": 15.53,
+                    "CovidRetrieval": 60.48,
+                    "DuRetrieval": 51.87,
+                    "EcomRetrieval": 37.58,
+                    "MMarcoRetrieval": 45.96,
+                    "MedicalRetrieval": 30.93,
+                    "T2Retrieval": 50.52,
+                    "VideoRetrieval": 42.65
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "text2vec-large-chinese",
+                    "AFQMC": 24.51,
+                    "ATEC": 32.45,
+                    "BQ": 44.22,
+                    "LCQMC": 69.16,
+                    "PAWSX": 14.55,
+                    "QBQTC": 29.51,
+                    "STS22 (zh)": 65.94,
+                    "STSB": 79.45
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "text2vec-large-chinese"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "text2vec-large-chinese"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "text2vec-large-chinese"
+                }
+            ]
+        }
+    },
+    "deberta-v1-base": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "deberta-v1-base",
+                    "Tatoeba (rus-Cyrl_eng-Latn)": 13.21
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "deberta-v1-base",
+                    "GeoreviewClassification (rus-Cyrl)": 40.19,
+                    "HeadlineClassification (rus-Cyrl)": 78.75,
+                    "InappropriatenessClassification (rus-Cyrl)": 61.33,
+                    "KinopoiskClassification (rus-Cyrl)": 48.78,
+                    "MassiveIntentClassification (rus-Cyrl)": 61.32,
+                    "MassiveScenarioClassification (rus-Cyrl)": 64.71,
+                    "RuReviewsClassification (rus-Cyrl)": 55.66,
+                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 53.53,
+                    "RuSciBenchOECDClassification (rus-Cyrl)": 41.34
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "deberta-v1-base",
+                    "GeoreviewClusteringP2P (rus-Cyrl)": 58.79,
+                    "MLSUMClusteringP2P (rus-Cyrl)": 47.33,
+                    "MLSUMClusteringS2S (rus-Cyrl)": 44.6,
+                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 36.66,
+                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 33.31
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "deberta-v1-base",
+                    "OpusparcusPC (rus-Cyrl)": 83.31,
+                    "TERRa (rus-Cyrl)": 53.78
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "deberta-v1-base",
+                    "RuBQReranking (rus-Cyrl)": 34.01
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "deberta-v1-base",
+                    "RiaNewsRetrieval (rus-Cyrl)": 4.84,
+                    "RuBQRetrieval (rus-Cyrl)": 10.15
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "deberta-v1-base",
+                    "RUParaPhraserSTS (rus-Cyrl)": 54.03,
+                    "RuSTSBenchmarkSTS (rus-Cyrl)": 58.47,
+                    "STS22 (rus-Cyrl)": 47.67,
+                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 58.45
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "deberta-v1-base"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "deberta-v1-base",
+                    "CEDRClassification (rus-Cyrl)": 34.14,
+                    "SensitiveTopicsClassification (rus-Cyrl)": 23.67
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "deberta-v1-base"
+                }
+            ]
+        }
+    },
+    "tart-full-flan-t5-xl": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "tart-full-flan-t5-xl"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "tart-full-flan-t5-xl"
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "tart-full-flan-t5-xl"
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "tart-full-flan-t5-xl"
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "tart-full-flan-t5-xl"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "tart-full-flan-t5-xl"
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "tart-full-flan-t5-xl"
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "tart-full-flan-t5-xl"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "tart-full-flan-t5-xl"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "tart-full-flan-t5-xl",
+                    "Core17InstructionRetrieval": 2.82,
+                    "News21InstructionRetrieval": 1.99,
+                    "Robust04InstructionRetrieval": -0.72
+                }
+            ]
+        }
+    },
+    "instructor-xl": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "instructor-xl"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "instructor-xl"
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "instructor-xl"
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "instructor-xl"
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "instructor-xl"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "instructor-xl",
+                    "BrightRetrieval (aops)": 8.26,
+                    "BrightRetrieval (robotics)": 17.39,
+                    "BrightRetrieval (economics)": 22.81,
+                    "BrightRetrieval (stackoverflow)": 19.06,
+                    "BrightRetrieval (leetcode)": 27.5,
+                    "BrightRetrieval (theoremqa_questions)": 14.59,
+                    "BrightRetrieval (psychology)": 27.43,
+                    "BrightRetrieval (biology)": 21.91,
+                    "BrightRetrieval (theoremqa_theorems)": 6.5,
+                    "BrightRetrieval (earth_science)": 34.35,
+                    "BrightRetrieval (sustainable_living)": 18.82,
+                    "BrightRetrieval (pony)": 5.02
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "instructor-xl"
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "instructor-xl"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "instructor-xl"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "instructor-xl",
+                    "Core17InstructionRetrieval": 0.69,
+                    "News21InstructionRetrieval": -0.9,
+                    "Robust04InstructionRetrieval": -8.08
+                }
+            ]
+        }
+    },
+    "flan-t5-base": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "flan-t5-base"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "flan-t5-base"
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "flan-t5-base"
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "flan-t5-base"
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "flan-t5-base"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "flan-t5-base"
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "flan-t5-base"
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "flan-t5-base"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "flan-t5-base"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "flan-t5-base",
+                    "Core17InstructionRetrieval": -3.31,
+                    "News21InstructionRetrieval": -0.12,
+                    "Robust04InstructionRetrieval": 5.35
+                }
+            ]
+        }
+    },
+    "all-MiniLM-L6-v2-instruct": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "all-MiniLM-L6-v2-instruct"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "all-MiniLM-L6-v2-instruct"
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "all-MiniLM-L6-v2-instruct"
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "all-MiniLM-L6-v2-instruct"
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "all-MiniLM-L6-v2-instruct"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "all-MiniLM-L6-v2-instruct",
+                    "ARCChallenge": 9.4,
+                    "AlphaNLI": 15.09,
+                    "HellaSwag": 20.51,
+                    "PIQA": 24.68,
+                    "Quail": 3.46,
+                    "RARbCode": 42.47,
+                    "RARbMath": 62.39,
+                    "SIQA": 1.53,
+                    "SpartQA": 0.57,
+                    "TempReasonL1": 1.05,
+                    "TempReasonL2Fact": 16.57,
+                    "TempReasonL2Pure": 0.49,
+                    "TempReasonL3Fact": 14.01,
+                    "TempReasonL3Pure": 6.27,
+                    "WinoGrande": 20.73
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "all-MiniLM-L6-v2-instruct"
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "all-MiniLM-L6-v2-instruct"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "all-MiniLM-L6-v2-instruct"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "all-MiniLM-L6-v2-instruct"
+                }
+            ]
+        }
+    },
+    "Baichuan-text-embedding": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "Baichuan-text-embedding"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "Baichuan-text-embedding",
+                    "AmazonReviewsClassification (zh)": 48.3,
+                    "IFlyTek": 50.75,
+                    "JDReview": 87.69,
+                    "MassiveIntentClassification (zh-CN)": 74.91,
+                    "MassiveScenarioClassification (zh-CN)": 81.28,
+                    "MultilingualSentiment": 76.83,
+                    "OnlineShopping": 94.42,
+                    "TNews": 52.62,
+                    "Waimai": 88.77
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "Baichuan-text-embedding",
+                    "CLSClusteringP2P": 60.37,
+                    "CLSClusteringS2S": 51.09,
+                    "ThuNewsClusteringP2P": 58.23,
+                    "ThuNewsClusteringS2S": 57.83
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "Baichuan-text-embedding",
+                    "Cmnli": 85.31,
+                    "Ocnli": 79.33
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "Baichuan-text-embedding",
+                    "CMedQAv1": 88.06,
+                    "CMedQAv2": 88.46,
+                    "MMarcoReranking": 34.3,
+                    "T2Reranking": 67.85
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "Baichuan-text-embedding",
+                    "CmedqaRetrieval": 47.64,
+                    "CovidRetrieval": 86.86,
+                    "DuRetrieval": 88.43,
+                    "EcomRetrieval": 66.39,
+                    "MMarcoRetrieval": 80.17,
+                    "MedicalRetrieval": 61.1,
+                    "T2Retrieval": 80.11,
+                    "VideoRetrieval": 74.28
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "Baichuan-text-embedding",
+                    "AFQMC": 50.8,
+                    "ATEC": 53.23,
+                    "BQ": 66.49,
+                    "LCQMC": 76.6,
+                    "PAWSX": 47.56,
+                    "QBQTC": 39.96,
+                    "STS22 (zh)": 65.78,
+                    "STSB": 80.14
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "Baichuan-text-embedding"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "Baichuan-text-embedding"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "Baichuan-text-embedding"
+                }
+            ]
+        }
+    },
+    "distiluse-base-multilingual-cased-v2": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "distiluse-base-multilingual-cased-v2"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "distiluse-base-multilingual-cased-v2",
+                    "AllegroReviews": 28.03,
+                    "AmazonCounterfactualClassification (de)": 68.14,
+                    "AmazonCounterfactualClassification (en)": 71.81,
+                    "AmazonCounterfactualClassification (en-ext)": 72.96,
+                    "AmazonCounterfactualClassification (ja)": 65.39,
+                    "AmazonPolarityClassification": 68.0,
+                    "AmazonReviewsClassification (de)": 35.03,
+                    "AmazonReviewsClassification (en)": 35.45,
+                    "AmazonReviewsClassification (es)": 36.24,
+                    "AmazonReviewsClassification (fr)": 35.7,
+                    "AmazonReviewsClassification (ja)": 31.08,
+                    "AmazonReviewsClassification (zh)": 33.89,
+                    "Banking77Classification": 71.48,
+                    "CBD": 60.0,
+                    "EmotionClassification": 40.04,
+                    "ImdbClassification": 61.52,
+                    "MTOPDomainClassification (de)": 86.19,
+                    "MTOPDomainClassification (en)": 91.59,
+                    "MTOPDomainClassification (es)": 87.75,
+                    "MTOPDomainClassification (fr)": 84.61,
+                    "MTOPDomainClassification (hi)": 76.41,
+                    "MTOPDomainClassification (th)": 73.62,
+                    "MTOPIntentClassification (de)": 59.21,
+                    "MTOPIntentClassification (en)": 66.4,
+                    "MTOPIntentClassification (es)": 57.21,
+                    "MTOPIntentClassification (fr)": 53.41,
+                    "MTOPIntentClassification (hi)": 45.54,
+                    "MTOPIntentClassification (th)": 47.73,
+                    "MasakhaNEWSClassification (fra)": 76.87,
+                    "MassiveIntentClassification (af)": 40.02,
+                    "MassiveIntentClassification (am)": 2.35,
+                    "MassiveIntentClassification (ar)": 43.14,
+                    "MassiveIntentClassification (az)": 25.6,
+                    "MassiveIntentClassification (bn)": 4.84,
+                    "MassiveIntentClassification (cy)": 15.43,
+                    "MassiveIntentClassification (da)": 52.33,
+                    "MassiveIntentClassification (de)": 51.57,
+                    "MassiveIntentClassification (el)": 49.65,
+                    "MassiveIntentClassification (en)": 66.71,
+                    "MassiveIntentClassification (es)": 56.57,
+                    "MassiveIntentClassification (fa)": 55.36,
+                    "MassiveIntentClassification (fi)": 45.72,
+                    "MassiveIntentClassification (fr)": 57.02,
+                    "MassiveIntentClassification (he)": 46.74,
+                    "MassiveIntentClassification (hi)": 48.55,
+                    "MassiveIntentClassification (hu)": 50.65,
+                    "MassiveIntentClassification (hy)": 40.79,
+                    "MassiveIntentClassification (id)": 56.0,
+                    "MassiveIntentClassification (is)": 16.08,
+                    "MassiveIntentClassification (it)": 57.65,
+                    "MassiveIntentClassification (ja)": 55.33,
+                    "MassiveIntentClassification (jv)": 28.16,
+                    "MassiveIntentClassification (ka)": 29.41,
+                    "MassiveIntentClassification (km)": 4.79,
+                    "MassiveIntentClassification (kn)": 3.37,
+                    "MassiveIntentClassification (ko)": 49.97,
+                    "MassiveIntentClassification (lv)": 44.31,
+                    "MassiveIntentClassification (ml)": 3.24,
+                    "MassiveIntentClassification (mn)": 40.37,
+                    "MassiveIntentClassification (ms)": 47.97,
+                    "MassiveIntentClassification (my)": 38.48,
+                    "MassiveIntentClassification (nb)": 46.01,
+                    "MassiveIntentClassification (nl)": 58.29,
+                    "MassiveIntentClassification (pl)": 53.1,
+                    "MassiveIntentClassification (pt)": 58.63,
+                    "MassiveIntentClassification (ro)": 50.63,
+                    "MassiveIntentClassification (ru)": 57.96,
+                    "MassiveIntentClassification (sl)": 50.66,
+                    "MassiveIntentClassification (sq)": 50.25,
+                    "MassiveIntentClassification (sv)": 52.41,
+                    "MassiveIntentClassification (sw)": 19.29,
+                    "MassiveIntentClassification (ta)": 3.79,
+                    "MassiveIntentClassification (te)": 3.36,
+                    "MassiveIntentClassification (th)": 45.28,
+                    "MassiveIntentClassification (tl)": 28.44,
+                    "MassiveIntentClassification (tr)": 50.47,
+                    "MassiveIntentClassification (ur)": 46.03,
+                    "MassiveIntentClassification (vi)": 45.25,
+                    "MassiveIntentClassification (zh-CN)": 59.22,
+                    "MassiveIntentClassification (zh-TW)": 54.96,
+                    "MassiveScenarioClassification (af)": 53.67,
+                    "MassiveScenarioClassification (am)": 7.72,
+                    "MassiveScenarioClassification (ar)": 52.19,
+                    "MassiveScenarioClassification (az)": 34.75,
+                    "MassiveScenarioClassification (bn)": 10.65,
+                    "MassiveScenarioClassification (cy)": 21.24,
+                    "MassiveScenarioClassification (da)": 62.55,
+                    "MassiveScenarioClassification (de)": 61.4,
+                    "MassiveScenarioClassification (el)": 60.68,
+                    "MassiveScenarioClassification (en)": 74.0,
+                    "MassiveScenarioClassification (es)": 64.61,
+                    "MassiveScenarioClassification (fa)": 59.24,
+                    "MassiveScenarioClassification (fi)": 54.66,
+                    "MassiveScenarioClassification (fr)": 65.2,
+                    "MassiveScenarioClassification (he)": 54.74,
+                    "MassiveScenarioClassification (hi)": 55.99,
+                    "MassiveScenarioClassification (hu)": 61.2,
+                    "MassiveScenarioClassification (hy)": 49.63,
+                    "MassiveScenarioClassification (id)": 65.25,
+                    "MassiveScenarioClassification (is)": 22.6,
+                    "MassiveScenarioClassification (it)": 64.63,
+                    "MassiveScenarioClassification (ja)": 62.32,
+                    "MassiveScenarioClassification (jv)": 35.77,
+                    "MassiveScenarioClassification (ka)": 39.08,
+                    "MassiveScenarioClassification (km)": 9.24,
+                    "MassiveScenarioClassification (kn)": 8.28,
+                    "MassiveScenarioClassification (ko)": 57.6,
+                    "MassiveScenarioClassification (lv)": 51.72,
+                    "MassiveScenarioClassification (ml)": 8.25,
+                    "MassiveScenarioClassification (mn)": 47.21,
+                    "MassiveScenarioClassification (ms)": 55.65,
+                    "MassiveScenarioClassification (my)": 43.31,
+                    "MassiveScenarioClassification (nb)": 54.98,
+                    "MassiveScenarioClassification (nl)": 67.49,
+                    "MassiveScenarioClassification (pl)": 61.29,
+                    "MassiveScenarioClassification (pt)": 64.26,
+                    "MassiveScenarioClassification (ro)": 58.03,
+                    "MassiveScenarioClassification (ru)": 65.41,
+                    "MassiveScenarioClassification (sl)": 59.36,
+                    "MassiveScenarioClassification (sq)": 62.69,
+                    "MassiveScenarioClassification (sv)": 64.35,
+                    "MassiveScenarioClassification (sw)": 25.12,
+                    "MassiveScenarioClassification (ta)": 8.67,
+                    "MassiveScenarioClassification (te)": 7.82,
+                    "MassiveScenarioClassification (th)": 54.65,
+                    "MassiveScenarioClassification (tl)": 36.09,
+                    "MassiveScenarioClassification (tr)": 60.89,
+                    "MassiveScenarioClassification (ur)": 54.71,
+                    "MassiveScenarioClassification (vi)": 55.15,
+                    "MassiveScenarioClassification (zh-CN)": 66.44,
+                    "MassiveScenarioClassification (zh-TW)": 62.89,
+                    "PAC": 68.17,
+                    "PolEmo2.0-IN": 48.84,
+                    "PolEmo2.0-OUT": 30.0,
+                    "ToxicConversationsClassification": 69.09,
+                    "TweetSentimentExtractionClassification": 59.97
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "distiluse-base-multilingual-cased-v2",
+                    "8TagsClustering": 12.51,
+                    "AlloProfClusteringP2P": 55.95,
+                    "AlloProfClusteringS2S": 35.39,
+                    "ArxivClusteringP2P": 33.59,
+                    "HALClusteringS2S": 18.2,
+                    "MLSUMClusteringP2P": 40.17,
+                    "MLSUMClusteringS2S": 34.65,
+                    "MasakhaNEWSClusteringP2P (fra)": 53.76,
+                    "MasakhaNEWSClusteringS2S (fra)": 32.76
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "LaBSE",
-                    "CDSC-E (pol-Latn)": 68.92,
-                    "CDSC-E": 68.91,
-                    "OpusparcusPC (deu-Latn)": 96.58,
-                    "OpusparcusPC (en)": 98.12,
-                    "OpusparcusPC (fin-Latn)": 94.44,
-                    "OpusparcusPC (fra-Latn)": 93.96,
-                    "OpusparcusPC (rus-Cyrl)": 87.3,
-                    "OpusparcusPC (swe-Latn)": 93.69,
-                    "OpusparcusPC (fr)": 93.96,
-                    "PPC": 86.97,
-                    "PSC (pol-Latn)": 97.42,
-                    "PSC": 97.42,
-                    "PawsXPairClassification (deu-Latn)": 51.07,
-                    "PawsXPairClassification (en)": 54.07,
-                    "PawsXPairClassification (spa-Latn)": 52.19,
-                    "PawsXPairClassification (fra-Latn)": 54.63,
-                    "PawsXPairClassification (jpn-Hira)": 47.56,
-                    "PawsXPairClassification (kor-Hang)": 49.39,
-                    "PawsXPairClassification (cmn-Hans)": 54.26,
-                    "PawsXPairClassification (fr)": 54.63,
-                    "SICK-E-PL (pol-Latn)": 63.77,
-                    "SICK-E-PL": 63.77,
-                    "SprintDuplicateQuestions": 89.26,
-                    "TERRa (rus-Cyrl)": 55.71,
-                    "TwitterSemEval2015": 62.78,
-                    "TwitterURLCorpus": 84.58
+                    "Model": "distiluse-base-multilingual-cased-v2",
+                    "CDSC-E": 71.83,
+                    "OpusparcusPC (fr)": 92.07,
+                    "PPC": 86.83,
+                    "PSC": 96.35,
+                    "PawsXPairClassification (fr)": 51.08,
+                    "SICK-E-PL": 62.05,
+                    "SprintDuplicateQuestions": 87.15,
+                    "TwitterSemEval2015": 61.67,
+                    "TwitterURLCorpus": 84.02
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "LaBSE",
-                    "AlloprofReranking (fra-Latn)": 55.37,
-                    "AlloprofReranking": 49.51,
-                    "AskUbuntuDupQuestions": 52.75,
-                    "MMarcoReranking (cmn-Hans)": 14.83,
-                    "MindSmallReranking": 29.81,
-                    "RuBQReranking (rus-Cyrl)": 55.13,
-                    "SciDocsRR": 68.72,
-                    "StackOverflowDupQuestions": 42.42,
-                    "SyntecReranking (fra-Latn)": 67.62,
-                    "SyntecReranking": 73.28,
-                    "T2Reranking (cmn-Hans)": 63.29
+                    "Model": "distiluse-base-multilingual-cased-v2",
+                    "AlloprofReranking": 51.77,
+                    "AskUbuntuDupQuestions": 53.75,
+                    "MindSmallReranking": 30.39,
+                    "SciDocsRR": 69.22,
+                    "StackOverflowDupQuestions": 41.92,
+                    "SyntecReranking": 74.78
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "distiluse-base-multilingual-cased-v2",
+                    "AlloprofRetrieval": 26.99,
+                    "ArguAna-PL": 36.7,
+                    "BSARDRetrieval": 0.0,
+                    "DBPedia-PL": 12.36,
+                    "FiQA-PL": 8.02,
+                    "HotpotQA-PL": 20.83,
+                    "MSMARCO-PL": 4.57,
+                    "MintakaRetrieval (fr)": 22.55,
+                    "NFCorpus-PL": 16.28,
+                    "NQ-PL": 5.85,
+                    "Quora-PL": 71.95,
+                    "SCIDOCS-PL": 6.5,
+                    "SciFact-PL": 33.03,
+                    "SyntecRetrieval": 65.34,
+                    "TRECCOVID-PL": 16.91,
+                    "XPQARetrieval (fr)": 51.2
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "distiluse-base-multilingual-cased-v2",
+                    "BIOSSES": 78.34,
+                    "CDSC-R": 87.67,
+                    "SICK-R": 75.25,
+                    "SICK-R-PL": 65.53,
+                    "SICKFr": 72.49,
+                    "STS12": 72.96,
+                    "STS13": 70.58,
+                    "STS14": 70.29,
+                    "STS15": 81.94,
+                    "STS16": 76.8,
+                    "STS17 (ar-ar)": 77.34,
+                    "STS17 (en-ar)": 77.46,
+                    "STS17 (en-de)": 80.24,
+                    "STS17 (en-en)": 86.19,
+                    "STS17 (en-tr)": 74.34,
+                    "STS17 (es-en)": 77.4,
+                    "STS17 (es-es)": 83.71,
+                    "STS17 (fr-en)": 79.28,
+                    "STS17 (it-en)": 80.82,
+                    "STS17 (ko-ko)": 76.4,
+                    "STS17 (nl-en)": 80.51,
+                    "STS22 (ar)": 49.04,
+                    "STS22 (de)": 35.73,
+                    "STS22 (de-en)": 47.51,
+                    "STS22 (de-fr)": 60.76,
+                    "STS22 (de-pl)": 36.09,
+                    "STS22 (en)": 62.88,
+                    "STS22 (es)": 59.34,
+                    "STS22 (es-en)": 68.96,
+                    "STS22 (es-it)": 63.28,
+                    "STS22 (fr)": 76.41,
+                    "STS22 (fr-pl)": 61.98,
+                    "STS22 (it)": 65.1,
+                    "STS22 (pl)": 34.58,
+                    "STS22 (pl-en)": 71.33,
+                    "STS22 (ru)": 52.4,
+                    "STS22 (tr)": 54.07,
+                    "STS22 (zh)": 54.32,
+                    "STS22 (zh-en)": 61.75,
+                    "STSBenchmark": 80.75,
+                    "STSBenchmarkMultilingualSTS (fr)": 77.49
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "distiluse-base-multilingual-cased-v2",
+                    "SummEvalFr": 28.12
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "distiluse-base-multilingual-cased-v2"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "distiluse-base-multilingual-cased-v2"
+                }
+            ]
+        }
+    },
+    "LaBSE-ru-turbo": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "LaBSE-ru-turbo",
+                    "Tatoeba (rus-Cyrl_eng-Latn)": 93.22
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "LaBSE-ru-turbo",
+                    "GeoreviewClassification (rus-Cyrl)": 46.04,
+                    "HeadlineClassification (rus-Cyrl)": 69.98,
+                    "InappropriatenessClassification (rus-Cyrl)": 61.39,
+                    "KinopoiskClassification (rus-Cyrl)": 53.59,
+                    "MassiveIntentClassification (rus-Cyrl)": 66.08,
+                    "MassiveScenarioClassification (rus-Cyrl)": 71.13,
+                    "RuReviewsClassification (rus-Cyrl)": 64.58,
+                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 56.67,
+                    "RuSciBenchOECDClassification (rus-Cyrl)": 43.58
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "LaBSE-ru-turbo",
+                    "GeoreviewClusteringP2P (rus-Cyrl)": 64.55,
+                    "MLSUMClusteringP2P (rus-Cyrl)": 45.7,
+                    "MLSUMClusteringS2S (rus-Cyrl)": 42.93,
+                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 50.64,
+                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 44.48
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "LaBSE-ru-turbo",
+                    "OpusparcusPC (rus-Cyrl)": 89.32,
+                    "TERRa (rus-Cyrl)": 57.81
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "LaBSE-ru-turbo",
+                    "RuBQReranking (rus-Cyrl)": 68.65
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "LaBSE",
-                    "AILACasedocs": 17.67,
-                    "AILAStatutes": 16.72,
-                    "ARCChallenge": 3.78,
-                    "AlloprofRetrieval (fra-Latn)": 19.77,
-                    "AlloprofRetrieval": 19.77,
-                    "AlphaNLI": 13.11,
-                    "ArguAna": 34.18,
-                    "ArguAna-PL (pol-Latn)": 38.56,
-                    "ArguAna-PL": 38.52,
-                    "BSARDRetrieval (fra-Latn)": 4.44,
-                    "BSARDRetrieval": 0.0,
-                    "CQADupstackRetrieval": 18.75,
-                    "ClimateFEVER": 3.83,
-                    "CmedqaRetrieval (cmn-Hans)": 5.49,
-                    "CovidRetrieval (cmn-Hans)": 28.6,
-                    "DBPedia": 15.57,
-                    "DBPedia-PL": 16.1,
-                    "DuRetrieval (cmn-Hans)": 26.34,
-                    "EcomRetrieval (cmn-Hans)": 25.42,
-                    "FEVER": 12.18,
-                    "FiQA-PL (pol-Latn)": 7.66,
-                    "FiQA-PL": 7.63,
-                    "FiQA2018": 7.0,
-                    "GerDaLIRSmall (deu-Latn)": 4.59,
-                    "HellaSwag": 5.59,
-                    "HotpotQA": 18.75,
-                    "HotpotQA-PL": 19.72,
-                    "LEMBNarrativeQARetrieval": 11.45,
-                    "LEMBNeedleRetrieval": 17.5,
-                    "LEMBPasskeyRetrieval": 20.25,
-                    "LEMBQMSumRetrieval": 14.07,
-                    "LEMBSummScreenFDRetrieval": 40.52,
-                    "LEMBWikimQARetrieval": 28.1,
-                    "LeCaRDv2 (zho-Hans)": 24.68,
-                    "LegalBenchConsumerContractsQA": 54.66,
-                    "LegalBenchCorporateLobbying": 69.39,
-                    "LegalQuAD (deu-Latn)": 16.64,
-                    "LegalSummarization": 53.89,
-                    "MMarcoRetrieval (cmn-Hans)": 34.78,
-                    "MSMARCO": 7.6,
-                    "MSMARCO-PL": 7.22,
-                    "MedicalRetrieval (cmn-Hans)": 6.68,
-                    "MintakaRetrieval (ara-Arab)": 14.06,
-                    "MintakaRetrieval (deu-Latn)": 15.26,
-                    "MintakaRetrieval (spa-Latn)": 15.65,
-                    "MintakaRetrieval (fra-Latn)": 15.53,
-                    "MintakaRetrieval (hin-Deva)": 13.67,
-                    "MintakaRetrieval (ita-Latn)": 15.94,
-                    "MintakaRetrieval (jpn-Hira)": 12.8,
-                    "MintakaRetrieval (por-Latn)": 15.03,
-                    "MintakaRetrieval (fr)": 15.53,
-                    "NFCorpus": 16.54,
-                    "NFCorpus-PL (pol-Latn)": 17.45,
-                    "NFCorpus-PL": 17.45,
-                    "NQ": 8.42,
-                    "NQ-PL": 9.65,
-                    "PIQA": 6.53,
-                    "Quail": 1.91,
-                    "Quora-PL": 74.96,
-                    "QuoraRetrieval": 77.03,
-                    "RARbCode": 2.31,
-                    "RARbMath": 27.19,
-                    "RiaNewsRetrieval (rus-Cyrl)": 42.75,
-                    "RuBQRetrieval (rus-Cyrl)": 30.02,
-                    "SCIDOCS": 5.63,
-                    "SCIDOCS-PL (pol-Latn)": 7.47,
-                    "SCIDOCS-PL": 7.48,
-                    "SIQA": 1.07,
-                    "SciFact": 38.2,
-                    "SciFact-PL (pol-Latn)": 39.79,
-                    "SciFact-PL": 39.79,
-                    "SpartQA": 1.56,
-                    "SyntecRetrieval (fra-Latn)": 55.31,
-                    "SyntecRetrieval": 55.31,
-                    "T2Retrieval (cmn-Hans)": 25.32,
-                    "TRECCOVID": 16.34,
-                    "TRECCOVID-PL (pol-Latn)": 18.51,
-                    "TRECCOVID-PL": 18.45,
-                    "TempReasonL1": 1.56,
-                    "TempReasonL2Fact": 7.06,
-                    "TempReasonL2Pure": 0.14,
-                    "TempReasonL3Fact": 8.74,
-                    "TempReasonL3Pure": 4.73,
-                    "Touche2020": 4.88,
-                    "VideoRetrieval (cmn-Hans)": 22.04,
-                    "WinoGrande": 54.3,
-                    "XPQARetrieval (ara-Arab_ara-Arab)": 35.19,
-                    "XPQARetrieval (eng-Latn_ara-Arab)": 20.64,
-                    "XPQARetrieval (ara-Arab_eng-Latn)": 32.47,
-                    "XPQARetrieval (deu-Latn_deu-Latn)": 53.56,
-                    "XPQARetrieval (eng-Latn_deu-Latn)": 24.31,
-                    "XPQARetrieval (deu-Latn_eng-Latn)": 54.87,
-                    "XPQARetrieval (spa-Latn_spa-Latn)": 44.49,
-                    "XPQARetrieval (eng-Latn_spa-Latn)": 25.31,
-                    "XPQARetrieval (spa-Latn_eng-Latn)": 43.4,
-                    "XPQARetrieval (fra-Latn_fra-Latn)": 51.74,
-                    "XPQARetrieval (eng-Latn_fra-Latn)": 21.29,
-                    "XPQARetrieval (fra-Latn_eng-Latn)": 49.4,
-                    "XPQARetrieval (hin-Deva_hin-Deva)": 66.64,
-                    "XPQARetrieval (eng-Latn_hin-Deva)": 23.25,
-                    "XPQARetrieval (hin-Deva_eng-Latn)": 64.54,
-                    "XPQARetrieval (ita-Latn_ita-Latn)": 56.27,
-                    "XPQARetrieval (eng-Latn_ita-Latn)": 25.8,
-                    "XPQARetrieval (ita-Latn_eng-Latn)": 52.69,
-                    "XPQARetrieval (jpn-Hira_jpn-Hira)": 58.6,
-                    "XPQARetrieval (eng-Latn_jpn-Hira)": 21.49,
-                    "XPQARetrieval (jpn-Hira_eng-Latn)": 52.41,
-                    "XPQARetrieval (kor-Hang_kor-Hang)": 27.66,
-                    "XPQARetrieval (eng-Latn_kor-Hang)": 23.33,
-                    "XPQARetrieval (kor-Hang_eng-Latn)": 23.96,
-                    "XPQARetrieval (pol-Latn_pol-Latn)": 37.33,
-                    "XPQARetrieval (eng-Latn_pol-Latn)": 16.19,
-                    "XPQARetrieval (pol-Latn_eng-Latn)": 37.7,
-                    "XPQARetrieval (por-Latn_por-Latn)": 38.49,
-                    "XPQARetrieval (eng-Latn_por-Latn)": 19.41,
-                    "XPQARetrieval (por-Latn_eng-Latn)": 37.33,
-                    "XPQARetrieval (tam-Taml_tam-Taml)": 37.32,
-                    "XPQARetrieval (eng-Latn_tam-Taml)": 20.53,
-                    "XPQARetrieval (tam-Taml_eng-Latn)": 30.14,
-                    "XPQARetrieval (cmn-Hans_cmn-Hans)": 50.7,
-                    "XPQARetrieval (eng-Latn_cmn-Hans)": 20.59,
-                    "XPQARetrieval (cmn-Hans_eng-Latn)": 48.23,
-                    "XPQARetrieval (fr)": 51.74
+                    "Model": "LaBSE-ru-turbo",
+                    "RiaNewsRetrieval (rus-Cyrl)": 69.36,
+                    "RuBQRetrieval (rus-Cyrl)": 65.71
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "LaBSE",
-                    "AFQMC (cmn-Hans)": 21.02,
-                    "ATEC (cmn-Hans)": 26.61,
-                    "BIOSSES": 78.7,
-                    "BQ (cmn-Hans)": 42.6,
-                    "CDSC-R (pol-Latn)": 85.53,
-                    "CDSC-R": 85.53,
-                    "LCQMC (cmn-Hans)": 52.19,
-                    "PAWSX (cmn-Hans)": 10.23,
-                    "RUParaPhraserSTS (rus-Cyrl)": 65.74,
-                    "RuSTSBenchmarkSTS (rus-Cyrl)": 73.34,
-                    "SICK-R": 69.99,
-                    "SICK-R-PL (pol-Latn)": 65.9,
-                    "SICK-R-PL": 65.9,
-                    "SICKFr (fra-Latn)": 69.94,
-                    "SICKFr": 69.94,
-                    "STS12": 65.08,
-                    "STS13": 67.98,
-                    "STS14": 64.03,
-                    "STS15": 76.59,
-                    "STS16": 72.98,
-                    "STS17 (nld-Latn_eng-Latn)": 75.22,
-                    "STS17 (eng-Latn_tur-Latn)": 72.07,
-                    "STS17 (spa-Latn)": 80.83,
-                    "STS17 (kor-Hang)": 71.32,
-                    "STS17 (eng-Latn_deu-Latn)": 73.85,
-                    "STS17 (ita-Latn_eng-Latn)": 76.99,
-                    "STS17 (eng-Latn_ara-Arab)": 74.51,
-                    "STS17 (ara-Arab)": 69.07,
-                    "STS17 (fra-Latn_eng-Latn)": 76.98,
-                    "STS17 (spa-Latn_eng-Latn)": 65.71,
-                    "STS17 (en-en)": 79.45,
-                    "STS17 (ar-ar)": 69.07,
-                    "STS17 (en-ar)": 74.51,
-                    "STS17 (en-de)": 73.85,
-                    "STS17 (en-tr)": 72.07,
-                    "STS17 (es-en)": 65.71,
-                    "STS17 (es-es)": 80.83,
-                    "STS17 (fr-en)": 76.98,
-                    "STS17 (it-en)": 76.99,
-                    "STS17 (ko-ko)": 71.32,
-                    "STS17 (nl-en)": 75.22,
-                    "STS22 (cmn-Hans)": 63.02,
-                    "STS22 (spa-Latn)": 63.18,
-                    "STS22 (en)": 60.97,
-                    "STS22 (spa-Latn_ita-Latn)": 69.69,
-                    "STS22 (deu-Latn)": 48.58,
-                    "STS22 (fra-Latn)": 77.95,
-                    "STS22 (ara-Arab)": 57.67,
-                    "STS22 (spa-Latn_eng-Latn)": 71.86,
-                    "STS22 (pol-Latn_eng-Latn)": 69.41,
-                    "STS22 (ita-Latn)": 72.22,
-                    "STS22 (pol-Latn)": 39.3,
-                    "STS22 (deu-Latn_fra-Latn)": 53.28,
-                    "STS22 (deu-Latn_pol-Latn)": 58.69,
-                    "STS22 (fra-Latn_pol-Latn)": 61.98,
-                    "STS22 (cmn-Hans_eng-Latn)": 64.02,
-                    "STS22 (tur-Latn)": 58.15,
-                    "STS22 (deu-Latn_eng-Latn)": 50.14,
-                    "STS22 (rus-Cyrl)": 57.49,
-                    "STS22 (ar)": 57.67,
-                    "STS22 (de)": 48.58,
-                    "STS22 (de-en)": 50.14,
-                    "STS22 (de-fr)": 53.28,
-                    "STS22 (de-pl)": 58.69,
-                    "STS22 (es)": 63.18,
-                    "STS22 (es-en)": 71.86,
-                    "STS22 (es-it)": 69.69,
-                    "STS22 (fr)": 77.95,
-                    "STS22 (fr-pl)": 61.98,
-                    "STS22 (it)": 72.22,
-                    "STS22 (pl)": 39.28,
-                    "STS22 (pl-en)": 69.41,
-                    "STS22 (ru)": 57.49,
-                    "STS22 (tr)": 58.15,
-                    "STS22 (zh)": 63.02,
-                    "STS22 (zh-en)": 64.02,
-                    "STSB (cmn-Hans)": 68.38,
-                    "STSBenchmark": 72.25,
-                    "STSBenchmarkMultilingualSTS (en)": 72.25,
-                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 73.06,
-                    "STSBenchmarkMultilingualSTS (fra-Latn)": 75.1,
-                    "STSBenchmarkMultilingualSTS (spa-Latn)": 72.92,
-                    "STSBenchmarkMultilingualSTS (nld-Latn)": 70.22,
-                    "STSBenchmarkMultilingualSTS (cmn-Hans)": 69.5,
-                    "STSBenchmarkMultilingualSTS (ita-Latn)": 72.97,
-                    "STSBenchmarkMultilingualSTS (por-Latn)": 71.65,
-                    "STSBenchmarkMultilingualSTS (deu-Latn)": 72.43,
-                    "STSBenchmarkMultilingualSTS (pol-Latn)": 72.58,
-                    "STSBenchmarkMultilingualSTS (fr)": 75.1
+                    "Model": "LaBSE-ru-turbo",
+                    "RUParaPhraserSTS (rus-Cyrl)": 72.97,
+                    "RuSTSBenchmarkSTS (rus-Cyrl)": 81.77,
+                    "STS22 (rus-Cyrl)": 62.89,
+                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 81.81
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "LaBSE-ru-turbo"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "LaBSE-ru-turbo",
+                    "CEDRClassification (rus-Cyrl)": 45.11,
+                    "SensitiveTopicsClassification (rus-Cyrl)": 27.52
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "LaBSE-ru-turbo"
+                }
+            ]
+        }
+    },
+    "voyage-multilingual-2": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "voyage-multilingual-2"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "voyage-multilingual-2",
+                    "AmazonReviewsClassification (fr)": 43.36,
+                    "MTOPDomainClassification (fr)": 90.33,
+                    "MTOPIntentClassification (fr)": 60.52,
+                    "MasakhaNEWSClassification (fra)": 74.81,
+                    "MassiveIntentClassification (fr)": 68.06,
+                    "MassiveScenarioClassification (fr)": 74.29
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "voyage-multilingual-2",
+                    "AlloProfClusteringP2P": 65.37,
+                    "AlloProfClusteringS2S": 47.03,
+                    "HALClusteringS2S": 27.67,
+                    "MLSUMClusteringP2P (fr)": 45.99,
+                    "MLSUMClusteringS2S (fr)": 45.57,
+                    "MasakhaNEWSClusteringP2P (fra)": 44.53,
+                    "MasakhaNEWSClusteringS2S (fra)": 49.8
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "voyage-multilingual-2",
+                    "OpusparcusPC (fr)": 93.68,
+                    "PawsXPairClassification (fr)": 63.64
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "voyage-multilingual-2",
+                    "AlloprofReranking": 74.78,
+                    "SyntecReranking": 90.4
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "voyage-multilingual-2",
+                    "AlloprofRetrieval": 58.27,
+                    "BSARDRetrieval": 5.14,
+                    "LEMBNarrativeQARetrieval": 64.69,
+                    "LEMBNeedleRetrieval": 75.25,
+                    "LEMBPasskeyRetrieval": 97.0,
+                    "LEMBQMSumRetrieval": 51.49,
+                    "LEMBSummScreenFDRetrieval": 99.11,
+                    "LEMBWikimQARetrieval": 87.49,
+                    "MintakaRetrieval (fr)": 49.19,
+                    "SyntecRetrieval": 87.28,
+                    "XPQARetrieval (fr)": 72.92
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "voyage-multilingual-2",
+                    "SICKFr": 74.9,
+                    "STS22 (fr)": 82.76,
+                    "STSBenchmarkMultilingualSTS (fr)": 82.72
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "LaBSE",
-                    "SummEval": 31.05,
-                    "SummEvalFr (fra-Latn)": 30.16,
-                    "SummEvalFr": 30.16
+                    "Model": "voyage-multilingual-2",
+                    "SummEvalFr": 29.96
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "voyage-multilingual-2"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "LaBSE"
+                    "Model": "voyage-multilingual-2"
                 }
             ]
         }
     },
-    "text-embedding-3-small": {
+    "bge-large-en-v1.5-instruct": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "text-embedding-3-small"
+                    "Model": "bge-large-en-v1.5-instruct"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "text-embedding-3-small",
-                    "AmazonCounterfactualClassification (en)": 76.42,
-                    "AmazonPolarityClassification": 90.84,
-                    "AmazonReviewsClassification (en)": 45.73,
-                    "Banking77Classification": 83.01,
-                    "EmotionClassification": 50.63,
-                    "ImdbClassification": 83.66,
-                    "MTOPDomainClassification (en)": 93.91,
-                    "MTOPIntentClassification (en)": 70.98,
-                    "MassiveIntentClassification (en)": 72.86,
-                    "MassiveScenarioClassification (en)": 76.84,
-                    "ToxicConversationsClassification": 71.91,
-                    "TweetSentimentExtractionClassification": 61.72
+                    "Model": "bge-large-en-v1.5-instruct"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "text-embedding-3-small",
-                    "ArxivClusteringP2P": 46.57,
-                    "ArxivClusteringS2S": 39.35,
-                    "BiorxivClusteringP2P": 37.77,
-                    "BiorxivClusteringS2S": 34.68,
-                    "MedrxivClusteringP2P": 32.77,
-                    "MedrxivClusteringS2S": 31.85,
-                    "RedditClustering": 64.09,
-                    "RedditClusteringP2P": 65.12,
-                    "StackExchangeClustering": 72.05,
-                    "StackExchangeClusteringP2P": 34.04,
-                    "TwentyNewsgroupsClustering": 54.81
+                    "Model": "bge-large-en-v1.5-instruct"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "text-embedding-3-small",
-                    "OpusparcusPC (fr)": 94.45,
-                    "SprintDuplicateQuestions": 94.58,
-                    "TwitterSemEval2015": 73.33,
-                    "TwitterURLCorpus": 87.21
+                    "Model": "bge-large-en-v1.5-instruct"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "text-embedding-3-small",
-                    "AskUbuntuDupQuestions": 62.18,
-                    "MindSmallReranking": 29.93,
-                    "SciDocsRR": 83.25,
-                    "StackOverflowDupQuestions": 51.53
+                    "Model": "bge-large-en-v1.5-instruct"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "text-embedding-3-small",
-                    "ARCChallenge": 14.63,
-                    "AlphaNLI": 30.61,
-                    "ArguAna": 55.49,
-                    "CQADupstackRetrieval": 42.58,
-                    "ClimateFEVER": 26.86,
-                    "DBPedia": 39.97,
-                    "FEVER": 79.42,
-                    "FiQA2018": 44.91,
-                    "HellaSwag": 30.94,
-                    "HotpotQA": 63.63,
-                    "MSMARCO": 37.02,
-                    "NFCorpus": 38.33,
-                    "NQ": 52.86,
-                    "PIQA": 33.69,
-                    "Quail": 6.11,
-                    "QuoraRetrieval": 88.83,
-                    "RARbCode": 72.03,
-                    "RARbMath": 71.07,
-                    "SCIDOCS": 20.8,
-                    "SIQA": 3.03,
-                    "SciFact": 73.37,
-                    "SpartQA": 6.63,
-                    "TRECCOVID": 77.9,
-                    "TempReasonL1": 2.35,
-                    "TempReasonL2Fact": 25.68,
-                    "TempReasonL2Pure": 2.76,
-                    "TempReasonL3Fact": 22.09,
-                    "TempReasonL3Pure": 9.79,
-                    "Touche2020": 24.28,
-                    "WinoGrande": 31.53
+                    "Model": "bge-large-en-v1.5-instruct",
+                    "ARCChallenge": 8.86,
+                    "AlphaNLI": 0.86,
+                    "HellaSwag": 26.24,
+                    "PIQA": 23.26,
+                    "Quail": 2.72,
+                    "RARbCode": 45.25,
+                    "RARbMath": 49.82,
+                    "SIQA": 0.59,
+                    "SpartQA": 2.34,
+                    "TempReasonL1": 1.17,
+                    "TempReasonL2Fact": 21.19,
+                    "TempReasonL2Pure": 2.1,
+                    "TempReasonL3Fact": 17.59,
+                    "TempReasonL3Pure": 5.99,
+                    "WinoGrande": 10.31
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "text-embedding-3-small",
-                    "BIOSSES": 88.72,
-                    "SICK-R": 76.73,
-                    "STS12": 73.09,
-                    "STS13": 84.92,
-                    "STS14": 79.81,
-                    "STS15": 88.01,
-                    "STS16": 84.41,
-                    "STS17 (en-en)": 90.94,
-                    "STS22 (en)": 64.96,
-                    "STSBenchmark": 84.24
+                    "Model": "bge-large-en-v1.5-instruct"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "text-embedding-3-small",
-                    "SummEval": 31.12
+                    "Model": "bge-large-en-v1.5-instruct"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "bge-large-en-v1.5-instruct"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "text-embedding-3-small"
+                    "Model": "bge-large-en-v1.5-instruct"
                 }
             ]
         }
     },
-    "flan-t5-large": {
+    "rubert-base-cased-sentence": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "flan-t5-large"
+                    "Model": "rubert-base-cased-sentence",
+                    "Tatoeba (rus-Cyrl_eng-Latn)": 20.26
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "flan-t5-large"
+                    "Model": "rubert-base-cased-sentence",
+                    "GeoreviewClassification (rus-Cyrl)": 38.05,
+                    "HeadlineClassification (rus-Cyrl)": 67.64,
+                    "InappropriatenessClassification (rus-Cyrl)": 58.27,
+                    "KinopoiskClassification (rus-Cyrl)": 45.86,
+                    "MassiveIntentClassification (rus-Cyrl)": 49.1,
+                    "MassiveScenarioClassification (rus-Cyrl)": 51.91,
+                    "RuReviewsClassification (rus-Cyrl)": 58.34,
+                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 52.18,
+                    "RuSciBenchOECDClassification (rus-Cyrl)": 40.11
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "flan-t5-large"
+                    "Model": "rubert-base-cased-sentence",
+                    "GeoreviewClusteringP2P (rus-Cyrl)": 41.82,
+                    "MLSUMClusteringP2P (rus-Cyrl)": 43.71,
+                    "MLSUMClusteringS2S (rus-Cyrl)": 45.94,
+                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 46.29,
+                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 41.28
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "flan-t5-large"
+                    "Model": "rubert-base-cased-sentence",
+                    "OpusparcusPC (rus-Cyrl)": 81.52,
+                    "TERRa (rus-Cyrl)": 59.12
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "flan-t5-large"
+                    "Model": "rubert-base-cased-sentence",
+                    "RuBQReranking (rus-Cyrl)": 39.89
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "flan-t5-large"
+                    "Model": "rubert-base-cased-sentence",
+                    "RiaNewsRetrieval (rus-Cyrl)": 6.72,
+                    "RuBQRetrieval (rus-Cyrl)": 12.63
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "flan-t5-large"
+                    "Model": "rubert-base-cased-sentence",
+                    "RUParaPhraserSTS (rus-Cyrl)": 66.24,
+                    "RuSTSBenchmarkSTS (rus-Cyrl)": 66.03,
+                    "STS22 (rus-Cyrl)": 51.27,
+                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 66.71
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "flan-t5-large"
+                    "Model": "rubert-base-cased-sentence"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "rubert-base-cased-sentence",
+                    "CEDRClassification (rus-Cyrl)": 35.55,
+                    "SensitiveTopicsClassification (rus-Cyrl)": 20.05
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "flan-t5-large",
-                    "Core17InstructionRetrieval": 1.32,
-                    "News21InstructionRetrieval": 8.95,
-                    "Robust04InstructionRetrieval": 3.9
+                    "Model": "rubert-base-cased-sentence"
                 }
             ]
         }
     },
-    "st-polish-paraphrase-from-distilroberta": {
+    "voyage-lite-01-instruct": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "st-polish-paraphrase-from-distilroberta"
+                    "Model": "voyage-lite-01-instruct"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "st-polish-paraphrase-from-distilroberta",
-                    "AllegroReviews": 34.5,
-                    "CBD": 70.27,
-                    "MassiveIntentClassification (pl)": 64.81,
-                    "MassiveScenarioClassification (pl)": 70.01,
-                    "PAC": 64.6,
-                    "PolEmo2.0-IN": 67.06,
-                    "PolEmo2.0-OUT": 38.58
+                    "Model": "voyage-lite-01-instruct",
+                    "AmazonCounterfactualClassification (en)": 71.43,
+                    "AmazonPolarityClassification": 96.41,
+                    "AmazonReviewsClassification (en)": 57.06,
+                    "Banking77Classification": 81.64,
+                    "EmotionClassification": 48.29,
+                    "ImdbClassification": 95.49,
+                    "MTOPDomainClassification (en)": 96.3,
+                    "MTOPIntentClassification (en)": 67.93,
+                    "MassiveIntentClassification (en)": 71.29,
+                    "MassiveScenarioClassification (en)": 76.74,
+                    "ToxicConversationsClassification": 75.45,
+                    "TweetSentimentExtractionClassification": 59.44
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "st-polish-paraphrase-from-distilroberta",
-                    "8TagsClustering": 31.68
+                    "Model": "voyage-lite-01-instruct",
+                    "ArxivClusteringP2P": 47.92,
+                    "ArxivClusteringS2S": 42.42,
+                    "BiorxivClusteringP2P": 38.72,
+                    "BiorxivClusteringS2S": 36.6,
+                    "MedrxivClusteringP2P": 34.04,
+                    "MedrxivClusteringS2S": 32.81,
+                    "RedditClustering": 61.56,
+                    "RedditClusteringP2P": 65.35,
+                    "StackExchangeClustering": 70.16,
+                    "StackExchangeClusteringP2P": 38.23,
+                    "TwentyNewsgroupsClustering": 53.56
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "st-polish-paraphrase-from-distilroberta",
-                    "CDSC-E": 75.99,
-                    "PPC": 93.29,
-                    "PSC": 99.1,
-                    "SICK-E-PL": 79.63
+                    "Model": "voyage-lite-01-instruct",
+                    "SprintDuplicateQuestions": 96.01,
+                    "TwitterSemEval2015": 76.87,
+                    "TwitterURLCorpus": 86.84
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "st-polish-paraphrase-from-distilroberta"
+                    "Model": "voyage-lite-01-instruct",
+                    "AskUbuntuDupQuestions": 65.77,
+                    "MindSmallReranking": 31.69,
+                    "SciDocsRR": 87.03,
+                    "StackOverflowDupQuestions": 54.49
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "st-polish-paraphrase-from-distilroberta",
-                    "ArguAna-PL": 49.42,
-                    "DBPedia-PL": 19.82,
-                    "FiQA-PL": 19.58,
-                    "HotpotQA-PL": 23.47,
-                    "MSMARCO-PL": 16.51,
-                    "NFCorpus-PL": 22.49,
-                    "NQ-PL": 19.83,
-                    "Quora-PL": 81.17,
-                    "SCIDOCS-PL": 12.15,
-                    "SciFact-PL": 49.49,
-                    "TRECCOVID-PL": 38.97
+                    "Model": "voyage-lite-01-instruct",
+                    "ArguAna": 58.73,
+                    "CQADupstackRetrieval": 45.11,
+                    "ClimateFEVER": 37.47,
+                    "DBPedia": 43.42,
+                    "FEVER": 89.71,
+                    "FiQA2018": 44.79,
+                    "HotpotQA": 70.46,
+                    "MSMARCO": 39.66,
+                    "NFCorpus": 43.33,
+                    "NQ": 60.65,
+                    "QuoraRetrieval": 87.83,
+                    "SCIDOCS": 23.19,
+                    "SciFact": 73.64,
+                    "TRECCOVID": 78.92,
+                    "Touche2020": 36.83
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "st-polish-paraphrase-from-distilroberta",
-                    "CDSC-R": 89.62,
-                    "SICK-R-PL": 76.37,
-                    "STS22 (pl)": 40.36
+                    "Model": "voyage-lite-01-instruct",
+                    "BIOSSES": 84.85,
+                    "SICK-R": 79.71,
+                    "STS12": 77.09,
+                    "STS13": 88.91,
+                    "STS14": 82.08,
+                    "STS15": 89.21,
+                    "STS16": 84.74,
+                    "STS17 (en-en)": 90.73,
+                    "STS22 (en)": 62.1,
+                    "STSBenchmark": 89.86
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "st-polish-paraphrase-from-distilroberta"
+                    "Model": "voyage-lite-01-instruct",
+                    "SummEval": 30.97
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "voyage-lite-01-instruct"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "st-polish-paraphrase-from-distilroberta"
+                    "Model": "voyage-lite-01-instruct"
                 }
             ]
         }
     },
-    "LLM2Vec-Sheared-Llama-unsupervised": {
+    "udever-bloom-560m": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "LLM2Vec-Sheared-Llama-unsupervised"
+                    "Model": "udever-bloom-560m"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "LLM2Vec-Sheared-Llama-unsupervised",
-                    "AmazonCounterfactualClassification (en)": 72.93,
-                    "AmazonPolarityClassification": 74.28,
-                    "AmazonReviewsClassification (en)": 36.14,
-                    "Banking77Classification": 79.0,
-                    "EmotionClassification": 42.85,
-                    "ImdbClassification": 71.92,
-                    "MTOPDomainClassification (en)": 91.24,
-                    "MTOPIntentClassification (en)": 74.08,
-                    "MassiveIntentClassification (en)": 69.99,
-                    "MassiveScenarioClassification (en)": 75.15,
-                    "ToxicConversationsClassification": 68.4,
-                    "TweetSentimentExtractionClassification": 56.08
+                    "Model": "udever-bloom-560m",
+                    "AmazonReviewsClassification (fr)": 26.85,
+                    "MTOPDomainClassification (fr)": 34.99,
+                    "MTOPIntentClassification (fr)": 15.76,
+                    "MasakhaNEWSClassification (fra)": 67.94,
+                    "MassiveIntentClassification (fr)": 15.09,
+                    "MassiveScenarioClassification (fr)": 21.67
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "LLM2Vec-Sheared-Llama-unsupervised",
-                    "ArxivClusteringP2P": 42.92,
-                    "ArxivClusteringS2S": 35.2,
-                    "BiorxivClusteringP2P": 35.02,
-                    "BiorxivClusteringS2S": 27.21,
-                    "MedrxivClusteringP2P": 30.15,
-                    "MedrxivClusteringS2S": 26.96,
-                    "RedditClustering": 38.67,
-                    "RedditClusteringP2P": 53.42,
-                    "StackExchangeClustering": 59.35,
-                    "StackExchangeClusteringP2P": 31.47,
-                    "TwentyNewsgroupsClustering": 31.54
+                    "Model": "udever-bloom-560m",
+                    "AlloProfClusteringP2P": 53.57,
+                    "AlloProfClusteringS2S": 22.13,
+                    "HALClusteringS2S": 7.68,
+                    "MLSUMClusteringP2P": 36.43,
+                    "MLSUMClusteringS2S": 25.26,
+                    "MasakhaNEWSClusteringP2P (fra)": 37.57,
+                    "MasakhaNEWSClusteringS2S (fra)": 20.58
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "LLM2Vec-Sheared-Llama-unsupervised",
-                    "SprintDuplicateQuestions": 77.36,
-                    "TwitterSemEval2015": 61.54,
-                    "TwitterURLCorpus": 77.73
+                    "Model": "udever-bloom-560m",
+                    "OpusparcusPC (fr)": 82.1,
+                    "PawsXPairClassification (fr)": 59.69
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "LLM2Vec-Sheared-Llama-unsupervised",
-                    "AskUbuntuDupQuestions": 52.7,
-                    "MindSmallReranking": 29.52,
-                    "SciDocsRR": 67.76,
-                    "StackOverflowDupQuestions": 40.82
+                    "Model": "udever-bloom-560m",
+                    "AlloprofReranking": 28.75,
+                    "SyntecReranking": 50.88
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "LLM2Vec-Sheared-Llama-unsupervised",
-                    "ArguAna": 43.64,
-                    "CQADupstackRetrieval": 18.5,
-                    "ClimateFEVER": 18.95,
-                    "DBPedia": 13.21,
-                    "FEVER": 16.96,
-                    "FiQA2018": 16.99,
-                    "HotpotQA": 22.64,
-                    "MSMARCO": 7.03,
-                    "NFCorpus": 15.73,
-                    "NQ": 17.96,
-                    "QuoraRetrieval": 78.23,
-                    "SCIDOCS": 5.53,
-                    "SciFact": 38.31,
-                    "TRECCOVID": 56.04,
-                    "Touche2020": 19.17
+                    "Model": "udever-bloom-560m",
+                    "AlloprofRetrieval": 1.98,
+                    "BSARDRetrieval": 0.0,
+                    "MintakaRetrieval (fr)": 0.48,
+                    "SyntecRetrieval": 24.45,
+                    "XPQARetrieval (fr)": 12.98
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "LLM2Vec-Sheared-Llama-unsupervised",
-                    "BIOSSES": 75.12,
-                    "SICK-R": 69.34,
-                    "STS12": 60.09,
-                    "STS13": 72.52,
-                    "STS14": 66.7,
-                    "STS15": 77.69,
-                    "STS16": 75.94,
-                    "STS17 (en-en)": 81.67,
-                    "STS22 (en)": 63.7,
-                    "STSBenchmark": 73.36
+                    "Model": "udever-bloom-560m",
+                    "SICKFr": 54.54,
+                    "STS22 (fr)": 61.35,
+                    "STSBenchmarkMultilingualSTS (fr)": 36.78
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "LLM2Vec-Sheared-Llama-unsupervised",
-                    "SummEval": 31.23
+                    "Model": "udever-bloom-560m",
+                    "SummEvalFr": 23.63
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "udever-bloom-560m"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "LLM2Vec-Sheared-Llama-unsupervised"
+                    "Model": "udever-bloom-560m"
                 }
             ]
         }
     },
-    "DanskBERT": {
+    "text-search-curie-001": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "DanskBERT",
-                    "BornholmBitextMining": 6.34
+                    "Model": "text-search-curie-001"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "DanskBERT",
-                    "AngryTweetsClassification": 54.28,
-                    "DKHateClassification": 59.3,
-                    "DanishPoliticalCommentsClassification": 39.81,
-                    "LccSentimentClassification": 58.0,
-                    "MassiveIntentClassification (da)": 54.68,
-                    "MassiveIntentClassification (nb)": 45.38,
-                    "MassiveIntentClassification (sv)": 40.82,
-                    "MassiveScenarioClassification (da)": 59.56,
-                    "MassiveScenarioClassification (nb)": 47.55,
-                    "MassiveScenarioClassification (sv)": 40.14,
-                    "NoRecClassification": 46.06,
-                    "NordicLangClassification": 74.25,
-                    "NorwegianParliament": 56.79,
-                    "ScalaDaClassification": 66.59,
-                    "ScalaNbClassification": 59.99
+                    "Model": "text-search-curie-001"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "DanskBERT"
+                    "Model": "text-search-curie-001"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "DanskBERT"
+                    "Model": "text-search-curie-001"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "DanskBERT"
+                    "Model": "text-search-curie-001"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "text-search-curie-001",
+                    "ArguAna": 46.98,
+                    "ClimateFEVER": 19.4,
+                    "FEVER": 75.6,
+                    "FiQA2018": 45.21,
+                    "HotpotQA": 64.8,
+                    "NFCorpus": 38.01,
+                    "QuoraRetrieval": 67.7,
+                    "SCIDOCS": 17.74,
+                    "SciFact": 74.35,
+                    "TRECCOVID": 56.14,
+                    "Touche2020": 30.9
                 }
             ]
         },
-        "Retrieval": {
-            "ndcg_at_10": [
+        "STS": {
+            "spearman": [
                 {
-                    "Model": "DanskBERT"
+                    "Model": "text-search-curie-001"
                 }
             ]
         },
-        "STS": {
+        "Summarization": {
             "spearman": [
                 {
-                    "Model": "DanskBERT"
+                    "Model": "text-search-curie-001"
                 }
             ]
         },
-        "Summarization": {
-            "spearman": [
+        "MultilabelClassification": {
+            "accuracy": [
                 {
-                    "Model": "DanskBERT"
+                    "Model": "text-search-curie-001"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "DanskBERT"
+                    "Model": "text-search-curie-001"
                 }
             ]
         }
     },
-    "mistral-7b-instruct-v0.2": {
+    "gtr-t5-xl": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "mistral-7b-instruct-v0.2"
+                    "Model": "gtr-t5-xl",
+                    "BUCC (de-en)": 90.99,
+                    "BUCC (fr-en)": 88.55,
+                    "BUCC (ru-en)": 2.07,
+                    "BUCC (zh-en)": 1.49,
+                    "Tatoeba (afr-eng)": 33.47,
+                    "Tatoeba (amh-eng)": 0.01,
+                    "Tatoeba (ang-eng)": 30.74,
+                    "Tatoeba (ara-eng)": 0.47,
+                    "Tatoeba (arq-eng)": 0.34,
+                    "Tatoeba (arz-eng)": 0.14,
+                    "Tatoeba (ast-eng)": 51.74,
+                    "Tatoeba (awa-eng)": 0.49,
+                    "Tatoeba (aze-eng)": 7.43,
+                    "Tatoeba (bel-eng)": 3.45,
+                    "Tatoeba (ben-eng)": 0.06,
+                    "Tatoeba (ber-eng)": 5.79,
+                    "Tatoeba (bos-eng)": 17.43,
+                    "Tatoeba (bre-eng)": 5.69,
+                    "Tatoeba (bul-eng)": 7.55,
+                    "Tatoeba (cat-eng)": 48.06,
+                    "Tatoeba (cbk-eng)": 54.56,
+                    "Tatoeba (ceb-eng)": 8.72,
+                    "Tatoeba (ces-eng)": 8.76,
+                    "Tatoeba (cha-eng)": 27.56,
+                    "Tatoeba (cmn-eng)": 2.26,
+                    "Tatoeba (cor-eng)": 3.69,
+                    "Tatoeba (csb-eng)": 13.18,
+                    "Tatoeba (cym-eng)": 6.97,
+                    "Tatoeba (dan-eng)": 47.36,
+                    "Tatoeba (deu-eng)": 91.54,
+                    "Tatoeba (dsb-eng)": 13.2,
+                    "Tatoeba (dtp-eng)": 4.54,
+                    "Tatoeba (ell-eng)": 0.55,
+                    "Tatoeba (epo-eng)": 27.86,
+                    "Tatoeba (est-eng)": 5.13,
+                    "Tatoeba (eus-eng)": 10.23,
+                    "Tatoeba (fao-eng)": 21.44,
+                    "Tatoeba (fin-eng)": 6.62,
+                    "Tatoeba (fra-eng)": 79.66,
+                    "Tatoeba (fry-eng)": 32.92,
+                    "Tatoeba (gla-eng)": 2.87,
+                    "Tatoeba (gle-eng)": 3.26,
+                    "Tatoeba (glg-eng)": 63.81,
+                    "Tatoeba (gsw-eng)": 29.71,
+                    "Tatoeba (heb-eng)": 0.33,
+                    "Tatoeba (hin-eng)": 0.25,
+                    "Tatoeba (hrv-eng)": 17.16,
+                    "Tatoeba (hsb-eng)": 12.02,
+                    "Tatoeba (hun-eng)": 7.21,
+                    "Tatoeba (hye-eng)": 0.78,
+                    "Tatoeba (ido-eng)": 40.83,
+                    "Tatoeba (ile-eng)": 54.95,
+                    "Tatoeba (ina-eng)": 72.28,
+                    "Tatoeba (ind-eng)": 30.95,
+                    "Tatoeba (isl-eng)": 11.29,
+                    "Tatoeba (ita-eng)": 73.83,
+                    "Tatoeba (jav-eng)": 8.66,
+                    "Tatoeba (jpn-eng)": 0.61,
+                    "Tatoeba (kab-eng)": 1.78,
+                    "Tatoeba (kat-eng)": 0.79,
+                    "Tatoeba (kaz-eng)": 0.95,
+                    "Tatoeba (khm-eng)": 0.49,
+                    "Tatoeba (kor-eng)": 1.87,
+                    "Tatoeba (kur-eng)": 10.91,
+                    "Tatoeba (kzj-eng)": 5.72,
+                    "Tatoeba (lat-eng)": 18.24,
+                    "Tatoeba (lfn-eng)": 43.49,
+                    "Tatoeba (lit-eng)": 7.13,
+                    "Tatoeba (lvs-eng)": 7.04,
+                    "Tatoeba (mal-eng)": 0.44,
+                    "Tatoeba (mar-eng)": 0.03,
+                    "Tatoeba (max-eng)": 18.99,
+                    "Tatoeba (mhr-eng)": 1.11,
+                    "Tatoeba (mkd-eng)": 2.49,
+                    "Tatoeba (mon-eng)": 2.01,
+                    "Tatoeba (nds-eng)": 39.96,
+                    "Tatoeba (nld-eng)": 58.86,
+                    "Tatoeba (nno-eng)": 29.07,
+                    "Tatoeba (nob-eng)": 40.25,
+                    "Tatoeba (nov-eng)": 50.19,
+                    "Tatoeba (oci-eng)": 30.72,
+                    "Tatoeba (orv-eng)": 0.85,
+                    "Tatoeba (pam-eng)": 7.21,
+                    "Tatoeba (pes-eng)": 0.53,
+                    "Tatoeba (pms-eng)": 31.07,
+                    "Tatoeba (pol-eng)": 18.06,
+                    "Tatoeba (por-eng)": 81.92,
+                    "Tatoeba (ron-eng)": 62.6,
+                    "Tatoeba (rus-eng)": 22.24,
+                    "Tatoeba (slk-eng)": 10.59,
+                    "Tatoeba (slv-eng)": 11.4,
+                    "Tatoeba (spa-eng)": 85.78,
+                    "Tatoeba (sqi-eng)": 14.92,
+                    "Tatoeba (srp-eng)": 9.87,
+                    "Tatoeba (swe-eng)": 55.08,
+                    "Tatoeba (swg-eng)": 32.66,
+                    "Tatoeba (swh-eng)": 7.64,
+                    "Tatoeba (tam-eng)": 0.49,
+                    "Tatoeba (tat-eng)": 1.28,
+                    "Tatoeba (tel-eng)": 0.45,
+                    "Tatoeba (tgl-eng)": 23.63,
+                    "Tatoeba (tha-eng)": 0.61,
+                    "Tatoeba (tuk-eng)": 5.71,
+                    "Tatoeba (tur-eng)": 8.25,
+                    "Tatoeba (tzl-eng)": 28.4,
+                    "Tatoeba (uig-eng)": 0.57,
+                    "Tatoeba (ukr-eng)": 5.69,
+                    "Tatoeba (urd-eng)": 0.0,
+                    "Tatoeba (uzb-eng)": 4.19,
+                    "Tatoeba (vie-eng)": 9.07,
+                    "Tatoeba (war-eng)": 12.31,
+                    "Tatoeba (wuu-eng)": 1.38,
+                    "Tatoeba (xho-eng)": 7.6,
+                    "Tatoeba (yid-eng)": 0.41,
+                    "Tatoeba (yue-eng)": 1.31,
+                    "Tatoeba (zsm-eng)": 29.74
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "mistral-7b-instruct-v0.2"
+                    "Model": "gtr-t5-xl",
+                    "AmazonCounterfactualClassification (de)": 59.79,
+                    "AmazonCounterfactualClassification (en)": 68.6,
+                    "AmazonCounterfactualClassification (en-ext)": 69.03,
+                    "AmazonCounterfactualClassification (ja)": 50.59,
+                    "AmazonPolarityClassification": 74.58,
+                    "AmazonReviewsClassification (de)": 35.06,
+                    "AmazonReviewsClassification (en)": 38.2,
+                    "AmazonReviewsClassification (es)": 37.18,
+                    "AmazonReviewsClassification (fr)": 35.48,
+                    "AmazonReviewsClassification (ja)": 22.24,
+                    "AmazonReviewsClassification (zh)": 21.89,
+                    "Banking77Classification": 82.22,
+                    "EmotionClassification": 45.54,
+                    "ImdbClassification": 68.15,
+                    "MTOPDomainClassification (de)": 85.42,
+                    "MTOPDomainClassification (en)": 93.6,
+                    "MTOPDomainClassification (es)": 88.2,
+                    "MTOPDomainClassification (fr)": 85.05,
+                    "MTOPDomainClassification (hi)": 21.74,
+                    "MTOPDomainClassification (th)": 15.87,
+                    "MTOPIntentClassification (de)": 55.75,
+                    "MTOPIntentClassification (en)": 65.93,
+                    "MTOPIntentClassification (es)": 57.73,
+                    "MTOPIntentClassification (fr)": 51.07,
+                    "MTOPIntentClassification (hi)": 3.19,
+                    "MTOPIntentClassification (th)": 5.55,
+                    "MassiveIntentClassification (af)": 42.6,
+                    "MassiveIntentClassification (am)": 2.12,
+                    "MassiveIntentClassification (ar)": 4.64,
+                    "MassiveIntentClassification (az)": 35.05,
+                    "MassiveIntentClassification (bn)": 2.84,
+                    "MassiveIntentClassification (cy)": 36.19,
+                    "MassiveIntentClassification (da)": 48.42,
+                    "MassiveIntentClassification (de)": 55.49,
+                    "MassiveIntentClassification (el)": 10.14,
+                    "MassiveIntentClassification (en)": 70.23,
+                    "MassiveIntentClassification (es)": 56.72,
+                    "MassiveIntentClassification (fa)": 3.54,
+                    "MassiveIntentClassification (fi)": 37.13,
+                    "MassiveIntentClassification (fr)": 57.67,
+                    "MassiveIntentClassification (he)": 2.56,
+                    "MassiveIntentClassification (hi)": 3.24,
+                    "MassiveIntentClassification (hu)": 34.22,
+                    "MassiveIntentClassification (hy)": 3.01,
+                    "MassiveIntentClassification (id)": 46.54,
+                    "MassiveIntentClassification (is)": 34.77,
+                    "MassiveIntentClassification (it)": 54.13,
+                    "MassiveIntentClassification (ja)": 4.27,
+                    "MassiveIntentClassification (jv)": 36.97,
+                    "MassiveIntentClassification (ka)": 2.72,
+                    "MassiveIntentClassification (km)": 5.35,
+                    "MassiveIntentClassification (kn)": 3.17,
+                    "MassiveIntentClassification (ko)": 2.64,
+                    "MassiveIntentClassification (lv)": 36.32,
+                    "MassiveIntentClassification (ml)": 3.18,
+                    "MassiveIntentClassification (mn)": 22.85,
+                    "MassiveIntentClassification (ms)": 42.87,
+                    "MassiveIntentClassification (my)": 4.04,
+                    "MassiveIntentClassification (nb)": 45.87,
+                    "MassiveIntentClassification (nl)": 49.53,
+                    "MassiveIntentClassification (pl)": 42.64,
+                    "MassiveIntentClassification (pt)": 57.03,
+                    "MassiveIntentClassification (ro)": 49.95,
+                    "MassiveIntentClassification (ru)": 36.58,
+                    "MassiveIntentClassification (sl)": 39.44,
+                    "MassiveIntentClassification (sq)": 41.78,
+                    "MassiveIntentClassification (sv)": 47.95,
+                    "MassiveIntentClassification (sw)": 35.85,
+                    "MassiveIntentClassification (ta)": 2.32,
+                    "MassiveIntentClassification (te)": 2.2,
+                    "MassiveIntentClassification (th)": 3.74,
+                    "MassiveIntentClassification (tl)": 43.12,
+                    "MassiveIntentClassification (tr)": 35.24,
+                    "MassiveIntentClassification (ur)": 3.0,
+                    "MassiveIntentClassification (vi)": 30.01,
+                    "MassiveIntentClassification (zh-CN)": 1.72,
+                    "MassiveIntentClassification (zh-TW)": 3.35,
+                    "MassiveScenarioClassification (af)": 52.54,
+                    "MassiveScenarioClassification (am)": 6.3,
+                    "MassiveScenarioClassification (ar)": 11.96,
+                    "MassiveScenarioClassification (az)": 40.17,
+                    "MassiveScenarioClassification (bn)": 8.29,
+                    "MassiveScenarioClassification (cy)": 42.24,
+                    "MassiveScenarioClassification (da)": 57.28,
+                    "MassiveScenarioClassification (de)": 68.09,
+                    "MassiveScenarioClassification (el)": 16.66,
+                    "MassiveScenarioClassification (en)": 75.94,
+                    "MassiveScenarioClassification (es)": 64.32,
+                    "MassiveScenarioClassification (fa)": 6.9,
+                    "MassiveScenarioClassification (fi)": 43.96,
+                    "MassiveScenarioClassification (fr)": 66.72,
+                    "MassiveScenarioClassification (he)": 7.51,
+                    "MassiveScenarioClassification (hi)": 7.82,
+                    "MassiveScenarioClassification (hu)": 42.16,
+                    "MassiveScenarioClassification (hy)": 9.33,
+                    "MassiveScenarioClassification (id)": 53.54,
+                    "MassiveScenarioClassification (is)": 42.84,
+                    "MassiveScenarioClassification (it)": 62.44,
+                    "MassiveScenarioClassification (ja)": 7.29,
+                    "MassiveScenarioClassification (jv)": 43.13,
+                    "MassiveScenarioClassification (ka)": 7.63,
+                    "MassiveScenarioClassification (km)": 9.08,
+                    "MassiveScenarioClassification (kn)": 8.1,
+                    "MassiveScenarioClassification (ko)": 6.35,
+                    "MassiveScenarioClassification (lv)": 40.24,
+                    "MassiveScenarioClassification (ml)": 7.65,
+                    "MassiveScenarioClassification (mn)": 27.98,
+                    "MassiveScenarioClassification (ms)": 52.41,
+                    "MassiveScenarioClassification (my)": 9.21,
+                    "MassiveScenarioClassification (nb)": 54.44,
+                    "MassiveScenarioClassification (nl)": 60.35,
+                    "MassiveScenarioClassification (pl)": 49.97,
+                    "MassiveScenarioClassification (pt)": 62.78,
+                    "MassiveScenarioClassification (ro)": 59.62,
+                    "MassiveScenarioClassification (ru)": 43.44,
+                    "MassiveScenarioClassification (sl)": 44.79,
+                    "MassiveScenarioClassification (sq)": 50.84,
+                    "MassiveScenarioClassification (sv)": 58.21,
+                    "MassiveScenarioClassification (sw)": 44.63,
+                    "MassiveScenarioClassification (ta)": 7.95,
+                    "MassiveScenarioClassification (te)": 7.5,
+                    "MassiveScenarioClassification (th)": 8.79,
+                    "MassiveScenarioClassification (tl)": 53.54,
+                    "MassiveScenarioClassification (tr)": 42.47,
+                    "MassiveScenarioClassification (ur)": 9.58,
+                    "MassiveScenarioClassification (vi)": 34.68,
+                    "MassiveScenarioClassification (zh-CN)": 5.21,
+                    "MassiveScenarioClassification (zh-TW)": 8.77,
+                    "ToxicConversationsClassification": 67.56,
+                    "TweetSentimentExtractionClassification": 54.77
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "mistral-7b-instruct-v0.2"
+                    "Model": "gtr-t5-xl",
+                    "ArxivClusteringP2P": 37.9,
+                    "ArxivClusteringS2S": 30.45,
+                    "BiorxivClusteringP2P": 30.52,
+                    "BiorxivClusteringS2S": 26.06,
+                    "MedrxivClusteringP2P": 28.69,
+                    "MedrxivClusteringS2S": 26.69,
+                    "RedditClustering": 61.34,
+                    "RedditClusteringP2P": 61.11,
+                    "StackExchangeClustering": 69.95,
+                    "StackExchangeClusteringP2P": 32.73,
+                    "TwentyNewsgroupsClustering": 51.15
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "mistral-7b-instruct-v0.2"
+                    "Model": "gtr-t5-xl",
+                    "SprintDuplicateQuestions": 95.45,
+                    "TwitterSemEval2015": 77.81,
+                    "TwitterURLCorpus": 85.14
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "mistral-7b-instruct-v0.2"
+                    "Model": "gtr-t5-xl",
+                    "AskUbuntuDupQuestions": 63.08,
+                    "MindSmallReranking": 31.5,
+                    "SciDocsRR": 76.49,
+                    "StackOverflowDupQuestions": 52.79
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "mistral-7b-instruct-v0.2"
+                    "Model": "gtr-t5-xl",
+                    "ArguAna": 52.81,
+                    "CQADupstackRetrieval": 37.35,
+                    "ClimateFEVER": 27.01,
+                    "DBPedia": 39.74,
+                    "FEVER": 72.18,
+                    "FiQA2018": 44.19,
+                    "HotpotQA": 58.91,
+                    "MSMARCO": 43.52,
+                    "NFCorpus": 33.34,
+                    "NQ": 56.16,
+                    "QuoraRetrieval": 88.91,
+                    "SCIDOCS": 15.71,
+                    "SciFact": 64.2,
+                    "TRECCOVID": 60.09,
+                    "Touche2020": 25.26
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "mistral-7b-instruct-v0.2"
+                    "Model": "gtr-t5-xl",
+                    "BIOSSES": 78.94,
+                    "SICK-R": 73.63,
+                    "STS12": 69.11,
+                    "STS13": 81.82,
+                    "STS14": 77.07,
+                    "STS15": 86.01,
+                    "STS16": 82.23,
+                    "STS17 (ar-ar)": 9.06,
+                    "STS17 (en-ar)": -3.22,
+                    "STS17 (en-de)": 70.38,
+                    "STS17 (en-en)": 84.9,
+                    "STS17 (en-tr)": 17.17,
+                    "STS17 (es-en)": 60.24,
+                    "STS17 (es-es)": 81.93,
+                    "STS17 (fr-en)": 62.17,
+                    "STS17 (it-en)": 59.11,
+                    "STS17 (ko-ko)": 8.9,
+                    "STS17 (nl-en)": 56.91,
+                    "STS22 (ar)": 37.66,
+                    "STS22 (de)": 50.58,
+                    "STS22 (de-en)": 53.63,
+                    "STS22 (de-fr)": 55.72,
+                    "STS22 (de-pl)": 27.99,
+                    "STS22 (en)": 66.61,
+                    "STS22 (es)": 59.14,
+                    "STS22 (es-en)": 69.99,
+                    "STS22 (es-it)": 60.94,
+                    "STS22 (fr)": 79.43,
+                    "STS22 (fr-pl)": 61.98,
+                    "STS22 (it)": 67.14,
+                    "STS22 (pl)": 33.74,
+                    "STS22 (pl-en)": 60.18,
+                    "STS22 (ru)": 32.69,
+                    "STS22 (tr)": 55.79,
+                    "STS22 (zh)": 31.16,
+                    "STS22 (zh-en)": 28.85,
+                    "STSBenchmark": 77.65
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "mistral-7b-instruct-v0.2"
+                    "Model": "gtr-t5-xl",
+                    "SummEval": 30.21
                 }
             ]
         },
-        "InstructionRetrieval": {
-            "p-MRR": [
+        "MultilabelClassification": {
+            "accuracy": [
                 {
-                    "Model": "mistral-7b-instruct-v0.2",
-                    "Core17InstructionRetrieval": 13.03,
-                    "News21InstructionRetrieval": 4.81,
-                    "Robust04InstructionRetrieval": 12.61
+                    "Model": "gtr-t5-xl"
                 }
             ]
-        }
-    },
-    "gtr-t5-large": {
-        "BitextMining": {
-            "f1": [
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
                 {
-                    "Model": "gtr-t5-large"
+                    "Model": "gtr-t5-xl"
                 }
             ]
-        },
-        "Classification": {
-            "accuracy": [
-                {
-                    "Model": "gtr-t5-large",
-                    "AmazonCounterfactualClassification (de)": 59.38,
-                    "AmazonCounterfactualClassification (en)": 70.03,
-                    "AmazonCounterfactualClassification (en-ext)": 69.86,
-                    "AmazonCounterfactualClassification (ja)": 45.87,
-                    "AmazonPolarityClassification": 73.92,
-                    "AmazonReviewsClassification (de)": 33.06,
-                    "AmazonReviewsClassification (en)": 37.21,
-                    "AmazonReviewsClassification (es)": 34.0,
-                    "AmazonReviewsClassification (fr)": 33.48,
-                    "AmazonReviewsClassification (ja)": 21.78,
-                    "AmazonReviewsClassification (zh)": 21.83,
-                    "Banking77Classification": 81.21,
-                    "EmotionClassification": 46.33,
-                    "ImdbClassification": 70.86,
-                    "MTOPDomainClassification (de)": 81.91,
-                    "MTOPDomainClassification (en)": 94.01,
-                    "MTOPDomainClassification (es)": 84.7,
-                    "MTOPDomainClassification (fr)": 82.48,
-                    "MTOPDomainClassification (hi)": 22.11,
-                    "MTOPDomainClassification (th)": 16.36,
-                    "MTOPIntentClassification (de)": 52.13,
-                    "MTOPIntentClassification (en)": 63.86,
-                    "MTOPIntentClassification (es)": 52.62,
-                    "MTOPIntentClassification (fr)": 46.39,
-                    "MTOPIntentClassification (hi)": 3.9,
-                    "MTOPIntentClassification (th)": 5.38,
-                    "MassiveIntentClassification (af)": 41.02,
-                    "MassiveIntentClassification (am)": 2.34,
-                    "MassiveIntentClassification (ar)": 4.87,
-                    "MassiveIntentClassification (az)": 34.92,
-                    "MassiveIntentClassification (bn)": 2.52,
-                    "MassiveIntentClassification (cy)": 35.87,
-                    "MassiveIntentClassification (da)": 45.3,
-                    "MassiveIntentClassification (de)": 51.48,
-                    "MassiveIntentClassification (el)": 10.0,
-                    "MassiveIntentClassification (en)": 70.06,
-                    "MassiveIntentClassification (es)": 53.3,
-                    "MassiveIntentClassification (fa)": 3.59,
-                    "MassiveIntentClassification (fi)": 37.35,
-                    "MassiveIntentClassification (fr)": 54.83,
-                    "MassiveIntentClassification (he)": 2.52,
-                    "MassiveIntentClassification (hi)": 2.88,
-                    "MassiveIntentClassification (hu)": 33.52,
-                    "MassiveIntentClassification (hy)": 3.13,
-                    "MassiveIntentClassification (id)": 40.11,
-                    "MassiveIntentClassification (is)": 34.77,
-                    "MassiveIntentClassification (it)": 51.21,
-                    "MassiveIntentClassification (ja)": 4.75,
-                    "MassiveIntentClassification (jv)": 35.6,
-                    "MassiveIntentClassification (ka)": 2.71,
-                    "MassiveIntentClassification (km)": 5.48,
-                    "MassiveIntentClassification (kn)": 2.44,
-                    "MassiveIntentClassification (ko)": 2.59,
-                    "MassiveIntentClassification (lv)": 38.15,
-                    "MassiveIntentClassification (ml)": 2.67,
-                    "MassiveIntentClassification (mn)": 18.47,
-                    "MassiveIntentClassification (ms)": 35.58,
-                    "MassiveIntentClassification (my)": 4.35,
-                    "MassiveIntentClassification (nb)": 43.78,
-                    "MassiveIntentClassification (nl)": 45.96,
-                    "MassiveIntentClassification (pl)": 39.08,
-                    "MassiveIntentClassification (pt)": 52.27,
-                    "MassiveIntentClassification (ro)": 46.39,
-                    "MassiveIntentClassification (ru)": 16.82,
-                    "MassiveIntentClassification (sl)": 37.3,
-                    "MassiveIntentClassification (sq)": 41.73,
-                    "MassiveIntentClassification (sv)": 43.51,
-                    "MassiveIntentClassification (sw)": 35.97,
-                    "MassiveIntentClassification (ta)": 1.52,
-                    "MassiveIntentClassification (te)": 2.57,
-                    "MassiveIntentClassification (th)": 3.94,
-                    "MassiveIntentClassification (tl)": 41.03,
-                    "MassiveIntentClassification (tr)": 33.75,
-                    "MassiveIntentClassification (ur)": 2.57,
-                    "MassiveIntentClassification (vi)": 25.23,
-                    "MassiveIntentClassification (zh-CN)": 2.41,
-                    "MassiveIntentClassification (zh-TW)": 4.64,
-                    "MassiveScenarioClassification (af)": 51.48,
-                    "MassiveScenarioClassification (am)": 7.74,
-                    "MassiveScenarioClassification (ar)": 12.03,
-                    "MassiveScenarioClassification (az)": 41.77,
-                    "MassiveScenarioClassification (bn)": 8.07,
-                    "MassiveScenarioClassification (cy)": 43.67,
-                    "MassiveScenarioClassification (da)": 54.88,
-                    "MassiveScenarioClassification (de)": 63.63,
-                    "MassiveScenarioClassification (el)": 16.83,
-                    "MassiveScenarioClassification (en)": 75.49,
-                    "MassiveScenarioClassification (es)": 61.48,
-                    "MassiveScenarioClassification (fa)": 6.48,
-                    "MassiveScenarioClassification (fi)": 43.54,
-                    "MassiveScenarioClassification (fr)": 64.06,
-                    "MassiveScenarioClassification (he)": 8.03,
-                    "MassiveScenarioClassification (hi)": 7.5,
-                    "MassiveScenarioClassification (hu)": 42.59,
-                    "MassiveScenarioClassification (hy)": 9.22,
-                    "MassiveScenarioClassification (id)": 48.67,
-                    "MassiveScenarioClassification (is)": 43.87,
-                    "MassiveScenarioClassification (it)": 59.83,
-                    "MassiveScenarioClassification (ja)": 5.62,
-                    "MassiveScenarioClassification (jv)": 42.18,
-                    "MassiveScenarioClassification (ka)": 7.52,
-                    "MassiveScenarioClassification (km)": 9.55,
-                    "MassiveScenarioClassification (kn)": 8.34,
-                    "MassiveScenarioClassification (ko)": 6.11,
-                    "MassiveScenarioClassification (lv)": 43.35,
-                    "MassiveScenarioClassification (ml)": 7.28,
-                    "MassiveScenarioClassification (mn)": 23.94,
-                    "MassiveScenarioClassification (ms)": 45.18,
-                    "MassiveScenarioClassification (my)": 9.33,
-                    "MassiveScenarioClassification (nb)": 52.71,
-                    "MassiveScenarioClassification (nl)": 57.02,
-                    "MassiveScenarioClassification (pl)": 46.79,
-                    "MassiveScenarioClassification (pt)": 59.45,
-                    "MassiveScenarioClassification (ro)": 56.8,
-                    "MassiveScenarioClassification (ru)": 25.85,
-                    "MassiveScenarioClassification (sl)": 42.51,
-                    "MassiveScenarioClassification (sq)": 50.41,
-                    "MassiveScenarioClassification (sv)": 54.16,
-                    "MassiveScenarioClassification (sw)": 43.02,
-                    "MassiveScenarioClassification (ta)": 7.21,
-                    "MassiveScenarioClassification (te)": 6.9,
-                    "MassiveScenarioClassification (th)": 8.7,
-                    "MassiveScenarioClassification (tl)": 51.76,
-                    "MassiveScenarioClassification (tr)": 42.54,
-                    "MassiveScenarioClassification (ur)": 9.32,
-                    "MassiveScenarioClassification (vi)": 31.51,
-                    "MassiveScenarioClassification (zh-CN)": 3.84,
-                    "MassiveScenarioClassification (zh-TW)": 8.16,
-                    "ToxicConversationsClassification": 68.65,
-                    "TweetSentimentExtractionClassification": 54.09
+        }
+    },
+    "st-polish-paraphrase-from-mpnet": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "st-polish-paraphrase-from-mpnet"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "st-polish-paraphrase-from-mpnet",
+                    "AllegroReviews": 34.55,
+                    "CBD": 67.48,
+                    "MassiveIntentClassification (pl)": 65.93,
+                    "MassiveScenarioClassification (pl)": 71.85,
+                    "PAC": 63.25,
+                    "PolEmo2.0-IN": 68.37,
+                    "PolEmo2.0-OUT": 30.99
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "gtr-t5-large",
-                    "ArxivClusteringP2P": 37.5,
-                    "ArxivClusteringS2S": 30.55,
-                    "BiorxivClusteringP2P": 29.59,
-                    "BiorxivClusteringS2S": 25.72,
-                    "MedrxivClusteringP2P": 28.72,
-                    "MedrxivClusteringS2S": 27.39,
-                    "RedditClustering": 61.69,
-                    "RedditClusteringP2P": 61.67,
-                    "StackExchangeClustering": 69.93,
-                    "StackExchangeClusteringP2P": 33.21,
-                    "TwentyNewsgroupsClustering": 51.64
+                    "Model": "st-polish-paraphrase-from-mpnet",
+                    "8TagsClustering": 33.15
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "gtr-t5-large",
-                    "SprintDuplicateQuestions": 95.05,
-                    "TwitterSemEval2015": 76.03,
-                    "TwitterURLCorpus": 84.89
+                    "Model": "st-polish-paraphrase-from-mpnet",
+                    "CDSC-E": 75.06,
+                    "PPC": 93.49,
+                    "PSC": 99.05,
+                    "SICK-E-PL": 80.56
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "gtr-t5-large",
-                    "AskUbuntuDupQuestions": 61.64,
-                    "MindSmallReranking": 31.84,
-                    "SciDocsRR": 76.39,
-                    "StackOverflowDupQuestions": 51.58
+                    "Model": "st-polish-paraphrase-from-mpnet"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "gtr-t5-large",
-                    "ArguAna": 52.09,
-                    "CQADupstackRetrieval": 36.62,
-                    "ClimateFEVER": 26.9,
-                    "DBPedia": 39.55,
-                    "FEVER": 72.66,
-                    "FiQA2018": 42.79,
-                    "HotpotQA": 57.85,
-                    "MSMARCO": 42.73,
-                    "NFCorpus": 32.63,
-                    "NQ": 55.09,
-                    "QuoraRetrieval": 88.47,
-                    "SCIDOCS": 15.51,
-                    "SciFact": 63.42,
-                    "TRECCOVID": 56.68,
-                    "Touche2020": 28.29
+                    "Model": "st-polish-paraphrase-from-mpnet",
+                    "ArguAna-PL": 51.87,
+                    "DBPedia-PL": 24.59,
+                    "FiQA-PL": 22.27,
+                    "HotpotQA-PL": 32.11,
+                    "MSMARCO-PL": 17.91,
+                    "NFCorpus-PL": 24.05,
+                    "NQ-PL": 23.54,
+                    "Quora-PL": 81.49,
+                    "SCIDOCS-PL": 13.23,
+                    "SciFact-PL": 52.51,
+                    "TRECCOVID-PL": 35.23
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "gtr-t5-large",
-                    "BIOSSES": 84.86,
-                    "SICK-R": 73.39,
-                    "STS12": 70.33,
-                    "STS13": 82.19,
-                    "STS14": 77.16,
-                    "STS15": 86.31,
-                    "STS16": 81.85,
-                    "STS17 (ar-ar)": 10.19,
-                    "STS17 (en-ar)": -5.77,
-                    "STS17 (en-de)": 67.43,
-                    "STS17 (en-en)": 83.93,
-                    "STS17 (en-tr)": 8.75,
-                    "STS17 (es-en)": 54.96,
-                    "STS17 (es-es)": 82.74,
-                    "STS17 (fr-en)": 60.5,
-                    "STS17 (it-en)": 46.26,
-                    "STS17 (ko-ko)": 8.96,
-                    "STS17 (nl-en)": 47.48,
-                    "STS22 (ar)": 34.97,
-                    "STS22 (de)": 51.7,
-                    "STS22 (de-en)": 48.76,
-                    "STS22 (de-fr)": 57.5,
-                    "STS22 (de-pl)": 32.76,
-                    "STS22 (en)": 64.3,
-                    "STS22 (es)": 57.49,
-                    "STS22 (es-en)": 67.76,
-                    "STS22 (es-it)": 57.18,
-                    "STS22 (fr)": 78.7,
-                    "STS22 (fr-pl)": 61.98,
-                    "STS22 (it)": 67.67,
-                    "STS22 (pl)": 30.68,
-                    "STS22 (pl-en)": 54.17,
-                    "STS22 (ru)": 15.36,
-                    "STS22 (tr)": 58.12,
-                    "STS22 (zh)": 27.32,
-                    "STS22 (zh-en)": 29.42,
-                    "STSBenchmark": 77.6
+                    "Model": "st-polish-paraphrase-from-mpnet",
+                    "CDSC-R": 88.55,
+                    "SICK-R-PL": 76.18,
+                    "STS22 (pl)": 37.34
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "gtr-t5-large",
-                    "SummEval": 29.5
+                    "Model": "st-polish-paraphrase-from-mpnet"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "st-polish-paraphrase-from-mpnet"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "gtr-t5-large"
+                    "Model": "st-polish-paraphrase-from-mpnet"
                 }
             ]
         }
     },
-    "monobert-large-msmarco": {
+    "xlm-roberta-large": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "monobert-large-msmarco"
+                    "Model": "xlm-roberta-large"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "monobert-large-msmarco"
+                    "Model": "xlm-roberta-large",
+                    "AmazonReviewsClassification (fr)": 26.62,
+                    "MTOPDomainClassification (fr)": 36.77,
+                    "MTOPIntentClassification (fr)": 15.37,
+                    "MasakhaNEWSClassification (fra)": 65.76,
+                    "MassiveIntentClassification (fr)": 15.82,
+                    "MassiveScenarioClassification (fr)": 23.92
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "monobert-large-msmarco"
+                    "Model": "xlm-roberta-large",
+                    "AlloProfClusteringP2P": 56.54,
+                    "AlloProfClusteringS2S": 21.18,
+                    "BlurbsClusteringP2P": 29.84,
+                    "BlurbsClusteringS2S": 7.29,
+                    "HALClusteringS2S": 5.94,
+                    "MLSUMClusteringP2P": 42.67,
+                    "MLSUMClusteringS2S": 18.5,
+                    "MasakhaNEWSClusteringP2P (fra)": 34.02,
+                    "MasakhaNEWSClusteringS2S (fra)": 21.52,
+                    "TenKGnadClusteringP2P": 32.46,
+                    "TenKGnadClusteringS2S": 6.16
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "monobert-large-msmarco"
+                    "Model": "xlm-roberta-large",
+                    "OpusparcusPC (fr)": 83.73,
+                    "PawsXPairClassification (fr)": 53.38
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "monobert-large-msmarco"
+                    "Model": "xlm-roberta-large",
+                    "AlloprofReranking": 28.62,
+                    "SyntecReranking": 49.4
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "monobert-large-msmarco"
+                    "Model": "xlm-roberta-large",
+                    "AlloprofRetrieval": 0.52,
+                    "BSARDRetrieval": 0.0,
+                    "MintakaRetrieval (fr)": 0.9,
+                    "SyntecRetrieval": 6.6,
+                    "XPQARetrieval (fr)": 12.7
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "monobert-large-msmarco"
+                    "Model": "xlm-roberta-large",
+                    "SICKFr": 50.01,
+                    "STS22 (fr)": 55.49,
+                    "STSBenchmarkMultilingualSTS (fr)": 42.32
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "monobert-large-msmarco"
+                    "Model": "xlm-roberta-large",
+                    "SummEvalFr": 28.89
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "xlm-roberta-large"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "monobert-large-msmarco",
-                    "Core17InstructionRetrieval": -0.24,
-                    "News21InstructionRetrieval": -0.8,
-                    "Robust04InstructionRetrieval": -9.36
+                    "Model": "xlm-roberta-large"
                 }
             ]
         }
     },
-    "text-search-curie-001": {
+    "bge-base-zh-v1.5": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "text-search-curie-001"
+                    "Model": "bge-base-zh-v1.5"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "text-search-curie-001"
+                    "Model": "bge-base-zh-v1.5",
+                    "AmazonReviewsClassification (zh)": 40.15,
+                    "IFlyTek": 48.62,
+                    "JDReview": 83.62,
+                    "MassiveIntentClassification (zh-CN)": 67.93,
+                    "MassiveScenarioClassification (zh-CN)": 73.98,
+                    "MultilingualSentiment": 70.67,
+                    "OnlineShopping": 91.26,
+                    "TNews": 51.08,
+                    "Waimai": 85.36
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "text-search-curie-001"
+                    "Model": "bge-base-zh-v1.5",
+                    "CLSClusteringP2P": 39.91,
+                    "CLSClusteringS2S": 37.63,
+                    "ThuNewsClusteringP2P": 58.45,
+                    "ThuNewsClusteringS2S": 54.12
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "text-search-curie-001"
+                    "Model": "bge-base-zh-v1.5",
+                    "Cmnli": 84.1,
+                    "Ocnli": 75.41
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "text-search-curie-001"
+                    "Model": "bge-base-zh-v1.5",
+                    "CMedQAv1": 80.47,
+                    "CMedQAv2": 84.88,
+                    "MMarcoReranking": 29.74,
+                    "T2Reranking": 66.49
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "text-search-curie-001",
-                    "ArguAna": 46.98,
-                    "ClimateFEVER": 19.4,
-                    "FEVER": 75.6,
-                    "FiQA2018": 45.21,
-                    "HotpotQA": 64.8,
-                    "NFCorpus": 38.01,
-                    "QuoraRetrieval": 67.7,
-                    "SCIDOCS": 17.74,
-                    "SciFact": 74.35,
-                    "TRECCOVID": 56.14,
-                    "Touche2020": 30.9
+                    "Model": "bge-base-zh-v1.5",
+                    "CmedqaRetrieval": 41.61,
+                    "CovidRetrieval": 74.7,
+                    "DuRetrieval": 85.07,
+                    "EcomRetrieval": 64.25,
+                    "MMarcoRetrieval": 77.69,
+                    "MedicalRetrieval": 56.51,
+                    "T2Retrieval": 83.71,
+                    "VideoRetrieval": 72.35
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "text-search-curie-001"
+                    "Model": "bge-base-zh-v1.5",
+                    "AFQMC": 42.4,
+                    "ATEC": 48.17,
+                    "BQ": 61.78,
+                    "LCQMC": 74.45,
+                    "PAWSX": 20.4,
+                    "QBQTC": 36.22,
+                    "STS22 (zh)": 68.01,
+                    "STSB": 78.31
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "text-search-curie-001"
+                    "Model": "bge-base-zh-v1.5"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "bge-base-zh-v1.5"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "text-search-curie-001"
+                    "Model": "bge-base-zh-v1.5"
                 }
             ]
         }
     },
-    "flaubert_large_cased": {
+    "dragon-plus-instruct": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "flaubert_large_cased"
+                    "Model": "dragon-plus-instruct"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "flaubert_large_cased",
-                    "AmazonReviewsClassification (fr)": 22.45,
-                    "MTOPDomainClassification (fr)": 24.27,
-                    "MTOPIntentClassification (fr)": 9.79,
-                    "MasakhaNEWSClassification (fra)": 55.64,
-                    "MassiveIntentClassification (fr)": 16.41,
-                    "MassiveScenarioClassification (fr)": 22.72
+                    "Model": "dragon-plus-instruct"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "flaubert_large_cased",
-                    "AlloProfClusteringP2P": 40.85,
-                    "AlloProfClusteringS2S": 21.76,
-                    "HALClusteringS2S": 5.26,
-                    "MLSUMClusteringP2P": 38.09,
-                    "MLSUMClusteringS2S": 18.71,
-                    "MasakhaNEWSClusteringP2P (fra)": 26.43,
-                    "MasakhaNEWSClusteringS2S (fra)": 24.68
+                    "Model": "dragon-plus-instruct"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "flaubert_large_cased",
-                    "OpusparcusPC (fr)": 74.78,
-                    "PawsXPairClassification (fr)": 54.14
+                    "Model": "dragon-plus-instruct"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "flaubert_large_cased",
-                    "AlloprofReranking": 26.29,
-                    "SyntecReranking": 42.8
+                    "Model": "dragon-plus-instruct"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "flaubert_large_cased",
-                    "AlloprofRetrieval": 0.58,
-                    "BSARDRetrieval": 0.0,
-                    "MintakaRetrieval (fr)": 0.26,
-                    "SyntecRetrieval": 1.58,
-                    "XPQARetrieval (fr)": 3.69
+                    "Model": "dragon-plus-instruct",
+                    "ARCChallenge": 8.24,
+                    "AlphaNLI": 25.18,
+                    "HellaSwag": 24.06,
+                    "PIQA": 26.35,
+                    "Quail": 4.2,
+                    "RARbCode": 12.84,
+                    "RARbMath": 36.15,
+                    "SIQA": 1.75,
+                    "SpartQA": 10.82,
+                    "TempReasonL1": 1.54,
+                    "TempReasonL2Fact": 16.11,
+                    "TempReasonL2Pure": 0.57,
+                    "TempReasonL3Fact": 14.81,
+                    "TempReasonL3Pure": 7.46,
+                    "WinoGrande": 60.84
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "flaubert_large_cased",
-                    "SICKFr": 34.6,
-                    "STS22 (fr)": 48.52,
-                    "STSBenchmarkMultilingualSTS (fr)": 15.66
+                    "Model": "dragon-plus-instruct"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "flaubert_large_cased",
-                    "SummEvalFr": 29.25
+                    "Model": "dragon-plus-instruct"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "dragon-plus-instruct"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "flaubert_large_cased"
+                    "Model": "dragon-plus-instruct"
                 }
             ]
         }
     },
-    "OpenSearch-text-hybrid": {
+    "LLM2Vec-Mistral-supervised": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "OpenSearch-text-hybrid"
+                    "Model": "LLM2Vec-Mistral-supervised"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "OpenSearch-text-hybrid",
-                    "AmazonReviewsClassification (zh)": 46.18,
-                    "IFlyTek": 51.8,
-                    "JDReview": 86.02,
-                    "MassiveIntentClassification (zh-CN)": 73.85,
-                    "MassiveScenarioClassification (zh-CN)": 77.13,
-                    "MultilingualSentiment": 76.35,
-                    "OnlineShopping": 93.2,
-                    "TNews": 53.06,
-                    "Waimai": 88.1
+                    "Model": "LLM2Vec-Mistral-supervised",
+                    "AmazonCounterfactualClassification (en)": 77.58,
+                    "AmazonPolarityClassification": 91.12,
+                    "AmazonReviewsClassification (en)": 49.97,
+                    "Banking77Classification": 88.31,
+                    "EmotionClassification": 52.04,
+                    "ImdbClassification": 87.42,
+                    "MTOPDomainClassification (en)": 96.04,
+                    "MTOPIntentClassification (en)": 84.77,
+                    "MassiveIntentClassification (en)": 79.29,
+                    "MassiveScenarioClassification (en)": 81.64,
+                    "ToxicConversationsClassification": 69.26,
+                    "TweetSentimentExtractionClassification": 62.14
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "OpenSearch-text-hybrid",
-                    "CLSClusteringP2P": 41.64,
-                    "CLSClusteringS2S": 40.33,
-                    "ThuNewsClusteringP2P": 69.28,
-                    "ThuNewsClusteringS2S": 63.75
+                    "Model": "LLM2Vec-Mistral-supervised",
+                    "ArxivClusteringP2P": 42.81,
+                    "ArxivClusteringS2S": 44.24,
+                    "BiorxivClusteringP2P": 34.27,
+                    "BiorxivClusteringS2S": 35.53,
+                    "MedrxivClusteringP2P": 31.07,
+                    "MedrxivClusteringS2S": 31.27,
+                    "RedditClustering": 60.24,
+                    "RedditClusteringP2P": 64.12,
+                    "StackExchangeClustering": 70.73,
+                    "StackExchangeClusteringP2P": 34.5,
+                    "TwentyNewsgroupsClustering": 52.18
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "OpenSearch-text-hybrid",
-                    "Cmnli": 90.77,
-                    "Ocnli": 85.44
+                    "Model": "LLM2Vec-Mistral-supervised",
+                    "SprintDuplicateQuestions": 96.82,
+                    "TwitterSemEval2015": 80.6,
+                    "TwitterURLCorpus": 86.56
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "OpenSearch-text-hybrid",
-                    "CMedQAv1": 88.99,
-                    "CMedQAv2": 89.6,
-                    "MMarcoReranking": 28.12,
-                    "T2Reranking": 66.38
+                    "Model": "LLM2Vec-Mistral-supervised",
+                    "AskUbuntuDupQuestions": 63.98,
+                    "MindSmallReranking": 31.5,
+                    "SciDocsRR": 83.8,
+                    "StackOverflowDupQuestions": 54.41
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "OpenSearch-text-hybrid",
-                    "CmedqaRetrieval": 46.56,
-                    "CovidRetrieval": 84.03,
-                    "DuRetrieval": 87.85,
-                    "EcomRetrieval": 68.79,
-                    "MMarcoRetrieval": 79.93,
-                    "MedicalRetrieval": 65.92,
-                    "T2Retrieval": 86.76,
-                    "VideoRetrieval": 75.43
+                    "Model": "LLM2Vec-Mistral-supervised",
+                    "ArguAna": 57.48,
+                    "CQADupstackRetrieval": 48.84,
+                    "ClimateFEVER": 35.19,
+                    "DBPedia": 49.58,
+                    "FEVER": 89.4,
+                    "FiQA2018": 53.11,
+                    "HotpotQA": 74.07,
+                    "MSMARCO": 42.17,
+                    "NFCorpus": 39.33,
+                    "NQ": 61.7,
+                    "QuoraRetrieval": 87.75,
+                    "SCIDOCS": 22.5,
+                    "SciFact": 78.86,
+                    "TRECCOVID": 77.69,
+                    "Touche2020": 22.18
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "OpenSearch-text-hybrid",
-                    "AFQMC": 59.11,
-                    "ATEC": 58.19,
-                    "BQ": 71.07,
-                    "LCQMC": 78.27,
-                    "PAWSX": 44.98,
-                    "QBQTC": 38.69,
-                    "STS22 (zh)": 66.53,
-                    "STSB": 82.8
+                    "Model": "LLM2Vec-Mistral-supervised",
+                    "BIOSSES": 85.24,
+                    "SICK-R": 83.7,
+                    "STS12": 78.8,
+                    "STS13": 86.37,
+                    "STS14": 84.04,
+                    "STS15": 88.99,
+                    "STS16": 87.22,
+                    "STS17 (en-en)": 90.19,
+                    "STS22 (en)": 67.68,
+                    "STSBenchmark": 88.65
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "OpenSearch-text-hybrid"
+                    "Model": "LLM2Vec-Mistral-supervised",
+                    "SummEval": 29.96
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "LLM2Vec-Mistral-supervised"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "OpenSearch-text-hybrid"
+                    "Model": "LLM2Vec-Mistral-supervised"
                 }
             ]
         }
     },
-    "nomic-embed-text-v1.5-64": {
+    "msmarco-bert-co-condensor": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "nomic-embed-text-v1.5-64"
+                    "Model": "msmarco-bert-co-condensor"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "nomic-embed-text-v1.5-64",
-                    "AmazonCounterfactualClassification (en)": 66.85,
-                    "AmazonPolarityClassification": 85.92,
-                    "AmazonReviewsClassification (en)": 41.02,
-                    "Banking77Classification": 80.63,
-                    "EmotionClassification": 40.55,
-                    "ImdbClassification": 76.6,
-                    "MTOPDomainClassification (en)": 86.31,
-                    "MTOPIntentClassification (en)": 62.77,
-                    "MassiveIntentClassification (en)": 64.95,
-                    "MassiveScenarioClassification (en)": 70.38,
-                    "ToxicConversationsClassification": 66.53,
-                    "TweetSentimentExtractionClassification": 55.23
+                    "Model": "msmarco-bert-co-condensor",
+                    "AmazonCounterfactualClassification (en)": 64.06,
+                    "AmazonPolarityClassification": 66.88,
+                    "AmazonReviewsClassification (en)": 34.85,
+                    "Banking77Classification": 82.35,
+                    "EmotionClassification": 41.91,
+                    "ImdbClassification": 60.17,
+                    "MTOPDomainClassification (en)": 91.34,
+                    "MTOPIntentClassification (en)": 71.07,
+                    "MassiveIntentClassification (en)": 70.4,
+                    "MassiveScenarioClassification (en)": 73.73,
+                    "ToxicConversationsClassification": 64.01,
+                    "TweetSentimentExtractionClassification": 55.74
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "nomic-embed-text-v1.5-64",
-                    "ArxivClusteringP2P": 41.8,
-                    "ArxivClusteringS2S": 32.41,
-                    "BiorxivClusteringP2P": 34.81,
-                    "BiorxivClusteringS2S": 28.59,
-                    "MedrxivClusteringP2P": 32.73,
-                    "MedrxivClusteringS2S": 29.91,
-                    "RedditClustering": 50.31,
-                    "RedditClusteringP2P": 56.57,
-                    "StackExchangeClustering": 57.99,
-                    "StackExchangeClusteringP2P": 33.64,
-                    "TwentyNewsgroupsClustering": 44.61
+                    "Model": "msmarco-bert-co-condensor",
+                    "ArxivClusteringP2P": 36.94,
+                    "ArxivClusteringS2S": 29.03,
+                    "BiorxivClusteringP2P": 32.35,
+                    "BiorxivClusteringS2S": 28.16,
+                    "MedrxivClusteringP2P": 30.23,
+                    "MedrxivClusteringS2S": 27.01,
+                    "RedditClustering": 48.04,
+                    "RedditClusteringP2P": 53.53,
+                    "StackExchangeClustering": 59.54,
+                    "StackExchangeClusteringP2P": 30.48,
+                    "TwentyNewsgroupsClustering": 38.68
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "nomic-embed-text-v1.5-64",
-                    "SprintDuplicateQuestions": 90.06,
-                    "TwitterSemEval2015": 71.68,
-                    "TwitterURLCorpus": 85.03
+                    "Model": "msmarco-bert-co-condensor",
+                    "SprintDuplicateQuestions": 96.09,
+                    "TwitterSemEval2015": 65.95,
+                    "TwitterURLCorpus": 83.17
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "nomic-embed-text-v1.5-64",
-                    "AskUbuntuDupQuestions": 60.79,
-                    "MindSmallReranking": 29.7,
-                    "SciDocsRR": 75.79,
-                    "StackOverflowDupQuestions": 47.42
+                    "Model": "msmarco-bert-co-condensor",
+                    "AskUbuntuDupQuestions": 58.99,
+                    "MindSmallReranking": 27.13,
+                    "SciDocsRR": 72.78,
+                    "StackOverflowDupQuestions": 48.48
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "nomic-embed-text-v1.5-64",
-                    "ArguAna": 37.16,
-                    "CQADupstackRetrieval": 28.72,
-                    "ClimateFEVER": 31.48,
-                    "DBPedia": 28.19,
-                    "FEVER": 70.24,
-                    "FiQA2018": 25.78,
-                    "HotpotQA": 43.07,
-                    "MSMARCO": 35.95,
-                    "NFCorpus": 26.03,
-                    "NQ": 45.54,
-                    "QuoraRetrieval": 85.83,
-                    "SCIDOCS": 12.09,
-                    "SciFact": 52.71,
-                    "TRECCOVID": 67.83,
-                    "Touche2020": 23.13
+                    "Model": "msmarco-bert-co-condensor",
+                    "ArguAna": 45.15,
+                    "CQADupstackRetrieval": 27.72,
+                    "ClimateFEVER": 16.96,
+                    "DBPedia": 27.86,
+                    "FEVER": 45.68,
+                    "FiQA2018": 15.62,
+                    "HotpotQA": 35.61,
+                    "MSMARCO": 29.57,
+                    "NFCorpus": 22.29,
+                    "NQ": 29.85,
+                    "QuoraRetrieval": 86.51,
+                    "SCIDOCS": 10.13,
+                    "SciFact": 52.31,
+                    "TRECCOVID": 40.54,
+                    "Touche2020": 8.57
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "nomic-embed-text-v1.5-64",
-                    "BIOSSES": 77.18,
-                    "SICK-R": 78.76,
-                    "STS12": 77.3,
-                    "STS13": 84.18,
-                    "STS14": 79.37,
-                    "STS15": 84.69,
-                    "STS16": 83.36,
-                    "STS17 (en-en)": 85.73,
-                    "STS22 (en)": 63.83,
-                    "STSBenchmark": 83.46
+                    "Model": "msmarco-bert-co-condensor",
+                    "BIOSSES": 77.32,
+                    "SICK-R": 72.0,
+                    "STS12": 68.19,
+                    "STS13": 80.4,
+                    "STS14": 74.02,
+                    "STS15": 82.57,
+                    "STS16": 79.78,
+                    "STS17 (en-en)": 85.94,
+                    "STS22 (en)": 67.54,
+                    "STSBenchmark": 76.97
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "nomic-embed-text-v1.5-64",
-                    "SummEval": 28.41
+                    "Model": "msmarco-bert-co-condensor",
+                    "SummEval": 29.5
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "msmarco-bert-co-condensor"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "nomic-embed-text-v1.5-64"
+                    "Model": "msmarco-bert-co-condensor"
                 }
             ]
         }
     },
-    "sentence-camembert-large": {
+    "luotuo-bert-medium": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "sentence-camembert-large"
+                    "Model": "luotuo-bert-medium"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "sentence-camembert-large",
-                    "AmazonReviewsClassification (fr)": 37.97,
-                    "MTOPDomainClassification (fr)": 85.74,
-                    "MTOPIntentClassification (fr)": 58.62,
-                    "MasakhaNEWSClassification (fra)": 80.62,
-                    "MassiveIntentClassification (fr)": 62.65,
-                    "MassiveScenarioClassification (fr)": 69.29
+                    "Model": "luotuo-bert-medium",
+                    "AmazonReviewsClassification (zh)": 34.46,
+                    "IFlyTek": 41.75,
+                    "JDReview": 79.68,
+                    "MassiveIntentClassification (zh-CN)": 57.47,
+                    "MassiveScenarioClassification (zh-CN)": 65.32,
+                    "MultilingualSentiment": 61.21,
+                    "OnlineShopping": 84.3,
+                    "TNews": 45.22,
+                    "Waimai": 79.57
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "sentence-camembert-large",
-                    "AlloProfClusteringP2P": 62.69,
-                    "AlloProfClusteringS2S": 42.06,
-                    "HALClusteringS2S": 23.9,
-                    "MLSUMClusteringP2P": 42.04,
-                    "MLSUMClusteringS2S": 32.29,
-                    "MasakhaNEWSClusteringP2P (fra)": 54.51,
-                    "MasakhaNEWSClusteringS2S (fra)": 44.73
+                    "Model": "luotuo-bert-medium",
+                    "CLSClusteringP2P": 37.01,
+                    "CLSClusteringS2S": 33.46,
+                    "ThuNewsClusteringP2P": 58.83,
+                    "ThuNewsClusteringS2S": 48.26
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "sentence-camembert-large",
-                    "OpusparcusPC (fr)": 94.63,
-                    "PawsXPairClassification (fr)": 59.59
+                    "Model": "luotuo-bert-medium",
+                    "Cmnli": 72.55,
+                    "Ocnli": 60.7
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "sentence-camembert-large",
-                    "AlloprofReranking": 57.62,
-                    "SyntecReranking": 88.15
+                    "Model": "luotuo-bert-medium",
+                    "CMedQAv1": 57.82,
+                    "CMedQAv2": 58.88,
+                    "MMarcoReranking": 14.55,
+                    "T2Reranking": 65.76
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "sentence-camembert-large",
-                    "AlloprofRetrieval": 31.62,
-                    "BSARDRetrieval": 0.0,
-                    "MintakaRetrieval (fr)": 21.87,
-                    "SyntecRetrieval": 81.11,
-                    "XPQARetrieval (fr)": 65.62
+                    "Model": "luotuo-bert-medium",
+                    "CmedqaRetrieval": 18.04,
+                    "CovidRetrieval": 55.48,
+                    "DuRetrieval": 59.36,
+                    "EcomRetrieval": 40.48,
+                    "MMarcoRetrieval": 55.31,
+                    "MedicalRetrieval": 29.8,
+                    "T2Retrieval": 58.67,
+                    "VideoRetrieval": 38.04
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "sentence-camembert-large",
-                    "SICKFr": 77.7,
-                    "STS22 (fr)": 81.73,
-                    "STSBenchmarkMultilingualSTS (fr)": 85.79
+                    "Model": "luotuo-bert-medium",
+                    "AFQMC": 22.24,
+                    "ATEC": 30.84,
+                    "BQ": 43.33,
+                    "LCQMC": 66.74,
+                    "PAWSX": 12.31,
+                    "QBQTC": 27.2,
+                    "STS22 (zh)": 66.4,
+                    "STSB": 73.22
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "sentence-camembert-large",
-                    "SummEvalFr": 30.88
+                    "Model": "luotuo-bert-medium"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "luotuo-bert-medium"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "sentence-camembert-large"
+                    "Model": "luotuo-bert-medium"
                 }
             ]
         }
     },
-    "LLM2Vec-Meta-Llama-3-supervised": {
+    "multilingual-e5-base": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "LLM2Vec-Meta-Llama-3-supervised"
+                    "Model": "multilingual-e5-base",
+                    "BornholmBitextMining (dan-Latn)": 33.22,
+                    "BornholmBitextMining": 46.4,
+                    "Tatoeba (kzj-Latn_eng-Latn)": 6.26,
+                    "Tatoeba (ina-Latn_eng-Latn)": 86.11,
+                    "Tatoeba (bre-Latn_eng-Latn)": 5.44,
+                    "Tatoeba (kab-Latn_eng-Latn)": 21.77,
+                    "Tatoeba (ind-Latn_eng-Latn)": 90.26,
+                    "Tatoeba (mkd-Cyrl_eng-Latn)": 73.76,
+                    "Tatoeba (yue-Hant_eng-Latn)": 80.66,
+                    "Tatoeba (amh-Ethi_eng-Latn)": 74.93,
+                    "Tatoeba (ceb-Latn_eng-Latn)": 45.46,
+                    "Tatoeba (lit-Latn_eng-Latn)": 75.53,
+                    "Tatoeba (nds-Latn_eng-Latn)": 53.86,
+                    "Tatoeba (kur-Latn_eng-Latn)": 52.96,
+                    "Tatoeba (bel-Cyrl_eng-Latn)": 86.7,
+                    "Tatoeba (ile-Latn_eng-Latn)": 72.56,
+                    "Tatoeba (oci-Latn_eng-Latn)": 35.79,
+                    "Tatoeba (heb-Hebr_eng-Latn)": 74.26,
+                    "Tatoeba (mhr-Cyrl_eng-Latn)": 5.52,
+                    "Tatoeba (afr-Latn_eng-Latn)": 87.04,
+                    "Tatoeba (uig-Arab_eng-Latn)": 62.97,
+                    "Tatoeba (mar-Deva_eng-Latn)": 86.62,
+                    "Tatoeba (fry-Latn_eng-Latn)": 50.82,
+                    "Tatoeba (tat-Cyrl_eng-Latn)": 66.92,
+                    "Tatoeba (khm-Khmr_eng-Latn)": 47.27,
+                    "Tatoeba (dtp-Latn_eng-Latn)": 5.13,
+                    "Tatoeba (ben-Beng_eng-Latn)": 81.05,
+                    "Tatoeba (ido-Latn_eng-Latn)": 74.41,
+                    "Tatoeba (cha-Latn_eng-Latn)": 16.95,
+                    "Tatoeba (zsm-Latn_eng-Latn)": 92.45,
+                    "Tatoeba (pes-Arab_eng-Latn)": 87.18,
+                    "Tatoeba (hye-Armn_eng-Latn)": 85.85,
+                    "Tatoeba (cat-Latn_eng-Latn)": 84.09,
+                    "Tatoeba (cym-Latn_eng-Latn)": 65.69,
+                    "Tatoeba (aze-Latn_eng-Latn)": 84.71,
+                    "Tatoeba (yid-Hebr_eng-Latn)": 63.2,
+                    "Tatoeba (swg-Latn_eng-Latn)": 42.33,
+                    "Tatoeba (war-Latn_eng-Latn)": 47.18,
+                    "Tatoeba (swe-Latn_eng-Latn)": 91.33,
+                    "Tatoeba (slk-Latn_eng-Latn)": 86.42,
+                    "Tatoeba (gla-Latn_eng-Latn)": 43.08,
+                    "Tatoeba (xho-Latn_eng-Latn)": 73.24,
+                    "Tatoeba (dan-Latn_eng-Latn)": 91.23,
+                    "Tatoeba (ara-Arab_eng-Latn)": 82.86,
+                    "Tatoeba (ast-Latn_eng-Latn)": 74.36,
+                    "Tatoeba (hrv-Latn_eng-Latn)": 92.5,
+                    "Tatoeba (nob-Latn_eng-Latn)": 95.9,
+                    "Tatoeba (eus-Latn_eng-Latn)": 56.26,
+                    "Tatoeba (kaz-Cyrl_eng-Latn)": 75.56,
+                    "Tatoeba (tuk-Latn_eng-Latn)": 19.67,
+                    "Tatoeba (pam-Latn_eng-Latn)": 6.92,
+                    "Tatoeba (gsw-Latn_eng-Latn)": 43.53,
+                    "Tatoeba (slv-Latn_eng-Latn)": 81.93,
+                    "Tatoeba (dsb-Latn_eng-Latn)": 34.36,
+                    "Tatoeba (cor-Latn_eng-Latn)": 4.38,
+                    "Tatoeba (ces-Latn_eng-Latn)": 88.75,
+                    "Tatoeba (tam-Taml_eng-Latn)": 85.12,
+                    "Tatoeba (glg-Latn_eng-Latn)": 82.69,
+                    "Tatoeba (bul-Cyrl_eng-Latn)": 88.95,
+                    "Tatoeba (deu-Latn_eng-Latn)": 97.07,
+                    "Tatoeba (fin-Latn_eng-Latn)": 86.15,
+                    "Tatoeba (csb-Latn_eng-Latn)": 24.29,
+                    "Tatoeba (urd-Arab_eng-Latn)": 86.2,
+                    "Tatoeba (est-Latn_eng-Latn)": 70.64,
+                    "Tatoeba (wuu-Hans_eng-Latn)": 78.65,
+                    "Tatoeba (tha-Thai_eng-Latn)": 94.22,
+                    "Tatoeba (spa-Latn_eng-Latn)": 96.97,
+                    "Tatoeba (ukr-Cyrl_eng-Latn)": 88.29,
+                    "Tatoeba (awa-Deva_eng-Latn)": 68.39,
+                    "Tatoeba (mal-Mlym_eng-Latn)": 96.72,
+                    "Tatoeba (cbk-Latn_eng-Latn)": 60.66,
+                    "Tatoeba (hsb-Latn_eng-Latn)": 40.36,
+                    "Tatoeba (tzl-Latn_eng-Latn)": 34.44,
+                    "Tatoeba (gle-Latn_eng-Latn)": 58.62,
+                    "Tatoeba (orv-Cyrl_eng-Latn)": 16.0,
+                    "Tatoeba (isl-Latn_eng-Latn)": 76.9,
+                    "Tatoeba (jav-Latn_eng-Latn)": 61.25,
+                    "Tatoeba (fao-Latn_eng-Latn)": 64.72,
+                    "Tatoeba (pol-Latn_eng-Latn)": 94.57,
+                    "Tatoeba (max-Deva_eng-Latn)": 52.4,
+                    "Tatoeba (bos-Latn_eng-Latn)": 88.86,
+                    "Tatoeba (hun-Latn_eng-Latn)": 84.41,
+                    "Tatoeba (rus-Cyrl_eng-Latn)": 91.78,
+                    "Tatoeba (arq-Arab_eng-Latn)": 26.61,
+                    "Tatoeba (kor-Hang_eng-Latn)": 83.37,
+                    "Tatoeba (uzb-Latn_eng-Latn)": 62.63,
+                    "Tatoeba (pms-Latn_eng-Latn)": 44.61,
+                    "Tatoeba (ell-Grek_eng-Latn)": 89.96,
+                    "Tatoeba (swh-Latn_eng-Latn)": 66.81,
+                    "Tatoeba (epo-Latn_eng-Latn)": 92.07,
+                    "Tatoeba (jpn-Jpan_eng-Latn)": 90.3,
+                    "Tatoeba (tel-Telu_eng-Latn)": 88.49,
+                    "Tatoeba (srp-Cyrl_eng-Latn)": 89.08,
+                    "Tatoeba (nov-Latn_eng-Latn)": 66.96,
+                    "Tatoeba (cmn-Hans_eng-Latn)": 93.35,
+                    "Tatoeba (tgl-Latn_eng-Latn)": 83.78,
+                    "Tatoeba (ber-Tfng_eng-Latn)": 23.59,
+                    "Tatoeba (sqi-Latn_eng-Latn)": 90.06,
+                    "Tatoeba (ang-Latn_eng-Latn)": 29.87,
+                    "Tatoeba (ita-Latn_eng-Latn)": 90.61,
+                    "Tatoeba (por-Latn_eng-Latn)": 92.74,
+                    "Tatoeba (mon-Cyrl_eng-Latn)": 78.37,
+                    "Tatoeba (fra-Latn_eng-Latn)": 92.76,
+                    "Tatoeba (lat-Latn_eng-Latn)": 39.62,
+                    "Tatoeba (nno-Latn_eng-Latn)": 82.67,
+                    "Tatoeba (arz-Arab_eng-Latn)": 66.79,
+                    "Tatoeba (hin-Deva_eng-Latn)": 93.13,
+                    "Tatoeba (nld-Latn_eng-Latn)": 93.2,
+                    "Tatoeba (kat-Geor_eng-Latn)": 77.83,
+                    "Tatoeba (lfn-Latn_eng-Latn)": 52.85,
+                    "Tatoeba (lvs-Latn_eng-Latn)": 76.76,
+                    "Tatoeba (tur-Latn_eng-Latn)": 92.54,
+                    "Tatoeba (ron-Latn_eng-Latn)": 91.27,
+                    "Tatoeba (vie-Latn_eng-Latn)": 94.55
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "LLM2Vec-Meta-Llama-3-supervised",
-                    "AmazonCounterfactualClassification (en)": 79.94,
-                    "AmazonPolarityClassification": 86.07,
-                    "AmazonReviewsClassification (en)": 46.84,
-                    "Banking77Classification": 88.05,
-                    "EmotionClassification": 51.2,
-                    "ImdbClassification": 82.94,
-                    "MTOPDomainClassification (en)": 96.14,
-                    "MTOPIntentClassification (en)": 86.11,
-                    "MassiveIntentClassification (en)": 79.8,
-                    "MassiveScenarioClassification (en)": 81.52,
-                    "ToxicConversationsClassification": 70.59,
-                    "TweetSentimentExtractionClassification": 61.9
+                    "Model": "multilingual-e5-base",
+                    "AllegroReviews (pol-Latn)": 40.78,
+                    "AllegroReviews": 40.85,
+                    "AmazonCounterfactualClassification (en-ext)": 76.91,
+                    "AmazonCounterfactualClassification (en)": 77.36,
+                    "AmazonCounterfactualClassification (deu-Latn)": 70.81,
+                    "AmazonCounterfactualClassification (jpn-Jpan)": 72.02,
+                    "AmazonPolarityClassification": 91.76,
+                    "AmazonReviewsClassification (en)": 47.54,
+                    "AmazonReviewsClassification (deu-Latn)": 44.37,
+                    "AmazonReviewsClassification (spa-Latn)": 43.38,
+                    "AmazonReviewsClassification (fra-Latn)": 41.55,
+                    "AmazonReviewsClassification (jpn-Jpan)": 39.57,
+                    "AmazonReviewsClassification (cmn-Hans)": 38.34,
+                    "AmazonReviewsClassification (fr)": 40.94,
+                    "AngryTweetsClassification (dan-Latn)": 56.28,
+                    "AngryTweetsClassification": 54.65,
+                    "Banking77Classification": 73.53,
+                    "CBD (pol-Latn)": 62.6,
+                    "CBD": 62.66,
+                    "DKHateClassification": 63.53,
+                    "DanishPoliticalCommentsClassification (dan-Latn)": 36.41,
+                    "DanishPoliticalCommentsClassification": 36.69,
+                    "EmotionClassification": 45.68,
+                    "GeoreviewClassification (rus-Cyrl)": 46.05,
+                    "HeadlineClassification (rus-Cyrl)": 75.64,
+                    "IFlyTek (cmn-Hans)": 40.81,
+                    "IFlyTek": 44.93,
+                    "ImdbClassification": 84.29,
+                    "InappropriatenessClassification (rus-Cyrl)": 58.78,
+                    "JDReview (cmn-Hans)": 75.72,
+                    "JDReview": 76.21,
+                    "KinopoiskClassification (rus-Cyrl)": 50.89,
+                    "LccSentimentClassification (dan-Latn)": 60.13,
+                    "LccSentimentClassification": 59.67,
+                    "MTOPDomainClassification (en)": 90.9,
+                    "MTOPDomainClassification (deu-Latn)": 87.94,
+                    "MTOPDomainClassification (spa-Latn)": 85.96,
+                    "MTOPDomainClassification (fra-Latn)": 82.88,
+                    "MTOPDomainClassification (hin-Deva)": 83.92,
+                    "MTOPDomainClassification (tha-Thai)": 83.94,
+                    "MTOPDomainClassification (fr)": 84.79,
+                    "MTOPIntentClassification (en)": 61.6,
+                    "MTOPIntentClassification (deu-Latn)": 61.05,
+                    "MTOPIntentClassification (spa-Latn)": 55.36,
+                    "MTOPIntentClassification (fra-Latn)": 52.23,
+                    "MTOPIntentClassification (hin-Deva)": 53.93,
+                    "MTOPIntentClassification (tha-Thai)": 58.69,
+                    "MTOPIntentClassification (fr)": 55.51,
+                    "MasakhaNEWSClassification (amh-Ethi)": 83.8,
+                    "MasakhaNEWSClassification (eng)": 76.49,
+                    "MasakhaNEWSClassification (fra-Latn)": 76.35,
+                    "MasakhaNEWSClassification (hau-Latn)": 74.63,
+                    "MasakhaNEWSClassification (ibo-Latn)": 64.59,
+                    "MasakhaNEWSClassification (lin-Latn)": 70.57,
+                    "MasakhaNEWSClassification (lug-Latn)": 68.12,
+                    "MasakhaNEWSClassification (orm-Ethi)": 71.75,
+                    "MasakhaNEWSClassification (pcm-Latn)": 91.05,
+                    "MasakhaNEWSClassification (run-Latn)": 73.35,
+                    "MasakhaNEWSClassification (sna-Latn)": 84.17,
+                    "MasakhaNEWSClassification (som-Latn)": 60.1,
+                    "MasakhaNEWSClassification (swa-Latn)": 70.74,
+                    "MasakhaNEWSClassification (tir-Ethi)": 67.1,
+                    "MasakhaNEWSClassification (xho-Latn)": 76.03,
+                    "MasakhaNEWSClassification (yor-Latn)": 72.75,
+                    "MasakhaNEWSClassification (fra)": 79.69,
+                    "MassiveIntentClassification (tha-Thai)": 59.63,
+                    "MassiveIntentClassification (tam-Taml)": 48.93,
+                    "MassiveIntentClassification (fin-Latn)": 58.91,
+                    "MassiveIntentClassification (rus-Cyrl)": 62.78,
+                    "MassiveIntentClassification (afr-Latn)": 49.82,
+                    "MassiveIntentClassification (heb-Hebr)": 55.3,
+                    "MassiveIntentClassification (sqi-Latn)": 51.07,
+                    "MassiveIntentClassification (por-Latn)": 62.12,
+                    "MassiveIntentClassification (hye-Armn)": 48.77,
+                    "MassiveIntentClassification (cym-Latn)": 37.05,
+                    "MassiveIntentClassification (deu-Latn)": 59.82,
+                    "MassiveIntentClassification (fas-Arab)": 59.51,
+                    "MassiveIntentClassification (hun-Latn)": 57.69,
+                    "MassiveIntentClassification (urd-Arab)": 51.3,
+                    "MassiveIntentClassification (cmo-Hant)": 56.4,
+                    "MassiveIntentClassification (khm-Khmr)": 32.14,
+                    "MassiveIntentClassification (tel-Telu)": 50.09,
+                    "MassiveIntentClassification (vie-Latn)": 59.61,
+                    "MassiveIntentClassification (kan-Knda)": 48.63,
+                    "MassiveIntentClassification (ara-Arab)": 50.2,
+                    "MassiveIntentClassification (mya-Mymr)": 46.67,
+                    "MassiveIntentClassification (slv-Latn)": 53.84,
+                    "MassiveIntentClassification (jpn-Jpan)": 62.3,
+                    "MassiveIntentClassification (mon-Cyrl)": 46.8,
+                    "MassiveIntentClassification (jav-Latn)": 43.23,
+                    "MassiveIntentClassification (lav-Latn)": 51.17,
+                    "MassiveIntentClassification (ron-Latn)": 56.83,
+                    "MassiveIntentClassification (dan-Latn)": 60.69,
+                    "MassiveIntentClassification (nob-Latn)": 60.06,
+                    "MassiveIntentClassification (tgl-Latn)": 48.99,
+                    "MassiveIntentClassification (aze-Latn)": 51.36,
+                    "MassiveIntentClassification (ind-Latn)": 58.7,
+                    "MassiveIntentClassification (amh-Ethi)": 42.4,
+                    "MassiveIntentClassification (ben-Beng)": 51.69,
+                    "MassiveIntentClassification (ell-Grek)": 58.07,
+                    "MassiveIntentClassification (hin-Deva)": 56.75,
+                    "MassiveIntentClassification (nld-Latn)": 61.23,
+                    "MassiveIntentClassification (pol-Latn)": 60.98,
+                    "MassiveIntentClassification (swe-Latn)": 62.43,
+                    "MassiveIntentClassification (isl-Latn)": 44.52,
+                    "MassiveIntentClassification (mal-Mlym)": 53.75,
+                    "MassiveIntentClassification (msa-Latn)": 52.84,
+                    "MassiveIntentClassification (kat-Geor)": 37.56,
+                    "MassiveIntentClassification (tur-Latn)": 60.69,
+                    "MassiveIntentClassification (kor-Kore)": 59.97,
+                    "MassiveIntentClassification (ita-Latn)": 61.29,
+                    "MassiveIntentClassification (cmo-Hans)": 63.22,
+                    "MassiveIntentClassification (en)": 65.71,
+                    "MassiveIntentClassification (fra-Latn)": 61.32,
+                    "MassiveIntentClassification (swa-Latn)": 45.24,
+                    "MassiveIntentClassification (spa-Latn)": 61.13,
+                    "MassiveIntentClassification (da)": 60.16,
+                    "MassiveIntentClassification (nb)": 59.83,
+                    "MassiveIntentClassification (sv)": 61.78,
+                    "MassiveIntentClassification (pl)": 61.04,
+                    "MassiveScenarioClassification (ind-Latn)": 63.6,
+                    "MassiveScenarioClassification (tha-Thai)": 67.37,
+                    "MassiveScenarioClassification (cmo-Hans)": 70.24,
+                    "MassiveScenarioClassification (ben-Beng)": 57.0,
+                    "MassiveScenarioClassification (kan-Knda)": 53.49,
+                    "MassiveScenarioClassification (tel-Telu)": 54.24,
+                    "MassiveScenarioClassification (aze-Latn)": 55.15,
+                    "MassiveScenarioClassification (ell-Grek)": 65.38,
+                    "MassiveScenarioClassification (swa-Latn)": 52.64,
+                    "MassiveScenarioClassification (hin-Deva)": 62.91,
+                    "MassiveScenarioClassification (tur-Latn)": 65.18,
+                    "MassiveScenarioClassification (dan-Latn)": 67.97,
+                    "MassiveScenarioClassification (msa-Latn)": 58.35,
+                    "MassiveScenarioClassification (mya-Mymr)": 50.77,
+                    "MassiveScenarioClassification (mon-Cyrl)": 51.87,
+                    "MassiveScenarioClassification (tgl-Latn)": 54.36,
+                    "MassiveScenarioClassification (cmo-Hant)": 63.73,
+                    "MassiveScenarioClassification (ara-Arab)": 58.0,
+                    "MassiveScenarioClassification (slv-Latn)": 58.3,
+                    "MassiveScenarioClassification (spa-Latn)": 66.47,
+                    "MassiveScenarioClassification (urd-Arab)": 56.74,
+                    "MassiveScenarioClassification (fin-Latn)": 64.94,
+                    "MassiveScenarioClassification (tam-Taml)": 53.86,
+                    "MassiveScenarioClassification (ron-Latn)": 63.5,
+                    "MassiveScenarioClassification (hye-Armn)": 53.63,
+                    "MassiveScenarioClassification (vie-Latn)": 66.35,
+                    "MassiveScenarioClassification (deu-Latn)": 68.4,
+                    "MassiveScenarioClassification (afr-Latn)": 58.95,
+                    "MassiveScenarioClassification (en)": 71.57,
+                    "MassiveScenarioClassification (fra-Latn)": 67.37,
+                    "MassiveScenarioClassification (jpn-Jpan)": 69.89,
+                    "MassiveScenarioClassification (nld-Latn)": 68.62,
+                    "MassiveScenarioClassification (cym-Latn)": 43.84,
+                    "MassiveScenarioClassification (heb-Hebr)": 62.53,
+                    "MassiveScenarioClassification (pol-Latn)": 66.12,
+                    "MassiveScenarioClassification (fas-Arab)": 63.92,
+                    "MassiveScenarioClassification (lav-Latn)": 56.42,
+                    "MassiveScenarioClassification (por-Latn)": 65.49,
+                    "MassiveScenarioClassification (rus-Cyrl)": 68.21,
+                    "MassiveScenarioClassification (mal-Mlym)": 59.89,
+                    "MassiveScenarioClassification (hun-Latn)": 65.75,
+                    "MassiveScenarioClassification (nob-Latn)": 66.57,
+                    "MassiveScenarioClassification (kor-Kore)": 67.9,
+                    "MassiveScenarioClassification (isl-Latn)": 53.28,
+                    "MassiveScenarioClassification (khm-Khmr)": 38.45,
+                    "MassiveScenarioClassification (sqi-Latn)": 57.92,
+                    "MassiveScenarioClassification (jav-Latn)": 51.94,
+                    "MassiveScenarioClassification (amh-Ethi)": 50.33,
+                    "MassiveScenarioClassification (ita-Latn)": 66.17,
+                    "MassiveScenarioClassification (kat-Geor)": 43.38,
+                    "MassiveScenarioClassification (swe-Latn)": 69.35,
+                    "MassiveScenarioClassification (da)": 67.46,
+                    "MassiveScenarioClassification (nb)": 66.18,
+                    "MassiveScenarioClassification (sv)": 69.15,
+                    "MassiveScenarioClassification (pl)": 66.11,
+                    "MultilingualSentiment (cmn-Hans)": 67.56,
+                    "MultilingualSentiment": 65.28,
+                    "NoRecClassification (nob-Latn)": 53.74,
+                    "NoRecClassification": 57.58,
+                    "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 75.85,
+                    "NordicLangClassification": 75.94,
+                    "NorwegianParliament": 59.94,
+                    "OnlineShopping (cmn-Hans)": 88.66,
+                    "OnlineShopping": 88.4,
+                    "PAC (pol-Latn)": 70.87,
+                    "PAC": 70.87,
+                    "PolEmo2.0-IN (pol-Latn)": 67.59,
+                    "PolEmo2.0-IN": 67.66,
+                    "PolEmo2.0-OUT (pol-Latn)": 43.93,
+                    "PolEmo2.0-OUT": 43.91,
+                    "RuReviewsClassification (rus-Cyrl)": 62.99,
+                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 56.28,
+                    "RuSciBenchOECDClassification (rus-Cyrl)": 42.69,
+                    "ScalaDaClassification": 50.79,
+                    "ScalaNbClassification": 50.32,
+                    "TNews (cmn-Hans)": 47.52,
+                    "TNews": 47.06,
+                    "ToxicConversationsClassification": 64.33,
+                    "TweetSentimentExtractionClassification": 62.8,
+                    "Waimai (cmn-Hans)": 85.98,
+                    "Waimai": 84.42
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "LLM2Vec-Meta-Llama-3-supervised",
-                    "ArxivClusteringP2P": 44.27,
-                    "ArxivClusteringS2S": 46.85,
-                    "BiorxivClusteringP2P": 32.35,
-                    "BiorxivClusteringS2S": 36.7,
-                    "MedrxivClusteringP2P": 30.71,
-                    "MedrxivClusteringS2S": 32.96,
-                    "RedditClustering": 61.72,
-                    "RedditClusteringP2P": 63.98,
-                    "StackExchangeClustering": 72.74,
-                    "StackExchangeClusteringP2P": 32.26,
-                    "TwentyNewsgroupsClustering": 56.41
+                    "Model": "multilingual-e5-base",
+                    "8TagsClustering": 24.97,
+                    "AlloProfClusteringP2P": 62.09,
+                    "AlloProfClusteringS2S": 32.98,
+                    "ArxivClusteringP2P": 43.35,
+                    "ArxivClusteringS2S": 36.0,
+                    "BiorxivClusteringP2P": 37.55,
+                    "BiorxivClusteringS2S": 30.33,
+                    "CLSClusteringP2P": 32.41,
+                    "CLSClusteringS2S": 36.99,
+                    "GeoreviewClusteringP2P (rus-Cyrl)": 54.46,
+                    "HALClusteringS2S": 22.48,
+                    "MLSUMClusteringP2P (rus-Cyrl)": 43.47,
+                    "MLSUMClusteringP2P": 43.48,
+                    "MLSUMClusteringS2S (rus-Cyrl)": 40.87,
+                    "MLSUMClusteringS2S": 38.53,
+                    "MasakhaNEWSClusteringP2P (amh-Ethi)": 58.05,
+                    "MasakhaNEWSClusteringP2P (eng)": 43.8,
+                    "MasakhaNEWSClusteringP2P (fra-Latn)": 58.28,
+                    "MasakhaNEWSClusteringP2P (hau-Latn)": 44.78,
+                    "MasakhaNEWSClusteringP2P (ibo-Latn)": 44.97,
+                    "MasakhaNEWSClusteringP2P (lin-Latn)": 48.08,
+                    "MasakhaNEWSClusteringP2P (lug-Latn)": 50.15,
+                    "MasakhaNEWSClusteringP2P (orm-Ethi)": 38.02,
+                    "MasakhaNEWSClusteringP2P (pcm-Latn)": 71.03,
+                    "MasakhaNEWSClusteringP2P (run-Latn)": 58.28,
+                    "MasakhaNEWSClusteringP2P (sna-Latn)": 59.25,
+                    "MasakhaNEWSClusteringP2P (som-Latn)": 37.27,
+                    "MasakhaNEWSClusteringP2P (swa-Latn)": 34.54,
+                    "MasakhaNEWSClusteringP2P (tir-Ethi)": 53.44,
+                    "MasakhaNEWSClusteringP2P (xho-Latn)": 40.32,
+                    "MasakhaNEWSClusteringP2P (yor-Latn)": 37.97,
+                    "MasakhaNEWSClusteringP2P (fra)": 47.91,
+                    "MasakhaNEWSClusteringS2S (amh-Ethi)": 49.38,
+                    "MasakhaNEWSClusteringS2S (eng)": 45.76,
+                    "MasakhaNEWSClusteringS2S (fra-Latn)": 55.43,
+                    "MasakhaNEWSClusteringS2S (hau-Latn)": 16.11,
+                    "MasakhaNEWSClusteringS2S (ibo-Latn)": 24.38,
+                    "MasakhaNEWSClusteringS2S (lin-Latn)": 44.8,
+                    "MasakhaNEWSClusteringS2S (lug-Latn)": 45.67,
+                    "MasakhaNEWSClusteringS2S (orm-Ethi)": 26.41,
+                    "MasakhaNEWSClusteringS2S (pcm-Latn)": 83.26,
+                    "MasakhaNEWSClusteringS2S (run-Latn)": 48.77,
+                    "MasakhaNEWSClusteringS2S (sna-Latn)": 43.9,
+                    "MasakhaNEWSClusteringS2S (som-Latn)": 25.43,
+                    "MasakhaNEWSClusteringS2S (swa-Latn)": 9.87,
+                    "MasakhaNEWSClusteringS2S (tir-Ethi)": 51.66,
+                    "MasakhaNEWSClusteringS2S (xho-Latn)": 29.65,
+                    "MasakhaNEWSClusteringS2S (yor-Latn)": 30.12,
+                    "MasakhaNEWSClusteringS2S (fra)": 51.16,
+                    "MedrxivClusteringP2P": 30.6,
+                    "MedrxivClusteringS2S": 28.73,
+                    "RedditClustering": 43.15,
+                    "RedditClusteringP2P": 61.69,
+                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 51.56,
+                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 44.78,
+                    "StackExchangeClustering": 55.31,
+                    "StackExchangeClusteringP2P": 33.51,
+                    "ThuNewsClusteringP2P": 40.98,
+                    "ThuNewsClusteringS2S": 52.36,
+                    "TwentyNewsgroupsClustering": 35.55
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "LLM2Vec-Meta-Llama-3-supervised",
-                    "SprintDuplicateQuestions": 95.09,
-                    "TwitterSemEval2015": 81.73,
-                    "TwitterURLCorpus": 86.56
+                    "Model": "multilingual-e5-base",
+                    "CDSC-E (pol-Latn)": 72.7,
+                    "CDSC-E": 72.67,
+                    "Cmnli": 74.51,
+                    "Ocnli": 59.63,
+                    "OpusparcusPC (deu-Latn)": 95.83,
+                    "OpusparcusPC (en)": 98.71,
+                    "OpusparcusPC (fin-Latn)": 90.3,
+                    "OpusparcusPC (fra-Latn)": 92.12,
+                    "OpusparcusPC (rus-Cyrl)": 86.82,
+                    "OpusparcusPC (swe-Latn)": 93.05,
+                    "OpusparcusPC (fr)": 92.72,
+                    "PPC": 88.01,
+                    "PSC (pol-Latn)": 99.14,
+                    "PSC": 99.14,
+                    "PawsXPairClassification (deu-Latn)": 54.11,
+                    "PawsXPairClassification (en)": 55.79,
+                    "PawsXPairClassification (spa-Latn)": 54.13,
+                    "PawsXPairClassification (fra-Latn)": 56.01,
+                    "PawsXPairClassification (jpn-Hira)": 49.02,
+                    "PawsXPairClassification (kor-Hang)": 51.01,
+                    "PawsXPairClassification (cmn-Hans)": 55.13,
+                    "PawsXPairClassification (fr)": 56.93,
+                    "SICK-E-PL (pol-Latn)": 68.76,
+                    "SICK-E-PL": 68.77,
+                    "SprintDuplicateQuestions": 93.02,
+                    "TERRa (rus-Cyrl)": 54.96,
+                    "TwitterSemEval2015": 72.21,
+                    "TwitterURLCorpus": 85.48
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "LLM2Vec-Meta-Llama-3-supervised",
-                    "AskUbuntuDupQuestions": 65.19,
-                    "MindSmallReranking": 32.67,
-                    "SciDocsRR": 86.05,
-                    "StackOverflowDupQuestions": 54.82
+                    "Model": "multilingual-e5-base",
+                    "AlloprofReranking (fra-Latn)": 65.9,
+                    "AlloprofReranking": 58.1,
+                    "AskUbuntuDupQuestions": 59.28,
+                    "CMedQAv1": 65.21,
+                    "CMedQAv2": 66.06,
+                    "MIRACLReranking (rus-Cyrl)": 60.47,
+                    "MMarcoReranking (cmn-Hans)": 30.52,
+                    "MMarcoReranking": 21.76,
+                    "MindSmallReranking": 29.28,
+                    "RuBQReranking (rus-Cyrl)": 72.01,
+                    "SciDocsRR": 81.81,
+                    "StackOverflowDupQuestions": 49.75,
+                    "SyntecReranking (fra-Latn)": 85.31,
+                    "SyntecReranking": 85.43,
+                    "T2Reranking (cmn-Hans)": 64.86,
+                    "T2Reranking": 64.39
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "LLM2Vec-Meta-Llama-3-supervised",
-                    "ArguAna": 62.78,
-                    "CQADupstackRetrieval": 48.25,
-                    "ClimateFEVER": 34.27,
-                    "DBPedia": 48.34,
-                    "FEVER": 90.2,
-                    "FiQA2018": 55.33,
-                    "HotpotQA": 71.76,
-                    "MSMARCO": 43.24,
-                    "NFCorpus": 41.83,
-                    "NQ": 64.21,
-                    "QuoraRetrieval": 87.16,
-                    "SCIDOCS": 22.96,
-                    "SciFact": 78.22,
-                    "TRECCOVID": 80.34,
-                    "Touche2020": 20.5
+                    "Model": "multilingual-e5-base",
+                    "AILACasedocs": 26.05,
+                    "AILAStatutes": 20.37,
+                    "ARCChallenge": 9.61,
+                    "AlloprofRetrieval (fra-Latn)": 34.45,
+                    "AlloprofRetrieval": 36.21,
+                    "AlphaNLI": 16.44,
+                    "ArguAna": 44.21,
+                    "ArguAna-PL (pol-Latn)": 42.86,
+                    "ArguAna-PL": 42.81,
+                    "BSARDRetrieval (fra-Latn)": 18.83,
+                    "BSARDRetrieval": 0.0,
+                    "CmedqaRetrieval (cmn-Hans)": 27.2,
+                    "CmedqaRetrieval": 27.2,
+                    "CovidRetrieval (cmn-Hans)": 73.48,
+                    "CovidRetrieval": 73.45,
+                    "DBPedia-PL": 30.23,
+                    "DuRetrieval (cmn-Hans)": 81.66,
+                    "DuRetrieval": 81.64,
+                    "EcomRetrieval (cmn-Hans)": 54.01,
+                    "EcomRetrieval": 54.17,
+                    "FiQA-PL (pol-Latn)": 25.59,
+                    "FiQA-PL": 25.52,
+                    "FiQA2018": 38.15,
+                    "GerDaLIRSmall (deu-Latn)": 15.3,
+                    "HellaSwag": 24.79,
+                    "HotpotQA-PL": 63.52,
+                    "LEMBNarrativeQARetrieval": 23.6,
+                    "LEMBNeedleRetrieval": 32.0,
+                    "LEMBPasskeyRetrieval": 38.25,
+                    "LEMBQMSumRetrieval": 25.16,
+                    "LEMBSummScreenFDRetrieval": 68.21,
+                    "LEMBWikimQARetrieval": 56.04,
+                    "LeCaRDv2 (zho-Hans)": 59.0,
+                    "LegalBenchConsumerContractsQA": 69.02,
+                    "LegalBenchCorporateLobbying": 88.97,
+                    "LegalQuAD (deu-Latn)": 47.85,
+                    "LegalSummarization": 61.69,
+                    "MIRACLRetrieval (rus-Cyrl)": 61.6,
+                    "MMarcoRetrieval (cmn-Hans)": 76.01,
+                    "MMarcoRetrieval": 76.04,
+                    "MSMARCO-PL": 29.52,
+                    "MedicalRetrieval (cmn-Hans)": 48.33,
+                    "MedicalRetrieval": 48.35,
+                    "MintakaRetrieval (ara-Arab)": 23.06,
+                    "MintakaRetrieval (deu-Latn)": 29.8,
+                    "MintakaRetrieval (spa-Latn)": 29.88,
+                    "MintakaRetrieval (fra-Latn)": 30.96,
+                    "MintakaRetrieval (hin-Deva)": 22.68,
+                    "MintakaRetrieval (ita-Latn)": 29.77,
+                    "MintakaRetrieval (jpn-Hira)": 22.98,
+                    "MintakaRetrieval (por-Latn)": 30.62,
+                    "MintakaRetrieval (fr)": 23.46,
+                    "NFCorpus": 32.49,
+                    "NFCorpus-PL (pol-Latn)": 25.99,
+                    "NFCorpus-PL": 25.98,
+                    "NQ-PL": 44.8,
+                    "PIQA": 25.09,
+                    "Quail": 3.52,
+                    "Quora-PL": 81.22,
+                    "RARbCode": 52.16,
+                    "RARbMath": 65.35,
+                    "RiaNewsRetrieval (rus-Cyrl)": 70.24,
+                    "RuBQRetrieval (rus-Cyrl)": 69.58,
+                    "SCIDOCS": 17.17,
+                    "SCIDOCS-PL (pol-Latn)": 12.36,
+                    "SCIDOCS-PL": 12.35,
+                    "SIQA": 3.72,
+                    "SciFact": 69.39,
+                    "SciFact-PL (pol-Latn)": 62.26,
+                    "SciFact-PL": 62.11,
+                    "SpartQA": 7.91,
+                    "SyntecRetrieval (fra-Latn)": 82.86,
+                    "SyntecRetrieval": 80.49,
+                    "T2Retrieval (cmn-Hans)": 70.77,
+                    "T2Retrieval": 70.86,
+                    "TRECCOVID": 69.5,
+                    "TRECCOVID-PL (pol-Latn)": 65.94,
+                    "TRECCOVID-PL": 66.06,
+                    "TempReasonL1": 0.72,
+                    "TempReasonL2Fact": 38.76,
+                    "TempReasonL2Pure": 1.63,
+                    "TempReasonL3Fact": 35.85,
+                    "TempReasonL3Pure": 7.11,
+                    "Touche2020": 21.5,
+                    "VideoRetrieval (cmn-Hans)": 61.26,
+                    "VideoRetrieval": 61.3,
+                    "WinoGrande": 56.18,
+                    "XPQARetrieval (ara-Arab_ara-Arab)": 39.97,
+                    "XPQARetrieval (eng-Latn_ara-Arab)": 17.23,
+                    "XPQARetrieval (ara-Arab_eng-Latn)": 34.35,
+                    "XPQARetrieval (deu-Latn_deu-Latn)": 72.11,
+                    "XPQARetrieval (eng-Latn_deu-Latn)": 28.91,
+                    "XPQARetrieval (deu-Latn_eng-Latn)": 61.46,
+                    "XPQARetrieval (spa-Latn_spa-Latn)": 58.35,
+                    "XPQARetrieval (eng-Latn_spa-Latn)": 25.27,
+                    "XPQARetrieval (spa-Latn_eng-Latn)": 51.07,
+                    "XPQARetrieval (fra-Latn_fra-Latn)": 59.56,
+                    "XPQARetrieval (eng-Latn_fra-Latn)": 23.69,
+                    "XPQARetrieval (fra-Latn_eng-Latn)": 53.9,
+                    "XPQARetrieval (hin-Deva_hin-Deva)": 70.56,
+                    "XPQARetrieval (eng-Latn_hin-Deva)": 27.57,
+                    "XPQARetrieval (hin-Deva_eng-Latn)": 63.68,
+                    "XPQARetrieval (ita-Latn_ita-Latn)": 70.38,
+                    "XPQARetrieval (eng-Latn_ita-Latn)": 26.06,
+                    "XPQARetrieval (ita-Latn_eng-Latn)": 56.2,
+                    "XPQARetrieval (jpn-Hira_jpn-Hira)": 71.97,
+                    "XPQARetrieval (eng-Latn_jpn-Hira)": 17.63,
+                    "XPQARetrieval (jpn-Hira_eng-Latn)": 61.03,
+                    "XPQARetrieval (kor-Hang_kor-Hang)": 36.12,
+                    "XPQARetrieval (eng-Latn_kor-Hang)": 20.27,
+                    "XPQARetrieval (kor-Hang_eng-Latn)": 29.26,
+                    "XPQARetrieval (pol-Latn_pol-Latn)": 48.1,
+                    "XPQARetrieval (eng-Latn_pol-Latn)": 19.48,
+                    "XPQARetrieval (pol-Latn_eng-Latn)": 40.18,
+                    "XPQARetrieval (por-Latn_por-Latn)": 44.76,
+                    "XPQARetrieval (eng-Latn_por-Latn)": 17.66,
+                    "XPQARetrieval (por-Latn_eng-Latn)": 40.52,
+                    "XPQARetrieval (tam-Taml_tam-Taml)": 35.25,
+                    "XPQARetrieval (eng-Latn_tam-Taml)": 12.64,
+                    "XPQARetrieval (tam-Taml_eng-Latn)": 26.73,
+                    "XPQARetrieval (cmn-Hans_cmn-Hans)": 67.06,
+                    "XPQARetrieval (eng-Latn_cmn-Hans)": 12.72,
+                    "XPQARetrieval (cmn-Hans_eng-Latn)": 53.53,
+                    "XPQARetrieval (fr)": 65.81
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "multilingual-e5-base",
+                    "AFQMC (cmn-Hans)": 29.66,
+                    "AFQMC": 29.67,
+                    "ATEC (cmn-Hans)": 37.01,
+                    "ATEC": 37.01,
+                    "BIOSSES": 85.05,
+                    "BQ (cmn-Hans)": 45.45,
+                    "BQ": 45.45,
+                    "CDSC-R (pol-Latn)": 90.09,
+                    "CDSC-R": 90.08,
+                    "LCQMC (cmn-Hans)": 74.15,
+                    "LCQMC": 74.15,
+                    "PAWSX (cmn-Hans)": 12.13,
+                    "PAWSX": 12.14,
+                    "QBQTC": 28.81,
+                    "RUParaPhraserSTS (rus-Cyrl)": 70.17,
+                    "RuSTSBenchmarkSTS (rus-Cyrl)": 79.64,
+                    "SICK-R": 78.51,
+                    "SICK-R-PL (pol-Latn)": 71.23,
+                    "SICK-R-PL": 71.23,
+                    "SICKFr (fra-Latn)": 75.76,
+                    "SICKFr": 76.23,
+                    "STS12": 76.7,
+                    "STS13": 78.02,
+                    "STS14": 76.6,
+                    "STS15": 88.16,
+                    "STS16": 84.28,
+                    "STS17 (fra-Latn_eng-Latn)": 80.18,
+                    "STS17 (ita-Latn_eng-Latn)": 80.16,
+                    "STS17 (eng-Latn_ara-Arab)": 71.27,
+                    "STS17 (kor-Hang)": 79.95,
+                    "STS17 (eng-Latn_tur-Latn)": 63.3,
+                    "STS17 (spa-Latn_eng-Latn)": 76.56,
+                    "STS17 (spa-Latn)": 86.74,
+                    "STS17 (en-en)": 87.84,
+                    "STS17 (ara-Arab)": 74.48,
+                    "STS17 (nld-Latn_eng-Latn)": 79.29,
+                    "STS17 (eng-Latn_deu-Latn)": 82.08,
+                    "STS22 (fra-Latn)": 75.04,
+                    "STS22 (cmn-Hans_eng-Latn)": 69.8,
+                    "STS22 (ara-Arab)": 57.82,
+                    "STS22 (spa-Latn_ita-Latn)": 66.43,
+                    "STS22 (ita-Latn)": 77.76,
+                    "STS22 (fra-Latn_pol-Latn)": 73.25,
+                    "STS22 (deu-Latn_eng-Latn)": 54.89,
+                    "STS22 (pol-Latn_eng-Latn)": 70.37,
+                    "STS22 (cmn-Hans)": 65.63,
+                    "STS22 (deu-Latn)": 55.95,
+                    "STS22 (pol-Latn)": 34.08,
+                    "STS22 (deu-Latn_fra-Latn)": 59.68,
+                    "STS22 (spa-Latn)": 66.67,
+                    "STS22 (rus-Cyrl)": 60.67,
+                    "STS22 (spa-Latn_eng-Latn)": 74.0,
+                    "STS22 (en)": 62.26,
+                    "STS22 (tur-Latn)": 63.71,
+                    "STS22 (deu-Latn_pol-Latn)": 39.35,
+                    "STS22 (zh)": 65.64,
+                    "STS22 (pl)": 34.07,
+                    "STSB (cmn-Hans)": 79.04,
+                    "STSB": 79.05,
+                    "STSBenchmark": 85.64,
+                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 79.33,
+                    "STSBenchmarkMultilingualSTS (pol-Latn)": 74.93,
+                    "STSBenchmarkMultilingualSTS (spa-Latn)": 81.75,
+                    "STSBenchmarkMultilingualSTS (en)": 85.64,
+                    "STSBenchmarkMultilingualSTS (cmn-Hans)": 79.87,
+                    "STSBenchmarkMultilingualSTS (fra-Latn)": 80.85,
+                    "STSBenchmarkMultilingualSTS (deu-Latn)": 79.68,
+                    "STSBenchmarkMultilingualSTS (nld-Latn)": 75.96,
+                    "STSBenchmarkMultilingualSTS (por-Latn)": 67.16,
+                    "STSBenchmarkMultilingualSTS (ita-Latn)": 78.09,
+                    "STSBenchmarkMultilingualSTS (fr)": 80.62
                 }
             ]
         },
-        "STS": {
+        "Summarization": {
             "spearman": [
                 {
-                    "Model": "LLM2Vec-Meta-Llama-3-supervised",
-                    "BIOSSES": 84.92,
-                    "SICK-R": 83.94,
-                    "STS12": 79.27,
-                    "STS13": 84.83,
-                    "STS14": 82.94,
-                    "STS15": 88.09,
-                    "STS16": 86.54,
-                    "STS17 (en-en)": 89.58,
-                    "STS22 (en)": 67.67,
-                    "STSBenchmark": 88.05
+                    "Model": "multilingual-e5-base",
+                    "SummEval": 30.23,
+                    "SummEvalFr (fra-Latn)": 32.96,
+                    "SummEvalFr": 30.76
                 }
             ]
         },
-        "Summarization": {
-            "spearman": [
+        "MultilabelClassification": {
+            "accuracy": [
                 {
-                    "Model": "LLM2Vec-Meta-Llama-3-supervised",
-                    "SummEval": 30.94
+                    "Model": "multilingual-e5-base",
+                    "CEDRClassification (rus-Cyrl)": 42.32,
+                    "SensitiveTopicsClassification (rus-Cyrl)": 24.98
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "LLM2Vec-Meta-Llama-3-supervised"
+                    "Model": "multilingual-e5-base"
                 }
             ]
         }
     },
-    "LLM2Vec-Llama-2-unsupervised": {
+    "xlm-roberta-base": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "LLM2Vec-Llama-2-unsupervised"
+                    "Model": "xlm-roberta-base",
+                    "BornholmBitextMining": 4.42
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "LLM2Vec-Llama-2-unsupervised",
-                    "AmazonCounterfactualClassification (en)": 76.91,
-                    "AmazonPolarityClassification": 79.05,
-                    "AmazonReviewsClassification (en)": 40.08,
-                    "Banking77Classification": 84.65,
-                    "EmotionClassification": 46.58,
-                    "ImdbClassification": 75.68,
-                    "MTOPDomainClassification (en)": 94.33,
-                    "MTOPIntentClassification (en)": 79.54,
-                    "MassiveIntentClassification (en)": 73.84,
-                    "MassiveScenarioClassification (en)": 79.17,
-                    "ToxicConversationsClassification": 71.81,
-                    "TweetSentimentExtractionClassification": 57.17
+                    "Model": "xlm-roberta-base",
+                    "AmazonReviewsClassification (fr)": 26.75,
+                    "AngryTweetsClassification": 52.41,
+                    "DKHateClassification": 56.78,
+                    "DanishPoliticalCommentsClassification": 34.03,
+                    "LccSentimentClassification": 52.27,
+                    "MTOPDomainClassification (fr)": 43.83,
+                    "MTOPIntentClassification (fr)": 19.38,
+                    "MasakhaNEWSClassification (fra)": 60.5,
+                    "MassiveIntentClassification (da)": 41.06,
+                    "MassiveIntentClassification (nb)": 40.46,
+                    "MassiveIntentClassification (sv)": 45.12,
+                    "MassiveIntentClassification (fr)": 13.58,
+                    "MassiveScenarioClassification (da)": 43.91,
+                    "MassiveScenarioClassification (nb)": 44.83,
+                    "MassiveScenarioClassification (sv)": 47.35,
+                    "MassiveScenarioClassification (fr)": 23.21,
+                    "NoRecClassification": 46.28,
+                    "NordicLangClassification": 79.39,
+                    "NorwegianParliament": 56.75,
+                    "ScalaDaClassification": 57.3,
+                    "ScalaNbClassification": 58.33
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "LLM2Vec-Llama-2-unsupervised",
-                    "ArxivClusteringP2P": 47.81,
-                    "ArxivClusteringS2S": 40.53,
-                    "BiorxivClusteringP2P": 38.12,
-                    "BiorxivClusteringS2S": 31.25,
-                    "MedrxivClusteringP2P": 30.94,
-                    "MedrxivClusteringS2S": 28.04,
-                    "RedditClustering": 42.84,
-                    "RedditClusteringP2P": 60.1,
-                    "StackExchangeClustering": 65.12,
-                    "StackExchangeClusteringP2P": 33.61,
-                    "TwentyNewsgroupsClustering": 30.76
+                    "Model": "xlm-roberta-base",
+                    "AlloProfClusteringP2P": 52.24,
+                    "AlloProfClusteringS2S": 20.37,
+                    "HALClusteringS2S": 8.68,
+                    "MLSUMClusteringP2P": 40.44,
+                    "MLSUMClusteringS2S": 24.14,
+                    "MasakhaNEWSClusteringP2P (fra)": 29.29,
+                    "MasakhaNEWSClusteringS2S (fra)": 23.76
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "LLM2Vec-Llama-2-unsupervised",
-                    "SprintDuplicateQuestions": 87.57,
-                    "TwitterSemEval2015": 65.14,
-                    "TwitterURLCorpus": 80.94
+                    "Model": "xlm-roberta-base",
+                    "OpusparcusPC (fr)": 85.45,
+                    "PawsXPairClassification (fr)": 51.35
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "LLM2Vec-Llama-2-unsupervised",
-                    "AskUbuntuDupQuestions": 55.56,
-                    "MindSmallReranking": 30.86,
-                    "SciDocsRR": 77.62,
-                    "StackOverflowDupQuestions": 47.77
+                    "Model": "xlm-roberta-base",
+                    "AlloprofReranking": 25.58,
+                    "SyntecReranking": 43.75
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "LLM2Vec-Llama-2-unsupervised",
-                    "ArguAna": 47.09,
-                    "CQADupstackRetrieval": 30.78,
-                    "ClimateFEVER": 20.67,
-                    "DBPedia": 25.81,
-                    "FEVER": 43.48,
-                    "FiQA2018": 24.62,
-                    "HotpotQA": 48.46,
-                    "MSMARCO": 18.81,
-                    "NFCorpus": 26.81,
-                    "NQ": 33.21,
-                    "QuoraRetrieval": 86.15,
-                    "SCIDOCS": 10.0,
-                    "SciFact": 64.48,
-                    "TRECCOVID": 60.67,
-                    "Touche2020": 10.18
+                    "Model": "xlm-roberta-base",
+                    "AlloprofRetrieval": 0.16,
+                    "BSARDRetrieval": 0.0,
+                    "MintakaRetrieval (fr)": 0.88,
+                    "SyntecRetrieval": 3.33,
+                    "XPQARetrieval (fr)": 11.65
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "LLM2Vec-Llama-2-unsupervised",
-                    "BIOSSES": 82.41,
-                    "SICK-R": 71.77,
-                    "STS12": 65.39,
-                    "STS13": 79.26,
-                    "STS14": 72.98,
-                    "STS15": 82.72,
-                    "STS16": 81.02,
-                    "STS17 (en-en)": 86.7,
-                    "STS22 (en)": 63.47,
-                    "STSBenchmark": 78.32
+                    "Model": "xlm-roberta-base",
+                    "SICKFr": 48.62,
+                    "STS22 (fr)": 56.72,
+                    "STSBenchmarkMultilingualSTS (fr)": 46.23
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "LLM2Vec-Llama-2-unsupervised",
-                    "SummEval": 31.38
+                    "Model": "xlm-roberta-base",
+                    "SummEvalFr": 29.14
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "xlm-roberta-base"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "LLM2Vec-Llama-2-unsupervised"
+                    "Model": "xlm-roberta-base"
                 }
             ]
         }
     },
-    "luotuo-bert-medium": {
+    "sentence-camembert-base": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "luotuo-bert-medium"
+                    "Model": "sentence-camembert-base"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "luotuo-bert-medium",
-                    "AmazonReviewsClassification (zh)": 34.46,
-                    "IFlyTek": 41.75,
-                    "JDReview": 79.68,
-                    "MassiveIntentClassification (zh-CN)": 57.47,
-                    "MassiveScenarioClassification (zh-CN)": 65.32,
-                    "MultilingualSentiment": 61.21,
-                    "OnlineShopping": 84.3,
-                    "TNews": 45.22,
-                    "Waimai": 79.57
+                    "Model": "sentence-camembert-base",
+                    "AmazonReviewsClassification (fr)": 36.03,
+                    "MTOPDomainClassification (fr)": 77.1,
+                    "MTOPIntentClassification (fr)": 43.44,
+                    "MasakhaNEWSClassification (fra)": 70.36,
+                    "MassiveIntentClassification (fr)": 51.59,
+                    "MassiveScenarioClassification (fr)": 61.28
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "luotuo-bert-medium",
-                    "CLSClusteringP2P": 37.01,
-                    "CLSClusteringS2S": 33.46,
-                    "ThuNewsClusteringP2P": 58.83,
-                    "ThuNewsClusteringS2S": 48.26
+                    "Model": "sentence-camembert-base",
+                    "AlloProfClusteringP2P": 59.09,
+                    "AlloProfClusteringS2S": 38.92,
+                    "HALClusteringS2S": 20.22,
+                    "MLSUMClusteringP2P": 35.98,
+                    "MLSUMClusteringS2S": 27.05,
+                    "MasakhaNEWSClusteringP2P (fra)": 36.03,
+                    "MasakhaNEWSClusteringS2S (fra)": 30.77
                 }
             ]
         },
         "PairClassification": {
-            "ap": [
-                {
-                    "Model": "luotuo-bert-medium",
-                    "Cmnli": 72.55,
-                    "Ocnli": 60.7
+            "ap": [
+                {
+                    "Model": "sentence-camembert-base",
+                    "OpusparcusPC (fr)": 92.05,
+                    "PawsXPairClassification (fr)": 57.44
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "luotuo-bert-medium",
-                    "CMedQAv1": 57.82,
-                    "CMedQAv2": 58.88,
-                    "MMarcoReranking": 14.55,
-                    "T2Reranking": 65.76
+                    "Model": "sentence-camembert-base",
+                    "AlloprofReranking": 48.68,
+                    "SyntecReranking": 79.75
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "luotuo-bert-medium",
-                    "CmedqaRetrieval": 18.04,
-                    "CovidRetrieval": 55.48,
-                    "DuRetrieval": 59.36,
-                    "EcomRetrieval": 40.48,
-                    "MMarcoRetrieval": 55.31,
-                    "MedicalRetrieval": 29.8,
-                    "T2Retrieval": 58.67,
-                    "VideoRetrieval": 38.04
+                    "Model": "sentence-camembert-base",
+                    "AlloprofRetrieval": 21.94,
+                    "BSARDRetrieval": 0.0,
+                    "MintakaRetrieval (fr)": 13.36,
+                    "SyntecRetrieval": 68.62,
+                    "XPQARetrieval (fr)": 57.92
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "luotuo-bert-medium",
-                    "AFQMC": 22.24,
-                    "ATEC": 30.84,
-                    "BQ": 43.33,
-                    "LCQMC": 66.74,
-                    "PAWSX": 12.31,
-                    "QBQTC": 27.2,
-                    "STS22 (zh)": 66.4,
-                    "STSB": 73.22
+                    "Model": "sentence-camembert-base",
+                    "SICKFr": 74.18,
+                    "STS22 (fr)": 77.54,
+                    "STSBenchmarkMultilingualSTS (fr)": 81.64
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "luotuo-bert-medium"
+                    "Model": "sentence-camembert-base",
+                    "SummEvalFr": 28.77
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "sentence-camembert-base"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "luotuo-bert-medium"
+                    "Model": "sentence-camembert-base"
                 }
             ]
         }
     },
-    "sup-simcse-bert-base-uncased": {
+    "sbert_large_mt_nlu_ru": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "sup-simcse-bert-base-uncased"
+                    "Model": "sbert_large_mt_nlu_ru"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "sup-simcse-bert-base-uncased",
-                    "AmazonCounterfactualClassification (en)": 75.75,
-                    "AmazonPolarityClassification": 82.47,
-                    "AmazonReviewsClassification (en)": 39.6,
-                    "Banking77Classification": 75.76,
-                    "EmotionClassification": 44.81,
-                    "ImdbClassification": 73.53,
-                    "MTOPDomainClassification (en)": 84.29,
-                    "MTOPIntentClassification (en)": 63.14,
-                    "MassiveIntentClassification (en)": 65.95,
-                    "MassiveScenarioClassification (en)": 70.78,
-                    "ToxicConversationsClassification": 72.04,
-                    "TweetSentimentExtractionClassification": 59.73
+                    "Model": "sbert_large_mt_nlu_ru",
+                    "GeoreviewClassification (rus-Cyrl)": 39.67,
+                    "HeadlineClassification (rus-Cyrl)": 77.19,
+                    "InappropriatenessClassification (rus-Cyrl)": 64.64,
+                    "KinopoiskClassification (rus-Cyrl)": 50.33,
+                    "MassiveIntentClassification (rus-Cyrl)": 61.42,
+                    "MassiveScenarioClassification (rus-Cyrl)": 68.13,
+                    "RuReviewsClassification (rus-Cyrl)": 58.29,
+                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 54.19,
+                    "RuSciBenchOECDClassification (rus-Cyrl)": 43.8
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "sup-simcse-bert-base-uncased",
-                    "ArxivClusteringP2P": 35.18,
-                    "ArxivClusteringS2S": 27.54,
-                    "BiorxivClusteringP2P": 30.15,
-                    "BiorxivClusteringS2S": 24.67,
-                    "MedrxivClusteringP2P": 26.25,
-                    "MedrxivClusteringS2S": 24.12,
-                    "RedditClustering": 40.23,
-                    "RedditClusteringP2P": 47.74,
-                    "StackExchangeClustering": 47.55,
-                    "StackExchangeClusteringP2P": 29.45,
-                    "TwentyNewsgroupsClustering": 34.86
+                    "Model": "sbert_large_mt_nlu_ru",
+                    "GeoreviewClusteringP2P (rus-Cyrl)": 58.45,
+                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 52.2,
+                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 47.29
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "sup-simcse-bert-base-uncased",
-                    "SprintDuplicateQuestions": 69.39,
-                    "TwitterSemEval2015": 67.75,
-                    "TwitterURLCorpus": 83.89
+                    "Model": "sbert_large_mt_nlu_ru",
+                    "TERRa (rus-Cyrl)": 51.97
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "sup-simcse-bert-base-uncased",
-                    "AskUbuntuDupQuestions": 51.8,
-                    "MindSmallReranking": 29.3,
-                    "SciDocsRR": 70.14,
-                    "StackOverflowDupQuestions": 38.9
+                    "Model": "sbert_large_mt_nlu_ru",
+                    "MIRACLReranking (rus-Cyrl)": 24.99,
+                    "RuBQReranking (rus-Cyrl)": 56.13
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "sup-simcse-bert-base-uncased",
-                    "ArguAna": 38.33,
-                    "CQADupstackRetrieval": 14.5,
-                    "ClimateFEVER": 11.98,
-                    "DBPedia": 19.73,
-                    "FEVER": 20.41,
-                    "FiQA2018": 10.41,
-                    "HotpotQA": 22.9,
-                    "MSMARCO": 11.0,
-                    "NFCorpus": 12.42,
-                    "NQ": 16.08,
-                    "QuoraRetrieval": 79.62,
-                    "SCIDOCS": 7.53,
-                    "SciFact": 29.59,
-                    "TRECCOVID": 22.93,
-                    "Touche2020": 9.9
+                    "Model": "sbert_large_mt_nlu_ru",
+                    "MIRACLRetrieval (rus-Cyrl)": 6.2,
+                    "RiaNewsRetrieval (rus-Cyrl)": 21.4,
+                    "RuBQRetrieval (rus-Cyrl)": 29.8
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "sup-simcse-bert-base-uncased",
-                    "BIOSSES": 68.38,
-                    "SICK-R": 80.77,
-                    "STS12": 75.3,
-                    "STS13": 84.67,
-                    "STS14": 80.19,
-                    "STS15": 85.4,
-                    "STS16": 80.82,
-                    "STS17 (en-en)": 89.44,
-                    "STS22 (en)": 61.96,
-                    "STSBenchmark": 84.25
+                    "Model": "sbert_large_mt_nlu_ru",
+                    "RUParaPhraserSTS (rus-Cyrl)": 65.17,
+                    "RuSTSBenchmarkSTS (rus-Cyrl)": 71.22,
+                    "STS22 (rus-Cyrl)": 56.82
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "sup-simcse-bert-base-uncased",
-                    "SummEval": 31.17
+                    "Model": "sbert_large_mt_nlu_ru"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "sbert_large_mt_nlu_ru",
+                    "CEDRClassification (rus-Cyrl)": 36.81,
+                    "SensitiveTopicsClassification (rus-Cyrl)": 28.47
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "sup-simcse-bert-base-uncased"
+                    "Model": "sbert_large_mt_nlu_ru"
                 }
             ]
         }
     },
-    "e5-base": {
+    "text-embedding-3-large": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "e5-base",
-                    "BornholmBitextMining": 40.09
+                    "Model": "text-embedding-3-large"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "e5-base",
-                    "AngryTweetsClassification": 45.06,
-                    "DKHateClassification": 58.51,
-                    "DanishPoliticalCommentsClassification": 28.43,
-                    "LccSentimentClassification": 37.47,
-                    "MassiveIntentClassification (da)": 44.25,
-                    "MassiveIntentClassification (nb)": 41.57,
-                    "MassiveIntentClassification (sv)": 41.34,
-                    "MassiveScenarioClassification (da)": 52.99,
-                    "MassiveScenarioClassification (nb)": 50.33,
-                    "MassiveScenarioClassification (sv)": 50.0,
-                    "NoRecClassification": 42.0,
-                    "NordicLangClassification": 59.34,
-                    "NorwegianParliament": 57.42,
-                    "ScalaDaClassification": 50.08,
-                    "ScalaNbClassification": 50.18
+                    "Model": "text-embedding-3-large",
+                    "AmazonCounterfactualClassification (en)": 78.93,
+                    "AmazonPolarityClassification": 92.85,
+                    "AmazonReviewsClassification (en)": 48.7,
+                    "Banking77Classification": 85.69,
+                    "EmotionClassification": 51.58,
+                    "ImdbClassification": 87.67,
+                    "MTOPDomainClassification (en)": 95.36,
+                    "MTOPIntentClassification (en)": 75.07,
+                    "MassiveIntentClassification (en)": 74.64,
+                    "MassiveScenarioClassification (en)": 79.79,
+                    "ToxicConversationsClassification": 72.92,
+                    "TweetSentimentExtractionClassification": 62.22
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "e5-base"
+                    "Model": "text-embedding-3-large",
+                    "ArxivClusteringP2P": 49.01,
+                    "ArxivClusteringS2S": 44.45,
+                    "BiorxivClusteringP2P": 38.03,
+                    "BiorxivClusteringS2S": 36.53,
+                    "MedrxivClusteringP2P": 32.7,
+                    "MedrxivClusteringS2S": 31.27,
+                    "RedditClustering": 67.84,
+                    "RedditClusteringP2P": 67.96,
+                    "StackExchangeClustering": 76.26,
+                    "StackExchangeClusteringP2P": 36.88,
+                    "TwentyNewsgroupsClustering": 58.14
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "e5-base"
+                    "Model": "text-embedding-3-large",
+                    "SprintDuplicateQuestions": 92.25,
+                    "TwitterSemEval2015": 77.13,
+                    "TwitterURLCorpus": 87.78
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "e5-base"
+                    "Model": "text-embedding-3-large",
+                    "AskUbuntuDupQuestions": 65.03,
+                    "MindSmallReranking": 29.86,
+                    "SciDocsRR": 86.66,
+                    "StackOverflowDupQuestions": 55.08
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "text-embedding-3-large",
+                    "AILACasedocs": 39.0,
+                    "AILAStatutes": 41.31,
+                    "ARCChallenge": 23.98,
+                    "AlphaNLI": 37.27,
+                    "ArguAna": 58.05,
+                    "BrightRetrieval (theoremqa_questions)": 22.22,
+                    "BrightRetrieval (leetcode)": 23.65,
+                    "BrightRetrieval (earth_science)": 26.27,
+                    "BrightRetrieval (psychology)": 27.52,
+                    "BrightRetrieval (robotics)": 12.93,
+                    "BrightRetrieval (economics)": 19.98,
+                    "BrightRetrieval (stackoverflow)": 12.49,
+                    "BrightRetrieval (biology)": 23.67,
+                    "BrightRetrieval (theoremqa_theorems)": 9.25,
+                    "BrightRetrieval (pony)": 2.45,
+                    "BrightRetrieval (sustainable_living)": 20.32,
+                    "BrightRetrieval (aops)": 8.45,
+                    "CQADupstackRetrieval": 47.54,
+                    "ClimateFEVER": 30.27,
+                    "DBPedia": 44.76,
+                    "FEVER": 87.94,
+                    "FiQA2018": 55.0,
+                    "GerDaLIRSmall": 32.77,
+                    "HellaSwag": 34.12,
+                    "HotpotQA": 71.58,
+                    "LEMBNarrativeQARetrieval": 44.09,
+                    "LEMBNeedleRetrieval": 29.25,
+                    "LEMBPasskeyRetrieval": 63.0,
+                    "LEMBQMSumRetrieval": 32.49,
+                    "LEMBSummScreenFDRetrieval": 84.8,
+                    "LEMBWikimQARetrieval": 54.16,
+                    "LeCaRDv2": 57.2,
+                    "LegalBenchConsumerContractsQA": 79.39,
+                    "LegalBenchCorporateLobbying": 95.09,
+                    "LegalQuAD": 57.47,
+                    "LegalSummarization": 71.55,
+                    "MSMARCO": 40.24,
+                    "NFCorpus": 42.07,
+                    "NQ": 61.27,
+                    "PIQA": 41.96,
+                    "Quail": 10.15,
+                    "QuoraRetrieval": 89.05,
+                    "RARbCode": 89.64,
+                    "RARbMath": 90.08,
+                    "SCIDOCS": 23.11,
+                    "SIQA": 3.44,
+                    "SciFact": 77.77,
+                    "SpartQA": 7.51,
+                    "TRECCOVID": 79.56,
+                    "TempReasonL1": 2.13,
+                    "TempReasonL2Fact": 28.65,
+                    "TempReasonL2Pure": 10.34,
+                    "TempReasonL3Fact": 25.52,
+                    "TempReasonL3Pure": 15.28,
+                    "Touche2020": 23.35,
+                    "WinoGrande": 29.11
                 }
             ]
         },
-        "Retrieval": {
-            "ndcg_at_10": [
+        "STS": {
+            "spearman": [
                 {
-                    "Model": "e5-base",
-                    "LEMBNarrativeQARetrieval": 25.31,
-                    "LEMBNeedleRetrieval": 28.5,
-                    "LEMBPasskeyRetrieval": 33.25,
-                    "LEMBQMSumRetrieval": 23.83,
-                    "LEMBSummScreenFDRetrieval": 74.67,
-                    "LEMBWikimQARetrieval": 55.85
+                    "Model": "text-embedding-3-large",
+                    "BIOSSES": 84.68,
+                    "SICK-R": 79.0,
+                    "STS12": 72.84,
+                    "STS13": 86.1,
+                    "STS14": 81.15,
+                    "STS15": 88.49,
+                    "STS16": 85.08,
+                    "STS17 (en-en)": 90.22,
+                    "STS22 (en)": 66.14,
+                    "STSBenchmark": 83.56
                 }
             ]
         },
-        "STS": {
+        "Summarization": {
             "spearman": [
                 {
-                    "Model": "e5-base"
+                    "Model": "text-embedding-3-large",
+                    "SummEval": 29.92
                 }
             ]
         },
-        "Summarization": {
-            "spearman": [
+        "MultilabelClassification": {
+            "accuracy": [
                 {
-                    "Model": "e5-base"
+                    "Model": "text-embedding-3-large"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "e5-base"
+                    "Model": "text-embedding-3-large",
+                    "Core17InstructionRetrieval": -0.2,
+                    "News21InstructionRetrieval": -2.03,
+                    "Robust04InstructionRetrieval": -5.81
                 }
             ]
         }
     },
-    "udever-bloom-1b1": {
+    "komninos": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "udever-bloom-1b1"
+                    "Model": "komninos",
+                    "BUCC (de-en)": 0.18,
+                    "BUCC (fr-en)": 0.08,
+                    "BUCC (ru-en)": 0.15,
+                    "BUCC (zh-en)": 0.05,
+                    "Tatoeba (afr-eng)": 4.82,
+                    "Tatoeba (amh-eng)": 1.18,
+                    "Tatoeba (ang-eng)": 8.54,
+                    "Tatoeba (ara-eng)": 0.63,
+                    "Tatoeba (arq-eng)": 0.4,
+                    "Tatoeba (arz-eng)": 0.63,
+                    "Tatoeba (ast-eng)": 11.69,
+                    "Tatoeba (awa-eng)": 0.0,
+                    "Tatoeba (aze-eng)": 3.22,
+                    "Tatoeba (bel-eng)": 1.75,
+                    "Tatoeba (ben-eng)": 0.2,
+                    "Tatoeba (ber-eng)": 7.0,
+                    "Tatoeba (bos-eng)": 9.31,
+                    "Tatoeba (bre-eng)": 4.17,
+                    "Tatoeba (bul-eng)": 1.29,
+                    "Tatoeba (cat-eng)": 7.73,
+                    "Tatoeba (cbk-eng)": 5.61,
+                    "Tatoeba (ceb-eng)": 4.88,
+                    "Tatoeba (ces-eng)": 3.55,
+                    "Tatoeba (cha-eng)": 19.29,
+                    "Tatoeba (cmn-eng)": 0.5,
+                    "Tatoeba (cor-eng)": 4.15,
+                    "Tatoeba (csb-eng)": 5.69,
+                    "Tatoeba (cym-eng)": 8.4,
+                    "Tatoeba (dan-eng)": 6.99,
+                    "Tatoeba (deu-eng)": 3.67,
+                    "Tatoeba (dsb-eng)": 5.33,
+                    "Tatoeba (dtp-eng)": 4.25,
+                    "Tatoeba (ell-eng)": 0.63,
+                    "Tatoeba (epo-eng)": 2.45,
+                    "Tatoeba (est-eng)": 2.69,
+                    "Tatoeba (eus-eng)": 4.69,
+                    "Tatoeba (fao-eng)": 7.61,
+                    "Tatoeba (fin-eng)": 3.36,
+                    "Tatoeba (fra-eng)": 7.0,
+                    "Tatoeba (fry-eng)": 12.36,
+                    "Tatoeba (gla-eng)": 3.07,
+                    "Tatoeba (gle-eng)": 4.81,
+                    "Tatoeba (glg-eng)": 8.12,
+                    "Tatoeba (gsw-eng)": 18.87,
+                    "Tatoeba (heb-eng)": 0.68,
+                    "Tatoeba (hin-eng)": 0.1,
+                    "Tatoeba (hrv-eng)": 5.41,
+                    "Tatoeba (hsb-eng)": 6.32,
+                    "Tatoeba (hun-eng)": 3.42,
+                    "Tatoeba (hye-eng)": 0.97,
+                    "Tatoeba (ido-eng)": 7.1,
+                    "Tatoeba (ile-eng)": 13.61,
+                    "Tatoeba (ina-eng)": 8.57,
+                    "Tatoeba (ind-eng)": 7.26,
+                    "Tatoeba (isl-eng)": 4.09,
+                    "Tatoeba (ita-eng)": 5.54,
+                    "Tatoeba (jav-eng)": 11.43,
+                    "Tatoeba (jpn-eng)": 0.2,
+                    "Tatoeba (kab-eng)": 2.71,
+                    "Tatoeba (kat-eng)": 1.11,
+                    "Tatoeba (kaz-eng)": 1.17,
+                    "Tatoeba (khm-eng)": 0.55,
+                    "Tatoeba (kor-eng)": 0.5,
+                    "Tatoeba (kur-eng)": 8.55,
+                    "Tatoeba (kzj-eng)": 4.61,
+                    "Tatoeba (lat-eng)": 4.07,
+                    "Tatoeba (lfn-eng)": 2.83,
+                    "Tatoeba (lit-eng)": 0.95,
+                    "Tatoeba (lvs-eng)": 3.25,
+                    "Tatoeba (mal-eng)": 0.29,
+                    "Tatoeba (mar-eng)": 0.2,
+                    "Tatoeba (max-eng)": 14.53,
+                    "Tatoeba (mhr-eng)": 0.2,
+                    "Tatoeba (mkd-eng)": 0.2,
+                    "Tatoeba (mon-eng)": 1.1,
+                    "Tatoeba (nds-eng)": 10.37,
+                    "Tatoeba (nld-eng)": 9.5,
+                    "Tatoeba (nno-eng)": 4.49,
+                    "Tatoeba (nob-eng)": 4.95,
+                    "Tatoeba (nov-eng)": 14.53,
+                    "Tatoeba (oci-eng)": 5.8,
+                    "Tatoeba (orv-eng)": 0.24,
+                    "Tatoeba (pam-eng)": 6.65,
+                    "Tatoeba (pes-eng)": 0.5,
+                    "Tatoeba (pms-eng)": 8.05,
+                    "Tatoeba (pol-eng)": 5.13,
+                    "Tatoeba (por-eng)": 5.87,
+                    "Tatoeba (ron-eng)": 6.76,
+                    "Tatoeba (rus-eng)": 0.2,
+                    "Tatoeba (slk-eng)": 4.23,
+                    "Tatoeba (slv-eng)": 6.05,
+                    "Tatoeba (spa-eng)": 5.03,
+                    "Tatoeba (sqi-eng)": 4.36,
+                    "Tatoeba (srp-eng)": 1.77,
+                    "Tatoeba (swe-eng)": 6.72,
+                    "Tatoeba (swg-eng)": 8.54,
+                    "Tatoeba (swh-eng)": 11.49,
+                    "Tatoeba (tam-eng)": 1.3,
+                    "Tatoeba (tat-eng)": 0.77,
+                    "Tatoeba (tel-eng)": 0.85,
+                    "Tatoeba (tgl-eng)": 2.61,
+                    "Tatoeba (tha-eng)": 0.69,
+                    "Tatoeba (tuk-eng)": 5.76,
+                    "Tatoeba (tur-eng)": 5.24,
+                    "Tatoeba (tzl-eng)": 15.51,
+                    "Tatoeba (uig-eng)": 0.6,
+                    "Tatoeba (ukr-eng)": 1.23,
+                    "Tatoeba (urd-eng)": 0.4,
+                    "Tatoeba (uzb-eng)": 4.73,
+                    "Tatoeba (vie-eng)": 6.55,
+                    "Tatoeba (war-eng)": 4.12,
+                    "Tatoeba (wuu-eng)": 0.2,
+                    "Tatoeba (xho-eng)": 4.33,
+                    "Tatoeba (yid-eng)": 0.59,
+                    "Tatoeba (yue-eng)": 0.5,
+                    "Tatoeba (zsm-eng)": 7.27
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "udever-bloom-1b1",
-                    "AmazonReviewsClassification (fr)": 35.12,
-                    "MTOPDomainClassification (fr)": 69.24,
-                    "MTOPIntentClassification (fr)": 51.25,
-                    "MasakhaNEWSClassification (fra)": 80.83,
-                    "MassiveIntentClassification (fr)": 43.21,
-                    "MassiveScenarioClassification (fr)": 49.78
+                    "Model": "komninos",
+                    "AmazonCounterfactualClassification (en)": 60.54,
+                    "AmazonPolarityClassification": 59.59,
+                    "AmazonReviewsClassification (en)": 31.01,
+                    "Banking77Classification": 67.05,
+                    "EmotionClassification": 33.18,
+                    "ImdbClassification": 63.98,
+                    "MTOPDomainClassification (en)": 78.57,
+                    "MTOPIntentClassification (en)": 57.07,
+                    "MassiveIntentClassification (en)": 57.21,
+                    "MassiveScenarioClassification (en)": 66.11,
+                    "ToxicConversationsClassification": 67.76,
+                    "TweetSentimentExtractionClassification": 49.68
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "udever-bloom-1b1",
-                    "AlloProfClusteringP2P": 62.22,
-                    "AlloProfClusteringS2S": 27.06,
-                    "HALClusteringS2S": 13.86,
-                    "MLSUMClusteringP2P": 44.11,
-                    "MLSUMClusteringS2S": 30.47,
-                    "MasakhaNEWSClusteringP2P (fra)": 40.2,
-                    "MasakhaNEWSClusteringS2S (fra)": 27.35
+                    "Model": "komninos",
+                    "ArxivClusteringP2P": 34.73,
+                    "ArxivClusteringS2S": 26.01,
+                    "BiorxivClusteringP2P": 29.76,
+                    "BiorxivClusteringS2S": 20.71,
+                    "BlurbsClusteringP2P": 11.37,
+                    "BlurbsClusteringS2S": 8.01,
+                    "MedrxivClusteringP2P": 26.65,
+                    "MedrxivClusteringS2S": 21.5,
+                    "RedditClustering": 28.84,
+                    "RedditClusteringP2P": 7.37,
+                    "StackExchangeClustering": 39.04,
+                    "StackExchangeClusteringP2P": 30.23,
+                    "TenKGnadClusteringP2P": 15.89,
+                    "TenKGnadClusteringS2S": 4.84,
+                    "TwentyNewsgroupsClustering": 27.42
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "udever-bloom-1b1",
-                    "OpusparcusPC (fr)": 85.54,
-                    "PawsXPairClassification (fr)": 61.99
+                    "Model": "komninos",
+                    "SprintDuplicateQuestions": 85.55,
+                    "TwitterSemEval2015": 53.85,
+                    "TwitterURLCorpus": 79.41
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "udever-bloom-1b1",
-                    "AlloprofReranking": 39.13,
-                    "SyntecReranking": 62.58
+                    "Model": "komninos",
+                    "AskUbuntuDupQuestions": 50.88,
+                    "MindSmallReranking": 28.92,
+                    "SciDocsRR": 63.55,
+                    "StackOverflowDupQuestions": 35.65
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "udever-bloom-1b1",
-                    "AlloprofRetrieval": 12.37,
-                    "BSARDRetrieval": 0.0,
-                    "MintakaRetrieval (fr)": 2.78,
-                    "SyntecRetrieval": 40.57,
-                    "XPQARetrieval (fr)": 33.82
+                    "Model": "komninos",
+                    "ArguAna": 30.96,
+                    "CQADupstackRetrieval": 16.79,
+                    "ClimateFEVER": 14.87,
+                    "DBPedia": 15.88,
+                    "FEVER": 15.56,
+                    "FiQA2018": 10.49,
+                    "HotpotQA": 20.77,
+                    "MSMARCO": 9.75,
+                    "NFCorpus": 11.79,
+                    "NQ": 12.75,
+                    "QuoraRetrieval": 71.57,
+                    "SCIDOCS": 8.47,
+                    "SciFact": 29.53,
+                    "TRECCOVID": 35.92,
+                    "Touche2020": 13.17
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "udever-bloom-1b1",
-                    "SICKFr": 59.94,
-                    "STS22 (fr)": 77.1,
-                    "STSBenchmarkMultilingualSTS (fr)": 49.97
+                    "Model": "komninos",
+                    "BIOSSES": 50.25,
+                    "SICK-R": 55.49,
+                    "STS12": 53.51,
+                    "STS13": 70.8,
+                    "STS14": 63.56,
+                    "STS15": 74.08,
+                    "STS16": 64.6,
+                    "STS17 (ar-ar)": 13.78,
+                    "STS17 (en-ar)": 9.08,
+                    "STS17 (en-de)": -3.11,
+                    "STS17 (en-en)": 76.91,
+                    "STS17 (en-tr)": -0.45,
+                    "STS17 (es-en)": -8.18,
+                    "STS17 (es-es)": 48.23,
+                    "STS17 (fr-en)": 5.81,
+                    "STS17 (it-en)": 3.64,
+                    "STS17 (ko-ko)": 2.54,
+                    "STS17 (nl-en)": 0.44,
+                    "STS22 (ar)": 32.42,
+                    "STS22 (de)": 33.04,
+                    "STS22 (de-en)": 28.65,
+                    "STS22 (de-fr)": 14.77,
+                    "STS22 (de-pl)": 11.21,
+                    "STS22 (en)": 53.89,
+                    "STS22 (es)": 48.53,
+                    "STS22 (es-en)": 26.97,
+                    "STS22 (es-it)": 41.1,
+                    "STS22 (fr)": 49.43,
+                    "STS22 (fr-pl)": 39.44,
+                    "STS22 (it)": 57.77,
+                    "STS22 (pl)": 12.47,
+                    "STS22 (pl-en)": 45.55,
+                    "STS22 (ru)": 19.44,
+                    "STS22 (tr)": 47.38,
+                    "STS22 (zh)": 4.78,
+                    "STS22 (zh-en)": 14.05,
+                    "STSBenchmark": 61.55
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "udever-bloom-1b1",
-                    "SummEvalFr": 29.48
+                    "Model": "komninos",
+                    "SummEval": 30.49
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "komninos"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "udever-bloom-1b1"
+                    "Model": "komninos"
                 }
             ]
         }
     },
-    "nomic-embed-text-v1.5-512": {
+    "OpenSearch-text-hybrid": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "nomic-embed-text-v1.5-512"
+                    "Model": "OpenSearch-text-hybrid"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "nomic-embed-text-v1.5-512",
-                    "AmazonCounterfactualClassification (en)": 74.27,
-                    "AmazonPolarityClassification": 91.89,
-                    "AmazonReviewsClassification (en)": 46.97,
-                    "Banking77Classification": 84.15,
-                    "EmotionClassification": 47.73,
-                    "ImdbClassification": 85.47,
-                    "MTOPDomainClassification (en)": 92.62,
-                    "MTOPIntentClassification (en)": 74.27,
-                    "MassiveIntentClassification (en)": 73.07,
-                    "MassiveScenarioClassification (en)": 76.82,
-                    "ToxicConversationsClassification": 71.25,
-                    "TweetSentimentExtractionClassification": 60.4
+                    "Model": "OpenSearch-text-hybrid",
+                    "AmazonReviewsClassification (zh)": 46.18,
+                    "IFlyTek": 51.8,
+                    "JDReview": 86.02,
+                    "MassiveIntentClassification (zh-CN)": 73.85,
+                    "MassiveScenarioClassification (zh-CN)": 77.13,
+                    "MultilingualSentiment": 76.35,
+                    "OnlineShopping": 93.2,
+                    "TNews": 53.06,
+                    "Waimai": 88.1
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "nomic-embed-text-v1.5-512",
-                    "ArxivClusteringP2P": 45.45,
-                    "ArxivClusteringS2S": 36.19,
-                    "BiorxivClusteringP2P": 38.41,
-                    "BiorxivClusteringS2S": 32.28,
-                    "MedrxivClusteringP2P": 34.47,
-                    "MedrxivClusteringS2S": 31.43,
-                    "RedditClustering": 55.9,
-                    "RedditClusteringP2P": 60.58,
-                    "StackExchangeClustering": 62.94,
-                    "StackExchangeClusteringP2P": 33.81,
-                    "TwentyNewsgroupsClustering": 49.36
+                    "Model": "OpenSearch-text-hybrid",
+                    "CLSClusteringP2P": 41.64,
+                    "CLSClusteringS2S": 40.33,
+                    "ThuNewsClusteringP2P": 69.28,
+                    "ThuNewsClusteringS2S": 63.75
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "nomic-embed-text-v1.5-512",
-                    "SprintDuplicateQuestions": 92.91,
-                    "TwitterSemEval2015": 74.3,
-                    "TwitterURLCorpus": 86.57
+                    "Model": "OpenSearch-text-hybrid",
+                    "Cmnli": 90.77,
+                    "Ocnli": 85.44
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "nomic-embed-text-v1.5-512",
-                    "AskUbuntuDupQuestions": 61.6,
-                    "MindSmallReranking": 30.34,
-                    "SciDocsRR": 80.33,
-                    "StackOverflowDupQuestions": 50.32
+                    "Model": "OpenSearch-text-hybrid",
+                    "CMedQAv1": 88.99,
+                    "CMedQAv2": 89.6,
+                    "MMarcoReranking": 28.12,
+                    "T2Reranking": 66.38
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "nomic-embed-text-v1.5-512",
-                    "ArguAna": 47.45,
-                    "CQADupstackRetrieval": 39.06,
-                    "ClimateFEVER": 40.7,
-                    "DBPedia": 42.96,
-                    "FEVER": 85.7,
-                    "FiQA2018": 36.92,
-                    "HotpotQA": 71.48,
-                    "MSMARCO": 42.29,
-                    "NFCorpus": 33.31,
-                    "NQ": 58.83,
-                    "QuoraRetrieval": 87.87,
-                    "SCIDOCS": 17.88,
-                    "SciFact": 70.12,
-                    "TRECCOVID": 82.12,
-                    "Touche2020": 29.24
+                    "Model": "OpenSearch-text-hybrid",
+                    "CmedqaRetrieval": 46.56,
+                    "CovidRetrieval": 84.03,
+                    "DuRetrieval": 87.85,
+                    "EcomRetrieval": 68.79,
+                    "MMarcoRetrieval": 79.93,
+                    "MedicalRetrieval": 65.92,
+                    "T2Retrieval": 86.76,
+                    "VideoRetrieval": 75.43
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "nomic-embed-text-v1.5-512",
-                    "BIOSSES": 83.3,
-                    "SICK-R": 79.27,
-                    "STS12": 78.3,
-                    "STS13": 85.81,
-                    "STS14": 81.38,
-                    "STS15": 86.79,
-                    "STS16": 84.56,
-                    "STS17 (en-en)": 87.25,
-                    "STS22 (en)": 65.24,
-                    "STSBenchmark": 85.14
+                    "Model": "OpenSearch-text-hybrid",
+                    "AFQMC": 59.11,
+                    "ATEC": 58.19,
+                    "BQ": 71.07,
+                    "LCQMC": 78.27,
+                    "PAWSX": 44.98,
+                    "QBQTC": 38.69,
+                    "STS22 (zh)": 66.53,
+                    "STSB": 82.8
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "nomic-embed-text-v1.5-512",
-                    "SummEval": 30.47
+                    "Model": "OpenSearch-text-hybrid"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "OpenSearch-text-hybrid"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "nomic-embed-text-v1.5-512"
+                    "Model": "OpenSearch-text-hybrid"
                 }
             ]
         }
     },
-    "text-embedding-3-large-instruct": {
+    "sentence-t5-xxl": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "text-embedding-3-large-instruct"
+                    "Model": "sentence-t5-xxl"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "text-embedding-3-large-instruct"
+                    "Model": "sentence-t5-xxl",
+                    "AmazonCounterfactualClassification (en)": 77.07,
+                    "AmazonPolarityClassification": 92.79,
+                    "AmazonReviewsClassification (en)": 48.93,
+                    "AmazonReviewsClassification (fr)": 46.09,
+                    "Banking77Classification": 82.31,
+                    "EmotionClassification": 48.57,
+                    "ImdbClassification": 90.23,
+                    "MTOPDomainClassification (en)": 92.49,
+                    "MTOPDomainClassification (fr)": 86.2,
+                    "MTOPIntentClassification (en)": 68.33,
+                    "MTOPIntentClassification (fr)": 58.33,
+                    "MasakhaNEWSClassification (fra)": 79.1,
+                    "MassiveIntentClassification (en)": 73.44,
+                    "MassiveIntentClassification (fr)": 65.91,
+                    "MassiveScenarioClassification (en)": 74.82,
+                    "MassiveScenarioClassification (fr)": 68.53,
+                    "ToxicConversationsClassification": 70.04,
+                    "TweetSentimentExtractionClassification": 62.01
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "text-embedding-3-large-instruct"
+                    "Model": "sentence-t5-xxl",
+                    "AlloProfClusteringP2P": 60.98,
+                    "AlloProfClusteringS2S": 43.5,
+                    "ArxivClusteringP2P": 42.89,
+                    "ArxivClusteringS2S": 33.47,
+                    "BiorxivClusteringP2P": 36.53,
+                    "BiorxivClusteringS2S": 28.66,
+                    "BlurbsClusteringP2P": 39.91,
+                    "BlurbsClusteringS2S": 15.94,
+                    "HALClusteringS2S": 21.4,
+                    "MLSUMClusteringP2P": 42.24,
+                    "MLSUMClusteringS2S": 35.25,
+                    "MasakhaNEWSClusteringP2P (fra)": 61.15,
+                    "MasakhaNEWSClusteringS2S (fra)": 38.24,
+                    "MedrxivClusteringP2P": 32.09,
+                    "MedrxivClusteringS2S": 26.82,
+                    "RedditClustering": 58.99,
+                    "RedditClusteringP2P": 64.46,
+                    "StackExchangeClustering": 70.78,
+                    "StackExchangeClusteringP2P": 35.25,
+                    "TenKGnadClusteringP2P": 43.43,
+                    "TenKGnadClusteringS2S": 19.69,
+                    "TwentyNewsgroupsClustering": 50.93
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "text-embedding-3-large-instruct"
+                    "Model": "sentence-t5-xxl",
+                    "OpusparcusPC (fr)": 93.94,
+                    "PawsXPairClassification (fr)": 63.98,
+                    "SprintDuplicateQuestions": 88.89,
+                    "TwitterSemEval2015": 80.28,
+                    "TwitterURLCorpus": 86.01
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "text-embedding-3-large-instruct"
+                    "Model": "sentence-t5-xxl",
+                    "AlloprofReranking": 68.36,
+                    "AskUbuntuDupQuestions": 66.16,
+                    "MindSmallReranking": 30.6,
+                    "SciDocsRR": 76.09,
+                    "StackOverflowDupQuestions": 52.85,
+                    "SyntecReranking": 85.15
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "sentence-t5-xxl",
+                    "AlloprofRetrieval": 45.75,
+                    "ArguAna": 39.85,
+                    "BSARDRetrieval": 3.33,
+                    "CQADupstackRetrieval": 44.65,
+                    "ClimateFEVER": 14.63,
+                    "DBPedia": 39.19,
+                    "FEVER": 51.2,
+                    "FiQA2018": 46.68,
+                    "HotpotQA": 42.14,
+                    "MSMARCO": 27.67,
+                    "MintakaRetrieval (fr)": 34.93,
+                    "NFCorpus": 35.08,
+                    "NQ": 52.87,
+                    "QuoraRetrieval": 85.96,
+                    "SCIDOCS": 17.17,
+                    "SciFact": 55.38,
+                    "SyntecRetrieval": 78.97,
+                    "TRECCOVID": 59.48,
+                    "Touche2020": 21.65,
+                    "XPQARetrieval (fr)": 56.2
                 }
             ]
         },
-        "Retrieval": {
-            "ndcg_at_10": [
+        "STS": {
+            "spearman": [
                 {
-                    "Model": "text-embedding-3-large-instruct",
-                    "ARCChallenge": 21.22,
-                    "AlphaNLI": 34.23,
-                    "HellaSwag": 31.4,
-                    "PIQA": 37.52,
-                    "Quail": 13.6,
-                    "RARbCode": 89.41,
-                    "RARbMath": 87.73,
-                    "SIQA": 4.99,
-                    "SpartQA": 7.45,
-                    "TempReasonL1": 2.07,
-                    "TempReasonL2Fact": 39.77,
-                    "TempReasonL2Pure": 11.04,
-                    "TempReasonL3Fact": 37.04,
-                    "TempReasonL3Pure": 15.51,
-                    "WinoGrande": 33.92
+                    "Model": "sentence-t5-xxl",
+                    "BIOSSES": 80.43,
+                    "SICK-R": 80.47,
+                    "SICKFr": 77.07,
+                    "STS12": 78.85,
+                    "STS13": 88.94,
+                    "STS14": 84.86,
+                    "STS15": 89.32,
+                    "STS16": 84.67,
+                    "STS17 (en-en)": 89.46,
+                    "STS22 (en)": 65.33,
+                    "STS22 (fr)": 76.8,
+                    "STSBenchmark": 84.01,
+                    "STSBenchmarkMultilingualSTS (fr)": 81.24
                 }
             ]
         },
-        "STS": {
+        "Summarization": {
             "spearman": [
                 {
-                    "Model": "text-embedding-3-large-instruct"
+                    "Model": "sentence-t5-xxl",
+                    "SummEval": 30.08,
+                    "SummEvalFr": 30.39
                 }
             ]
         },
-        "Summarization": {
-            "spearman": [
+        "MultilabelClassification": {
+            "accuracy": [
                 {
-                    "Model": "text-embedding-3-large-instruct"
+                    "Model": "sentence-t5-xxl"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "text-embedding-3-large-instruct"
+                    "Model": "sentence-t5-xxl"
                 }
             ]
         }
     },
-    "sentence-croissant-llm-base": {
+    "gelectra-large": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "sentence-croissant-llm-base"
+                    "Model": "gelectra-large"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "sentence-croissant-llm-base",
-                    "AmazonReviewsClassification (fr)": 34.79,
-                    "MTOPDomainClassification (fr)": 85.52,
-                    "MTOPIntentClassification (fr)": 63.12,
-                    "MasakhaNEWSClassification (fra)": 79.29,
-                    "MassiveIntentClassification (fr)": 59.41,
-                    "MassiveScenarioClassification (fr)": 65.29
+                    "Model": "gelectra-large"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "sentence-croissant-llm-base",
-                    "AlloProfClusteringP2P": 64.12,
-                    "AlloProfClusteringS2S": 32.52,
-                    "HALClusteringS2S": 23.4,
-                    "MLSUMClusteringP2P": 42.94,
-                    "MLSUMClusteringS2S": 33.91,
-                    "MasakhaNEWSClusteringP2P (fra)": 53.94,
-                    "MasakhaNEWSClusteringS2S (fra)": 41.05
+                    "Model": "gelectra-large",
+                    "BlurbsClusteringP2P": 13.96,
+                    "BlurbsClusteringS2S": 7.57,
+                    "TenKGnadClusteringP2P": 11.49,
+                    "TenKGnadClusteringS2S": 3.91
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "sentence-croissant-llm-base",
-                    "OpusparcusPC (fr)": 91.42,
-                    "PawsXPairClassification (fr)": 63.13
+                    "Model": "gelectra-large"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "sentence-croissant-llm-base",
-                    "AlloprofReranking": 53.0,
-                    "SyntecReranking": 82.9
+                    "Model": "gelectra-large"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "sentence-croissant-llm-base",
-                    "AlloprofRetrieval": 29.97,
-                    "BSARDRetrieval": 0.0,
-                    "MintakaRetrieval (fr)": 21.31,
-                    "SyntecRetrieval": 74.2,
-                    "XPQARetrieval (fr)": 58.57
+                    "Model": "gelectra-large"
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "sentence-croissant-llm-base",
-                    "SICKFr": 69.6,
-                    "STS22 (fr)": 78.77,
-                    "STSBenchmarkMultilingualSTS (fr)": 79.23
+                    "Model": "gelectra-large"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "sentence-croissant-llm-base",
-                    "SummEvalFr": 29.04
+                    "Model": "gelectra-large"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "gelectra-large"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "sentence-croissant-llm-base"
+                    "Model": "gelectra-large"
                 }
             ]
         }
     },
-    "titan-embed-text-v1": {
+    "nomic-embed-text-v1.5-128": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "titan-embed-text-v1"
+                    "Model": "nomic-embed-text-v1.5-128"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "titan-embed-text-v1",
-                    "AmazonCounterfactualClassification (en)": 61.85,
-                    "Banking77Classification": 83.21
+                    "Model": "nomic-embed-text-v1.5-128",
+                    "AmazonCounterfactualClassification (en)": 69.78,
+                    "AmazonPolarityClassification": 88.74,
+                    "AmazonReviewsClassification (en)": 43.11,
+                    "Banking77Classification": 82.78,
+                    "EmotionClassification": 42.92,
+                    "ImdbClassification": 80.87,
+                    "MTOPDomainClassification (en)": 89.61,
+                    "MTOPIntentClassification (en)": 68.9,
+                    "MassiveIntentClassification (en)": 69.34,
+                    "MassiveScenarioClassification (en)": 74.21,
+                    "ToxicConversationsClassification": 68.16,
+                    "TweetSentimentExtractionClassification": 57.99
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "titan-embed-text-v1"
+                    "Model": "nomic-embed-text-v1.5-128",
+                    "ArxivClusteringP2P": 43.87,
+                    "ArxivClusteringS2S": 34.57,
+                    "BiorxivClusteringP2P": 36.79,
+                    "BiorxivClusteringS2S": 30.68,
+                    "MedrxivClusteringP2P": 34.09,
+                    "MedrxivClusteringS2S": 31.3,
+                    "RedditClustering": 53.31,
+                    "RedditClusteringP2P": 58.96,
+                    "StackExchangeClustering": 59.92,
+                    "StackExchangeClusteringP2P": 33.88,
+                    "TwentyNewsgroupsClustering": 47.29
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "titan-embed-text-v1"
+                    "Model": "nomic-embed-text-v1.5-128",
+                    "SprintDuplicateQuestions": 91.45,
+                    "TwitterSemEval2015": 73.23,
+                    "TwitterURLCorpus": 85.93
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "titan-embed-text-v1",
-                    "SciDocsRR": 88.87
+                    "Model": "nomic-embed-text-v1.5-128",
+                    "AskUbuntuDupQuestions": 61.16,
+                    "MindSmallReranking": 30.02,
+                    "SciDocsRR": 78.05,
+                    "StackOverflowDupQuestions": 49.0
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "titan-embed-text-v1",
-                    "ArguAna": 48.83,
-                    "FiQA2018": 40.38,
-                    "MSMARCO": 35.19,
-                    "NQ": 51.08,
-                    "SciFact": 73.5,
-                    "TRECCOVID": 54.74
+                    "Model": "nomic-embed-text-v1.5-128",
+                    "ArguAna": 43.4,
+                    "CQADupstackRetrieval": 34.67,
+                    "ClimateFEVER": 36.52,
+                    "DBPedia": 36.22,
+                    "FEVER": 80.48,
+                    "FiQA2018": 32.08,
+                    "HotpotQA": 60.09,
+                    "MSMARCO": 39.99,
+                    "NFCorpus": 30.72,
+                    "NQ": 53.62,
+                    "QuoraRetrieval": 87.07,
+                    "SCIDOCS": 15.56,
+                    "SciFact": 64.28,
+                    "TRECCOVID": 74.58,
+                    "Touche2020": 26.99
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "titan-embed-text-v1",
-                    "BIOSSES": 84.17,
-                    "SICK-R": 73.05,
-                    "STS12": 66.59,
-                    "STS13": 83.24,
-                    "STS14": 73.71,
-                    "STS15": 82.4,
-                    "STS16": NaN,
-                    "STS17 (en-en)": 80.9,
-                    "STSBenchmark": 74.85
+                    "Model": "nomic-embed-text-v1.5-128",
+                    "BIOSSES": 80.19,
+                    "SICK-R": 79.09,
+                    "STS12": 77.49,
+                    "STS13": 85.62,
+                    "STS14": 80.5,
+                    "STS15": 85.84,
+                    "STS16": 83.9,
+                    "STS17 (en-en)": 86.27,
+                    "STS22 (en)": 64.24,
+                    "STSBenchmark": 84.28
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "titan-embed-text-v1"
+                    "Model": "nomic-embed-text-v1.5-128",
+                    "SummEval": 29.59
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "nomic-embed-text-v1.5-128"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "titan-embed-text-v1"
+                    "Model": "nomic-embed-text-v1.5-128"
                 }
             ]
         }
     },
-    "dragon-plus-instruct": {
+    "universal-sentence-encoder-multilingual-large-3": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "dragon-plus-instruct"
+                    "Model": "universal-sentence-encoder-multilingual-large-3"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "dragon-plus-instruct"
+                    "Model": "universal-sentence-encoder-multilingual-large-3",
+                    "AmazonReviewsClassification (fr)": 35.09,
+                    "MTOPDomainClassification (fr)": 88.19,
+                    "MTOPIntentClassification (fr)": 63.64,
+                    "MasakhaNEWSClassification (fra)": 72.04,
+                    "MassiveIntentClassification (fr)": 65.8,
+                    "MassiveScenarioClassification (fr)": 73.47
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "dragon-plus-instruct"
+                    "Model": "universal-sentence-encoder-multilingual-large-3",
+                    "AlloProfClusteringP2P": 54.21,
+                    "AlloProfClusteringS2S": 37.95,
+                    "HALClusteringS2S": 18.94,
+                    "MLSUMClusteringP2P": 41.02,
+                    "MLSUMClusteringS2S": 37.97,
+                    "MasakhaNEWSClusteringP2P (fra)": 24.09,
+                    "MasakhaNEWSClusteringS2S (fra)": 40.24
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "dragon-plus-instruct"
+                    "Model": "universal-sentence-encoder-multilingual-large-3",
+                    "OpusparcusPC (fr)": 93.38,
+                    "PawsXPairClassification (fr)": 53.62
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "dragon-plus-instruct"
+                    "Model": "universal-sentence-encoder-multilingual-large-3",
+                    "AlloprofReranking": 55.39,
+                    "SyntecReranking": 77.13
                 }
             ]
         },
-        "Retrieval": {
-            "ndcg_at_10": [
-                {
-                    "Model": "dragon-plus-instruct",
-                    "ARCChallenge": 8.24,
-                    "AlphaNLI": 25.18,
-                    "HellaSwag": 24.06,
-                    "PIQA": 26.35,
-                    "Quail": 4.2,
-                    "RARbCode": 12.84,
-                    "RARbMath": 36.15,
-                    "SIQA": 1.75,
-                    "SpartQA": 10.82,
-                    "TempReasonL1": 1.54,
-                    "TempReasonL2Fact": 16.11,
-                    "TempReasonL2Pure": 0.57,
-                    "TempReasonL3Fact": 14.81,
-                    "TempReasonL3Pure": 7.46,
-                    "WinoGrande": 60.84
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "universal-sentence-encoder-multilingual-large-3",
+                    "AlloprofRetrieval": 33.78,
+                    "BSARDRetrieval": 0.0,
+                    "MintakaRetrieval (fr)": 26.21,
+                    "SyntecRetrieval": 63.69,
+                    "XPQARetrieval (fr)": 65.21
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "dragon-plus-instruct"
+                    "Model": "universal-sentence-encoder-multilingual-large-3",
+                    "SICKFr": 74.39,
+                    "STS22 (fr)": 71.11,
+                    "STSBenchmarkMultilingualSTS (fr)": 78.16
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "dragon-plus-instruct"
+                    "Model": "universal-sentence-encoder-multilingual-large-3",
+                    "SummEvalFr": 28.56
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "universal-sentence-encoder-multilingual-large-3"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "dragon-plus-instruct"
+                    "Model": "universal-sentence-encoder-multilingual-large-3"
                 }
             ]
         }
     },
-    "norbert3-large": {
+    "bm25": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "norbert3-large",
-                    "BornholmBitextMining": 2.9
+                    "Model": "bm25"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "norbert3-large",
-                    "AngryTweetsClassification": 49.04,
-                    "DKHateClassification": 62.71,
-                    "DanishPoliticalCommentsClassification": 33.53,
-                    "LccSentimentClassification": 46.93,
-                    "MassiveIntentClassification (da)": 45.98,
-                    "MassiveIntentClassification (nb)": 47.42,
-                    "MassiveIntentClassification (sv)": 48.47,
-                    "MassiveScenarioClassification (da)": 50.51,
-                    "MassiveScenarioClassification (nb)": 54.25,
-                    "MassiveScenarioClassification (sv)": 50.6,
-                    "NoRecClassification": 50.46,
-                    "NordicLangClassification": 84.25,
-                    "NorwegianParliament": 58.85,
-                    "ScalaDaClassification": 60.72,
-                    "ScalaNbClassification": 66.79
+                    "Model": "bm25"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "norbert3-large"
+                    "Model": "bm25"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "norbert3-large"
+                    "Model": "bm25"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "norbert3-large"
+                    "Model": "bm25"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "norbert3-large"
+                    "Model": "bm25",
+                    "BrightRetrieval (robotics)": 13.53,
+                    "BrightRetrieval (pony)": 7.93,
+                    "BrightRetrieval (leetcode)": 24.37,
+                    "BrightRetrieval (earth_science)": 27.06,
+                    "BrightRetrieval (stackoverflow)": 16.55,
+                    "BrightRetrieval (economics)": 14.87,
+                    "BrightRetrieval (theoremqa_questions)": 9.78,
+                    "BrightRetrieval (theoremqa_theorems)": 4.25,
+                    "BrightRetrieval (psychology)": 12.51,
+                    "BrightRetrieval (sustainable_living)": 15.22,
+                    "BrightRetrieval (biology)": 19.19,
+                    "BrightRetrieval (aops)": 6.2
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "norbert3-large"
+                    "Model": "bm25"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "norbert3-large"
+                    "Model": "bm25"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "bm25"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "norbert3-large"
+                    "Model": "bm25",
+                    "Core17InstructionRetrieval": -1.06,
+                    "News21InstructionRetrieval": -2.15,
+                    "Robust04InstructionRetrieval": -3.06
                 }
             ]
         }
     },
-    "bert-base-15lang-cased": {
+    "LLM2Vec-Llama-2-unsupervised": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "bert-base-15lang-cased"
+                    "Model": "LLM2Vec-Llama-2-unsupervised"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "bert-base-15lang-cased",
-                    "AmazonReviewsClassification (fr)": 29.35,
-                    "MTOPDomainClassification (fr)": 63.7,
-                    "MTOPIntentClassification (fr)": 37.85,
-                    "MasakhaNEWSClassification (fra)": 63.89,
-                    "MassiveIntentClassification (fr)": 37.28,
-                    "MassiveScenarioClassification (fr)": 44.47
+                    "Model": "LLM2Vec-Llama-2-unsupervised",
+                    "AmazonCounterfactualClassification (en)": 76.91,
+                    "AmazonPolarityClassification": 79.05,
+                    "AmazonReviewsClassification (en)": 40.08,
+                    "Banking77Classification": 84.65,
+                    "EmotionClassification": 46.58,
+                    "ImdbClassification": 75.68,
+                    "MTOPDomainClassification (en)": 94.33,
+                    "MTOPIntentClassification (en)": 79.54,
+                    "MassiveIntentClassification (en)": 73.84,
+                    "MassiveScenarioClassification (en)": 79.17,
+                    "ToxicConversationsClassification": 71.81,
+                    "TweetSentimentExtractionClassification": 57.17
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "bert-base-15lang-cased",
-                    "AlloProfClusteringP2P": 53.16,
-                    "AlloProfClusteringS2S": 43.43,
-                    "HALClusteringS2S": 20.26,
-                    "MLSUMClusteringP2P": 41.22,
-                    "MLSUMClusteringS2S": 31.88,
-                    "MasakhaNEWSClusteringP2P (fra)": 24.23,
-                    "MasakhaNEWSClusteringS2S (fra)": 24.46
+                    "Model": "LLM2Vec-Llama-2-unsupervised",
+                    "ArxivClusteringP2P": 47.81,
+                    "ArxivClusteringS2S": 40.53,
+                    "BiorxivClusteringP2P": 38.12,
+                    "BiorxivClusteringS2S": 31.25,
+                    "MedrxivClusteringP2P": 30.94,
+                    "MedrxivClusteringS2S": 28.04,
+                    "RedditClustering": 42.84,
+                    "RedditClusteringP2P": 60.1,
+                    "StackExchangeClustering": 65.12,
+                    "StackExchangeClusteringP2P": 33.61,
+                    "TwentyNewsgroupsClustering": 30.76
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "bert-base-15lang-cased",
-                    "OpusparcusPC (fr)": 86.78,
-                    "PawsXPairClassification (fr)": 53.38
+                    "Model": "LLM2Vec-Llama-2-unsupervised",
+                    "SprintDuplicateQuestions": 87.57,
+                    "TwitterSemEval2015": 65.14,
+                    "TwitterURLCorpus": 80.94
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "bert-base-15lang-cased",
-                    "AlloprofReranking": 36.21,
-                    "SyntecReranking": 53.25
+                    "Model": "LLM2Vec-Llama-2-unsupervised",
+                    "AskUbuntuDupQuestions": 55.56,
+                    "MindSmallReranking": 30.86,
+                    "SciDocsRR": 77.62,
+                    "StackOverflowDupQuestions": 47.77
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "bert-base-15lang-cased",
-                    "AlloprofRetrieval": 1.61,
-                    "BSARDRetrieval": 0.0,
-                    "MintakaRetrieval (fr)": 3.55,
-                    "SyntecRetrieval": 18.95,
-                    "XPQARetrieval (fr)": 18.35
+                    "Model": "LLM2Vec-Llama-2-unsupervised",
+                    "ArguAna": 47.09,
+                    "CQADupstackRetrieval": 30.78,
+                    "ClimateFEVER": 20.67,
+                    "DBPedia": 25.81,
+                    "FEVER": 43.48,
+                    "FiQA2018": 24.62,
+                    "HotpotQA": 48.46,
+                    "MSMARCO": 18.81,
+                    "NFCorpus": 26.81,
+                    "NQ": 33.21,
+                    "QuoraRetrieval": 86.15,
+                    "SCIDOCS": 10.0,
+                    "SciFact": 64.48,
+                    "TRECCOVID": 60.67,
+                    "Touche2020": 10.18
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "bert-base-15lang-cased",
-                    "SICKFr": 58.77,
-                    "STS22 (fr)": 40.4,
-                    "STSBenchmarkMultilingualSTS (fr)": 52.25
+                    "Model": "LLM2Vec-Llama-2-unsupervised",
+                    "BIOSSES": 82.41,
+                    "SICK-R": 71.77,
+                    "STS12": 65.39,
+                    "STS13": 79.26,
+                    "STS14": 72.98,
+                    "STS15": 82.72,
+                    "STS16": 81.02,
+                    "STS17 (en-en)": 86.7,
+                    "STS22 (en)": 63.47,
+                    "STSBenchmark": 78.32
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "bert-base-15lang-cased",
-                    "SummEvalFr": 29.13
+                    "Model": "LLM2Vec-Llama-2-unsupervised",
+                    "SummEval": 31.38
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "LLM2Vec-Llama-2-unsupervised"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "bert-base-15lang-cased"
+                    "Model": "LLM2Vec-Llama-2-unsupervised"
                 }
             ]
         }
     },
-    "gbert-base": {
+    "electra-small-swedish-cased-discriminator": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "gbert-base"
+                    "Model": "electra-small-swedish-cased-discriminator",
+                    "BornholmBitextMining": 0.85
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "gbert-base"
+                    "Model": "electra-small-swedish-cased-discriminator",
+                    "AngryTweetsClassification": 40.52,
+                    "DKHateClassification": 52.28,
+                    "DanishPoliticalCommentsClassification": 25.17,
+                    "LccSentimentClassification": 36.67,
+                    "MassiveIntentClassification (da)": 6.51,
+                    "MassiveIntentClassification (nb)": 5.66,
+                    "MassiveIntentClassification (sv)": 6.6,
+                    "MassiveScenarioClassification (da)": 11.5,
+                    "MassiveScenarioClassification (nb)": 11.26,
+                    "MassiveScenarioClassification (sv)": 12.16,
+                    "NoRecClassification": 39.72,
+                    "NordicLangClassification": 44.53,
+                    "NorwegianParliament": 52.44,
+                    "ScalaDaClassification": 51.66,
+                    "ScalaNbClassification": 52.41
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "gbert-base",
-                    "BlurbsClusteringP2P": 35.36,
-                    "BlurbsClusteringS2S": 11.27,
-                    "TenKGnadClusteringP2P": 37.16,
-                    "TenKGnadClusteringS2S": 24.23
+                    "Model": "electra-small-swedish-cased-discriminator"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "gbert-base"
+                    "Model": "electra-small-swedish-cased-discriminator"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "gbert-base"
+                    "Model": "electra-small-swedish-cased-discriminator"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "gbert-base"
+                    "Model": "electra-small-swedish-cased-discriminator"
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "gbert-base"
+                    "Model": "electra-small-swedish-cased-discriminator"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "gbert-base"
+                    "Model": "electra-small-swedish-cased-discriminator"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "electra-small-swedish-cased-discriminator"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "gbert-base"
+                    "Model": "electra-small-swedish-cased-discriminator"
                 }
             ]
         }
     },
-    "gelectra-base": {
+    "text2vec-base-chinese": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "gelectra-base"
+                    "Model": "text2vec-base-chinese"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "gelectra-base"
+                    "Model": "text2vec-base-chinese",
+                    "AmazonReviewsClassification (zh)": 34.12,
+                    "IFlyTek": 42.05,
+                    "JDReview": 82.14,
+                    "MassiveIntentClassification (zh-CN)": 63.98,
+                    "MassiveScenarioClassification (zh-CN)": 70.52,
+                    "MultilingualSentiment": 60.98,
+                    "OnlineShopping": 85.69,
+                    "TNews": 43.01,
+                    "Waimai": 77.22
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "gelectra-base",
-                    "BlurbsClusteringP2P": 10.06,
-                    "BlurbsClusteringS2S": 7.74,
-                    "TenKGnadClusteringP2P": 9.02,
-                    "TenKGnadClusteringS2S": 4.11
+                    "Model": "text2vec-base-chinese",
+                    "CLSClusteringP2P": 35.27,
+                    "CLSClusteringS2S": 32.42,
+                    "ThuNewsClusteringP2P": 42.92,
+                    "ThuNewsClusteringS2S": 40.01
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "gelectra-base"
+                    "Model": "text2vec-base-chinese",
+                    "Cmnli": 73.87,
+                    "Ocnli": 60.95
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "gelectra-base"
+                    "Model": "text2vec-base-chinese",
+                    "CMedQAv1": 59.26,
+                    "CMedQAv2": 59.82,
+                    "MMarcoReranking": 12.76,
+                    "T2Reranking": 65.95
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "gelectra-base"
+                    "Model": "text2vec-base-chinese",
+                    "CmedqaRetrieval": 15.91,
+                    "CovidRetrieval": 44.81,
+                    "DuRetrieval": 52.23,
+                    "EcomRetrieval": 34.6,
+                    "MMarcoRetrieval": 44.06,
+                    "MedicalRetrieval": 27.56,
+                    "T2Retrieval": 51.67,
+                    "VideoRetrieval": 39.52
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "gelectra-base"
+                    "Model": "text2vec-base-chinese",
+                    "AFQMC": 26.06,
+                    "ATEC": 31.93,
+                    "BQ": 42.67,
+                    "LCQMC": 70.16,
+                    "PAWSX": 17.21,
+                    "QBQTC": 24.62,
+                    "STS22 (zh)": 55.35,
+                    "STSB": 79.3
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "gelectra-base"
+                    "Model": "text2vec-base-chinese"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "text2vec-base-chinese"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "gelectra-base"
+                    "Model": "text2vec-base-chinese"
                 }
             ]
         }
     },
-    "xlm-roberta-base": {
+    "bm25s": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "xlm-roberta-base",
-                    "BornholmBitextMining": 4.42
+                    "Model": "bm25s"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "xlm-roberta-base",
-                    "AmazonReviewsClassification (fr)": 26.75,
-                    "AngryTweetsClassification": 52.41,
-                    "DKHateClassification": 56.78,
-                    "DanishPoliticalCommentsClassification": 34.03,
-                    "LccSentimentClassification": 52.27,
-                    "MTOPDomainClassification (fr)": 43.83,
-                    "MTOPIntentClassification (fr)": 19.38,
-                    "MasakhaNEWSClassification (fra)": 60.5,
-                    "MassiveIntentClassification (da)": 41.06,
-                    "MassiveIntentClassification (nb)": 40.46,
-                    "MassiveIntentClassification (sv)": 45.12,
-                    "MassiveIntentClassification (fr)": 13.58,
-                    "MassiveScenarioClassification (da)": 43.91,
-                    "MassiveScenarioClassification (nb)": 44.83,
-                    "MassiveScenarioClassification (sv)": 47.35,
-                    "MassiveScenarioClassification (fr)": 23.21,
-                    "NoRecClassification": 46.28,
-                    "NordicLangClassification": 79.39,
-                    "NorwegianParliament": 56.75,
-                    "ScalaDaClassification": 57.3,
-                    "ScalaNbClassification": 58.33
+                    "Model": "bm25s"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "xlm-roberta-base",
-                    "AlloProfClusteringP2P": 52.24,
-                    "AlloProfClusteringS2S": 20.37,
-                    "HALClusteringS2S": 8.68,
-                    "MLSUMClusteringP2P": 40.44,
-                    "MLSUMClusteringS2S": 24.14,
-                    "MasakhaNEWSClusteringP2P (fra)": 29.29,
-                    "MasakhaNEWSClusteringS2S (fra)": 23.76
+                    "Model": "bm25s"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "xlm-roberta-base",
-                    "OpusparcusPC (fr)": 85.45,
-                    "PawsXPairClassification (fr)": 51.35
+                    "Model": "bm25s"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "xlm-roberta-base",
-                    "AlloprofReranking": 25.58,
-                    "SyntecReranking": 43.75
+                    "Model": "bm25s"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "xlm-roberta-base",
-                    "AlloprofRetrieval": 0.16,
-                    "BSARDRetrieval": 0.0,
-                    "MintakaRetrieval (fr)": 0.88,
-                    "SyntecRetrieval": 3.33,
-                    "XPQARetrieval (fr)": 11.65
-                }
-            ]
-        },
-        "STS": {
-            "spearman": [
-                {
-                    "Model": "xlm-roberta-base",
-                    "SICKFr": 48.62,
-                    "STS22 (fr)": 56.72,
-                    "STSBenchmarkMultilingualSTS (fr)": 46.23
-                }
-            ]
-        },
-        "Summarization": {
-            "spearman": [
-                {
-                    "Model": "xlm-roberta-base",
-                    "SummEvalFr": 29.14
-                }
-            ]
-        },
-        "InstructionRetrieval": {
-            "p-MRR": [
-                {
-                    "Model": "xlm-roberta-base"
-                }
-            ]
-        }
-    },
-    "all-mpnet-base-v2": {
-        "BitextMining": {
-            "f1": [
-                {
-                    "Model": "all-mpnet-base-v2",
-                    "BornholmBitextMining (dan-Latn)": 27.44,
-                    "Tatoeba (pol-Latn_eng-Latn)": 4.09,
-                    "Tatoeba (ita-Latn_eng-Latn)": 11.1,
-                    "Tatoeba (cat-Latn_eng-Latn)": 9.44,
-                    "Tatoeba (aze-Latn_eng-Latn)": 1.49,
-                    "Tatoeba (eus-Latn_eng-Latn)": 3.94,
-                    "Tatoeba (epo-Latn_eng-Latn)": 7.15,
-                    "Tatoeba (lit-Latn_eng-Latn)": 1.02,
-                    "Tatoeba (ast-Latn_eng-Latn)": 9.78,
-                    "Tatoeba (bul-Cyrl_eng-Latn)": 0.35,
-                    "Tatoeba (ceb-Latn_eng-Latn)": 4.41,
-                    "Tatoeba (mkd-Cyrl_eng-Latn)": 0.0,
-                    "Tatoeba (tzl-Latn_eng-Latn)": 3.55,
-                    "Tatoeba (zsm-Latn_eng-Latn)": 4.75,
-                    "Tatoeba (mhr-Cyrl_eng-Latn)": 0.17,
-                    "Tatoeba (pam-Latn_eng-Latn)": 4.32,
-                    "Tatoeba (amh-Ethi_eng-Latn)": 0.0,
-                    "Tatoeba (slv-Latn_eng-Latn)": 3.73,
-                    "Tatoeba (lvs-Latn_eng-Latn)": 2.98,
-                    "Tatoeba (sqi-Latn_eng-Latn)": 3.45,
-                    "Tatoeba (orv-Cyrl_eng-Latn)": 0.0,
-                    "Tatoeba (vie-Latn_eng-Latn)": 4.96,
-                    "Tatoeba (pes-Arab_eng-Latn)": 0.2,
-                    "Tatoeba (por-Latn_eng-Latn)": 10.48,
-                    "Tatoeba (dtp-Latn_eng-Latn)": 3.54,
-                    "Tatoeba (yid-Hebr_eng-Latn)": 0.08,
-                    "Tatoeba (isl-Latn_eng-Latn)": 3.86,
-                    "Tatoeba (cha-Latn_eng-Latn)": 12.2,
-                    "Tatoeba (ron-Latn_eng-Latn)": 7.34,
-                    "Tatoeba (hye-Armn_eng-Latn)": 0.14,
-                    "Tatoeba (mar-Deva_eng-Latn)": 0.11,
-                    "Tatoeba (hin-Deva_eng-Latn)": 0.02,
-                    "Tatoeba (kor-Hang_eng-Latn)": 0.32,
-                    "Tatoeba (srp-Cyrl_eng-Latn)": 1.89,
-                    "Tatoeba (csb-Latn_eng-Latn)": 4.19,
-                    "Tatoeba (jpn-Jpan_eng-Latn)": 1.71,
-                    "Tatoeba (ber-Tfng_eng-Latn)": 4.56,
-                    "Tatoeba (wuu-Hans_eng-Latn)": 0.91,
-                    "Tatoeba (jav-Latn_eng-Latn)": 3.17,
-                    "Tatoeba (nob-Latn_eng-Latn)": 4.37,
-                    "Tatoeba (bre-Latn_eng-Latn)": 3.65,
-                    "Tatoeba (kzj-Latn_eng-Latn)": 3.62,
-                    "Tatoeba (urd-Arab_eng-Latn)": 0.0,
-                    "Tatoeba (ces-Latn_eng-Latn)": 3.56,
-                    "Tatoeba (cbk-Latn_eng-Latn)": 9.33,
-                    "Tatoeba (gla-Latn_eng-Latn)": 2.04,
-                    "Tatoeba (war-Latn_eng-Latn)": 5.14,
-                    "Tatoeba (swh-Latn_eng-Latn)": 6.01,
-                    "Tatoeba (swg-Latn_eng-Latn)": 7.86,
-                    "Tatoeba (glg-Latn_eng-Latn)": 12.0,
-                    "Tatoeba (fao-Latn_eng-Latn)": 7.08,
-                    "Tatoeba (gsw-Latn_eng-Latn)": 10.67,
-                    "Tatoeba (rus-Cyrl_eng-Latn)": 0.14,
-                    "Tatoeba (kaz-Cyrl_eng-Latn)": 0.52,
-                    "Tatoeba (gle-Latn_eng-Latn)": 2.19,
-                    "Tatoeba (slk-Latn_eng-Latn)": 3.4,
-                    "Tatoeba (nno-Latn_eng-Latn)": 5.75,
-                    "Tatoeba (cor-Latn_eng-Latn)": 2.42,
-                    "Tatoeba (nov-Latn_eng-Latn)": 16.61,
-                    "Tatoeba (swe-Latn_eng-Latn)": 6.55,
-                    "Tatoeba (max-Deva_eng-Latn)": 6.46,
-                    "Tatoeba (oci-Latn_eng-Latn)": 8.57,
-                    "Tatoeba (lfn-Latn_eng-Latn)": 6.1,
-                    "Tatoeba (fra-Latn_eng-Latn)": 16.9,
-                    "Tatoeba (ben-Beng_eng-Latn)": 0.0,
-                    "Tatoeba (bel-Cyrl_eng-Latn)": 0.65,
-                    "Tatoeba (lat-Latn_eng-Latn)": 5.78,
-                    "Tatoeba (cmn-Hans_eng-Latn)": 2.22,
-                    "Tatoeba (kat-Geor_eng-Latn)": 0.43,
-                    "Tatoeba (bos-Latn_eng-Latn)": 4.6,
-                    "Tatoeba (xho-Latn_eng-Latn)": 3.3,
-                    "Tatoeba (tha-Thai_eng-Latn)": 0.0,
-                    "Tatoeba (cym-Latn_eng-Latn)": 4.88,
-                    "Tatoeba (deu-Latn_eng-Latn)": 11.46,
-                    "Tatoeba (awa-Deva_eng-Latn)": 0.44,
-                    "Tatoeba (ido-Latn_eng-Latn)": 9.84,
-                    "Tatoeba (tat-Cyrl_eng-Latn)": 0.24,
-                    "Tatoeba (kab-Latn_eng-Latn)": 1.31,
-                    "Tatoeba (uzb-Latn_eng-Latn)": 1.98,
-                    "Tatoeba (heb-Hebr_eng-Latn)": 0.28,
-                    "Tatoeba (ara-Arab_eng-Latn)": 0.1,
-                    "Tatoeba (fry-Latn_eng-Latn)": 12.43,
-                    "Tatoeba (afr-Latn_eng-Latn)": 6.08,
-                    "Tatoeba (kur-Latn_eng-Latn)": 3.65,
-                    "Tatoeba (pms-Latn_eng-Latn)": 7.63,
-                    "Tatoeba (ell-Grek_eng-Latn)": 0.0,
-                    "Tatoeba (spa-Latn_eng-Latn)": 10.12,
-                    "Tatoeba (dsb-Latn_eng-Latn)": 2.96,
-                    "Tatoeba (uig-Arab_eng-Latn)": 0.33,
-                    "Tatoeba (nld-Latn_eng-Latn)": 9.29,
-                    "Tatoeba (tel-Telu_eng-Latn)": 0.73,
-                    "Tatoeba (hrv-Latn_eng-Latn)": 3.77,
-                    "Tatoeba (nds-Latn_eng-Latn)": 10.96,
-                    "Tatoeba (hun-Latn_eng-Latn)": 3.23,
-                    "Tatoeba (est-Latn_eng-Latn)": 2.35,
-                    "Tatoeba (mal-Mlym_eng-Latn)": 0.15,
-                    "Tatoeba (khm-Khmr_eng-Latn)": 0.28,
-                    "Tatoeba (hsb-Latn_eng-Latn)": 3.12,
-                    "Tatoeba (tgl-Latn_eng-Latn)": 4.06,
-                    "Tatoeba (ang-Latn_eng-Latn)": 9.77,
-                    "Tatoeba (tur-Latn_eng-Latn)": 3.16,
-                    "Tatoeba (tuk-Latn_eng-Latn)": 2.23,
-                    "Tatoeba (ile-Latn_eng-Latn)": 17.84,
-                    "Tatoeba (mon-Cyrl_eng-Latn)": 0.81,
-                    "Tatoeba (yue-Hant_eng-Latn)": 1.16,
-                    "Tatoeba (ina-Latn_eng-Latn)": 22.55,
-                    "Tatoeba (tam-Taml_eng-Latn)": 0.73,
-                    "Tatoeba (ukr-Cyrl_eng-Latn)": 0.5,
-                    "Tatoeba (dan-Latn_eng-Latn)": 10.01,
-                    "Tatoeba (arq-Arab_eng-Latn)": 0.33,
-                    "Tatoeba (arz-Arab_eng-Latn)": 0.0,
-                    "Tatoeba (fin-Latn_eng-Latn)": 3.82,
-                    "Tatoeba (ind-Latn_eng-Latn)": 4.88
+                    "Model": "bm25s",
+                    "ArguAna": 49.28,
+                    "CQADupstackRetrieval": 31.86,
+                    "ClimateFEVER": 13.62,
+                    "DBPedia": 29.91,
+                    "FEVER": 48.09,
+                    "FiQA2018": 25.14,
+                    "HotpotQA": 56.91,
+                    "MSMARCO": 21.89,
+                    "NFCorpus": 32.08,
+                    "NQ": 28.5,
+                    "QuoraRetrieval": 80.42,
+                    "SCIDOCS": 15.78,
+                    "SciFact": 68.7,
+                    "TRECCOVID": 62.31,
+                    "Touche2020": 33.05
                 }
             ]
         },
-        "Classification": {
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "bm25s"
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "bm25s"
+                }
+            ]
+        },
+        "MultilabelClassification": {
             "accuracy": [
                 {
-                    "Model": "all-mpnet-base-v2",
-                    "AllegroReviews (pol-Latn)": 22.99,
-                    "AmazonCounterfactualClassification (en-ext)": 67.5,
-                    "AmazonCounterfactualClassification (en)": 65.03,
-                    "AmazonCounterfactualClassification (deu-Latn)": 55.66,
-                    "AmazonCounterfactualClassification (jpn-Jpan)": 60.69,
-                    "AmazonPolarityClassification": 67.14,
-                    "AmazonReviewsClassification (en)": 31.44,
-                    "AmazonReviewsClassification (deu-Latn)": 26.05,
-                    "AmazonReviewsClassification (spa-Latn)": 27.73,
-                    "AmazonReviewsClassification (fra-Latn)": 28.49,
-                    "AmazonReviewsClassification (jpn-Jpan)": 23.65,
-                    "AmazonReviewsClassification (cmn-Hans)": 23.62,
-                    "AngryTweetsClassification (dan-Latn)": 44.13,
-                    "Banking77Classification": 81.7,
-                    "CBD (pol-Latn)": 50.25,
-                    "DanishPoliticalCommentsClassification (dan-Latn)": 28.31,
-                    "EmotionClassification": 42.22,
-                    "GeoreviewClassification (rus-Cyrl)": 25.93,
-                    "HeadlineClassification (rus-Cyrl)": 28.53,
-                    "IFlyTek (cmn-Hans)": 17.18,
-                    "ImdbClassification": 71.17,
-                    "InappropriatenessClassification (rus-Cyrl)": 51.82,
-                    "JDReview (cmn-Hans)": 60.19,
-                    "KinopoiskClassification (rus-Cyrl)": 34.18,
-                    "LccSentimentClassification (dan-Latn)": 39.27,
-                    "MTOPDomainClassification (en)": 91.89,
-                    "MTOPDomainClassification (deu-Latn)": 71.86,
-                    "MTOPDomainClassification (spa-Latn)": 71.3,
-                    "MTOPDomainClassification (fra-Latn)": 74.88,
-                    "MTOPDomainClassification (hin-Deva)": 39.93,
-                    "MTOPDomainClassification (tha-Thai)": 17.54,
-                    "MTOPIntentClassification (en)": 68.27,
-                    "MTOPIntentClassification (deu-Latn)": 44.36,
-                    "MTOPIntentClassification (spa-Latn)": 39.48,
-                    "MTOPIntentClassification (fra-Latn)": 37.57,
-                    "MTOPIntentClassification (hin-Deva)": 18.63,
-                    "MTOPIntentClassification (tha-Thai)": 5.42,
-                    "MasakhaNEWSClassification (amh-Ethi)": 36.49,
-                    "MasakhaNEWSClassification (eng)": 79.75,
-                    "MasakhaNEWSClassification (fra-Latn)": 77.77,
-                    "MasakhaNEWSClassification (hau-Latn)": 59.22,
-                    "MasakhaNEWSClassification (ibo-Latn)": 61.64,
-                    "MasakhaNEWSClassification (lin-Latn)": 74.0,
-                    "MasakhaNEWSClassification (lug-Latn)": 58.43,
-                    "MasakhaNEWSClassification (orm-Ethi)": 48.15,
-                    "MasakhaNEWSClassification (pcm-Latn)": 92.2,
-                    "MasakhaNEWSClassification (run-Latn)": 64.72,
-                    "MasakhaNEWSClassification (sna-Latn)": 73.69,
-                    "MasakhaNEWSClassification (som-Latn)": 49.97,
-                    "MasakhaNEWSClassification (swa-Latn)": 55.15,
-                    "MasakhaNEWSClassification (tir-Ethi)": 27.46,
-                    "MasakhaNEWSClassification (xho-Latn)": 60.98,
-                    "MasakhaNEWSClassification (yor-Latn)": 63.33,
-                    "MassiveIntentClassification (en)": 69.76,
-                    "MassiveIntentClassification (jav-Latn)": 31.75,
-                    "MassiveIntentClassification (fra-Latn)": 44.27,
-                    "MassiveIntentClassification (msa-Latn)": 30.53,
-                    "MassiveIntentClassification (hun-Latn)": 34.38,
-                    "MassiveIntentClassification (pol-Latn)": 34.26,
-                    "MassiveIntentClassification (nld-Latn)": 38.49,
-                    "MassiveIntentClassification (tha-Thai)": 8.51,
-                    "MassiveIntentClassification (tur-Latn)": 32.02,
-                    "MassiveIntentClassification (tam-Taml)": 9.25,
-                    "MassiveIntentClassification (hye-Armn)": 10.11,
-                    "MassiveIntentClassification (khm-Khmr)": 4.74,
-                    "MassiveIntentClassification (lav-Latn)": 35.08,
-                    "MassiveIntentClassification (deu-Latn)": 44.54,
-                    "MassiveIntentClassification (spa-Latn)": 39.75,
-                    "MassiveIntentClassification (ben-Beng)": 12.35,
-                    "MassiveIntentClassification (por-Latn)": 42.83,
-                    "MassiveIntentClassification (ara-Arab)": 20.42,
-                    "MassiveIntentClassification (cym-Latn)": 30.82,
-                    "MassiveIntentClassification (dan-Latn)": 42.36,
-                    "MassiveIntentClassification (mya-Mymr)": 4.6,
-                    "MassiveIntentClassification (heb-Hebr)": 23.6,
-                    "MassiveIntentClassification (kan-Knda)": 3.76,
-                    "MassiveIntentClassification (swa-Latn)": 31.82,
-                    "MassiveIntentClassification (fas-Arab)": 22.45,
-                    "MassiveIntentClassification (hin-Deva)": 17.68,
-                    "MassiveIntentClassification (kat-Geor)": 7.66,
-                    "MassiveIntentClassification (mal-Mlym)": 2.64,
-                    "MassiveIntentClassification (fin-Latn)": 34.58,
-                    "MassiveIntentClassification (slv-Latn)": 34.49,
-                    "MassiveIntentClassification (afr-Latn)": 36.49,
-                    "MassiveIntentClassification (urd-Arab)": 12.86,
-                    "MassiveIntentClassification (ron-Latn)": 38.07,
-                    "MassiveIntentClassification (sqi-Latn)": 37.26,
-                    "MassiveIntentClassification (cmo-Hant)": 22.43,
-                    "MassiveIntentClassification (ita-Latn)": 40.29,
-                    "MassiveIntentClassification (ind-Latn)": 36.31,
-                    "MassiveIntentClassification (nob-Latn)": 39.3,
-                    "MassiveIntentClassification (jpn-Jpan)": 33.13,
-                    "MassiveIntentClassification (aze-Latn)": 28.92,
-                    "MassiveIntentClassification (mon-Cyrl)": 19.65,
-                    "MassiveIntentClassification (ell-Grek)": 24.52,
-                    "MassiveIntentClassification (rus-Cyrl)": 23.98,
-                    "MassiveIntentClassification (kor-Kore)": 13.35,
-                    "MassiveIntentClassification (cmo-Hans)": 24.36,
-                    "MassiveIntentClassification (isl-Latn)": 31.46,
-                    "MassiveIntentClassification (swe-Latn)": 39.02,
-                    "MassiveIntentClassification (tel-Telu)": 2.26,
-                    "MassiveIntentClassification (vie-Latn)": 31.47,
-                    "MassiveIntentClassification (tgl-Latn)": 36.33,
-                    "MassiveIntentClassification (amh-Ethi)": 2.39,
-                    "MassiveScenarioClassification (en)": 75.67,
-                    "MassiveScenarioClassification (tur-Latn)": 39.11,
-                    "MassiveScenarioClassification (kat-Geor)": 13.45,
-                    "MassiveScenarioClassification (jpn-Jpan)": 40.57,
-                    "MassiveScenarioClassification (spa-Latn)": 50.92,
-                    "MassiveScenarioClassification (fas-Arab)": 27.8,
-                    "MassiveScenarioClassification (hun-Latn)": 41.01,
-                    "MassiveScenarioClassification (jav-Latn)": 40.0,
-                    "MassiveScenarioClassification (por-Latn)": 52.06,
-                    "MassiveScenarioClassification (sqi-Latn)": 44.67,
-                    "MassiveScenarioClassification (lav-Latn)": 39.28,
-                    "MassiveScenarioClassification (deu-Latn)": 54.09,
-                    "MassiveScenarioClassification (nld-Latn)": 47.79,
-                    "MassiveScenarioClassification (mon-Cyrl)": 25.58,
-                    "MassiveScenarioClassification (swa-Latn)": 40.34,
-                    "MassiveScenarioClassification (ben-Beng)": 17.49,
-                    "MassiveScenarioClassification (cym-Latn)": 34.82,
-                    "MassiveScenarioClassification (swe-Latn)": 44.53,
-                    "MassiveScenarioClassification (rus-Cyrl)": 28.71,
-                    "MassiveScenarioClassification (fra-Latn)": 54.26,
-                    "MassiveScenarioClassification (dan-Latn)": 49.45,
-                    "MassiveScenarioClassification (mya-Mymr)": 10.8,
-                    "MassiveScenarioClassification (ron-Latn)": 47.86,
-                    "MassiveScenarioClassification (cmo-Hans)": 35.33,
-                    "MassiveScenarioClassification (hin-Deva)": 23.13,
-                    "MassiveScenarioClassification (cmo-Hant)": 31.7,
-                    "MassiveScenarioClassification (afr-Latn)": 43.63,
-                    "MassiveScenarioClassification (aze-Latn)": 36.42,
-                    "MassiveScenarioClassification (msa-Latn)": 37.28,
-                    "MassiveScenarioClassification (ell-Grek)": 33.85,
-                    "MassiveScenarioClassification (isl-Latn)": 39.36,
-                    "MassiveScenarioClassification (fin-Latn)": 38.41,
-                    "MassiveScenarioClassification (ind-Latn)": 43.05,
-                    "MassiveScenarioClassification (pol-Latn)": 42.66,
-                    "MassiveScenarioClassification (tam-Taml)": 14.55,
-                    "MassiveScenarioClassification (ita-Latn)": 51.37,
-                    "MassiveScenarioClassification (urd-Arab)": 20.0,
-                    "MassiveScenarioClassification (kan-Knda)": 8.34,
-                    "MassiveScenarioClassification (tel-Telu)": 7.81,
-                    "MassiveScenarioClassification (mal-Mlym)": 7.69,
-                    "MassiveScenarioClassification (ara-Arab)": 27.8,
-                    "MassiveScenarioClassification (kor-Kore)": 17.28,
-                    "MassiveScenarioClassification (vie-Latn)": 35.9,
-                    "MassiveScenarioClassification (amh-Ethi)": 7.43,
-                    "MassiveScenarioClassification (heb-Hebr)": 25.49,
-                    "MassiveScenarioClassification (hye-Armn)": 16.86,
-                    "MassiveScenarioClassification (khm-Khmr)": 9.63,
-                    "MassiveScenarioClassification (slv-Latn)": 39.88,
-                    "MassiveScenarioClassification (tgl-Latn)": 47.04,
-                    "MassiveScenarioClassification (nob-Latn)": 45.75,
-                    "MassiveScenarioClassification (tha-Thai)": 17.01,
-                    "MultilingualSentiment (cmn-Hans)": 41.2,
-                    "NoRecClassification (nob-Latn)": 38.34,
-                    "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 50.15,
-                    "OnlineShopping (cmn-Hans)": 56.94,
-                    "PAC (pol-Latn)": 62.1,
-                    "PolEmo2.0-IN (pol-Latn)": 41.63,
-                    "PolEmo2.0-OUT (pol-Latn)": 25.0,
-                    "RuReviewsClassification (rus-Cyrl)": 42.33,
-                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 13.29,
-                    "RuSciBenchOECDClassification (rus-Cyrl)": 10.62,
-                    "TNews (cmn-Hans)": 21.05,
-                    "ToxicConversationsClassification": 61.05,
-                    "TweetSentimentExtractionClassification": 55.05,
-                    "Waimai (cmn-Hans)": 63.31
+                    "Model": "bm25s"
                 }
             ]
         },
-        "Clustering": {
-            "v_measure": [
+        "InstructionRetrieval": {
+            "p-MRR": [
                 {
-                    "Model": "all-mpnet-base-v2",
-                    "ArxivClusteringP2P": 48.38,
-                    "ArxivClusteringS2S": 39.72,
-                    "BiorxivClusteringP2P": 39.62,
-                    "BiorxivClusteringS2S": 35.02,
-                    "GeoreviewClusteringP2P (rus-Cyrl)": 20.33,
-                    "MasakhaNEWSClusteringP2P (amh-Ethi)": 42.49,
-                    "MasakhaNEWSClusteringP2P (eng)": 67.24,
-                    "MasakhaNEWSClusteringP2P (fra-Latn)": 61.99,
-                    "MasakhaNEWSClusteringP2P (hau-Latn)": 37.17,
-                    "MasakhaNEWSClusteringP2P (ibo-Latn)": 52.0,
-                    "MasakhaNEWSClusteringP2P (lin-Latn)": 69.68,
-                    "MasakhaNEWSClusteringP2P (lug-Latn)": 50.96,
-                    "MasakhaNEWSClusteringP2P (orm-Ethi)": 28.42,
-                    "MasakhaNEWSClusteringP2P (pcm-Latn)": 64.01,
-                    "MasakhaNEWSClusteringP2P (run-Latn)": 57.6,
-                    "MasakhaNEWSClusteringP2P (sna-Latn)": 54.99,
-                    "MasakhaNEWSClusteringP2P (som-Latn)": 31.16,
-                    "MasakhaNEWSClusteringP2P (swa-Latn)": 28.29,
-                    "MasakhaNEWSClusteringP2P (tir-Ethi)": 41.85,
-                    "MasakhaNEWSClusteringP2P (xho-Latn)": 35.24,
-                    "MasakhaNEWSClusteringP2P (yor-Latn)": 42.15,
-                    "MasakhaNEWSClusteringS2S (amh-Ethi)": 44.48,
-                    "MasakhaNEWSClusteringS2S (eng)": 35.69,
-                    "MasakhaNEWSClusteringS2S (fra-Latn)": 41.05,
-                    "MasakhaNEWSClusteringS2S (hau-Latn)": 16.64,
-                    "MasakhaNEWSClusteringS2S (ibo-Latn)": 38.63,
-                    "MasakhaNEWSClusteringS2S (lin-Latn)": 70.72,
-                    "MasakhaNEWSClusteringS2S (lug-Latn)": 46.97,
-                    "MasakhaNEWSClusteringS2S (orm-Ethi)": 23.85,
-                    "MasakhaNEWSClusteringS2S (pcm-Latn)": 68.7,
-                    "MasakhaNEWSClusteringS2S (run-Latn)": 52.27,
-                    "MasakhaNEWSClusteringS2S (sna-Latn)": 47.64,
-                    "MasakhaNEWSClusteringS2S (som-Latn)": 30.94,
-                    "MasakhaNEWSClusteringS2S (swa-Latn)": 17.12,
-                    "MasakhaNEWSClusteringS2S (tir-Ethi)": 42.01,
-                    "MasakhaNEWSClusteringS2S (xho-Latn)": 24.16,
-                    "MasakhaNEWSClusteringS2S (yor-Latn)": 35.04,
-                    "MedrxivClusteringP2P": 35.58,
-                    "MedrxivClusteringS2S": 32.87,
-                    "RedditClustering": 54.82,
-                    "RedditClusteringP2P": 56.77,
-                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 14.66,
-                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 12.49,
-                    "StackExchangeClustering": 53.8,
-                    "StackExchangeClusteringP2P": 34.28,
-                    "TwentyNewsgroupsClustering": 49.74
+                    "Model": "bm25s"
+                }
+            ]
+        }
+    },
+    "norbert3-base": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "norbert3-base",
+                    "BornholmBitextMining": 6.08
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "norbert3-base",
+                    "AngryTweetsClassification": 52.48,
+                    "DKHateClassification": 58.78,
+                    "DanishPoliticalCommentsClassification": 34.14,
+                    "LccSentimentClassification": 54.07,
+                    "MassiveIntentClassification (da)": 53.16,
+                    "MassiveIntentClassification (nb)": 54.2,
+                    "MassiveIntentClassification (sv)": 52.08,
+                    "MassiveScenarioClassification (da)": 57.17,
+                    "MassiveScenarioClassification (nb)": 60.69,
+                    "MassiveScenarioClassification (sv)": 53.53,
+                    "NoRecClassification": 53.4,
+                    "NordicLangClassification": 82.67,
+                    "NorwegianParliament": 59.33,
+                    "ScalaDaClassification": 58.25,
+                    "ScalaNbClassification": 60.19
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "norbert3-base"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "all-mpnet-base-v2",
-                    "CDSC-E (pol-Latn)": 45.37,
-                    "OpusparcusPC (deu-Latn)": 89.78,
-                    "OpusparcusPC (en)": 97.75,
-                    "OpusparcusPC (fin-Latn)": 85.82,
-                    "OpusparcusPC (fra-Latn)": 86.61,
-                    "OpusparcusPC (rus-Cyrl)": 79.85,
-                    "OpusparcusPC (swe-Latn)": 81.81,
-                    "PSC (pol-Latn)": 83.28,
-                    "PawsXPairClassification (deu-Latn)": 52.17,
-                    "PawsXPairClassification (en)": 61.99,
-                    "PawsXPairClassification (spa-Latn)": 55.06,
-                    "PawsXPairClassification (fra-Latn)": 56.42,
-                    "PawsXPairClassification (jpn-Hira)": 47.43,
-                    "PawsXPairClassification (kor-Hang)": 49.75,
-                    "PawsXPairClassification (cmn-Hans)": 52.47,
-                    "SICK-E-PL (pol-Latn)": 46.51,
-                    "SprintDuplicateQuestions": 90.15,
-                    "TERRa (rus-Cyrl)": 44.52,
-                    "TwitterSemEval2015": 73.85,
-                    "TwitterURLCorpus": 85.11
+                    "Model": "norbert3-base"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "all-mpnet-base-v2",
-                    "AlloprofReranking (fra-Latn)": 69.63,
-                    "AskUbuntuDupQuestions": 65.85,
-                    "MMarcoReranking (cmn-Hans)": 4.65,
-                    "MindSmallReranking": 30.97,
-                    "RuBQReranking (rus-Cyrl)": 30.96,
-                    "SciDocsRR": 88.65,
-                    "StackOverflowDupQuestions": 51.98,
-                    "SyntecReranking (fra-Latn)": 66.12,
-                    "T2Reranking (cmn-Hans)": 58.3
+                    "Model": "norbert3-base"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "all-mpnet-base-v2",
-                    "AILACasedocs": 22.51,
-                    "AILAStatutes": 21.27,
-                    "ARCChallenge": 11.8,
-                    "AlloprofRetrieval (fra-Latn)": 34.27,
-                    "AlphaNLI": 22.41,
-                    "ArguAna": 46.52,
-                    "ArguAna-PL (pol-Latn)": 14.72,
-                    "BSARDRetrieval (fra-Latn)": 6.98,
-                    "BrightRetrieval (robotics)": 8.36,
-                    "BrightRetrieval (psychology)": 22.63,
-                    "BrightRetrieval (leetcode)": 26.4,
-                    "BrightRetrieval (biology)": 15.52,
-                    "BrightRetrieval (theoremqa_questions)": 18.49,
-                    "BrightRetrieval (economics)": 16.64,
-                    "BrightRetrieval (stackoverflow)": 9.48,
-                    "BrightRetrieval (pony)": 6.95,
-                    "BrightRetrieval (earth_science)": 20.11,
-                    "BrightRetrieval (theoremqa_theorems)": 12.38,
-                    "BrightRetrieval (sustainable_living)": 15.34,
-                    "BrightRetrieval (aops)": 5.32,
-                    "CQADupstackRetrieval": 44.96,
-                    "ClimateFEVER": 21.97,
-                    "CmedqaRetrieval (cmn-Hans)": 2.0,
-                    "CovidRetrieval (cmn-Hans)": 3.7,
-                    "DBPedia": 32.09,
-                    "DuRetrieval (cmn-Hans)": 4.92,
-                    "EcomRetrieval (cmn-Hans)": 3.94,
-                    "FEVER": 50.86,
-                    "FiQA-PL (pol-Latn)": 3.6,
-                    "FiQA2018": 49.96,
-                    "GerDaLIRSmall (deu-Latn)": 3.78,
-                    "HellaSwag": 26.27,
-                    "HotpotQA": 39.29,
-                    "LEMBNarrativeQARetrieval": 19.34,
-                    "LEMBNeedleRetrieval": 16.0,
-                    "LEMBPasskeyRetrieval": 24.5,
-                    "LEMBQMSumRetrieval": 21.54,
-                    "LEMBSummScreenFDRetrieval": 60.43,
-                    "LEMBWikimQARetrieval": 44.92,
-                    "LeCaRDv2 (zho-Hans)": 18.09,
-                    "LegalBenchConsumerContractsQA": 75.25,
-                    "LegalBenchCorporateLobbying": 89.04,
-                    "LegalQuAD (deu-Latn)": 10.67,
-                    "LegalSummarization": 58.55,
-                    "MMarcoRetrieval (cmn-Hans)": 7.13,
-                    "MSMARCO": 39.75,
-                    "MedicalRetrieval (cmn-Hans)": 1.71,
-                    "MintakaRetrieval (ara-Arab)": 1.97,
-                    "MintakaRetrieval (deu-Latn)": 17.21,
-                    "MintakaRetrieval (spa-Latn)": 10.11,
-                    "MintakaRetrieval (fra-Latn)": 12.93,
-                    "MintakaRetrieval (hin-Deva)": 2.05,
-                    "MintakaRetrieval (ita-Latn)": 5.63,
-                    "MintakaRetrieval (jpn-Hira)": 6.72,
-                    "MintakaRetrieval (por-Latn)": 8.05,
-                    "NFCorpus": 33.29,
-                    "NFCorpus-PL (pol-Latn)": 8.77,
-                    "NQ": 50.45,
-                    "PIQA": 29.03,
-                    "Quail": 3.41,
-                    "QuoraRetrieval": 87.46,
-                    "RARbCode": 53.21,
-                    "RARbMath": 71.85,
-                    "RuBQRetrieval (rus-Cyrl)": 4.75,
-                    "SCIDOCS": 23.76,
-                    "SCIDOCS-PL (pol-Latn)": 4.02,
-                    "SIQA": 2.38,
-                    "SciFact": 65.57,
-                    "SciFact-PL (pol-Latn)": 13.31,
-                    "SpartQA": 0.22,
-                    "SyntecRetrieval (fra-Latn)": 57.39,
-                    "T2Retrieval (cmn-Hans)": 2.98,
-                    "TRECCOVID": 51.33,
-                    "TRECCOVID-PL (pol-Latn)": 12.12,
-                    "TempReasonL1": 1.77,
-                    "TempReasonL2Fact": 11.2,
-                    "TempReasonL2Pure": 1.15,
-                    "TempReasonL3Fact": 9.42,
-                    "TempReasonL3Pure": 5.59,
-                    "Touche2020": 19.93,
-                    "VideoRetrieval (cmn-Hans)": 8.48,
-                    "WinoGrande": 20.8,
-                    "XPQARetrieval (ara-Arab_ara-Arab)": 9.42,
-                    "XPQARetrieval (eng-Latn_ara-Arab)": 2.39,
-                    "XPQARetrieval (ara-Arab_eng-Latn)": 8.98,
-                    "XPQARetrieval (deu-Latn_deu-Latn)": 55.82,
-                    "XPQARetrieval (eng-Latn_deu-Latn)": 11.74,
-                    "XPQARetrieval (deu-Latn_eng-Latn)": 30.44,
-                    "XPQARetrieval (spa-Latn_spa-Latn)": 40.01,
-                    "XPQARetrieval (eng-Latn_spa-Latn)": 6.12,
-                    "XPQARetrieval (spa-Latn_eng-Latn)": 29.44,
-                    "XPQARetrieval (fra-Latn_fra-Latn)": 51.94,
-                    "XPQARetrieval (eng-Latn_fra-Latn)": 11.48,
-                    "XPQARetrieval (fra-Latn_eng-Latn)": 32.52,
-                    "XPQARetrieval (hin-Deva_hin-Deva)": 37.48,
-                    "XPQARetrieval (eng-Latn_hin-Deva)": 5.11,
-                    "XPQARetrieval (hin-Deva_eng-Latn)": 7.37,
-                    "XPQARetrieval (ita-Latn_ita-Latn)": 54.2,
-                    "XPQARetrieval (eng-Latn_ita-Latn)": 6.08,
-                    "XPQARetrieval (ita-Latn_eng-Latn)": 30.32,
-                    "XPQARetrieval (jpn-Hira_jpn-Hira)": 37.45,
-                    "XPQARetrieval (eng-Latn_jpn-Hira)": 5.79,
-                    "XPQARetrieval (jpn-Hira_eng-Latn)": 14.77,
-                    "XPQARetrieval (kor-Hang_kor-Hang)": 10.4,
-                    "XPQARetrieval (eng-Latn_kor-Hang)": 7.09,
-                    "XPQARetrieval (kor-Hang_eng-Latn)": 6.95,
-                    "XPQARetrieval (pol-Latn_pol-Latn)": 23.67,
-                    "XPQARetrieval (eng-Latn_pol-Latn)": 8.83,
-                    "XPQARetrieval (pol-Latn_eng-Latn)": 15.94,
-                    "XPQARetrieval (por-Latn_por-Latn)": 33.56,
-                    "XPQARetrieval (eng-Latn_por-Latn)": 3.76,
-                    "XPQARetrieval (por-Latn_eng-Latn)": 23.45,
-                    "XPQARetrieval (tam-Taml_tam-Taml)": 5.53,
-                    "XPQARetrieval (eng-Latn_tam-Taml)": 3.3,
-                    "XPQARetrieval (tam-Taml_eng-Latn)": 4.0,
-                    "XPQARetrieval (cmn-Hans_cmn-Hans)": 23.84,
-                    "XPQARetrieval (eng-Latn_cmn-Hans)": 7.2,
-                    "XPQARetrieval (cmn-Hans_eng-Latn)": 12.84
+                    "Model": "norbert3-base"
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "all-mpnet-base-v2",
-                    "AFQMC (cmn-Hans)": 8.01,
-                    "ATEC (cmn-Hans)": 14.03,
-                    "BIOSSES": 80.43,
-                    "BQ (cmn-Hans)": 21.39,
-                    "CDSC-R (pol-Latn)": 77.04,
-                    "LCQMC (cmn-Hans)": 22.84,
-                    "PAWSX (cmn-Hans)": 6.44,
-                    "RUParaPhraserSTS (rus-Cyrl)": 42.15,
-                    "RuSTSBenchmarkSTS (rus-Cyrl)": 55.68,
-                    "SICK-R": 80.59,
-                    "SICK-R-PL (pol-Latn)": 50.2,
-                    "SICKFr (fra-Latn)": 67.05,
-                    "STS12": 72.63,
-                    "STS13": 83.48,
-                    "STS14": 78.0,
-                    "STS15": 85.66,
-                    "STS16": 80.03,
-                    "STS17 (en-en)": 90.6,
-                    "STS17 (eng-Latn_ara-Arab)": 6.76,
-                    "STS17 (fra-Latn_eng-Latn)": 41.64,
-                    "STS17 (eng-Latn_tur-Latn)": -4.58,
-                    "STS17 (eng-Latn_deu-Latn)": 35.5,
-                    "STS17 (spa-Latn_eng-Latn)": 25.28,
-                    "STS17 (ita-Latn_eng-Latn)": 31.8,
-                    "STS17 (spa-Latn)": 78.4,
-                    "STS17 (kor-Hang)": 39.11,
-                    "STS17 (ara-Arab)": 55.42,
-                    "STS17 (nld-Latn_eng-Latn)": 32.89,
-                    "STS22 (en)": 68.39,
-                    "STS22 (spa-Latn_eng-Latn)": 55.09,
-                    "STS22 (deu-Latn_pol-Latn)": 23.53,
-                    "STS22 (cmn-Hans_eng-Latn)": 40.47,
-                    "STS22 (pol-Latn)": 24.21,
-                    "STS22 (tur-Latn)": 29.35,
-                    "STS22 (spa-Latn_ita-Latn)": 41.61,
-                    "STS22 (fra-Latn_pol-Latn)": 73.25,
-                    "STS22 (rus-Cyrl)": 15.83,
-                    "STS22 (deu-Latn)": 27.0,
-                    "STS22 (spa-Latn)": 55.98,
-                    "STS22 (pol-Latn_eng-Latn)": 51.07,
-                    "STS22 (fra-Latn)": 77.1,
-                    "STS22 (deu-Latn_eng-Latn)": 49.73,
-                    "STS22 (ara-Arab)": 38.96,
-                    "STS22 (deu-Latn_fra-Latn)": 31.39,
-                    "STS22 (ita-Latn)": 58.02,
-                    "STS22 (cmn-Hans)": 42.24,
-                    "STSB (cmn-Hans)": 37.7,
-                    "STSBenchmark": 83.42,
-                    "STSBenchmarkMultilingualSTS (nld-Latn)": 57.01,
-                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 55.54,
-                    "STSBenchmarkMultilingualSTS (fra-Latn)": 65.15,
-                    "STSBenchmarkMultilingualSTS (ita-Latn)": 62.72,
-                    "STSBenchmarkMultilingualSTS (spa-Latn)": 65.78,
-                    "STSBenchmarkMultilingualSTS (en)": 83.42,
-                    "STSBenchmarkMultilingualSTS (deu-Latn)": 61.43,
-                    "STSBenchmarkMultilingualSTS (por-Latn)": 62.12,
-                    "STSBenchmarkMultilingualSTS (cmn-Hans)": 39.43,
-                    "STSBenchmarkMultilingualSTS (pol-Latn)": 52.36
+                    "Model": "norbert3-base"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "all-mpnet-base-v2",
-                    "SummEval": 27.49,
-                    "SummEvalFr (fra-Latn)": 28.11
+                    "Model": "norbert3-base"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "norbert3-base"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "all-mpnet-base-v2"
+                    "Model": "norbert3-base"
                 }
             ]
         }
     },
-    "e5-base-v2": {
+    "GritLM-7B": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "e5-base-v2"
+                    "Model": "GritLM-7B"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "e5-base-v2"
+                    "Model": "GritLM-7B"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "e5-base-v2",
-                    "BiorxivClusteringP2P": 37.12,
-                    "BiorxivClusteringS2S": 33.41,
-                    "MedrxivClusteringP2P": 31.82,
-                    "MedrxivClusteringS2S": 29.68,
-                    "RedditClustering": 56.54,
-                    "RedditClusteringP2P": 63.23,
-                    "StackExchangeClustering": 64.6,
-                    "StackExchangeClusteringP2P": 33.02,
-                    "TwentyNewsgroupsClustering": 49.86
+                    "Model": "GritLM-7B"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "e5-base-v2"
+                    "Model": "GritLM-7B"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "e5-base-v2"
+                    "Model": "GritLM-7B"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "e5-base-v2"
+                    "Model": "GritLM-7B",
+                    "ARCChallenge": 26.68,
+                    "AlphaNLI": 34.0,
+                    "BrightRetrieval (pony)": 21.98,
+                    "BrightRetrieval (robotics)": 17.31,
+                    "BrightRetrieval (economics)": 19.0,
+                    "BrightRetrieval (theoremqa_questions)": 23.34,
+                    "BrightRetrieval (leetcode)": 29.85,
+                    "BrightRetrieval (earth_science)": 32.77,
+                    "BrightRetrieval (stackoverflow)": 11.62,
+                    "BrightRetrieval (sustainable_living)": 18.04,
+                    "BrightRetrieval (biology)": 25.04,
+                    "BrightRetrieval (psychology)": 19.92,
+                    "BrightRetrieval (theoremqa_theorems)": 17.41,
+                    "BrightRetrieval (aops)": 8.91,
+                    "HellaSwag": 39.45,
+                    "PIQA": 44.35,
+                    "Quail": 11.69,
+                    "RARbCode": 84.0,
+                    "RARbMath": 82.35,
+                    "SIQA": 7.23,
+                    "SpartQA": 9.29,
+                    "TempReasonL1": 7.15,
+                    "TempReasonL2Fact": 58.38,
+                    "TempReasonL2Pure": 11.22,
+                    "TempReasonL3Fact": 44.29,
+                    "TempReasonL3Pure": 14.15,
+                    "WinoGrande": 53.74
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "e5-base-v2"
+                    "Model": "GritLM-7B"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "e5-base-v2"
+                    "Model": "GritLM-7B"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "GritLM-7B"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "e5-base-v2",
-                    "Core17InstructionRetrieval": -2.9,
-                    "News21InstructionRetrieval": -2.0,
-                    "Robust04InstructionRetrieval": -6.73
+                    "Model": "GritLM-7B",
+                    "Core17InstructionRetrieval": 2.62,
+                    "News21InstructionRetrieval": -1.01,
+                    "Robust04InstructionRetrieval": -1.68
                 }
             ]
         }
     },
-    "text-search-babbage-001": {
+    "e5-mistral-7b-instruct-noinstruct": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "text-search-babbage-001"
+                    "Model": "e5-mistral-7b-instruct-noinstruct"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "text-search-babbage-001"
+                    "Model": "e5-mistral-7b-instruct-noinstruct"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "text-search-babbage-001"
+                    "Model": "e5-mistral-7b-instruct-noinstruct"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "text-search-babbage-001"
+                    "Model": "e5-mistral-7b-instruct-noinstruct"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "text-search-babbage-001"
+                    "Model": "e5-mistral-7b-instruct-noinstruct"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "text-search-babbage-001",
-                    "ArguAna": 49.2,
-                    "ClimateFEVER": 19.9,
-                    "FEVER": 77.0,
-                    "FiQA2018": 42.2,
-                    "HotpotQA": 63.1,
-                    "NFCorpus": 36.7,
-                    "QuoraRetrieval": 69.7,
-                    "SciFact": 70.4,
-                    "TRECCOVID": 58.5,
-                    "Touche2020": 29.7
+                    "Model": "e5-mistral-7b-instruct-noinstruct",
+                    "ARCChallenge": 20.48,
+                    "AlphaNLI": 18.88,
+                    "HellaSwag": 32.25,
+                    "PIQA": 32.8,
+                    "Quail": 6.25,
+                    "RARbCode": 79.84,
+                    "RARbMath": 76.19,
+                    "SIQA": 5.08,
+                    "SpartQA": 10.87,
+                    "TempReasonL1": 3.04,
+                    "TempReasonL2Fact": 35.63,
+                    "TempReasonL2Pure": 9.32,
+                    "TempReasonL3Fact": 30.41,
+                    "TempReasonL3Pure": 14.39,
+                    "WinoGrande": 45.18
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "text-search-babbage-001"
+                    "Model": "e5-mistral-7b-instruct-noinstruct"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "text-search-babbage-001"
+                    "Model": "e5-mistral-7b-instruct-noinstruct"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "e5-mistral-7b-instruct-noinstruct"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "text-search-babbage-001"
+                    "Model": "e5-mistral-7b-instruct-noinstruct"
                 }
             ]
         }
     },
-    "all-mpnet-base-v2-instruct": {
+    "electra-small-nordic": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "all-mpnet-base-v2-instruct"
+                    "Model": "electra-small-nordic",
+                    "BornholmBitextMining": 1.44
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "all-mpnet-base-v2-instruct"
+                    "Model": "electra-small-nordic",
+                    "AngryTweetsClassification": 47.91,
+                    "DKHateClassification": 59.45,
+                    "DanishPoliticalCommentsClassification": 31.89,
+                    "LccSentimentClassification": 47.93,
+                    "MassiveIntentClassification (da)": 26.3,
+                    "MassiveIntentClassification (nb)": 24.6,
+                    "MassiveIntentClassification (sv)": 27.58,
+                    "MassiveScenarioClassification (da)": 28.93,
+                    "MassiveScenarioClassification (nb)": 27.3,
+                    "MassiveScenarioClassification (sv)": 29.93,
+                    "NoRecClassification": 45.44,
+                    "NordicLangClassification": 57.82,
+                    "NorwegianParliament": 53.25,
+                    "ScalaDaClassification": 70.41,
+                    "ScalaNbClassification": 75.28
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "all-mpnet-base-v2-instruct"
+                    "Model": "electra-small-nordic"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "all-mpnet-base-v2-instruct"
+                    "Model": "electra-small-nordic"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "all-mpnet-base-v2-instruct"
+                    "Model": "electra-small-nordic"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "all-mpnet-base-v2-instruct",
-                    "ARCChallenge": 10.35,
-                    "AlphaNLI": 1.96,
-                    "HellaSwag": 13.01,
-                    "PIQA": 27.18,
-                    "Quail": 3.02,
-                    "RARbCode": 48.95,
-                    "RARbMath": 69.21,
-                    "SIQA": 1.29,
-                    "SpartQA": 1.01,
-                    "TempReasonL1": 1.52,
-                    "TempReasonL2Fact": 7.28,
-                    "TempReasonL2Pure": 1.03,
-                    "TempReasonL3Fact": 7.03,
-                    "TempReasonL3Pure": 5.16,
-                    "WinoGrande": 9.66
+                    "Model": "electra-small-nordic"
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "all-mpnet-base-v2-instruct"
+                    "Model": "electra-small-nordic"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "all-mpnet-base-v2-instruct"
+                    "Model": "electra-small-nordic"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "electra-small-nordic"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "all-mpnet-base-v2-instruct"
+                    "Model": "electra-small-nordic"
                 }
             ]
         }
     },
-    "bge-small-en-v1.5-instruct": {
+    "sentence-t5-xl": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "bge-small-en-v1.5-instruct"
+                    "Model": "sentence-t5-xl",
+                    "BUCC (de-en)": 95.04,
+                    "BUCC (fr-en)": 94.96,
+                    "BUCC (ru-en)": 8.33,
+                    "BUCC (zh-en)": 1.3,
+                    "Tatoeba (afr-eng)": 41.84,
+                    "Tatoeba (amh-eng)": 0.03,
+                    "Tatoeba (ang-eng)": 37.87,
+                    "Tatoeba (ara-eng)": 0.61,
+                    "Tatoeba (arq-eng)": 0.74,
+                    "Tatoeba (arz-eng)": 0.42,
+                    "Tatoeba (ast-eng)": 65.41,
+                    "Tatoeba (awa-eng)": 1.46,
+                    "Tatoeba (aze-eng)": 8.79,
+                    "Tatoeba (bel-eng)": 5.76,
+                    "Tatoeba (ben-eng)": 0.01,
+                    "Tatoeba (ber-eng)": 5.92,
+                    "Tatoeba (bos-eng)": 16.12,
+                    "Tatoeba (bre-eng)": 6.12,
+                    "Tatoeba (bul-eng)": 9.06,
+                    "Tatoeba (cat-eng)": 57.4,
+                    "Tatoeba (cbk-eng)": 57.68,
+                    "Tatoeba (ceb-eng)": 12.56,
+                    "Tatoeba (ces-eng)": 9.47,
+                    "Tatoeba (cha-eng)": 27.13,
+                    "Tatoeba (cmn-eng)": 1.82,
+                    "Tatoeba (cor-eng)": 3.87,
+                    "Tatoeba (csb-eng)": 14.41,
+                    "Tatoeba (cym-eng)": 6.69,
+                    "Tatoeba (dan-eng)": 54.87,
+                    "Tatoeba (deu-eng)": 93.72,
+                    "Tatoeba (dsb-eng)": 14.74,
+                    "Tatoeba (dtp-eng)": 5.84,
+                    "Tatoeba (ell-eng)": 0.6,
+                    "Tatoeba (epo-eng)": 30.8,
+                    "Tatoeba (est-eng)": 5.39,
+                    "Tatoeba (eus-eng)": 11.9,
+                    "Tatoeba (fao-eng)": 28.08,
+                    "Tatoeba (fin-eng)": 6.81,
+                    "Tatoeba (fra-eng)": 85.29,
+                    "Tatoeba (fry-eng)": 38.68,
+                    "Tatoeba (gla-eng)": 2.96,
+                    "Tatoeba (gle-eng)": 3.74,
+                    "Tatoeba (glg-eng)": 70.0,
+                    "Tatoeba (gsw-eng)": 30.49,
+                    "Tatoeba (heb-eng)": 0.87,
+                    "Tatoeba (hin-eng)": 0.1,
+                    "Tatoeba (hrv-eng)": 17.43,
+                    "Tatoeba (hsb-eng)": 14.69,
+                    "Tatoeba (hun-eng)": 7.28,
+                    "Tatoeba (hye-eng)": 0.77,
+                    "Tatoeba (ido-eng)": 46.65,
+                    "Tatoeba (ile-eng)": 59.43,
+                    "Tatoeba (ina-eng)": 82.71,
+                    "Tatoeba (ind-eng)": 37.26,
+                    "Tatoeba (isl-eng)": 11.21,
+                    "Tatoeba (ita-eng)": 79.77,
+                    "Tatoeba (jav-eng)": 7.81,
+                    "Tatoeba (jpn-eng)": 0.91,
+                    "Tatoeba (kab-eng)": 2.23,
+                    "Tatoeba (kat-eng)": 1.48,
+                    "Tatoeba (kaz-eng)": 1.77,
+                    "Tatoeba (khm-eng)": 0.38,
+                    "Tatoeba (kor-eng)": 1.96,
+                    "Tatoeba (kur-eng)": 12.11,
+                    "Tatoeba (kzj-eng)": 6.13,
+                    "Tatoeba (lat-eng)": 27.84,
+                    "Tatoeba (lfn-eng)": 45.89,
+                    "Tatoeba (lit-eng)": 5.94,
+                    "Tatoeba (lvs-eng)": 8.11,
+                    "Tatoeba (mal-eng)": 0.59,
+                    "Tatoeba (mar-eng)": 0.03,
+                    "Tatoeba (max-eng)": 21.7,
+                    "Tatoeba (mhr-eng)": 0.68,
+                    "Tatoeba (mkd-eng)": 5.92,
+                    "Tatoeba (mon-eng)": 2.39,
+                    "Tatoeba (nds-eng)": 45.04,
+                    "Tatoeba (nld-eng)": 64.75,
+                    "Tatoeba (nno-eng)": 36.74,
+                    "Tatoeba (nob-eng)": 54.77,
+                    "Tatoeba (nov-eng)": 57.12,
+                    "Tatoeba (oci-eng)": 34.39,
+                    "Tatoeba (orv-eng)": 2.04,
+                    "Tatoeba (pam-eng)": 8.34,
+                    "Tatoeba (pes-eng)": 0.87,
+                    "Tatoeba (pms-eng)": 38.06,
+                    "Tatoeba (pol-eng)": 28.35,
+                    "Tatoeba (por-eng)": 83.61,
+                    "Tatoeba (ron-eng)": 65.27,
+                    "Tatoeba (rus-eng)": 30.42,
+                    "Tatoeba (slk-eng)": 13.19,
+                    "Tatoeba (slv-eng)": 13.49,
+                    "Tatoeba (spa-eng)": 89.18,
+                    "Tatoeba (sqi-eng)": 14.66,
+                    "Tatoeba (srp-eng)": 13.24,
+                    "Tatoeba (swe-eng)": 60.67,
+                    "Tatoeba (swg-eng)": 34.76,
+                    "Tatoeba (swh-eng)": 8.07,
+                    "Tatoeba (tam-eng)": 0.36,
+                    "Tatoeba (tat-eng)": 1.46,
+                    "Tatoeba (tel-eng)": 0.67,
+                    "Tatoeba (tgl-eng)": 25.22,
+                    "Tatoeba (tha-eng)": 1.58,
+                    "Tatoeba (tuk-eng)": 4.99,
+                    "Tatoeba (tur-eng)": 7.72,
+                    "Tatoeba (tzl-eng)": 38.49,
+                    "Tatoeba (uig-eng)": 0.87,
+                    "Tatoeba (ukr-eng)": 9.12,
+                    "Tatoeba (urd-eng)": 0.0,
+                    "Tatoeba (uzb-eng)": 5.48,
+                    "Tatoeba (vie-eng)": 8.45,
+                    "Tatoeba (war-eng)": 13.75,
+                    "Tatoeba (wuu-eng)": 1.44,
+                    "Tatoeba (xho-eng)": 9.15,
+                    "Tatoeba (yid-eng)": 0.28,
+                    "Tatoeba (yue-eng)": 0.98,
+                    "Tatoeba (zsm-eng)": 35.71
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "bge-small-en-v1.5-instruct"
+                    "Model": "sentence-t5-xl",
+                    "AmazonCounterfactualClassification (de)": 67.01,
+                    "AmazonCounterfactualClassification (en)": 76.01,
+                    "AmazonCounterfactualClassification (en-ext)": 77.29,
+                    "AmazonCounterfactualClassification (ja)": 45.61,
+                    "AmazonPolarityClassification": 93.17,
+                    "AmazonReviewsClassification (de)": 44.05,
+                    "AmazonReviewsClassification (en)": 48.18,
+                    "AmazonReviewsClassification (es)": 45.01,
+                    "AmazonReviewsClassification (fr)": 43.52,
+                    "AmazonReviewsClassification (ja)": 22.23,
+                    "AmazonReviewsClassification (zh)": 21.88,
+                    "Banking77Classification": 80.88,
+                    "EmotionClassification": 51.95,
+                    "ImdbClassification": 87.54,
+                    "MTOPDomainClassification (de)": 83.28,
+                    "MTOPDomainClassification (en)": 90.73,
+                    "MTOPDomainClassification (es)": 85.32,
+                    "MTOPDomainClassification (fr)": 85.14,
+                    "MTOPDomainClassification (hi)": 20.85,
+                    "MTOPDomainClassification (th)": 15.62,
+                    "MTOPIntentClassification (de)": 54.65,
+                    "MTOPIntentClassification (en)": 68.15,
+                    "MTOPIntentClassification (es)": 57.38,
+                    "MTOPIntentClassification (fr)": 54.39,
+                    "MTOPIntentClassification (hi)": 3.28,
+                    "MTOPIntentClassification (th)": 5.08,
+                    "MasakhaNEWSClassification (fra)": 80.09,
+                    "MassiveIntentClassification (af)": 40.17,
+                    "MassiveIntentClassification (am)": 2.18,
+                    "MassiveIntentClassification (ar)": 4.18,
+                    "MassiveIntentClassification (az)": 30.02,
+                    "MassiveIntentClassification (bn)": 2.6,
+                    "MassiveIntentClassification (cy)": 29.15,
+                    "MassiveIntentClassification (da)": 47.69,
+                    "MassiveIntentClassification (de)": 57.43,
+                    "MassiveIntentClassification (el)": 9.96,
+                    "MassiveIntentClassification (en)": 72.09,
+                    "MassiveIntentClassification (es)": 57.97,
+                    "MassiveIntentClassification (fa)": 3.6,
+                    "MassiveIntentClassification (fi)": 34.02,
+                    "MassiveIntentClassification (fr)": 60.99,
+                    "MassiveIntentClassification (he)": 2.51,
+                    "MassiveIntentClassification (hi)": 3.02,
+                    "MassiveIntentClassification (hu)": 31.66,
+                    "MassiveIntentClassification (hy)": 3.32,
+                    "MassiveIntentClassification (id)": 41.53,
+                    "MassiveIntentClassification (is)": 30.25,
+                    "MassiveIntentClassification (it)": 56.57,
+                    "MassiveIntentClassification (ja)": 3.5,
+                    "MassiveIntentClassification (jv)": 31.67,
+                    "MassiveIntentClassification (ka)": 2.79,
+                    "MassiveIntentClassification (km)": 5.43,
+                    "MassiveIntentClassification (kn)": 2.79,
+                    "MassiveIntentClassification (ko)": 2.67,
+                    "MassiveIntentClassification (lv)": 34.25,
+                    "MassiveIntentClassification (ml)": 2.98,
+                    "MassiveIntentClassification (mn)": 20.99,
+                    "MassiveIntentClassification (ms)": 37.43,
+                    "MassiveIntentClassification (my)": 4.02,
+                    "MassiveIntentClassification (nb)": 45.91,
+                    "MassiveIntentClassification (nl)": 50.51,
+                    "MassiveIntentClassification (pl)": 43.95,
+                    "MassiveIntentClassification (pt)": 57.95,
+                    "MassiveIntentClassification (ro)": 49.37,
+                    "MassiveIntentClassification (ru)": 33.46,
+                    "MassiveIntentClassification (sl)": 36.33,
+                    "MassiveIntentClassification (sq)": 37.65,
+                    "MassiveIntentClassification (sv)": 46.35,
+                    "MassiveIntentClassification (sw)": 30.6,
+                    "MassiveIntentClassification (ta)": 1.79,
+                    "MassiveIntentClassification (te)": 2.26,
+                    "MassiveIntentClassification (th)": 4.02,
+                    "MassiveIntentClassification (tl)": 38.92,
+                    "MassiveIntentClassification (tr)": 32.05,
+                    "MassiveIntentClassification (ur)": 2.7,
+                    "MassiveIntentClassification (vi)": 21.47,
+                    "MassiveIntentClassification (zh-CN)": 0.59,
+                    "MassiveIntentClassification (zh-TW)": 3.24,
+                    "MassiveScenarioClassification (af)": 50.81,
+                    "MassiveScenarioClassification (am)": 6.95,
+                    "MassiveScenarioClassification (ar)": 12.32,
+                    "MassiveScenarioClassification (az)": 38.79,
+                    "MassiveScenarioClassification (bn)": 8.0,
+                    "MassiveScenarioClassification (cy)": 33.91,
+                    "MassiveScenarioClassification (da)": 55.79,
+                    "MassiveScenarioClassification (de)": 65.33,
+                    "MassiveScenarioClassification (el)": 16.89,
+                    "MassiveScenarioClassification (en)": 73.26,
+                    "MassiveScenarioClassification (es)": 62.52,
+                    "MassiveScenarioClassification (fa)": 6.08,
+                    "MassiveScenarioClassification (fi)": 43.34,
+                    "MassiveScenarioClassification (fr)": 66.42,
+                    "MassiveScenarioClassification (he)": 7.55,
+                    "MassiveScenarioClassification (hi)": 7.44,
+                    "MassiveScenarioClassification (hu)": 40.85,
+                    "MassiveScenarioClassification (hy)": 9.25,
+                    "MassiveScenarioClassification (id)": 51.92,
+                    "MassiveScenarioClassification (is)": 40.09,
+                    "MassiveScenarioClassification (it)": 62.94,
+                    "MassiveScenarioClassification (ja)": 7.9,
+                    "MassiveScenarioClassification (jv)": 41.33,
+                    "MassiveScenarioClassification (ka)": 7.76,
+                    "MassiveScenarioClassification (km)": 9.19,
+                    "MassiveScenarioClassification (kn)": 8.36,
+                    "MassiveScenarioClassification (ko)": 6.13,
+                    "MassiveScenarioClassification (lv)": 40.7,
+                    "MassiveScenarioClassification (ml)": 6.98,
+                    "MassiveScenarioClassification (mn)": 27.0,
+                    "MassiveScenarioClassification (ms)": 46.9,
+                    "MassiveScenarioClassification (my)": 9.55,
+                    "MassiveScenarioClassification (nb)": 53.43,
+                    "MassiveScenarioClassification (nl)": 59.65,
+                    "MassiveScenarioClassification (pl)": 49.87,
+                    "MassiveScenarioClassification (pt)": 62.18,
+                    "MassiveScenarioClassification (ro)": 58.22,
+                    "MassiveScenarioClassification (ru)": 40.73,
+                    "MassiveScenarioClassification (sl)": 43.66,
+                    "MassiveScenarioClassification (sq)": 49.25,
+                    "MassiveScenarioClassification (sv)": 57.17,
+                    "MassiveScenarioClassification (sw)": 40.55,
+                    "MassiveScenarioClassification (ta)": 7.46,
+                    "MassiveScenarioClassification (te)": 7.03,
+                    "MassiveScenarioClassification (th)": 8.52,
+                    "MassiveScenarioClassification (tl)": 51.74,
+                    "MassiveScenarioClassification (tr)": 43.01,
+                    "MassiveScenarioClassification (ur)": 9.61,
+                    "MassiveScenarioClassification (vi)": 28.91,
+                    "MassiveScenarioClassification (zh-CN)": 5.86,
+                    "MassiveScenarioClassification (zh-TW)": 7.14,
+                    "ToxicConversationsClassification": 70.95,
+                    "TweetSentimentExtractionClassification": 61.21
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "bge-small-en-v1.5-instruct"
+                    "Model": "sentence-t5-xl",
+                    "AlloProfClusteringP2P": 60.37,
+                    "AlloProfClusteringS2S": 40.76,
+                    "ArxivClusteringP2P": 41.62,
+                    "ArxivClusteringS2S": 31.17,
+                    "BiorxivClusteringP2P": 36.43,
+                    "BiorxivClusteringS2S": 26.47,
+                    "HALClusteringS2S": 20.28,
+                    "MLSUMClusteringP2P": 41.61,
+                    "MLSUMClusteringS2S": 33.6,
+                    "MasakhaNEWSClusteringP2P (fra)": 62.82,
+                    "MasakhaNEWSClusteringS2S (fra)": 31.74,
+                    "MedrxivClusteringP2P": 32.3,
+                    "MedrxivClusteringS2S": 26.93,
+                    "RedditClustering": 57.03,
+                    "RedditClusteringP2P": 62.34,
+                    "StackExchangeClustering": 67.13,
+                    "StackExchangeClusteringP2P": 34.79,
+                    "TwentyNewsgroupsClustering": 49.53
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "bge-small-en-v1.5-instruct"
+                    "Model": "sentence-t5-xl",
+                    "OpusparcusPC (fr)": 92.48,
+                    "PawsXPairClassification (fr)": 62.52,
+                    "SprintDuplicateQuestions": 91.44,
+                    "TwitterSemEval2015": 80.89,
+                    "TwitterURLCorpus": 85.86
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "bge-small-en-v1.5-instruct"
+                    "Model": "sentence-t5-xl",
+                    "AlloprofReranking": 63.3,
+                    "AskUbuntuDupQuestions": 62.86,
+                    "MindSmallReranking": 29.77,
+                    "SciDocsRR": 75.16,
+                    "StackOverflowDupQuestions": 51.05,
+                    "SyntecReranking": 83.07
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "bge-small-en-v1.5-instruct",
-                    "ARCChallenge": 7.72,
-                    "AlphaNLI": 1.26,
-                    "HellaSwag": 23.41,
-                    "PIQA": 20.79,
-                    "Quail": 2.01,
-                    "RARbCode": 41.52,
-                    "RARbMath": 46.5,
-                    "SIQA": 0.98,
-                    "SpartQA": 2.86,
-                    "TempReasonL1": 1.27,
-                    "TempReasonL2Fact": 16.72,
-                    "TempReasonL2Pure": 1.1,
-                    "TempReasonL3Fact": 12.81,
-                    "TempReasonL3Pure": 4.63,
-                    "WinoGrande": 5.35
+                    "Model": "sentence-t5-xl",
+                    "AlloprofRetrieval": 40.38,
+                    "ArguAna": 39.4,
+                    "BSARDRetrieval": 0.14,
+                    "CQADupstackRetrieval": 40.78,
+                    "ClimateFEVER": 10.61,
+                    "DBPedia": 33.65,
+                    "FEVER": 36.12,
+                    "FiQA2018": 44.71,
+                    "HotpotQA": 37.17,
+                    "MSMARCO": 25.17,
+                    "MintakaRetrieval (fr)": 31.54,
+                    "NFCorpus": 33.18,
+                    "NQ": 46.29,
+                    "QuoraRetrieval": 85.85,
+                    "SCIDOCS": 15.97,
+                    "SciFact": 50.91,
+                    "SyntecRetrieval": 74.24,
+                    "TRECCOVID": 54.77,
+                    "Touche2020": 22.51,
+                    "XPQARetrieval (fr)": 52.14
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "sentence-t5-xl",
+                    "BIOSSES": 73.12,
+                    "SICK-R": 79.98,
+                    "SICKFr": 75.08,
+                    "STS12": 79.02,
+                    "STS13": 88.8,
+                    "STS14": 84.33,
+                    "STS15": 88.89,
+                    "STS16": 85.31,
+                    "STS17 (ar-ar)": 11.13,
+                    "STS17 (en-ar)": -3.93,
+                    "STS17 (en-de)": 79.04,
+                    "STS17 (en-en)": 88.91,
+                    "STS17 (en-tr)": 13.61,
+                    "STS17 (es-en)": 71.72,
+                    "STS17 (es-es)": 83.42,
+                    "STS17 (fr-en)": 71.38,
+                    "STS17 (it-en)": 69.5,
+                    "STS17 (ko-ko)": 9.61,
+                    "STS17 (nl-en)": 66.12,
+                    "STS22 (ar)": 29.6,
+                    "STS22 (de)": 47.72,
+                    "STS22 (de-en)": 49.64,
+                    "STS22 (de-fr)": 62.21,
+                    "STS22 (de-pl)": 34.34,
+                    "STS22 (en)": 64.32,
+                    "STS22 (es)": 58.16,
+                    "STS22 (es-en)": 69.15,
+                    "STS22 (es-it)": 65.26,
+                    "STS22 (fr)": 77.49,
+                    "STS22 (fr-pl)": 50.71,
+                    "STS22 (it)": 66.91,
+                    "STS22 (pl)": 27.04,
+                    "STS22 (pl-en)": 58.85,
+                    "STS22 (ru)": 26.63,
+                    "STS22 (tr)": 43.36,
+                    "STS22 (zh)": 33.55,
+                    "STS22 (zh-en)": 29.0,
+                    "STSBenchmark": 83.93,
+                    "STSBenchmarkMultilingualSTS (fr)": 79.42
                 }
             ]
         },
-        "STS": {
+        "Summarization": {
             "spearman": [
                 {
-                    "Model": "bge-small-en-v1.5-instruct"
+                    "Model": "sentence-t5-xl",
+                    "SummEval": 29.91,
+                    "SummEvalFr": 31.59
                 }
             ]
         },
-        "Summarization": {
-            "spearman": [
+        "MultilabelClassification": {
+            "accuracy": [
                 {
-                    "Model": "bge-small-en-v1.5-instruct"
+                    "Model": "sentence-t5-xl"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "bge-small-en-v1.5-instruct"
+                    "Model": "sentence-t5-xl"
                 }
             ]
         }
     },
-    "LaBSE-en-ru": {
+    "e5-large": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "LaBSE-en-ru",
-                    "Tatoeba (rus-Cyrl_eng-Latn)": 93.62
+                    "Model": "e5-large",
+                    "BornholmBitextMining": 40.15
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "LaBSE-en-ru",
-                    "GeoreviewClassification (rus-Cyrl)": 40.89,
-                    "HeadlineClassification (rus-Cyrl)": 68.75,
-                    "InappropriatenessClassification (rus-Cyrl)": 58.48,
-                    "KinopoiskClassification (rus-Cyrl)": 49.85,
-                    "MassiveIntentClassification (swa-Latn)": 19.98,
-                    "MassiveIntentClassification (aze-Latn)": 19.52,
-                    "MassiveIntentClassification (tur-Latn)": 24.12,
-                    "MassiveIntentClassification (cmo-Hans)": 3.96,
-                    "MassiveIntentClassification (amh-Ethi)": 2.76,
-                    "MassiveIntentClassification (kan-Knda)": 2.86,
-                    "MassiveIntentClassification (hin-Deva)": 3.29,
-                    "MassiveIntentClassification (tgl-Latn)": 27.08,
-                    "MassiveIntentClassification (tha-Thai)": 4.0,
-                    "MassiveIntentClassification (swe-Latn)": 32.01,
-                    "MassiveIntentClassification (deu-Latn)": 35.14,
-                    "MassiveIntentClassification (spa-Latn)": 37.67,
-                    "MassiveIntentClassification (por-Latn)": 39.84,
-                    "MassiveIntentClassification (jpn-Jpan)": 4.78,
-                    "MassiveIntentClassification (fin-Latn)": 31.11,
-                    "MassiveIntentClassification (kat-Geor)": 2.87,
-                    "MassiveIntentClassification (slv-Latn)": 35.66,
-                    "MassiveIntentClassification (rus-Cyrl)": 60.53,
-                    "MassiveIntentClassification (ita-Latn)": 43.32,
-                    "MassiveIntentClassification (tel-Telu)": 2.72,
-                    "MassiveIntentClassification (afr-Latn)": 30.59,
-                    "MassiveIntentClassification (isl-Latn)": 25.61,
-                    "MassiveIntentClassification (fas-Arab)": 3.71,
-                    "MassiveIntentClassification (vie-Latn)": 23.0,
-                    "MassiveIntentClassification (ben-Beng)": 3.35,
-                    "MassiveIntentClassification (hye-Armn)": 2.8,
-                    "MassiveIntentClassification (pol-Latn)": 31.3,
-                    "MassiveIntentClassification (cym-Latn)": 26.59,
-                    "MassiveIntentClassification (jav-Latn)": 26.84,
-                    "MassiveIntentClassification (mon-Cyrl)": 35.97,
-                    "MassiveIntentClassification (en)": 60.48,
-                    "MassiveIntentClassification (msa-Latn)": 27.82,
-                    "MassiveIntentClassification (nob-Latn)": 35.78,
-                    "MassiveIntentClassification (heb-Hebr)": 2.33,
-                    "MassiveIntentClassification (khm-Khmr)": 4.6,
-                    "MassiveIntentClassification (nld-Latn)": 34.66,
-                    "MassiveIntentClassification (ind-Latn)": 33.31,
-                    "MassiveIntentClassification (mal-Mlym)": 2.63,
-                    "MassiveIntentClassification (tam-Taml)": 2.22,
-                    "MassiveIntentClassification (mya-Mymr)": 3.57,
-                    "MassiveIntentClassification (urd-Arab)": 3.36,
-                    "MassiveIntentClassification (dan-Latn)": 38.66,
-                    "MassiveIntentClassification (cmo-Hant)": 5.29,
-                    "MassiveIntentClassification (ron-Latn)": 37.45,
-                    "MassiveIntentClassification (lav-Latn)": 23.92,
-                    "MassiveIntentClassification (fra-Latn)": 40.29,
-                    "MassiveIntentClassification (ell-Grek)": 11.14,
-                    "MassiveIntentClassification (sqi-Latn)": 35.84,
-                    "MassiveIntentClassification (hun-Latn)": 26.74,
-                    "MassiveIntentClassification (kor-Kore)": 2.69,
-                    "MassiveIntentClassification (ara-Arab)": 5.19,
-                    "MassiveScenarioClassification (swa-Latn)": 25.61,
-                    "MassiveScenarioClassification (aze-Latn)": 24.48,
-                    "MassiveScenarioClassification (tur-Latn)": 31.38,
-                    "MassiveScenarioClassification (cmo-Hans)": 9.98,
-                    "MassiveScenarioClassification (amh-Ethi)": 7.59,
-                    "MassiveScenarioClassification (kan-Knda)": 8.73,
-                    "MassiveScenarioClassification (hin-Deva)": 8.77,
-                    "MassiveScenarioClassification (tgl-Latn)": 35.12,
-                    "MassiveScenarioClassification (tha-Thai)": 8.69,
-                    "MassiveScenarioClassification (swe-Latn)": 35.83,
-                    "MassiveScenarioClassification (deu-Latn)": 41.72,
-                    "MassiveScenarioClassification (spa-Latn)": 43.33,
-                    "MassiveScenarioClassification (por-Latn)": 44.62,
-                    "MassiveScenarioClassification (jpn-Jpan)": 9.51,
-                    "MassiveScenarioClassification (fin-Latn)": 33.79,
-                    "MassiveScenarioClassification (kat-Geor)": 7.32,
-                    "MassiveScenarioClassification (slv-Latn)": 37.6,
-                    "MassiveScenarioClassification (rus-Cyrl)": 65.15,
-                    "MassiveScenarioClassification (ita-Latn)": 47.28,
-                    "MassiveScenarioClassification (tel-Telu)": 7.53,
-                    "MassiveScenarioClassification (afr-Latn)": 37.27,
-                    "MassiveScenarioClassification (isl-Latn)": 30.32,
-                    "MassiveScenarioClassification (fas-Arab)": 6.83,
-                    "MassiveScenarioClassification (vie-Latn)": 28.92,
-                    "MassiveScenarioClassification (ben-Beng)": 8.57,
-                    "MassiveScenarioClassification (hye-Armn)": 8.91,
-                    "MassiveScenarioClassification (pol-Latn)": 33.75,
-                    "MassiveScenarioClassification (cym-Latn)": 30.38,
-                    "MassiveScenarioClassification (jav-Latn)": 33.94,
-                    "MassiveScenarioClassification (mon-Cyrl)": 41.53,
-                    "MassiveScenarioClassification (en)": 65.43,
-                    "MassiveScenarioClassification (msa-Latn)": 36.28,
-                    "MassiveScenarioClassification (nob-Latn)": 42.43,
-                    "MassiveScenarioClassification (heb-Hebr)": 8.64,
-                    "MassiveScenarioClassification (khm-Khmr)": 9.99,
-                    "MassiveScenarioClassification (nld-Latn)": 41.47,
-                    "MassiveScenarioClassification (ind-Latn)": 39.05,
-                    "MassiveScenarioClassification (mal-Mlym)": 7.24,
-                    "MassiveScenarioClassification (tam-Taml)": 7.71,
-                    "MassiveScenarioClassification (mya-Mymr)": 9.94,
-                    "MassiveScenarioClassification (urd-Arab)": 9.16,
-                    "MassiveScenarioClassification (dan-Latn)": 44.69,
-                    "MassiveScenarioClassification (cmo-Hant)": 10.48,
-                    "MassiveScenarioClassification (ron-Latn)": 44.55,
-                    "MassiveScenarioClassification (lav-Latn)": 26.26,
-                    "MassiveScenarioClassification (fra-Latn)": 45.08,
-                    "MassiveScenarioClassification (ell-Grek)": 19.46,
-                    "MassiveScenarioClassification (sqi-Latn)": 40.9,
-                    "MassiveScenarioClassification (hun-Latn)": 33.92,
-                    "MassiveScenarioClassification (kor-Kore)": 7.37,
-                    "MassiveScenarioClassification (ara-Arab)": 12.43,
-                    "RuReviewsClassification (rus-Cyrl)": 58.01,
-                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 52.8,
-                    "RuSciBenchOECDClassification (rus-Cyrl)": 40.36
+                    "Model": "e5-large",
+                    "AngryTweetsClassification": 46.14,
+                    "DKHateClassification": 58.72,
+                    "DanishPoliticalCommentsClassification": 28.67,
+                    "LccSentimentClassification": 42.13,
+                    "MassiveIntentClassification (da)": 42.29,
+                    "MassiveIntentClassification (nb)": 40.63,
+                    "MassiveIntentClassification (sv)": 40.69,
+                    "MassiveScenarioClassification (da)": 52.95,
+                    "MassiveScenarioClassification (nb)": 51.91,
+                    "MassiveScenarioClassification (sv)": 50.97,
+                    "NoRecClassification": 41.83,
+                    "NordicLangClassification": 58.3,
+                    "NorwegianParliament": 57.26,
+                    "ScalaDaClassification": 49.9,
+                    "ScalaNbClassification": 50.13
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "LaBSE-en-ru",
-                    "GeoreviewClusteringP2P (rus-Cyrl)": 51.89,
-                    "MLSUMClusteringP2P (rus-Cyrl)": 37.87,
-                    "MLSUMClusteringS2S (rus-Cyrl)": 41.24,
-                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 47.48,
-                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 41.16
+                    "Model": "e5-large"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "LaBSE-en-ru",
-                    "OpusparcusPC (rus-Cyrl)": 87.18,
-                    "TERRa (rus-Cyrl)": 55.61
+                    "Model": "e5-large"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "LaBSE-en-ru",
-                    "RuBQReranking (rus-Cyrl)": 54.83
+                    "Model": "e5-large"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "LaBSE-en-ru",
-                    "RiaNewsRetrieval (rus-Cyrl)": 34.73,
-                    "RuBQRetrieval (rus-Cyrl)": 29.03
+                    "Model": "e5-large"
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "LaBSE-en-ru",
-                    "RUParaPhraserSTS (rus-Cyrl)": 65.87,
-                    "RuSTSBenchmarkSTS (rus-Cyrl)": 73.32,
-                    "STS22 (deu-Latn)": 38.9,
-                    "STS22 (en)": 59.47,
-                    "STS22 (pol-Latn_eng-Latn)": 58.73,
-                    "STS22 (spa-Latn)": 60.85,
-                    "STS22 (fra-Latn)": 74.98,
-                    "STS22 (deu-Latn_eng-Latn)": 47.98,
-                    "STS22 (deu-Latn_fra-Latn)": 59.4,
-                    "STS22 (deu-Latn_pol-Latn)": 39.48,
-                    "STS22 (pol-Latn)": 32.74,
-                    "STS22 (tur-Latn)": 55.04,
-                    "STS22 (spa-Latn_eng-Latn)": 70.8,
-                    "STS22 (rus-Cyrl)": 58.53,
-                    "STS22 (ita-Latn)": 68.58,
-                    "STS22 (fra-Latn_pol-Latn)": 61.98,
-                    "STS22 (spa-Latn_ita-Latn)": 66.83,
-                    "STS22 (cmn-Hans_eng-Latn)": 24.98,
-                    "STS22 (ara-Arab)": 31.85,
-                    "STS22 (cmn-Hans)": 35.1,
-                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 73.02
+                    "Model": "e5-large"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "LaBSE-en-ru"
+                    "Model": "e5-large"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "e5-large"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "LaBSE-en-ru"
+                    "Model": "e5-large"
                 }
             ]
         }
     },
-    "text2vec-base-chinese": {
+    "contriever-instruct": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "text2vec-base-chinese"
+                    "Model": "contriever-instruct"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "text2vec-base-chinese",
-                    "AmazonReviewsClassification (zh)": 34.12,
-                    "IFlyTek": 42.05,
-                    "JDReview": 82.14,
-                    "MassiveIntentClassification (zh-CN)": 63.98,
-                    "MassiveScenarioClassification (zh-CN)": 70.52,
-                    "MultilingualSentiment": 60.98,
-                    "OnlineShopping": 85.69,
-                    "TNews": 43.01,
-                    "Waimai": 77.22
+                    "Model": "contriever-instruct"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "text2vec-base-chinese",
-                    "CLSClusteringP2P": 35.27,
-                    "CLSClusteringS2S": 32.42,
-                    "ThuNewsClusteringP2P": 42.92,
-                    "ThuNewsClusteringS2S": 40.01
+                    "Model": "contriever-instruct"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "text2vec-base-chinese",
-                    "Cmnli": 73.87,
-                    "Ocnli": 60.95
+                    "Model": "contriever-instruct"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "text2vec-base-chinese",
-                    "CMedQAv1": 59.26,
-                    "CMedQAv2": 59.82,
-                    "MMarcoReranking": 12.76,
-                    "T2Reranking": 65.95
+                    "Model": "contriever-instruct"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "text2vec-base-chinese",
-                    "CmedqaRetrieval": 15.91,
-                    "CovidRetrieval": 44.81,
-                    "DuRetrieval": 52.23,
-                    "EcomRetrieval": 34.6,
-                    "MMarcoRetrieval": 44.06,
-                    "MedicalRetrieval": 27.56,
-                    "T2Retrieval": 51.67,
-                    "VideoRetrieval": 39.52
+                    "Model": "contriever-instruct",
+                    "ARCChallenge": 7.63,
+                    "AlphaNLI": 27.09,
+                    "PIQA": 21.73,
+                    "Quail": 4.92,
+                    "RARbCode": 7.12,
+                    "RARbMath": 21.83,
+                    "SIQA": 0.88,
+                    "SpartQA": 10.56,
+                    "TempReasonL1": 1.8,
+                    "TempReasonL2Fact": 22.03,
+                    "TempReasonL2Pure": 0.94,
+                    "TempReasonL3Fact": 20.82,
+                    "TempReasonL3Pure": 7.15,
+                    "WinoGrande": 26.3
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "text2vec-base-chinese",
-                    "AFQMC": 26.06,
-                    "ATEC": 31.93,
-                    "BQ": 42.67,
-                    "LCQMC": 70.16,
-                    "PAWSX": 17.21,
-                    "QBQTC": 24.62,
-                    "STS22 (zh)": 55.35,
-                    "STSB": 79.3
+                    "Model": "contriever-instruct"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "text2vec-base-chinese"
+                    "Model": "contriever-instruct"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "contriever-instruct"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "text2vec-base-chinese"
+                    "Model": "contriever-instruct"
                 }
             ]
         }
     },
-    "herbert-base-retrieval-v2": {
+    "text-embedding-3-large-256": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "herbert-base-retrieval-v2"
+                    "Model": "text-embedding-3-large-256"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "herbert-base-retrieval-v2",
-                    "AllegroReviews": 34.11,
-                    "CBD": 68.35,
-                    "MassiveIntentClassification (pl)": 65.53,
-                    "MassiveScenarioClassification (pl)": 68.51,
-                    "PAC": 68.4,
-                    "PolEmo2.0-IN": 64.18,
-                    "PolEmo2.0-OUT": 45.73
+                    "Model": "text-embedding-3-large-256",
+                    "AmazonCounterfactualClassification (en)": 73.96,
+                    "AmazonPolarityClassification": 91.32,
+                    "AmazonReviewsClassification (en)": 46.03,
+                    "Banking77Classification": 83.19,
+                    "EmotionClassification": 45.8,
+                    "ImdbClassification": 85.93,
+                    "MTOPDomainClassification (en)": 92.76,
+                    "MTOPIntentClassification (en)": 70.45,
+                    "MassiveIntentClassification (en)": 71.12,
+                    "MassiveScenarioClassification (en)": 75.56,
+                    "ToxicConversationsClassification": 68.52,
+                    "TweetSentimentExtractionClassification": 58.98
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "herbert-base-retrieval-v2",
-                    "8TagsClustering": 28.15
+                    "Model": "text-embedding-3-large-256",
+                    "ArxivClusteringP2P": 47.05,
+                    "ArxivClusteringS2S": 42.59,
+                    "BiorxivClusteringP2P": 35.43,
+                    "BiorxivClusteringS2S": 33.86,
+                    "MedrxivClusteringP2P": 32.1,
+                    "MedrxivClusteringS2S": 31.15,
+                    "RedditClustering": 60.18,
+                    "RedditClusteringP2P": 64.71,
+                    "StackExchangeClustering": 71.23,
+                    "StackExchangeClusteringP2P": 35.95,
+                    "TwentyNewsgroupsClustering": 54.24
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "herbert-base-retrieval-v2",
-                    "CDSC-E": 63.31,
-                    "PPC": 84.18,
-                    "PSC": 98.87,
-                    "SICK-E-PL": 54.93
+                    "Model": "text-embedding-3-large-256",
+                    "SprintDuplicateQuestions": 89.02,
+                    "TwitterSemEval2015": 76.56,
+                    "TwitterURLCorpus": 87.09
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "herbert-base-retrieval-v2"
+                    "Model": "text-embedding-3-large-256",
+                    "AskUbuntuDupQuestions": 64.61,
+                    "MindSmallReranking": 29.63,
+                    "SciDocsRR": 84.25,
+                    "StackOverflowDupQuestions": 53.46
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "herbert-base-retrieval-v2",
-                    "ArguAna-PL": 41.97,
-                    "DBPedia-PL": 24.07,
-                    "FiQA-PL": 24.25,
-                    "HotpotQA-PL": 43.41,
-                    "MSMARCO-PL": 51.56,
-                    "NFCorpus-PL": 25.95,
-                    "NQ-PL": 35.09,
-                    "Quora-PL": 78.86,
-                    "SCIDOCS-PL": 11.0,
-                    "SciFact-PL": 51.92,
-                    "TRECCOVID-PL": 42.64
+                    "Model": "text-embedding-3-large-256",
+                    "ArguAna": 55.6,
+                    "CQADupstackRetrieval": 42.28,
+                    "ClimateFEVER": 25.8,
+                    "DBPedia": 40.8,
+                    "FEVER": 84.57,
+                    "FiQA2018": 50.33,
+                    "HotpotQA": 62.69,
+                    "MSMARCO": 37.93,
+                    "NFCorpus": 37.94,
+                    "NQ": 56.64,
+                    "QuoraRetrieval": 88.22,
+                    "SCIDOCS": 20.44,
+                    "SciFact": 73.1,
+                    "TRECCOVID": 76.24,
+                    "Touche2020": 22.31
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "herbert-base-retrieval-v2",
-                    "CDSC-R": 86.18,
-                    "SICK-R-PL": 64.67,
-                    "STS22 (pl)": 39.73
+                    "Model": "text-embedding-3-large-256",
+                    "BIOSSES": 84.87,
+                    "SICK-R": 79.18,
+                    "STS12": 71.98,
+                    "STS13": 85.52,
+                    "STS14": 80.5,
+                    "STS15": 87.51,
+                    "STS16": 84.48,
+                    "STS17 (en-en)": 88.11,
+                    "STS22 (en)": 65.92,
+                    "STSBenchmark": 82.34
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "herbert-base-retrieval-v2"
+                    "Model": "text-embedding-3-large-256",
+                    "SummEval": 29.92
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "text-embedding-3-large-256"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "herbert-base-retrieval-v2"
+                    "Model": "text-embedding-3-large-256"
                 }
             ]
         }
     },
-    "voyage-law-2": {
+    "google-gecko-256.text-embedding-preview-0409": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "voyage-law-2"
+                    "Model": "google-gecko-256.text-embedding-preview-0409"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "voyage-law-2",
-                    "AmazonReviewsClassification (fr)": 41.98,
-                    "MTOPDomainClassification (fr)": 90.12,
-                    "MTOPIntentClassification (fr)": 62.44,
-                    "MasakhaNEWSClassification (fra)": 76.42,
-                    "MassiveIntentClassification (fr)": 66.94,
-                    "MassiveScenarioClassification (fr)": 72.78
+                    "Model": "google-gecko-256.text-embedding-preview-0409",
+                    "AmazonCounterfactualClassification (en)": 70.93,
+                    "AmazonPolarityClassification": 97.34,
+                    "AmazonReviewsClassification (en)": 48.47,
+                    "Banking77Classification": 86.01,
+                    "EmotionClassification": 51.53,
+                    "ImdbClassification": 95.7,
+                    "MTOPDomainClassification (en)": 98.02,
+                    "MTOPIntentClassification (en)": 77.82,
+                    "MassiveIntentClassification (en)": 75.67,
+                    "MassiveScenarioClassification (en)": 85.16,
+                    "ToxicConversationsClassification": 88.33,
+                    "TweetSentimentExtractionClassification": 72.97
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "voyage-law-2",
-                    "AlloProfClusteringP2P": 62.5,
-                    "AlloProfClusteringS2S": 44.28,
-                    "HALClusteringS2S": 26.36,
-                    "MLSUMClusteringP2P (fr)": 44.03,
-                    "MLSUMClusteringS2S (fr)": 42.95,
-                    "MasakhaNEWSClusteringP2P (fra)": 50.68,
-                    "MasakhaNEWSClusteringS2S (fra)": 38.79
+                    "Model": "google-gecko-256.text-embedding-preview-0409",
+                    "ArxivClusteringP2P": 44.12,
+                    "ArxivClusteringS2S": 36.54,
+                    "BiorxivClusteringP2P": 36.28,
+                    "BiorxivClusteringS2S": 33.09,
+                    "MedrxivClusteringP2P": 32.08,
+                    "MedrxivClusteringS2S": 30.84,
+                    "RedditClustering": 62.24,
+                    "RedditClusteringP2P": 63.7,
+                    "StackExchangeClustering": 70.19,
+                    "StackExchangeClusteringP2P": 36.1,
+                    "TwentyNewsgroupsClustering": 50.6
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "voyage-law-2",
-                    "OpusparcusPC (fr)": 93.06,
-                    "PawsXPairClassification (fr)": 61.54
+                    "Model": "google-gecko-256.text-embedding-preview-0409",
+                    "SprintDuplicateQuestions": 96.49,
+                    "TwitterSemEval2015": 78.23,
+                    "TwitterURLCorpus": 87.04
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "voyage-law-2",
-                    "AlloprofReranking": 72.92,
-                    "SyntecReranking": 91.2
+                    "Model": "google-gecko-256.text-embedding-preview-0409",
+                    "AskUbuntuDupQuestions": 63.84,
+                    "MindSmallReranking": 31.89,
+                    "SciDocsRR": 81.62,
+                    "StackOverflowDupQuestions": 53.76
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "voyage-law-2",
-                    "AILACasedocs": 44.56,
-                    "AILAStatutes": 45.51,
-                    "AlloprofRetrieval": 57.28,
-                    "BSARDRetrieval": 11.83,
-                    "GerDaLIRSmall": 44.91,
-                    "LEMBNarrativeQARetrieval": 55.78,
-                    "LEMBNeedleRetrieval": 80.5,
-                    "LEMBPasskeyRetrieval": 93.75,
-                    "LEMBQMSumRetrieval": 57.26,
-                    "LEMBSummScreenFDRetrieval": 98.72,
-                    "LEMBWikimQARetrieval": 87.08,
-                    "LeCaRDv2": 72.75,
-                    "LegalBenchConsumerContractsQA": 83.27,
-                    "LegalBenchCorporateLobbying": 95.66,
-                    "LegalQuAD": 67.47,
-                    "LegalSummarization": 68.96,
-                    "MintakaRetrieval (fr)": 34.92,
-                    "SyntecRetrieval": 87.33,
-                    "XPQARetrieval (fr)": 73.56
+                    "Model": "google-gecko-256.text-embedding-preview-0409",
+                    "ArguAna": 56.27,
+                    "CQADupstackRetrieval": 45.41,
+                    "ClimateFEVER": 29.35,
+                    "DBPedia": 41.91,
+                    "FEVER": 82.61,
+                    "FiQA2018": 55.54,
+                    "HotpotQA": 64.65,
+                    "MSMARCO": 31.12,
+                    "NFCorpus": 37.81,
+                    "NQ": 57.37,
+                    "QuoraRetrieval": 87.89,
+                    "SCIDOCS": 18.21,
+                    "SciFact": 70.86,
+                    "TRECCOVID": 80.13,
+                    "Touche2020": 27.4
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "voyage-law-2",
-                    "SICKFr": 74.09,
-                    "STS22 (fr)": 83.75,
-                    "STSBenchmarkMultilingualSTS (fr)": 83.02
+                    "Model": "google-gecko-256.text-embedding-preview-0409",
+                    "BIOSSES": 89.42,
+                    "SICK-R": 81.67,
+                    "STS12": 78.02,
+                    "STS13": 90.1,
+                    "STS14": 85.44,
+                    "STS15": 89.64,
+                    "STS16": 87.24,
+                    "STS17 (en-en)": 90.46,
+                    "STS22 (en)": 67.99,
+                    "STSBenchmark": 89.33
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "voyage-law-2",
-                    "SummEvalFr": 30.34
+                    "Model": "google-gecko-256.text-embedding-preview-0409",
+                    "SummEval": 32.36
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "google-gecko-256.text-embedding-preview-0409"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "voyage-law-2"
+                    "Model": "google-gecko-256.text-embedding-preview-0409"
                 }
             ]
         }
     },
-    "bert-base-multilingual-cased": {
+    "sentence-bert-swedish-cased": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "bert-base-multilingual-cased"
+                    "Model": "sentence-bert-swedish-cased",
+                    "BornholmBitextMining": 14.08
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "bert-base-multilingual-cased",
-                    "AmazonReviewsClassification (fr)": 29.39,
-                    "MTOPDomainClassification (fr)": 63.61,
-                    "MTOPIntentClassification (fr)": 37.84,
-                    "MasakhaNEWSClassification (fra)": 64.0,
-                    "MassiveIntentClassification (fr)": 37.3,
-                    "MassiveScenarioClassification (fr)": 44.47
+                    "Model": "sentence-bert-swedish-cased",
+                    "AngryTweetsClassification": 44.46,
+                    "DKHateClassification": 59.36,
+                    "DanishPoliticalCommentsClassification": 28.32,
+                    "LccSentimentClassification": 47.2,
+                    "MassiveIntentClassification (da)": 42.84,
+                    "MassiveIntentClassification (nb)": 42.74,
+                    "MassiveIntentClassification (sv)": 69.11,
+                    "MassiveScenarioClassification (da)": 49.64,
+                    "MassiveScenarioClassification (nb)": 49.49,
+                    "MassiveScenarioClassification (sv)": 75.96,
+                    "NoRecClassification": 43.53,
+                    "NordicLangClassification": 51.45,
+                    "NorwegianParliament": 55.74,
+                    "ScalaDaClassification": 50.12,
+                    "ScalaNbClassification": 50.34
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "bert-base-multilingual-cased",
-                    "AlloProfClusteringP2P": 51.5,
-                    "AlloProfClusteringS2S": 43.06,
-                    "HALClusteringS2S": 20.81,
-                    "MLSUMClusteringP2P": 40.9,
-                    "MLSUMClusteringS2S": 31.8,
-                    "MasakhaNEWSClusteringP2P (fra)": 24.23,
-                    "MasakhaNEWSClusteringS2S (fra)": 24.46
+                    "Model": "sentence-bert-swedish-cased"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "bert-base-multilingual-cased",
-                    "OpusparcusPC (fr)": 86.77,
-                    "PawsXPairClassification (fr)": 53.39
+                    "Model": "sentence-bert-swedish-cased"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "bert-base-multilingual-cased",
-                    "AlloprofReranking": 36.23,
-                    "SyntecReranking": 53.25
+                    "Model": "sentence-bert-swedish-cased"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "bert-base-multilingual-cased",
-                    "AlloprofRetrieval": 1.63,
-                    "BSARDRetrieval": 0.0,
-                    "MintakaRetrieval (fr)": 3.55,
-                    "SyntecRetrieval": 18.95,
-                    "XPQARetrieval (fr)": 18.49
+                    "Model": "sentence-bert-swedish-cased"
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "bert-base-multilingual-cased",
-                    "SICKFr": 58.75,
-                    "STS22 (fr)": 39.05,
-                    "STSBenchmarkMultilingualSTS (fr)": 52.25
+                    "Model": "sentence-bert-swedish-cased"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "bert-base-multilingual-cased",
-                    "SummEvalFr": 28.81
+                    "Model": "sentence-bert-swedish-cased"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "sentence-bert-swedish-cased"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "bert-base-multilingual-cased"
+                    "Model": "sentence-bert-swedish-cased"
                 }
             ]
         }
@@ -21560,6 +22257,13 @@
                 }
             ]
         },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "LLM2Vec-Meta-Llama-3-unsupervised"
+                }
+            ]
+        },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
@@ -21568,3246 +22272,3847 @@
             ]
         }
     },
-    "text-search-davinci-001": {
+    "unsup-simcse-bert-base-uncased": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "text-search-davinci-001"
+                    "Model": "unsup-simcse-bert-base-uncased"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "text-search-davinci-001"
+                    "Model": "unsup-simcse-bert-base-uncased",
+                    "AmazonCounterfactualClassification (en)": 67.09,
+                    "AmazonPolarityClassification": 74.48,
+                    "AmazonReviewsClassification (en)": 33.85,
+                    "Banking77Classification": 73.55,
+                    "EmotionClassification": 42.22,
+                    "ImdbClassification": 69.63,
+                    "MTOPDomainClassification (en)": 81.71,
+                    "MTOPIntentClassification (en)": 59.23,
+                    "MassiveIntentClassification (en)": 59.84,
+                    "MassiveScenarioClassification (en)": 66.25,
+                    "ToxicConversationsClassification": 68.82,
+                    "TweetSentimentExtractionClassification": 53.36
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "text-search-davinci-001"
+                    "Model": "unsup-simcse-bert-base-uncased",
+                    "ArxivClusteringP2P": 32.61,
+                    "ArxivClusteringS2S": 24.68,
+                    "BiorxivClusteringP2P": 24.9,
+                    "BiorxivClusteringS2S": 19.55,
+                    "MedrxivClusteringP2P": 23.6,
+                    "MedrxivClusteringS2S": 21.97,
+                    "RedditClustering": 32.18,
+                    "RedditClusteringP2P": 45.14,
+                    "StackExchangeClustering": 43.07,
+                    "StackExchangeClusteringP2P": 28.5,
+                    "TwentyNewsgroupsClustering": 23.21
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "text-search-davinci-001"
+                    "Model": "unsup-simcse-bert-base-uncased",
+                    "SprintDuplicateQuestions": 69.41,
+                    "TwitterSemEval2015": 60.21,
+                    "TwitterURLCorpus": 81.37
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "text-search-davinci-001"
+                    "Model": "unsup-simcse-bert-base-uncased",
+                    "AskUbuntuDupQuestions": 51.57,
+                    "MindSmallReranking": 28.62,
+                    "SciDocsRR": 66.33,
+                    "StackOverflowDupQuestions": 39.35
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "text-search-davinci-001",
-                    "ArguAna": 43.5,
-                    "ClimateFEVER": 22.3,
-                    "FEVER": 77.5,
-                    "FiQA2018": 51.2,
-                    "HotpotQA": 68.8,
-                    "NFCorpus": 40.7,
-                    "QuoraRetrieval": 63.8,
-                    "SciFact": 75.4,
-                    "TRECCOVID": 64.9,
-                    "Touche2020": 29.1
+                    "Model": "unsup-simcse-bert-base-uncased",
+                    "ArguAna": 38.34,
+                    "CQADupstackRetrieval": 13.22,
+                    "ClimateFEVER": 11.8,
+                    "DBPedia": 15.04,
+                    "FEVER": 21.06,
+                    "FiQA2018": 9.84,
+                    "HotpotQA": 19.75,
+                    "MSMARCO": 9.35,
+                    "NFCorpus": 9.88,
+                    "NQ": 11.69,
+                    "QuoraRetrieval": 78.03,
+                    "SCIDOCS": 5.5,
+                    "SciFact": 25.72,
+                    "TRECCOVID": 26.2,
+                    "Touche2020": 8.9
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "text-search-davinci-001"
+                    "Model": "unsup-simcse-bert-base-uncased",
+                    "BIOSSES": 72.31,
+                    "SICK-R": 72.24,
+                    "STS12": 66.05,
+                    "STS13": 81.49,
+                    "STS14": 73.61,
+                    "STS15": 79.72,
+                    "STS16": 78.12,
+                    "STS17 (en-en)": 83.58,
+                    "STS22 (en)": 59.65,
+                    "STSBenchmark": 76.52
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "text-search-davinci-001"
+                    "Model": "unsup-simcse-bert-base-uncased",
+                    "SummEval": 31.15
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "unsup-simcse-bert-base-uncased"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "unsup-simcse-bert-base-uncased"
+                }
+            ]
+        }
+    },
+    "text-search-ada-001": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "text-search-ada-001"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "text-search-ada-001"
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "text-search-ada-001",
+                    "BiorxivClusteringS2S": 26.05,
+                    "MedrxivClusteringS2S": 25.67,
+                    "TwentyNewsgroupsClustering": 44.92
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "text-search-ada-001"
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "text-search-ada-001"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "text-search-ada-001",
+                    "ArguAna": 46.91,
+                    "ClimateFEVER": 18.5,
+                    "DBPedia": 36.2,
+                    "FEVER": 72.1,
+                    "FiQA2018": 38.41,
+                    "HotpotQA": 59.39,
+                    "MSMARCO": 37.94,
+                    "NFCorpus": 33.17,
+                    "NQ": 42.81,
+                    "QuoraRetrieval": 70.57,
+                    "SCIDOCS": 14.83,
+                    "SciFact": 67.25,
+                    "TRECCOVID": 72.43,
+                    "Touche2020": 28.68
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "text-search-ada-001"
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "text-search-ada-001"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "text-search-ada-001"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "text-search-davinci-001"
+                    "Model": "text-search-ada-001"
                 }
             ]
         }
     },
-    "m3e-base": {
+    "all-mpnet-base-v2-instruct": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "m3e-base"
+                    "Model": "all-mpnet-base-v2-instruct"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "m3e-base",
-                    "AmazonReviewsClassification (zh)": 43.02,
-                    "IFlyTek": 44.42,
-                    "JDReview": 85.33,
-                    "MassiveIntentClassification (zh-CN)": 68.4,
-                    "MassiveScenarioClassification (zh-CN)": 74.6,
-                    "MultilingualSentiment": 71.9,
-                    "OnlineShopping": 87.77,
-                    "TNews": 48.28,
-                    "Waimai": 83.99
+                    "Model": "all-mpnet-base-v2-instruct"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "m3e-base",
-                    "CLSClusteringP2P": 39.81,
-                    "CLSClusteringS2S": 37.34,
-                    "ThuNewsClusteringP2P": 59.77,
-                    "ThuNewsClusteringS2S": 53.78
+                    "Model": "all-mpnet-base-v2-instruct"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "m3e-base",
-                    "Cmnli": 69.98,
-                    "Ocnli": 58.0
+                    "Model": "all-mpnet-base-v2-instruct"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "m3e-base",
-                    "CMedQAv1": 77.05,
-                    "CMedQAv2": 76.76,
-                    "MMarcoReranking": 17.51,
-                    "T2Reranking": 66.03
+                    "Model": "all-mpnet-base-v2-instruct"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "m3e-base",
-                    "CmedqaRetrieval": 30.33,
-                    "CovidRetrieval": 66.42,
-                    "DuRetrieval": 75.76,
-                    "EcomRetrieval": 50.27,
-                    "MMarcoRetrieval": 65.46,
-                    "MedicalRetrieval": 42.79,
-                    "T2Retrieval": 73.14,
-                    "VideoRetrieval": 51.11
+                    "Model": "all-mpnet-base-v2-instruct",
+                    "ARCChallenge": 10.35,
+                    "AlphaNLI": 1.96,
+                    "HellaSwag": 13.01,
+                    "PIQA": 27.18,
+                    "Quail": 3.02,
+                    "RARbCode": 48.95,
+                    "RARbMath": 69.21,
+                    "SIQA": 1.29,
+                    "SpartQA": 1.01,
+                    "TempReasonL1": 1.52,
+                    "TempReasonL2Fact": 7.28,
+                    "TempReasonL2Pure": 1.03,
+                    "TempReasonL3Fact": 7.03,
+                    "TempReasonL3Pure": 5.16,
+                    "WinoGrande": 9.66
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "m3e-base",
-                    "AFQMC": 35.87,
-                    "ATEC": 41.27,
-                    "BQ": 63.81,
-                    "LCQMC": 74.88,
-                    "PAWSX": 12.19,
-                    "QBQTC": 32.07,
-                    "STS22 (zh)": 66.73,
-                    "STSB": 76.97
+                    "Model": "all-mpnet-base-v2-instruct"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "m3e-base"
+                    "Model": "all-mpnet-base-v2-instruct"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "all-mpnet-base-v2-instruct"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "m3e-base"
+                    "Model": "all-mpnet-base-v2-instruct"
                 }
             ]
         }
     },
-    "multilingual-e5-large": {
+    "m3e-large": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "multilingual-e5-large",
-                    "BornholmBitextMining (dan-Latn)": 29.61,
-                    "BornholmBitextMining": 44.16,
-                    "Tatoeba (tgl-Latn_eng-Latn)": 92.0,
-                    "Tatoeba (gsw-Latn_eng-Latn)": 51.65,
-                    "Tatoeba (tzl-Latn_eng-Latn)": 53.16,
-                    "Tatoeba (slv-Latn_eng-Latn)": 89.57,
-                    "Tatoeba (jav-Latn_eng-Latn)": 75.46,
-                    "Tatoeba (uig-Arab_eng-Latn)": 72.17,
-                    "Tatoeba (ind-Latn_eng-Latn)": 92.9,
-                    "Tatoeba (rus-Cyrl_eng-Latn)": 92.32,
-                    "Tatoeba (war-Latn_eng-Latn)": 62.02,
-                    "Tatoeba (mar-Deva_eng-Latn)": 88.58,
-                    "Tatoeba (mkd-Cyrl_eng-Latn)": 85.63,
-                    "Tatoeba (jpn-Jpan_eng-Latn)": 95.28,
-                    "Tatoeba (hun-Latn_eng-Latn)": 94.01,
-                    "Tatoeba (slk-Latn_eng-Latn)": 93.13,
-                    "Tatoeba (tha-Thai_eng-Latn)": 95.38,
-                    "Tatoeba (fra-Latn_eng-Latn)": 93.42,
-                    "Tatoeba (ukr-Cyrl_eng-Latn)": 93.32,
-                    "Tatoeba (kat-Geor_eng-Latn)": 84.09,
-                    "Tatoeba (nov-Latn_eng-Latn)": 71.62,
-                    "Tatoeba (kor-Hang_eng-Latn)": 90.65,
-                    "Tatoeba (ben-Beng_eng-Latn)": 83.02,
-                    "Tatoeba (cor-Latn_eng-Latn)": 6.28,
-                    "Tatoeba (lfn-Latn_eng-Latn)": 62.91,
-                    "Tatoeba (swh-Latn_eng-Latn)": 71.61,
-                    "Tatoeba (tur-Latn_eng-Latn)": 96.27,
-                    "Tatoeba (cbk-Latn_eng-Latn)": 69.26,
-                    "Tatoeba (kur-Latn_eng-Latn)": 66.83,
-                    "Tatoeba (arq-Arab_eng-Latn)": 41.56,
-                    "Tatoeba (ceb-Latn_eng-Latn)": 55.31,
-                    "Tatoeba (max-Deva_eng-Latn)": 63.41,
-                    "Tatoeba (ang-Latn_eng-Latn)": 40.18,
-                    "Tatoeba (nds-Latn_eng-Latn)": 69.28,
-                    "Tatoeba (epo-Latn_eng-Latn)": 96.01,
-                    "Tatoeba (heb-Hebr_eng-Latn)": 86.61,
-                    "Tatoeba (yue-Hant_eng-Latn)": 88.71,
-                    "Tatoeba (dan-Latn_eng-Latn)": 95.08,
-                    "Tatoeba (swe-Latn_eng-Latn)": 95.3,
-                    "Tatoeba (lvs-Latn_eng-Latn)": 90.06,
-                    "Tatoeba (ast-Latn_eng-Latn)": 81.76,
-                    "Tatoeba (dsb-Latn_eng-Latn)": 48.44,
-                    "Tatoeba (pes-Arab_eng-Latn)": 92.14,
-                    "Tatoeba (dtp-Latn_eng-Latn)": 7.03,
-                    "Tatoeba (tuk-Latn_eng-Latn)": 33.15,
-                    "Tatoeba (isl-Latn_eng-Latn)": 92.09,
-                    "Tatoeba (khm-Khmr_eng-Latn)": 59.96,
-                    "Tatoeba (pam-Latn_eng-Latn)": 9.32,
-                    "Tatoeba (tat-Cyrl_eng-Latn)": 73.51,
-                    "Tatoeba (bos-Latn_eng-Latn)": 92.86,
-                    "Tatoeba (spa-Latn_eng-Latn)": 97.1,
-                    "Tatoeba (kaz-Cyrl_eng-Latn)": 79.67,
-                    "Tatoeba (bel-Cyrl_eng-Latn)": 91.08,
-                    "Tatoeba (zsm-Latn_eng-Latn)": 94.53,
-                    "Tatoeba (cat-Latn_eng-Latn)": 91.03,
-                    "Tatoeba (urd-Arab_eng-Latn)": 89.21,
-                    "Tatoeba (mon-Cyrl_eng-Latn)": 87.53,
-                    "Tatoeba (tam-Taml_eng-Latn)": 88.23,
-                    "Tatoeba (fry-Latn_eng-Latn)": 63.43,
-                    "Tatoeba (nob-Latn_eng-Latn)": 97.2,
-                    "Tatoeba (tel-Telu_eng-Latn)": 91.34,
-                    "Tatoeba (hye-Armn_eng-Latn)": 90.92,
-                    "Tatoeba (awa-Deva_eng-Latn)": 72.27,
-                    "Tatoeba (hrv-Latn_eng-Latn)": 96.15,
-                    "Tatoeba (ile-Latn_eng-Latn)": 79.16,
-                    "Tatoeba (amh-Ethi_eng-Latn)": 80.69,
-                    "Tatoeba (orv-Cyrl_eng-Latn)": 39.87,
-                    "Tatoeba (ara-Arab_eng-Latn)": 85.48,
-                    "Tatoeba (ido-Latn_eng-Latn)": 83.52,
-                    "Tatoeba (hin-Deva_eng-Latn)": 94.48,
-                    "Tatoeba (por-Latn_eng-Latn)": 93.63,
-                    "Tatoeba (ron-Latn_eng-Latn)": 94.87,
-                    "Tatoeba (swg-Latn_eng-Latn)": 55.64,
-                    "Tatoeba (cmn-Hans_eng-Latn)": 95.28,
-                    "Tatoeba (pol-Latn_eng-Latn)": 96.6,
-                    "Tatoeba (bul-Cyrl_eng-Latn)": 92.93,
-                    "Tatoeba (ina-Latn_eng-Latn)": 93.47,
-                    "Tatoeba (bre-Latn_eng-Latn)": 11.1,
-                    "Tatoeba (wuu-Hans_eng-Latn)": 86.37,
-                    "Tatoeba (lit-Latn_eng-Latn)": 88.48,
-                    "Tatoeba (csb-Latn_eng-Latn)": 36.98,
-                    "Tatoeba (lat-Latn_eng-Latn)": 53.37,
-                    "Tatoeba (gle-Latn_eng-Latn)": 71.48,
-                    "Tatoeba (ita-Latn_eng-Latn)": 93.29,
-                    "Tatoeba (srp-Cyrl_eng-Latn)": 93.1,
-                    "Tatoeba (arz-Arab_eng-Latn)": 74.73,
-                    "Tatoeba (cym-Latn_eng-Latn)": 76.21,
-                    "Tatoeba (ber-Tfng_eng-Latn)": 38.9,
-                    "Tatoeba (xho-Latn_eng-Latn)": 80.87,
-                    "Tatoeba (uzb-Latn_eng-Latn)": 72.35,
-                    "Tatoeba (pms-Latn_eng-Latn)": 59.85,
-                    "Tatoeba (est-Latn_eng-Latn)": 85.03,
-                    "Tatoeba (deu-Latn_eng-Latn)": 99.07,
-                    "Tatoeba (yid-Hebr_eng-Latn)": 76.33,
-                    "Tatoeba (ell-Grek_eng-Latn)": 93.88,
-                    "Tatoeba (afr-Latn_eng-Latn)": 90.22,
-                    "Tatoeba (fao-Latn_eng-Latn)": 72.62,
-                    "Tatoeba (nld-Latn_eng-Latn)": 96.63,
-                    "Tatoeba (hsb-Latn_eng-Latn)": 58.9,
-                    "Tatoeba (aze-Latn_eng-Latn)": 87.61,
-                    "Tatoeba (kzj-Latn_eng-Latn)": 7.91,
-                    "Tatoeba (kab-Latn_eng-Latn)": 36.54,
-                    "Tatoeba (mal-Mlym_eng-Latn)": 97.7,
-                    "Tatoeba (mhr-Cyrl_eng-Latn)": 6.79,
-                    "Tatoeba (ces-Latn_eng-Latn)": 94.89,
-                    "Tatoeba (gla-Latn_eng-Latn)": 59.0,
-                    "Tatoeba (cha-Latn_eng-Latn)": 27.16,
-                    "Tatoeba (glg-Latn_eng-Latn)": 93.34,
-                    "Tatoeba (vie-Latn_eng-Latn)": 97.0,
-                    "Tatoeba (oci-Latn_eng-Latn)": 54.91,
-                    "Tatoeba (nno-Latn_eng-Latn)": 91.4,
-                    "Tatoeba (fin-Latn_eng-Latn)": 95.44,
-                    "Tatoeba (eus-Latn_eng-Latn)": 77.82,
-                    "Tatoeba (sqi-Latn_eng-Latn)": 94.7
+                    "Model": "m3e-large"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "m3e-large",
+                    "AmazonReviewsClassification (zh)": 44.44,
+                    "IFlyTek": 43.96,
+                    "JDReview": 86.92,
+                    "MassiveIntentClassification (zh-CN)": 67.23,
+                    "MassiveScenarioClassification (zh-CN)": 74.88,
+                    "MultilingualSentiment": 72.47,
+                    "OnlineShopping": 89.59,
+                    "TNews": 48.26,
+                    "Waimai": 86.08
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "m3e-large",
+                    "CLSClusteringP2P": 38.6,
+                    "CLSClusteringS2S": 38.02,
+                    "ThuNewsClusteringP2P": 60.39,
+                    "ThuNewsClusteringS2S": 58.51
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "m3e-large",
+                    "Cmnli": 69.27,
+                    "Ocnli": 59.33
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "m3e-large",
+                    "CMedQAv1": 77.76,
+                    "CMedQAv2": 78.27,
+                    "MMarcoReranking": 16.46,
+                    "T2Reranking": 66.13
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "m3e-large",
+                    "CmedqaRetrieval": 30.73,
+                    "CovidRetrieval": 61.33,
+                    "DuRetrieval": 74.69,
+                    "EcomRetrieval": 45.18,
+                    "MMarcoRetrieval": 61.06,
+                    "MedicalRetrieval": 48.66,
+                    "T2Retrieval": 72.36,
+                    "VideoRetrieval": 44.02
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "m3e-large",
+                    "AFQMC": 36.53,
+                    "ATEC": 41.8,
+                    "BQ": 65.2,
+                    "LCQMC": 74.2,
+                    "PAWSX": 15.95,
+                    "QBQTC": 32.65,
+                    "STS22 (zh)": 62.91,
+                    "STSB": 74.16
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "m3e-large"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "m3e-large"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "m3e-large"
+                }
+            ]
+        }
+    },
+    "flaubert_base_uncased": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "flaubert_base_uncased"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "multilingual-e5-large",
-                    "AllegroReviews (pol-Latn)": 41.04,
-                    "AllegroReviews": 41.14,
-                    "AmazonCounterfactualClassification (en-ext)": 78.73,
-                    "AmazonCounterfactualClassification (en)": 78.67,
-                    "AmazonCounterfactualClassification (deu-Latn)": 68.66,
-                    "AmazonCounterfactualClassification (jpn-Jpan)": 78.8,
-                    "AmazonPolarityClassification": 93.26,
-                    "AmazonReviewsClassification (en)": 49.2,
-                    "AmazonReviewsClassification (deu-Latn)": 46.5,
-                    "AmazonReviewsClassification (spa-Latn)": 44.35,
-                    "AmazonReviewsClassification (fra-Latn)": 42.55,
-                    "AmazonReviewsClassification (jpn-Jpan)": 41.71,
-                    "AmazonReviewsClassification (cmn-Hans)": 38.87,
-                    "AmazonReviewsClassification (fr)": 41.91,
-                    "AngryTweetsClassification (dan-Latn)": 57.69,
-                    "AngryTweetsClassification": 54.95,
-                    "Banking77Classification": 75.88,
-                    "CBD (pol-Latn)": 69.84,
-                    "CBD": 69.9,
-                    "DKHateClassification": 66.02,
-                    "DanishPoliticalCommentsClassification (dan-Latn)": 39.43,
-                    "DanishPoliticalCommentsClassification": 38.27,
-                    "EmotionClassification": 47.58,
-                    "GeoreviewClassification (rus-Cyrl)": 49.69,
-                    "HeadlineClassification (rus-Cyrl)": 77.19,
-                    "IFlyTek (cmn-Hans)": 41.86,
-                    "IFlyTek": 45.47,
-                    "ImdbClassification": 90.23,
-                    "InappropriatenessClassification (rus-Cyrl)": 61.6,
-                    "JDReview (cmn-Hans)": 80.54,
-                    "JDReview": 80.99,
-                    "KinopoiskClassification (rus-Cyrl)": 56.59,
-                    "LccSentimentClassification (dan-Latn)": 61.53,
-                    "LccSentimentClassification": 59.6,
-                    "MTOPDomainClassification (en)": 91.81,
-                    "MTOPDomainClassification (deu-Latn)": 90.44,
-                    "MTOPDomainClassification (spa-Latn)": 88.34,
-                    "MTOPDomainClassification (fra-Latn)": 86.23,
-                    "MTOPDomainClassification (hin-Deva)": 86.84,
-                    "MTOPDomainClassification (tha-Thai)": 86.88,
-                    "MTOPDomainClassification (fr)": 86.41,
-                    "MTOPIntentClassification (en)": 64.29,
-                    "MTOPIntentClassification (deu-Latn)": 65.97,
-                    "MTOPIntentClassification (spa-Latn)": 61.9,
-                    "MTOPIntentClassification (fra-Latn)": 56.25,
-                    "MTOPIntentClassification (hin-Deva)": 59.17,
-                    "MTOPIntentClassification (tha-Thai)": 62.59,
-                    "MTOPIntentClassification (fr)": 59.43,
-                    "MasakhaNEWSClassification (amh-Ethi)": 83.7,
-                    "MasakhaNEWSClassification (eng)": 78.26,
-                    "MasakhaNEWSClassification (fra-Latn)": 76.11,
-                    "MasakhaNEWSClassification (hau-Latn)": 76.17,
-                    "MasakhaNEWSClassification (ibo-Latn)": 70.05,
-                    "MasakhaNEWSClassification (lin-Latn)": 75.89,
-                    "MasakhaNEWSClassification (lug-Latn)": 73.63,
-                    "MasakhaNEWSClassification (orm-Ethi)": 80.31,
-                    "MasakhaNEWSClassification (pcm-Latn)": 89.15,
-                    "MasakhaNEWSClassification (run-Latn)": 76.55,
-                    "MasakhaNEWSClassification (sna-Latn)": 86.99,
-                    "MasakhaNEWSClassification (som-Latn)": 64.63,
-                    "MasakhaNEWSClassification (swa-Latn)": 73.42,
-                    "MasakhaNEWSClassification (tir-Ethi)": 72.06,
-                    "MasakhaNEWSClassification (xho-Latn)": 82.56,
-                    "MasakhaNEWSClassification (yor-Latn)": 81.09,
-                    "MasakhaNEWSClassification (fra)": 79.38,
-                    "MassiveIntentClassification (kor-Kore)": 63.92,
-                    "MassiveIntentClassification (lav-Latn)": 58.31,
-                    "MassiveIntentClassification (isl-Latn)": 53.3,
-                    "MassiveIntentClassification (tel-Telu)": 53.96,
-                    "MassiveIntentClassification (mya-Mymr)": 49.73,
-                    "MassiveIntentClassification (nob-Latn)": 64.54,
-                    "MassiveIntentClassification (en)": 68.51,
-                    "MassiveIntentClassification (spa-Latn)": 64.01,
-                    "MassiveIntentClassification (swe-Latn)": 66.52,
-                    "MassiveIntentClassification (cmo-Hant)": 58.78,
-                    "MassiveIntentClassification (pol-Latn)": 65.09,
-                    "MassiveIntentClassification (rus-Cyrl)": 65.76,
-                    "MassiveIntentClassification (aze-Latn)": 54.68,
-                    "MassiveIntentClassification (fin-Latn)": 64.28,
-                    "MassiveIntentClassification (cmo-Hans)": 66.23,
-                    "MassiveIntentClassification (urd-Arab)": 54.6,
-                    "MassiveIntentClassification (tam-Taml)": 53.41,
-                    "MassiveIntentClassification (hin-Deva)": 60.93,
-                    "MassiveIntentClassification (deu-Latn)": 63.82,
-                    "MassiveIntentClassification (ell-Grek)": 64.34,
-                    "MassiveIntentClassification (hye-Armn)": 50.89,
-                    "MassiveIntentClassification (por-Latn)": 65.6,
-                    "MassiveIntentClassification (nld-Latn)": 65.0,
-                    "MassiveIntentClassification (fas-Arab)": 63.74,
-                    "MassiveIntentClassification (ron-Latn)": 59.76,
-                    "MassiveIntentClassification (slv-Latn)": 59.38,
-                    "MassiveIntentClassification (heb-Hebr)": 62.44,
-                    "MassiveIntentClassification (vie-Latn)": 63.39,
-                    "MassiveIntentClassification (sqi-Latn)": 57.3,
-                    "MassiveIntentClassification (khm-Khmr)": 34.88,
-                    "MassiveIntentClassification (ben-Beng)": 55.6,
-                    "MassiveIntentClassification (tgl-Latn)": 54.77,
-                    "MassiveIntentClassification (jpn-Jpan)": 67.11,
-                    "MassiveIntentClassification (kat-Geor)": 41.45,
-                    "MassiveIntentClassification (afr-Latn)": 53.69,
-                    "MassiveIntentClassification (cym-Latn)": 44.22,
-                    "MassiveIntentClassification (amh-Ethi)": 45.48,
-                    "MassiveIntentClassification (ita-Latn)": 63.89,
-                    "MassiveIntentClassification (mal-Mlym)": 57.58,
-                    "MassiveIntentClassification (tha-Thai)": 62.75,
-                    "MassiveIntentClassification (ind-Latn)": 63.51,
-                    "MassiveIntentClassification (jav-Latn)": 48.96,
-                    "MassiveIntentClassification (dan-Latn)": 63.7,
-                    "MassiveIntentClassification (ara-Arab)": 54.1,
-                    "MassiveIntentClassification (kan-Knda)": 53.45,
-                    "MassiveIntentClassification (hun-Latn)": 64.0,
-                    "MassiveIntentClassification (tur-Latn)": 64.61,
-                    "MassiveIntentClassification (msa-Latn)": 58.49,
-                    "MassiveIntentClassification (mon-Cyrl)": 49.6,
-                    "MassiveIntentClassification (swa-Latn)": 47.69,
-                    "MassiveIntentClassification (fra-Latn)": 63.37,
-                    "MassiveIntentClassification (da)": 60.16,
-                    "MassiveIntentClassification (nb)": 59.83,
-                    "MassiveIntentClassification (sv)": 61.78,
-                    "MassiveIntentClassification (pl)": 65.07,
-                    "MassiveScenarioClassification (heb-Hebr)": 67.72,
-                    "MassiveScenarioClassification (vie-Latn)": 68.91,
-                    "MassiveScenarioClassification (cmo-Hant)": 64.35,
-                    "MassiveScenarioClassification (urd-Arab)": 60.89,
-                    "MassiveScenarioClassification (isl-Latn)": 60.74,
-                    "MassiveScenarioClassification (ell-Grek)": 69.74,
-                    "MassiveScenarioClassification (mon-Cyrl)": 55.37,
-                    "MassiveScenarioClassification (swa-Latn)": 56.27,
-                    "MassiveScenarioClassification (tam-Taml)": 58.76,
-                    "MassiveScenarioClassification (hye-Armn)": 55.76,
-                    "MassiveScenarioClassification (amh-Ethi)": 52.69,
-                    "MassiveScenarioClassification (ben-Beng)": 61.85,
-                    "MassiveScenarioClassification (tel-Telu)": 59.49,
-                    "MassiveScenarioClassification (dan-Latn)": 71.18,
-                    "MassiveScenarioClassification (slv-Latn)": 65.33,
-                    "MassiveScenarioClassification (en)": 73.04,
-                    "MassiveScenarioClassification (rus-Cyrl)": 70.85,
-                    "MassiveScenarioClassification (mal-Mlym)": 63.17,
-                    "MassiveScenarioClassification (sqi-Latn)": 63.79,
-                    "MassiveScenarioClassification (ita-Latn)": 69.45,
-                    "MassiveScenarioClassification (kor-Kore)": 70.54,
-                    "MassiveScenarioClassification (cmo-Hans)": 72.25,
-                    "MassiveScenarioClassification (cym-Latn)": 51.25,
-                    "MassiveScenarioClassification (pol-Latn)": 69.83,
-                    "MassiveScenarioClassification (ind-Latn)": 69.43,
-                    "MassiveScenarioClassification (tur-Latn)": 68.12,
-                    "MassiveScenarioClassification (tgl-Latn)": 60.71,
-                    "MassiveScenarioClassification (hin-Deva)": 66.85,
-                    "MassiveScenarioClassification (spa-Latn)": 69.07,
-                    "MassiveScenarioClassification (lav-Latn)": 64.28,
-                    "MassiveScenarioClassification (mya-Mymr)": 54.03,
-                    "MassiveScenarioClassification (ara-Arab)": 61.0,
-                    "MassiveScenarioClassification (kan-Knda)": 59.36,
-                    "MassiveScenarioClassification (jav-Latn)": 56.24,
-                    "MassiveScenarioClassification (por-Latn)": 68.33,
-                    "MassiveScenarioClassification (tha-Thai)": 69.06,
-                    "MassiveScenarioClassification (aze-Latn)": 58.49,
-                    "MassiveScenarioClassification (fra-Latn)": 68.74,
-                    "MassiveScenarioClassification (ron-Latn)": 66.06,
-                    "MassiveScenarioClassification (nld-Latn)": 71.11,
-                    "MassiveScenarioClassification (fas-Arab)": 67.55,
-                    "MassiveScenarioClassification (deu-Latn)": 71.25,
-                    "MassiveScenarioClassification (nob-Latn)": 70.44,
-                    "MassiveScenarioClassification (msa-Latn)": 63.55,
-                    "MassiveScenarioClassification (afr-Latn)": 62.35,
-                    "MassiveScenarioClassification (hun-Latn)": 70.53,
-                    "MassiveScenarioClassification (swe-Latn)": 72.77,
-                    "MassiveScenarioClassification (kat-Geor)": 47.82,
-                    "MassiveScenarioClassification (jpn-Jpan)": 73.16,
-                    "MassiveScenarioClassification (khm-Khmr)": 41.14,
-                    "MassiveScenarioClassification (fin-Latn)": 68.62,
-                    "MassiveScenarioClassification (da)": 67.46,
-                    "MassiveScenarioClassification (nb)": 66.18,
-                    "MassiveScenarioClassification (sv)": 69.15,
-                    "MassiveScenarioClassification (pl)": 69.82,
-                    "MultilingualSentiment (cmn-Hans)": 70.81,
-                    "MultilingualSentiment": 68.58,
-                    "NoRecClassification (nob-Latn)": 58.43,
-                    "NoRecClassification": 62.76,
-                    "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 80.15,
-                    "NordicLangClassification": 82.29,
-                    "NorwegianParliament": 60.36,
-                    "OnlineShopping (cmn-Hans)": 90.45,
-                    "OnlineShopping": 90.81,
-                    "PAC (pol-Latn)": 70.33,
-                    "PAC": 70.37,
-                    "PolEmo2.0-IN (pol-Latn)": 77.06,
-                    "PolEmo2.0-IN": 77.06,
-                    "PolEmo2.0-OUT (pol-Latn)": 53.48,
-                    "PolEmo2.0-OUT": 53.38,
-                    "RuReviewsClassification (rus-Cyrl)": 65.28,
-                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 58.2,
-                    "RuSciBenchOECDClassification (rus-Cyrl)": 43.91,
-                    "ScalaDaClassification": 50.77,
-                    "ScalaNbClassification": 50.44,
-                    "TNews (cmn-Hans)": 48.8,
-                    "TNews": 48.38,
-                    "ToxicConversationsClassification": 66.01,
-                    "TweetSentimentExtractionClassification": 62.8,
-                    "Waimai (cmn-Hans)": 86.3,
-                    "Waimai": 85.02
+                    "Model": "flaubert_base_uncased",
+                    "AmazonReviewsClassification (fr)": 23.52,
+                    "MTOPDomainClassification (fr)": 27.74,
+                    "MTOPIntentClassification (fr)": 8.61,
+                    "MasakhaNEWSClassification (fra)": 62.61,
+                    "MassiveIntentClassification (fr)": 6.24,
+                    "MassiveScenarioClassification (fr)": 10.98
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "multilingual-e5-large",
-                    "8TagsClustering": 33.88,
-                    "AlloProfClusteringP2P": 62.99,
-                    "AlloProfClusteringS2S": 32.26,
-                    "BiorxivClusteringP2P": 35.5,
-                    "BiorxivClusteringS2S": 33.3,
-                    "CLSClusteringP2P": 40.68,
-                    "CLSClusteringS2S": 38.59,
-                    "GeoreviewClusteringP2P (rus-Cyrl)": 60.51,
-                    "HALClusteringS2S": 22.44,
-                    "MLSUMClusteringP2P (rus-Cyrl)": 42.79,
-                    "MLSUMClusteringP2P": 44.04,
-                    "MLSUMClusteringS2S (rus-Cyrl)": 44.32,
-                    "MLSUMClusteringS2S": 37.65,
-                    "MasakhaNEWSClusteringP2P (amh-Ethi)": 67.16,
-                    "MasakhaNEWSClusteringP2P (eng)": 61.1,
-                    "MasakhaNEWSClusteringP2P (fra-Latn)": 41.66,
-                    "MasakhaNEWSClusteringP2P (hau-Latn)": 60.7,
-                    "MasakhaNEWSClusteringP2P (ibo-Latn)": 48.41,
-                    "MasakhaNEWSClusteringP2P (lin-Latn)": 57.69,
-                    "MasakhaNEWSClusteringP2P (lug-Latn)": 71.95,
-                    "MasakhaNEWSClusteringP2P (orm-Ethi)": 60.14,
-                    "MasakhaNEWSClusteringP2P (pcm-Latn)": 80.84,
-                    "MasakhaNEWSClusteringP2P (run-Latn)": 59.91,
-                    "MasakhaNEWSClusteringP2P (sna-Latn)": 53.3,
-                    "MasakhaNEWSClusteringP2P (som-Latn)": 34.38,
-                    "MasakhaNEWSClusteringP2P (swa-Latn)": 33.25,
-                    "MasakhaNEWSClusteringP2P (tir-Ethi)": 54.21,
-                    "MasakhaNEWSClusteringP2P (xho-Latn)": 41.12,
-                    "MasakhaNEWSClusteringP2P (yor-Latn)": 36.22,
-                    "MasakhaNEWSClusteringP2P (fra)": 40.94,
-                    "MasakhaNEWSClusteringS2S (amh-Ethi)": 47.24,
-                    "MasakhaNEWSClusteringS2S (eng)": 53.93,
-                    "MasakhaNEWSClusteringS2S (fra-Latn)": 39.84,
-                    "MasakhaNEWSClusteringS2S (hau-Latn)": 19.24,
-                    "MasakhaNEWSClusteringS2S (ibo-Latn)": 28.88,
-                    "MasakhaNEWSClusteringS2S (lin-Latn)": 42.22,
-                    "MasakhaNEWSClusteringS2S (lug-Latn)": 43.63,
-                    "MasakhaNEWSClusteringS2S (orm-Ethi)": 26.29,
-                    "MasakhaNEWSClusteringS2S (pcm-Latn)": 59.77,
-                    "MasakhaNEWSClusteringS2S (run-Latn)": 51.46,
-                    "MasakhaNEWSClusteringS2S (sna-Latn)": 48.14,
-                    "MasakhaNEWSClusteringS2S (som-Latn)": 25.14,
-                    "MasakhaNEWSClusteringS2S (swa-Latn)": 7.28,
-                    "MasakhaNEWSClusteringS2S (tir-Ethi)": 50.51,
-                    "MasakhaNEWSClusteringS2S (xho-Latn)": 30.98,
-                    "MasakhaNEWSClusteringS2S (yor-Latn)": 34.09,
-                    "MasakhaNEWSClusteringS2S (fra)": 30.56,
-                    "MedrxivClusteringP2P": 31.7,
-                    "MedrxivClusteringS2S": 29.76,
-                    "RedditClustering": 46.91,
-                    "RedditClusteringP2P": 63.0,
-                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 52.03,
-                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 45.11,
-                    "StackExchangeClustering": 58.37,
-                    "StackExchangeClusteringP2P": 32.9,
-                    "ThuNewsClusteringP2P": 58.05,
-                    "ThuNewsClusteringS2S": 55.59,
-                    "TwentyNewsgroupsClustering": 39.4
+                    "Model": "flaubert_base_uncased",
+                    "AlloProfClusteringP2P": 43.2,
+                    "AlloProfClusteringS2S": 12.94,
+                    "HALClusteringS2S": 1.8,
+                    "MLSUMClusteringP2P": 33.22,
+                    "MLSUMClusteringS2S": 14.9,
+                    "MasakhaNEWSClusteringP2P (fra)": 28.49,
+                    "MasakhaNEWSClusteringS2S (fra)": 22.58
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "flaubert_base_uncased",
+                    "OpusparcusPC (fr)": 82.0,
+                    "PawsXPairClassification (fr)": 52.78
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "flaubert_base_uncased",
+                    "AlloprofReranking": 34.55,
+                    "SyntecReranking": 57.18
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "flaubert_base_uncased",
+                    "AlloprofRetrieval": 1.72,
+                    "BSARDRetrieval": 0.0,
+                    "MintakaRetrieval (fr)": 0.51,
+                    "SyntecRetrieval": 22.33,
+                    "XPQARetrieval (fr)": 9.09
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "flaubert_base_uncased",
+                    "SICKFr": 41.9,
+                    "STS22 (fr)": 55.15,
+                    "STSBenchmarkMultilingualSTS (fr)": 33.41
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "flaubert_base_uncased",
+                    "SummEvalFr": 29.43
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "flaubert_base_uncased"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "flaubert_base_uncased"
+                }
+            ]
+        }
+    },
+    "flaubert_base_cased": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "flaubert_base_cased"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "flaubert_base_cased",
+                    "AmazonReviewsClassification (fr)": 24.9,
+                    "MTOPDomainClassification (fr)": 25.55,
+                    "MTOPIntentClassification (fr)": 9.49,
+                    "MasakhaNEWSClassification (fra)": 71.14,
+                    "MassiveIntentClassification (fr)": 6.98,
+                    "MassiveScenarioClassification (fr)": 11.41
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "flaubert_base_cased",
+                    "AlloProfClusteringP2P": 52.86,
+                    "AlloProfClusteringS2S": 14.46,
+                    "HALClusteringS2S": 3.85,
+                    "MLSUMClusteringP2P": 39.06,
+                    "MLSUMClusteringS2S": 17.13,
+                    "MasakhaNEWSClusteringP2P (fra)": 41.61,
+                    "MasakhaNEWSClusteringS2S (fra)": 21.26
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "flaubert_base_cased",
+                    "OpusparcusPC (fr)": 82.15,
+                    "PawsXPairClassification (fr)": 51.89
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "flaubert_base_cased",
+                    "AlloprofReranking": 34.81,
+                    "SyntecReranking": 55.88
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "flaubert_base_cased",
+                    "AlloprofRetrieval": 1.63,
+                    "BSARDRetrieval": 0.0,
+                    "MintakaRetrieval (fr)": 0.58,
+                    "SyntecRetrieval": 20.56,
+                    "XPQARetrieval (fr)": 6.59
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "flaubert_base_cased",
+                    "SICKFr": 53.86,
+                    "STS22 (fr)": 65.37,
+                    "STSBenchmarkMultilingualSTS (fr)": 37.14
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "flaubert_base_cased",
+                    "SummEvalFr": 31.26
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "flaubert_base_cased"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "flaubert_base_cased"
+                }
+            ]
+        }
+    },
+    "nb-bert-large": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "nb-bert-large",
+                    "BornholmBitextMining": 4.53
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "nb-bert-large",
+                    "AngryTweetsClassification": 52.14,
+                    "DKHateClassification": 62.13,
+                    "DanishPoliticalCommentsClassification": 35.04,
+                    "LccSentimentClassification": 56.27,
+                    "MassiveIntentClassification (da)": 57.03,
+                    "MassiveIntentClassification (nb)": 62.68,
+                    "MassiveIntentClassification (sv)": 55.02,
+                    "MassiveScenarioClassification (da)": 60.43,
+                    "MassiveScenarioClassification (nb)": 67.44,
+                    "MassiveScenarioClassification (sv)": 57.12,
+                    "NoRecClassification": 55.46,
+                    "NordicLangClassification": 85.27,
+                    "NorwegianParliament": 62.58,
+                    "ScalaDaClassification": 62.85,
+                    "ScalaNbClassification": 66.97
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "nb-bert-large"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "multilingual-e5-large",
-                    "CDSC-E (pol-Latn)": 74.47,
-                    "CDSC-E": 74.47,
-                    "Cmnli": 78.18,
-                    "Ocnli": 61.6,
-                    "OpusparcusPC (deu-Latn)": 97.27,
-                    "OpusparcusPC (en)": 98.74,
-                    "OpusparcusPC (fin-Latn)": 94.26,
-                    "OpusparcusPC (fra-Latn)": 93.68,
-                    "OpusparcusPC (rus-Cyrl)": 89.64,
-                    "OpusparcusPC (swe-Latn)": 94.98,
-                    "OpusparcusPC (fr)": 93.89,
-                    "PPC": 92.18,
-                    "PSC (pol-Latn)": 99.4,
-                    "PSC": 99.39,
-                    "PawsXPairClassification (deu-Latn)": 56.81,
-                    "PawsXPairClassification (en)": 62.97,
-                    "PawsXPairClassification (spa-Latn)": 56.85,
-                    "PawsXPairClassification (fra-Latn)": 58.68,
-                    "PawsXPairClassification (jpn-Hira)": 50.7,
-                    "PawsXPairClassification (kor-Hang)": 52.08,
-                    "PawsXPairClassification (cmn-Hans)": 56.82,
-                    "PawsXPairClassification (fr)": 58.5,
-                    "SICK-E-PL (pol-Latn)": 75.95,
-                    "SICK-E-PL": 75.96,
-                    "SprintDuplicateQuestions": 93.14,
-                    "TERRa (rus-Cyrl)": 58.4,
-                    "TwitterSemEval2015": 75.28,
-                    "TwitterURLCorpus": 85.83
+                    "Model": "nb-bert-large"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "multilingual-e5-large",
-                    "AlloprofReranking (fra-Latn)": 69.44,
-                    "AlloprofReranking": 57.37,
-                    "AskUbuntuDupQuestions": 59.24,
-                    "CMedQAv1": 68.25,
-                    "CMedQAv2": 68.56,
-                    "MMarcoReranking (cmn-Hans)": 29.12,
-                    "MMarcoReranking": 21.34,
-                    "MindSmallReranking": 30.24,
-                    "RuBQReranking (rus-Cyrl)": 75.58,
-                    "SciDocsRR": 84.22,
-                    "StackOverflowDupQuestions": 50.14,
-                    "SyntecReranking (fra-Latn)": 85.45,
-                    "SyntecReranking": 86.9,
-                    "T2Reranking (cmn-Hans)": 66.32,
-                    "T2Reranking": 65.83
+                    "Model": "nb-bert-large"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "multilingual-e5-large",
-                    "AILACasedocs": 26.43,
-                    "AILAStatutes": 20.84,
-                    "ARCChallenge": 10.83,
-                    "AlloprofRetrieval (fra-Latn)": 39.34,
-                    "AlloprofRetrieval": 38.15,
-                    "AlphaNLI": 13.59,
-                    "ArguAna": 54.36,
-                    "ArguAna-PL (pol-Latn)": 52.99,
-                    "ArguAna-PL": 53.02,
-                    "BSARDRetrieval (fra-Latn)": 21.28,
-                    "BSARDRetrieval": 0.27,
-                    "CmedqaRetrieval (cmn-Hans)": 28.66,
-                    "CmedqaRetrieval": 28.67,
-                    "CovidRetrieval (cmn-Hans)": 75.61,
-                    "CovidRetrieval": 75.51,
-                    "DBPedia-PL": 35.82,
-                    "DuRetrieval (cmn-Hans)": 85.3,
-                    "DuRetrieval": 85.32,
-                    "EcomRetrieval (cmn-Hans)": 54.67,
-                    "EcomRetrieval": 54.75,
-                    "FiQA-PL (pol-Latn)": 32.97,
-                    "FiQA-PL": 33.0,
-                    "FiQA2018": 43.81,
-                    "GerDaLIRSmall (deu-Latn)": 15.72,
-                    "HellaSwag": 27.35,
-                    "HotpotQA-PL": 67.41,
-                    "LEMBNarrativeQARetrieval": 24.22,
-                    "LEMBNeedleRetrieval": 28.0,
-                    "LEMBPasskeyRetrieval": 38.25,
-                    "LEMBQMSumRetrieval": 24.26,
-                    "LEMBSummScreenFDRetrieval": 71.12,
-                    "LEMBWikimQARetrieval": 56.8,
-                    "LeCaRDv2 (zho-Hans)": 55.83,
-                    "LegalBenchConsumerContractsQA": 73.3,
-                    "LegalBenchCorporateLobbying": 89.72,
-                    "LegalQuAD (deu-Latn)": 43.17,
-                    "LegalSummarization": 62.1,
-                    "MMarcoRetrieval (cmn-Hans)": 79.2,
-                    "MMarcoRetrieval": 79.2,
-                    "MSMARCO-PL": 33.38,
-                    "MedicalRetrieval (cmn-Hans)": 51.44,
-                    "MedicalRetrieval": 51.44,
-                    "MintakaRetrieval (ara-Arab)": 26.5,
-                    "MintakaRetrieval (deu-Latn)": 32.77,
-                    "MintakaRetrieval (spa-Latn)": 34.23,
-                    "MintakaRetrieval (fra-Latn)": 34.24,
-                    "MintakaRetrieval (hin-Deva)": 27.45,
-                    "MintakaRetrieval (ita-Latn)": 33.84,
-                    "MintakaRetrieval (jpn-Hira)": 26.45,
-                    "MintakaRetrieval (por-Latn)": 35.9,
-                    "MintakaRetrieval (fr)": 25.2,
-                    "NFCorpus": 33.95,
-                    "NFCorpus-PL (pol-Latn)": 30.21,
-                    "NFCorpus-PL": 30.24,
-                    "NQ-PL": 52.79,
-                    "PIQA": 28.82,
-                    "Quail": 4.85,
-                    "Quora-PL": 83.65,
-                    "RARbCode": 58.92,
-                    "RARbMath": 67.32,
-                    "RiaNewsRetrieval (rus-Cyrl)": 80.67,
-                    "RuBQRetrieval (rus-Cyrl)": 74.11,
-                    "SCIDOCS": 17.45,
-                    "SCIDOCS-PL (pol-Latn)": 13.82,
-                    "SCIDOCS-PL": 13.81,
-                    "SIQA": 5.36,
-                    "SciFact": 70.42,
-                    "SciFact-PL (pol-Latn)": 65.66,
-                    "SciFact-PL": 65.66,
-                    "SpartQA": 5.64,
-                    "SyntecRetrieval (fra-Latn)": 82.39,
-                    "SyntecRetrieval": 81.07,
-                    "T2Retrieval (cmn-Hans)": 76.07,
-                    "T2Retrieval": 76.11,
-                    "TRECCOVID": 71.21,
-                    "TRECCOVID-PL (pol-Latn)": 69.9,
-                    "TRECCOVID-PL": 70.03,
-                    "TempReasonL1": 1.14,
-                    "TempReasonL2Fact": 42.97,
-                    "TempReasonL2Pure": 2.05,
-                    "TempReasonL3Fact": 38.22,
-                    "TempReasonL3Pure": 8.31,
-                    "Touche2020": 23.13,
-                    "VideoRetrieval (cmn-Hans)": 58.28,
-                    "VideoRetrieval": 58.25,
-                    "WinoGrande": 54.99,
-                    "XPQARetrieval (ara-Arab_ara-Arab)": 43.69,
-                    "XPQARetrieval (eng-Latn_ara-Arab)": 30.86,
-                    "XPQARetrieval (ara-Arab_eng-Latn)": 39.11,
-                    "XPQARetrieval (deu-Latn_deu-Latn)": 76.83,
-                    "XPQARetrieval (eng-Latn_deu-Latn)": 42.87,
-                    "XPQARetrieval (deu-Latn_eng-Latn)": 68.25,
-                    "XPQARetrieval (spa-Latn_spa-Latn)": 61.77,
-                    "XPQARetrieval (eng-Latn_spa-Latn)": 37.55,
-                    "XPQARetrieval (spa-Latn_eng-Latn)": 52.86,
-                    "XPQARetrieval (fra-Latn_fra-Latn)": 61.38,
-                    "XPQARetrieval (eng-Latn_fra-Latn)": 39.12,
-                    "XPQARetrieval (fra-Latn_eng-Latn)": 57.93,
-                    "XPQARetrieval (hin-Deva_hin-Deva)": 71.09,
-                    "XPQARetrieval (eng-Latn_hin-Deva)": 32.39,
-                    "XPQARetrieval (hin-Deva_eng-Latn)": 68.31,
-                    "XPQARetrieval (ita-Latn_ita-Latn)": 74.32,
-                    "XPQARetrieval (eng-Latn_ita-Latn)": 37.95,
-                    "XPQARetrieval (ita-Latn_eng-Latn)": 64.54,
-                    "XPQARetrieval (jpn-Hira_jpn-Hira)": 74.11,
-                    "XPQARetrieval (eng-Latn_jpn-Hira)": 38.31,
-                    "XPQARetrieval (jpn-Hira_eng-Latn)": 65.42,
-                    "XPQARetrieval (kor-Hang_kor-Hang)": 35.72,
-                    "XPQARetrieval (eng-Latn_kor-Hang)": 31.09,
-                    "XPQARetrieval (kor-Hang_eng-Latn)": 34.06,
-                    "XPQARetrieval (pol-Latn_pol-Latn)": 51.01,
-                    "XPQARetrieval (eng-Latn_pol-Latn)": 30.49,
-                    "XPQARetrieval (pol-Latn_eng-Latn)": 44.66,
-                    "XPQARetrieval (por-Latn_por-Latn)": 41.1,
-                    "XPQARetrieval (eng-Latn_por-Latn)": 22.03,
-                    "XPQARetrieval (por-Latn_eng-Latn)": 35.15,
-                    "XPQARetrieval (tam-Taml_tam-Taml)": 39.51,
-                    "XPQARetrieval (eng-Latn_tam-Taml)": 17.33,
-                    "XPQARetrieval (tam-Taml_eng-Latn)": 33.67,
-                    "XPQARetrieval (cmn-Hans_cmn-Hans)": 66.27,
-                    "XPQARetrieval (eng-Latn_cmn-Hans)": 26.24,
-                    "XPQARetrieval (cmn-Hans_eng-Latn)": 55.15,
-                    "XPQARetrieval (fr)": 66.15
+                    "Model": "nb-bert-large"
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "multilingual-e5-large",
-                    "AFQMC (cmn-Hans)": 33.01,
-                    "AFQMC": 33.02,
-                    "ATEC (cmn-Hans)": 39.8,
-                    "ATEC": 39.81,
-                    "BIOSSES": 82.49,
-                    "BQ (cmn-Hans)": 46.44,
-                    "BQ": 46.44,
-                    "CDSC-R (pol-Latn)": 91.0,
-                    "CDSC-R": 91.0,
-                    "LCQMC (cmn-Hans)": 75.95,
-                    "LCQMC": 75.95,
-                    "PAWSX (cmn-Hans)": 14.63,
-                    "PAWSX": 14.63,
-                    "QBQTC": 29.77,
-                    "RUParaPhraserSTS (rus-Cyrl)": 71.82,
-                    "RuSTSBenchmarkSTS (rus-Cyrl)": 83.15,
-                    "SICK-R": 80.23,
-                    "SICK-R-PL (pol-Latn)": 75.08,
-                    "SICK-R-PL": 75.08,
-                    "SICKFr (fra-Latn)": 78.81,
-                    "SICKFr": 78.78,
-                    "STS12": 80.02,
-                    "STS13": 81.55,
-                    "STS14": 77.72,
-                    "STS15": 89.31,
-                    "STS16": 85.79,
-                    "STS17 (en-en)": 88.12,
-                    "STS17 (spa-Latn)": 86.71,
-                    "STS17 (spa-Latn_eng-Latn)": 80.74,
-                    "STS17 (eng-Latn_ara-Arab)": 75.03,
-                    "STS17 (fra-Latn_eng-Latn)": 85.62,
-                    "STS17 (kor-Hang)": 82.27,
-                    "STS17 (ita-Latn_eng-Latn)": 84.52,
-                    "STS17 (ara-Arab)": 77.83,
-                    "STS17 (eng-Latn_tur-Latn)": 71.22,
-                    "STS17 (eng-Latn_deu-Latn)": 86.15,
-                    "STS17 (nld-Latn_eng-Latn)": 85.29,
-                    "STS22 (spa-Latn)": 64.6,
-                    "STS22 (spa-Latn_eng-Latn)": 72.51,
-                    "STS22 (deu-Latn_eng-Latn)": 56.59,
-                    "STS22 (cmn-Hans_eng-Latn)": 65.95,
-                    "STS22 (deu-Latn_pol-Latn)": 49.58,
-                    "STS22 (fra-Latn_pol-Latn)": 50.71,
-                    "STS22 (en)": 63.66,
-                    "STS22 (ara-Arab)": 56.95,
-                    "STS22 (spa-Latn_ita-Latn)": 68.92,
-                    "STS22 (tur-Latn)": 63.56,
-                    "STS22 (deu-Latn_fra-Latn)": 67.96,
-                    "STS22 (ita-Latn)": 76.99,
-                    "STS22 (cmn-Hans)": 66.82,
-                    "STS22 (rus-Cyrl)": 59.89,
-                    "STS22 (fra-Latn)": 76.77,
-                    "STS22 (pol-Latn_eng-Latn)": 65.54,
-                    "STS22 (deu-Latn)": 56.58,
-                    "STS22 (pol-Latn)": 34.65,
-                    "STS22 (zh)": 65.64,
-                    "STS22 (pl)": 34.66,
-                    "STSB (cmn-Hans)": 81.08,
-                    "STSB": 81.08,
-                    "STSBenchmark": 87.29,
-                    "STSBenchmarkMultilingualSTS (cmn-Hans)": 81.22,
-                    "STSBenchmarkMultilingualSTS (en)": 87.29,
-                    "STSBenchmarkMultilingualSTS (pol-Latn)": 81.06,
-                    "STSBenchmarkMultilingualSTS (nld-Latn)": 81.63,
-                    "STSBenchmarkMultilingualSTS (ita-Latn)": 81.75,
-                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 83.05,
-                    "STSBenchmarkMultilingualSTS (por-Latn)": 73.31,
-                    "STSBenchmarkMultilingualSTS (spa-Latn)": 83.81,
-                    "STSBenchmarkMultilingualSTS (fra-Latn)": 83.28,
-                    "STSBenchmarkMultilingualSTS (deu-Latn)": 84.27,
-                    "STSBenchmarkMultilingualSTS (fr)": 82.53
+                    "Model": "nb-bert-large"
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "nb-bert-large"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "nb-bert-large"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "nb-bert-large"
+                }
+            ]
+        }
+    },
+    "monot5-3b-msmarco-10k": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "monot5-3b-msmarco-10k"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "monot5-3b-msmarco-10k"
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "monot5-3b-msmarco-10k"
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "monot5-3b-msmarco-10k"
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "monot5-3b-msmarco-10k"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "monot5-3b-msmarco-10k"
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "monot5-3b-msmarco-10k"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "multilingual-e5-large",
-                    "SummEval": 29.65,
-                    "SummEvalFr (fra-Latn)": 30.92,
-                    "SummEvalFr": 30.92
+                    "Model": "monot5-3b-msmarco-10k"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "monot5-3b-msmarco-10k"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "multilingual-e5-large"
+                    "Model": "monot5-3b-msmarco-10k",
+                    "Core17InstructionRetrieval": 1.84,
+                    "News21InstructionRetrieval": 1.78,
+                    "Robust04InstructionRetrieval": 3.96
                 }
             ]
         }
     },
-    "komninos": {
+    "LaBSE-en-ru": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "komninos",
-                    "BUCC (de-en)": 0.18,
-                    "BUCC (fr-en)": 0.08,
-                    "BUCC (ru-en)": 0.15,
-                    "BUCC (zh-en)": 0.05,
-                    "Tatoeba (afr-eng)": 4.82,
-                    "Tatoeba (amh-eng)": 1.18,
-                    "Tatoeba (ang-eng)": 8.54,
-                    "Tatoeba (ara-eng)": 0.63,
-                    "Tatoeba (arq-eng)": 0.4,
-                    "Tatoeba (arz-eng)": 0.63,
-                    "Tatoeba (ast-eng)": 11.69,
-                    "Tatoeba (awa-eng)": 0.0,
-                    "Tatoeba (aze-eng)": 3.22,
-                    "Tatoeba (bel-eng)": 1.75,
-                    "Tatoeba (ben-eng)": 0.2,
-                    "Tatoeba (ber-eng)": 7.0,
-                    "Tatoeba (bos-eng)": 9.31,
-                    "Tatoeba (bre-eng)": 4.17,
-                    "Tatoeba (bul-eng)": 1.29,
-                    "Tatoeba (cat-eng)": 7.73,
-                    "Tatoeba (cbk-eng)": 5.61,
-                    "Tatoeba (ceb-eng)": 4.88,
-                    "Tatoeba (ces-eng)": 3.55,
-                    "Tatoeba (cha-eng)": 19.29,
-                    "Tatoeba (cmn-eng)": 0.5,
-                    "Tatoeba (cor-eng)": 4.15,
-                    "Tatoeba (csb-eng)": 5.69,
-                    "Tatoeba (cym-eng)": 8.4,
-                    "Tatoeba (dan-eng)": 6.99,
-                    "Tatoeba (deu-eng)": 3.67,
-                    "Tatoeba (dsb-eng)": 5.33,
-                    "Tatoeba (dtp-eng)": 4.25,
-                    "Tatoeba (ell-eng)": 0.63,
-                    "Tatoeba (epo-eng)": 2.45,
-                    "Tatoeba (est-eng)": 2.69,
-                    "Tatoeba (eus-eng)": 4.69,
-                    "Tatoeba (fao-eng)": 7.61,
-                    "Tatoeba (fin-eng)": 3.36,
-                    "Tatoeba (fra-eng)": 7.0,
-                    "Tatoeba (fry-eng)": 12.36,
-                    "Tatoeba (gla-eng)": 3.07,
-                    "Tatoeba (gle-eng)": 4.81,
-                    "Tatoeba (glg-eng)": 8.12,
-                    "Tatoeba (gsw-eng)": 18.87,
-                    "Tatoeba (heb-eng)": 0.68,
-                    "Tatoeba (hin-eng)": 0.1,
-                    "Tatoeba (hrv-eng)": 5.41,
-                    "Tatoeba (hsb-eng)": 6.32,
-                    "Tatoeba (hun-eng)": 3.42,
-                    "Tatoeba (hye-eng)": 0.97,
-                    "Tatoeba (ido-eng)": 7.1,
-                    "Tatoeba (ile-eng)": 13.61,
-                    "Tatoeba (ina-eng)": 8.57,
-                    "Tatoeba (ind-eng)": 7.26,
-                    "Tatoeba (isl-eng)": 4.09,
-                    "Tatoeba (ita-eng)": 5.54,
-                    "Tatoeba (jav-eng)": 11.43,
-                    "Tatoeba (jpn-eng)": 0.2,
-                    "Tatoeba (kab-eng)": 2.71,
-                    "Tatoeba (kat-eng)": 1.11,
-                    "Tatoeba (kaz-eng)": 1.17,
-                    "Tatoeba (khm-eng)": 0.55,
-                    "Tatoeba (kor-eng)": 0.5,
-                    "Tatoeba (kur-eng)": 8.55,
-                    "Tatoeba (kzj-eng)": 4.61,
-                    "Tatoeba (lat-eng)": 4.07,
-                    "Tatoeba (lfn-eng)": 2.83,
-                    "Tatoeba (lit-eng)": 0.95,
-                    "Tatoeba (lvs-eng)": 3.25,
-                    "Tatoeba (mal-eng)": 0.29,
-                    "Tatoeba (mar-eng)": 0.2,
-                    "Tatoeba (max-eng)": 14.53,
-                    "Tatoeba (mhr-eng)": 0.2,
-                    "Tatoeba (mkd-eng)": 0.2,
-                    "Tatoeba (mon-eng)": 1.1,
-                    "Tatoeba (nds-eng)": 10.37,
-                    "Tatoeba (nld-eng)": 9.5,
-                    "Tatoeba (nno-eng)": 4.49,
-                    "Tatoeba (nob-eng)": 4.95,
-                    "Tatoeba (nov-eng)": 14.53,
-                    "Tatoeba (oci-eng)": 5.8,
-                    "Tatoeba (orv-eng)": 0.24,
-                    "Tatoeba (pam-eng)": 6.65,
-                    "Tatoeba (pes-eng)": 0.5,
-                    "Tatoeba (pms-eng)": 8.05,
-                    "Tatoeba (pol-eng)": 5.13,
-                    "Tatoeba (por-eng)": 5.87,
-                    "Tatoeba (ron-eng)": 6.76,
-                    "Tatoeba (rus-eng)": 0.2,
-                    "Tatoeba (slk-eng)": 4.23,
-                    "Tatoeba (slv-eng)": 6.05,
-                    "Tatoeba (spa-eng)": 5.03,
-                    "Tatoeba (sqi-eng)": 4.36,
-                    "Tatoeba (srp-eng)": 1.77,
-                    "Tatoeba (swe-eng)": 6.72,
-                    "Tatoeba (swg-eng)": 8.54,
-                    "Tatoeba (swh-eng)": 11.49,
-                    "Tatoeba (tam-eng)": 1.3,
-                    "Tatoeba (tat-eng)": 0.77,
-                    "Tatoeba (tel-eng)": 0.85,
-                    "Tatoeba (tgl-eng)": 2.61,
-                    "Tatoeba (tha-eng)": 0.69,
-                    "Tatoeba (tuk-eng)": 5.76,
-                    "Tatoeba (tur-eng)": 5.24,
-                    "Tatoeba (tzl-eng)": 15.51,
-                    "Tatoeba (uig-eng)": 0.6,
-                    "Tatoeba (ukr-eng)": 1.23,
-                    "Tatoeba (urd-eng)": 0.4,
-                    "Tatoeba (uzb-eng)": 4.73,
-                    "Tatoeba (vie-eng)": 6.55,
-                    "Tatoeba (war-eng)": 4.12,
-                    "Tatoeba (wuu-eng)": 0.2,
-                    "Tatoeba (xho-eng)": 4.33,
-                    "Tatoeba (yid-eng)": 0.59,
-                    "Tatoeba (yue-eng)": 0.5,
-                    "Tatoeba (zsm-eng)": 7.27
+                    "Model": "LaBSE-en-ru",
+                    "Tatoeba (rus-Cyrl_eng-Latn)": 93.62
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "komninos",
-                    "AmazonCounterfactualClassification (en)": 60.54,
-                    "AmazonPolarityClassification": 59.59,
-                    "AmazonReviewsClassification (en)": 31.01,
-                    "Banking77Classification": 67.05,
-                    "EmotionClassification": 33.18,
-                    "ImdbClassification": 63.98,
-                    "MTOPDomainClassification (en)": 78.57,
-                    "MTOPIntentClassification (en)": 57.07,
-                    "MassiveIntentClassification (en)": 57.21,
-                    "MassiveScenarioClassification (en)": 66.11,
-                    "ToxicConversationsClassification": 67.76,
-                    "TweetSentimentExtractionClassification": 49.68
+                    "Model": "LaBSE-en-ru",
+                    "GeoreviewClassification (rus-Cyrl)": 40.89,
+                    "HeadlineClassification (rus-Cyrl)": 68.75,
+                    "InappropriatenessClassification (rus-Cyrl)": 58.48,
+                    "KinopoiskClassification (rus-Cyrl)": 49.85,
+                    "MassiveIntentClassification (swa-Latn)": 19.98,
+                    "MassiveIntentClassification (aze-Latn)": 19.52,
+                    "MassiveIntentClassification (tur-Latn)": 24.12,
+                    "MassiveIntentClassification (cmo-Hans)": 3.96,
+                    "MassiveIntentClassification (amh-Ethi)": 2.76,
+                    "MassiveIntentClassification (kan-Knda)": 2.86,
+                    "MassiveIntentClassification (hin-Deva)": 3.29,
+                    "MassiveIntentClassification (tgl-Latn)": 27.08,
+                    "MassiveIntentClassification (tha-Thai)": 4.0,
+                    "MassiveIntentClassification (swe-Latn)": 32.01,
+                    "MassiveIntentClassification (deu-Latn)": 35.14,
+                    "MassiveIntentClassification (spa-Latn)": 37.67,
+                    "MassiveIntentClassification (por-Latn)": 39.84,
+                    "MassiveIntentClassification (jpn-Jpan)": 4.78,
+                    "MassiveIntentClassification (fin-Latn)": 31.11,
+                    "MassiveIntentClassification (kat-Geor)": 2.87,
+                    "MassiveIntentClassification (slv-Latn)": 35.66,
+                    "MassiveIntentClassification (rus-Cyrl)": 60.53,
+                    "MassiveIntentClassification (ita-Latn)": 43.32,
+                    "MassiveIntentClassification (tel-Telu)": 2.72,
+                    "MassiveIntentClassification (afr-Latn)": 30.59,
+                    "MassiveIntentClassification (isl-Latn)": 25.61,
+                    "MassiveIntentClassification (fas-Arab)": 3.71,
+                    "MassiveIntentClassification (vie-Latn)": 23.0,
+                    "MassiveIntentClassification (ben-Beng)": 3.35,
+                    "MassiveIntentClassification (hye-Armn)": 2.8,
+                    "MassiveIntentClassification (pol-Latn)": 31.3,
+                    "MassiveIntentClassification (cym-Latn)": 26.59,
+                    "MassiveIntentClassification (jav-Latn)": 26.84,
+                    "MassiveIntentClassification (mon-Cyrl)": 35.97,
+                    "MassiveIntentClassification (en)": 60.48,
+                    "MassiveIntentClassification (msa-Latn)": 27.82,
+                    "MassiveIntentClassification (nob-Latn)": 35.78,
+                    "MassiveIntentClassification (heb-Hebr)": 2.33,
+                    "MassiveIntentClassification (khm-Khmr)": 4.6,
+                    "MassiveIntentClassification (nld-Latn)": 34.66,
+                    "MassiveIntentClassification (ind-Latn)": 33.31,
+                    "MassiveIntentClassification (mal-Mlym)": 2.63,
+                    "MassiveIntentClassification (tam-Taml)": 2.22,
+                    "MassiveIntentClassification (mya-Mymr)": 3.57,
+                    "MassiveIntentClassification (urd-Arab)": 3.36,
+                    "MassiveIntentClassification (dan-Latn)": 38.66,
+                    "MassiveIntentClassification (cmo-Hant)": 5.29,
+                    "MassiveIntentClassification (ron-Latn)": 37.45,
+                    "MassiveIntentClassification (lav-Latn)": 23.92,
+                    "MassiveIntentClassification (fra-Latn)": 40.29,
+                    "MassiveIntentClassification (ell-Grek)": 11.14,
+                    "MassiveIntentClassification (sqi-Latn)": 35.84,
+                    "MassiveIntentClassification (hun-Latn)": 26.74,
+                    "MassiveIntentClassification (kor-Kore)": 2.69,
+                    "MassiveIntentClassification (ara-Arab)": 5.19,
+                    "MassiveScenarioClassification (swa-Latn)": 25.61,
+                    "MassiveScenarioClassification (aze-Latn)": 24.48,
+                    "MassiveScenarioClassification (tur-Latn)": 31.38,
+                    "MassiveScenarioClassification (cmo-Hans)": 9.98,
+                    "MassiveScenarioClassification (amh-Ethi)": 7.59,
+                    "MassiveScenarioClassification (kan-Knda)": 8.73,
+                    "MassiveScenarioClassification (hin-Deva)": 8.77,
+                    "MassiveScenarioClassification (tgl-Latn)": 35.12,
+                    "MassiveScenarioClassification (tha-Thai)": 8.69,
+                    "MassiveScenarioClassification (swe-Latn)": 35.83,
+                    "MassiveScenarioClassification (deu-Latn)": 41.72,
+                    "MassiveScenarioClassification (spa-Latn)": 43.33,
+                    "MassiveScenarioClassification (por-Latn)": 44.62,
+                    "MassiveScenarioClassification (jpn-Jpan)": 9.51,
+                    "MassiveScenarioClassification (fin-Latn)": 33.79,
+                    "MassiveScenarioClassification (kat-Geor)": 7.32,
+                    "MassiveScenarioClassification (slv-Latn)": 37.6,
+                    "MassiveScenarioClassification (rus-Cyrl)": 65.15,
+                    "MassiveScenarioClassification (ita-Latn)": 47.28,
+                    "MassiveScenarioClassification (tel-Telu)": 7.53,
+                    "MassiveScenarioClassification (afr-Latn)": 37.27,
+                    "MassiveScenarioClassification (isl-Latn)": 30.32,
+                    "MassiveScenarioClassification (fas-Arab)": 6.83,
+                    "MassiveScenarioClassification (vie-Latn)": 28.92,
+                    "MassiveScenarioClassification (ben-Beng)": 8.57,
+                    "MassiveScenarioClassification (hye-Armn)": 8.91,
+                    "MassiveScenarioClassification (pol-Latn)": 33.75,
+                    "MassiveScenarioClassification (cym-Latn)": 30.38,
+                    "MassiveScenarioClassification (jav-Latn)": 33.94,
+                    "MassiveScenarioClassification (mon-Cyrl)": 41.53,
+                    "MassiveScenarioClassification (en)": 65.43,
+                    "MassiveScenarioClassification (msa-Latn)": 36.28,
+                    "MassiveScenarioClassification (nob-Latn)": 42.43,
+                    "MassiveScenarioClassification (heb-Hebr)": 8.64,
+                    "MassiveScenarioClassification (khm-Khmr)": 9.99,
+                    "MassiveScenarioClassification (nld-Latn)": 41.47,
+                    "MassiveScenarioClassification (ind-Latn)": 39.05,
+                    "MassiveScenarioClassification (mal-Mlym)": 7.24,
+                    "MassiveScenarioClassification (tam-Taml)": 7.71,
+                    "MassiveScenarioClassification (mya-Mymr)": 9.94,
+                    "MassiveScenarioClassification (urd-Arab)": 9.16,
+                    "MassiveScenarioClassification (dan-Latn)": 44.69,
+                    "MassiveScenarioClassification (cmo-Hant)": 10.48,
+                    "MassiveScenarioClassification (ron-Latn)": 44.55,
+                    "MassiveScenarioClassification (lav-Latn)": 26.26,
+                    "MassiveScenarioClassification (fra-Latn)": 45.08,
+                    "MassiveScenarioClassification (ell-Grek)": 19.46,
+                    "MassiveScenarioClassification (sqi-Latn)": 40.9,
+                    "MassiveScenarioClassification (hun-Latn)": 33.92,
+                    "MassiveScenarioClassification (kor-Kore)": 7.37,
+                    "MassiveScenarioClassification (ara-Arab)": 12.43,
+                    "RuReviewsClassification (rus-Cyrl)": 58.01,
+                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 52.8,
+                    "RuSciBenchOECDClassification (rus-Cyrl)": 40.36
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "komninos",
-                    "ArxivClusteringP2P": 34.73,
-                    "ArxivClusteringS2S": 26.01,
-                    "BiorxivClusteringP2P": 29.76,
-                    "BiorxivClusteringS2S": 20.71,
-                    "BlurbsClusteringP2P": 11.37,
-                    "BlurbsClusteringS2S": 8.01,
-                    "MedrxivClusteringP2P": 26.65,
-                    "MedrxivClusteringS2S": 21.5,
-                    "RedditClustering": 28.84,
-                    "RedditClusteringP2P": 7.37,
-                    "StackExchangeClustering": 39.04,
-                    "StackExchangeClusteringP2P": 30.23,
-                    "TenKGnadClusteringP2P": 15.89,
-                    "TenKGnadClusteringS2S": 4.84,
-                    "TwentyNewsgroupsClustering": 27.42
+                    "Model": "LaBSE-en-ru",
+                    "GeoreviewClusteringP2P (rus-Cyrl)": 51.89,
+                    "MLSUMClusteringP2P (rus-Cyrl)": 37.87,
+                    "MLSUMClusteringS2S (rus-Cyrl)": 41.24,
+                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 47.48,
+                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 41.16
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "komninos",
-                    "SprintDuplicateQuestions": 85.55,
-                    "TwitterSemEval2015": 53.85,
-                    "TwitterURLCorpus": 79.41
+                    "Model": "LaBSE-en-ru",
+                    "OpusparcusPC (rus-Cyrl)": 87.18,
+                    "TERRa (rus-Cyrl)": 55.61
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "komninos",
-                    "AskUbuntuDupQuestions": 50.88,
-                    "MindSmallReranking": 28.92,
-                    "SciDocsRR": 63.55,
-                    "StackOverflowDupQuestions": 35.65
+                    "Model": "LaBSE-en-ru",
+                    "RuBQReranking (rus-Cyrl)": 54.83
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "komninos",
-                    "ArguAna": 30.96,
-                    "CQADupstackRetrieval": 16.79,
-                    "ClimateFEVER": 14.87,
-                    "DBPedia": 15.88,
-                    "FEVER": 15.56,
-                    "FiQA2018": 10.49,
-                    "HotpotQA": 20.77,
-                    "MSMARCO": 9.75,
-                    "NFCorpus": 11.79,
-                    "NQ": 12.75,
-                    "QuoraRetrieval": 71.57,
-                    "SCIDOCS": 8.47,
-                    "SciFact": 29.53,
-                    "TRECCOVID": 35.92,
-                    "Touche2020": 13.17
+                    "Model": "LaBSE-en-ru",
+                    "RiaNewsRetrieval (rus-Cyrl)": 34.73,
+                    "RuBQRetrieval (rus-Cyrl)": 29.03
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "komninos",
-                    "BIOSSES": 50.25,
-                    "SICK-R": 55.49,
-                    "STS12": 53.51,
-                    "STS13": 70.8,
-                    "STS14": 63.56,
-                    "STS15": 74.08,
-                    "STS16": 64.6,
-                    "STS17 (ar-ar)": 13.78,
-                    "STS17 (en-ar)": 9.08,
-                    "STS17 (en-de)": -3.11,
-                    "STS17 (en-en)": 76.91,
-                    "STS17 (en-tr)": -0.45,
-                    "STS17 (es-en)": -8.18,
-                    "STS17 (es-es)": 48.23,
-                    "STS17 (fr-en)": 5.81,
-                    "STS17 (it-en)": 3.64,
-                    "STS17 (ko-ko)": 2.54,
-                    "STS17 (nl-en)": 0.44,
-                    "STS22 (ar)": 32.42,
-                    "STS22 (de)": 33.04,
-                    "STS22 (de-en)": 28.65,
-                    "STS22 (de-fr)": 14.77,
-                    "STS22 (de-pl)": 11.21,
-                    "STS22 (en)": 53.89,
-                    "STS22 (es)": 48.53,
-                    "STS22 (es-en)": 26.97,
-                    "STS22 (es-it)": 41.1,
-                    "STS22 (fr)": 49.43,
-                    "STS22 (fr-pl)": 39.44,
-                    "STS22 (it)": 57.77,
-                    "STS22 (pl)": 12.47,
-                    "STS22 (pl-en)": 45.55,
-                    "STS22 (ru)": 19.44,
-                    "STS22 (tr)": 47.38,
-                    "STS22 (zh)": 4.78,
-                    "STS22 (zh-en)": 14.05,
-                    "STSBenchmark": 61.55
+                    "Model": "LaBSE-en-ru",
+                    "RUParaPhraserSTS (rus-Cyrl)": 65.87,
+                    "RuSTSBenchmarkSTS (rus-Cyrl)": 73.32,
+                    "STS22 (deu-Latn)": 38.9,
+                    "STS22 (en)": 59.47,
+                    "STS22 (pol-Latn_eng-Latn)": 58.73,
+                    "STS22 (spa-Latn)": 60.85,
+                    "STS22 (fra-Latn)": 74.98,
+                    "STS22 (deu-Latn_eng-Latn)": 47.98,
+                    "STS22 (deu-Latn_fra-Latn)": 59.4,
+                    "STS22 (deu-Latn_pol-Latn)": 39.48,
+                    "STS22 (pol-Latn)": 32.74,
+                    "STS22 (tur-Latn)": 55.04,
+                    "STS22 (spa-Latn_eng-Latn)": 70.8,
+                    "STS22 (rus-Cyrl)": 58.53,
+                    "STS22 (ita-Latn)": 68.58,
+                    "STS22 (fra-Latn_pol-Latn)": 61.98,
+                    "STS22 (spa-Latn_ita-Latn)": 66.83,
+                    "STS22 (cmn-Hans_eng-Latn)": 24.98,
+                    "STS22 (ara-Arab)": 31.85,
+                    "STS22 (cmn-Hans)": 35.1,
+                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 73.02
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "komninos",
-                    "SummEval": 30.49
+                    "Model": "LaBSE-en-ru"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "LaBSE-en-ru",
+                    "CEDRClassification (rus-Cyrl)": 40.75,
+                    "SensitiveTopicsClassification (rus-Cyrl)": 21.79
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "komninos"
+                    "Model": "LaBSE-en-ru"
                 }
             ]
         }
     },
-    "voyage-lite-01-instruct": {
+    "Cohere-embed-multilingual-v3.0": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "voyage-lite-01-instruct"
+                    "Model": "Cohere-embed-multilingual-v3.0"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "voyage-lite-01-instruct",
-                    "AmazonCounterfactualClassification (en)": 71.43,
-                    "AmazonPolarityClassification": 96.41,
-                    "AmazonReviewsClassification (en)": 57.06,
-                    "Banking77Classification": 81.64,
-                    "EmotionClassification": 48.29,
-                    "ImdbClassification": 95.49,
-                    "MTOPDomainClassification (en)": 96.3,
-                    "MTOPIntentClassification (en)": 67.93,
-                    "MassiveIntentClassification (en)": 71.29,
-                    "MassiveScenarioClassification (en)": 76.74,
-                    "ToxicConversationsClassification": 75.45,
-                    "TweetSentimentExtractionClassification": 59.44
+                    "Model": "Cohere-embed-multilingual-v3.0",
+                    "AmazonReviewsClassification (fr)": 41.89,
+                    "MTOPDomainClassification (fr)": 86.23,
+                    "MTOPIntentClassification (fr)": 61.07,
+                    "MasakhaNEWSClassification (fra)": 83.06,
+                    "MassiveIntentClassification (fr)": 62.94,
+                    "MassiveScenarioClassification (fr)": 67.29
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "voyage-lite-01-instruct",
-                    "ArxivClusteringP2P": 47.92,
-                    "ArxivClusteringS2S": 42.42,
-                    "BiorxivClusteringP2P": 38.72,
-                    "BiorxivClusteringS2S": 36.6,
-                    "MedrxivClusteringP2P": 34.04,
-                    "MedrxivClusteringS2S": 32.81,
-                    "RedditClustering": 61.56,
-                    "RedditClusteringP2P": 65.35,
-                    "StackExchangeClustering": 70.16,
-                    "StackExchangeClusteringP2P": 38.23,
-                    "TwentyNewsgroupsClustering": 53.56
+                    "Model": "Cohere-embed-multilingual-v3.0",
+                    "AlloProfClusteringP2P": 63.53,
+                    "AlloProfClusteringS2S": 36.18,
+                    "HALClusteringS2S": 19.9,
+                    "MLSUMClusteringP2P": 45.08,
+                    "MLSUMClusteringS2S": 34.75,
+                    "MasakhaNEWSClusteringP2P (fra)": 53.18,
+                    "MasakhaNEWSClusteringS2S (fra)": 32.31
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "voyage-lite-01-instruct",
-                    "SprintDuplicateQuestions": 96.01,
-                    "TwitterSemEval2015": 76.87,
-                    "TwitterURLCorpus": 86.84
+                    "Model": "Cohere-embed-multilingual-v3.0",
+                    "OpusparcusPC (fr)": 94.08,
+                    "PawsXPairClassification (fr)": 61.26
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "voyage-lite-01-instruct",
-                    "AskUbuntuDupQuestions": 65.77,
-                    "MindSmallReranking": 31.69,
-                    "SciDocsRR": 87.03,
-                    "StackOverflowDupQuestions": 54.49
+                    "Model": "Cohere-embed-multilingual-v3.0",
+                    "AlloprofReranking": 51.01,
+                    "SyntecReranking": 85.72
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "voyage-lite-01-instruct",
-                    "ArguAna": 58.73,
-                    "CQADupstackRetrieval": 45.11,
-                    "ClimateFEVER": 37.47,
-                    "DBPedia": 43.42,
-                    "FEVER": 89.71,
-                    "FiQA2018": 44.79,
-                    "HotpotQA": 70.46,
-                    "MSMARCO": 39.66,
-                    "NFCorpus": 43.33,
-                    "NQ": 60.65,
-                    "QuoraRetrieval": 87.83,
-                    "SCIDOCS": 23.19,
-                    "SciFact": 73.64,
-                    "TRECCOVID": 78.92,
-                    "Touche2020": 36.83
+                    "Model": "Cohere-embed-multilingual-v3.0",
+                    "AlloprofRetrieval": 38.36,
+                    "BSARDRetrieval": 0.14,
+                    "MintakaRetrieval (fr)": 25.44,
+                    "SyntecRetrieval": 79.27,
+                    "XPQARetrieval (fr)": 58.87
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "voyage-lite-01-instruct",
-                    "BIOSSES": 84.85,
-                    "SICK-R": 79.71,
-                    "STS12": 77.09,
-                    "STS13": 88.91,
-                    "STS14": 82.08,
-                    "STS15": 89.21,
-                    "STS16": 84.74,
-                    "STS17 (en-en)": 90.73,
-                    "STS22 (en)": 62.1,
-                    "STSBenchmark": 89.86
+                    "Model": "Cohere-embed-multilingual-v3.0",
+                    "SICKFr": 79.23,
+                    "STS22 (fr)": 82.76,
+                    "STSBenchmarkMultilingualSTS (fr)": 81.84
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "voyage-lite-01-instruct",
-                    "SummEval": 30.97
+                    "Model": "Cohere-embed-multilingual-v3.0",
+                    "SummEvalFr": 31.26
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "Cohere-embed-multilingual-v3.0"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "voyage-lite-01-instruct"
+                    "Model": "Cohere-embed-multilingual-v3.0"
                 }
             ]
         }
     },
-    "bm25": {
+    "USER-base": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "bm25"
+                    "Model": "USER-base",
+                    "Tatoeba (rus-Cyrl_eng-Latn)": 90.2
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "bm25"
+                    "Model": "USER-base",
+                    "GeoreviewClassification (rus-Cyrl)": 47.23,
+                    "HeadlineClassification (rus-Cyrl)": 74.88,
+                    "InappropriatenessClassification (rus-Cyrl)": 61.94,
+                    "KinopoiskClassification (rus-Cyrl)": 55.69,
+                    "MassiveIntentClassification (rus-Cyrl)": 65.57,
+                    "MassiveScenarioClassification (rus-Cyrl)": 68.33,
+                    "RuReviewsClassification (rus-Cyrl)": 66.44,
+                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 55.55,
+                    "RuSciBenchOECDClassification (rus-Cyrl)": 43.28
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "bm25"
+                    "Model": "USER-base",
+                    "GeoreviewClusteringP2P (rus-Cyrl)": 64.16,
+                    "MLSUMClusteringP2P (rus-Cyrl)": 48.09,
+                    "MLSUMClusteringS2S (rus-Cyrl)": 45.73,
+                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 51.38,
+                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 44.73
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "bm25"
+                    "Model": "USER-base",
+                    "OpusparcusPC (rus-Cyrl)": 91.65,
+                    "TERRa (rus-Cyrl)": 60.02
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "bm25"
+                    "Model": "USER-base",
+                    "RuBQReranking (rus-Cyrl)": 64.42
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "bm25",
-                    "BrightRetrieval (robotics)": 13.53,
-                    "BrightRetrieval (pony)": 7.93,
-                    "BrightRetrieval (leetcode)": 24.37,
-                    "BrightRetrieval (earth_science)": 27.06,
-                    "BrightRetrieval (stackoverflow)": 16.55,
-                    "BrightRetrieval (economics)": 14.87,
-                    "BrightRetrieval (theoremqa_questions)": 9.78,
-                    "BrightRetrieval (theoremqa_theorems)": 4.25,
-                    "BrightRetrieval (psychology)": 12.51,
-                    "BrightRetrieval (sustainable_living)": 15.22,
-                    "BrightRetrieval (biology)": 19.19,
-                    "BrightRetrieval (aops)": 6.2
+                    "Model": "USER-base",
+                    "RiaNewsRetrieval (rus-Cyrl)": 77.83,
+                    "RuBQRetrieval (rus-Cyrl)": 56.86
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "bm25"
+                    "Model": "USER-base",
+                    "RUParaPhraserSTS (rus-Cyrl)": 73.56,
+                    "RuSTSBenchmarkSTS (rus-Cyrl)": 82.26,
+                    "STS22 (rus-Cyrl)": 63.39,
+                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 81.81
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "bm25"
+                    "Model": "USER-base"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "USER-base",
+                    "CEDRClassification (rus-Cyrl)": 46.47,
+                    "SensitiveTopicsClassification (rus-Cyrl)": 27.5
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "bm25",
-                    "Core17InstructionRetrieval": -1.06,
-                    "News21InstructionRetrieval": -2.15,
-                    "Robust04InstructionRetrieval": -3.06
+                    "Model": "USER-base"
                 }
             ]
         }
     },
-    "nomic-embed-text-v1.5-256": {
+    "gtr-t5-xxl": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "nomic-embed-text-v1.5-256"
+                    "Model": "gtr-t5-xxl"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "nomic-embed-text-v1.5-256",
-                    "AmazonCounterfactualClassification (en)": 72.94,
-                    "AmazonPolarityClassification": 91.35,
-                    "AmazonReviewsClassification (en)": 45.73,
-                    "Banking77Classification": 83.69,
-                    "EmotionClassification": 45.88,
-                    "ImdbClassification": 83.99,
-                    "MTOPDomainClassification (en)": 91.68,
-                    "MTOPIntentClassification (en)": 72.47,
-                    "MassiveIntentClassification (en)": 71.76,
-                    "MassiveScenarioClassification (en)": 75.67,
-                    "ToxicConversationsClassification": 70.87,
-                    "TweetSentimentExtractionClassification": 59.2
+                    "Model": "gtr-t5-xxl",
+                    "AmazonCounterfactualClassification (en)": 67.3,
+                    "AmazonPolarityClassification": 75.05,
+                    "AmazonReviewsClassification (en)": 37.3,
+                    "Banking77Classification": 82.32,
+                    "EmotionClassification": 43.19,
+                    "ImdbClassification": 70.8,
+                    "MTOPDomainClassification (en)": 93.84,
+                    "MTOPIntentClassification (en)": 67.71,
+                    "MassiveIntentClassification (en)": 70.61,
+                    "MassiveScenarioClassification (en)": 77.77,
+                    "ToxicConversationsClassification": 68.48,
+                    "TweetSentimentExtractionClassification": 54.54
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "nomic-embed-text-v1.5-256",
-                    "ArxivClusteringP2P": 44.82,
-                    "ArxivClusteringS2S": 35.32,
-                    "BiorxivClusteringP2P": 38.19,
-                    "BiorxivClusteringS2S": 31.83,
-                    "MedrxivClusteringP2P": 34.08,
-                    "MedrxivClusteringS2S": 30.98,
-                    "RedditClustering": 54.92,
-                    "RedditClusteringP2P": 60.23,
-                    "StackExchangeClustering": 61.81,
-                    "StackExchangeClusteringP2P": 34.03,
-                    "TwentyNewsgroupsClustering": 48.56
+                    "Model": "gtr-t5-xxl",
+                    "ArxivClusteringP2P": 37.9,
+                    "ArxivClusteringS2S": 32.39,
+                    "BiorxivClusteringP2P": 30.48,
+                    "BiorxivClusteringS2S": 27.5,
+                    "MedrxivClusteringP2P": 29.12,
+                    "MedrxivClusteringS2S": 27.56,
+                    "RedditClustering": 64.13,
+                    "RedditClusteringP2P": 62.84,
+                    "StackExchangeClustering": 71.43,
+                    "StackExchangeClusteringP2P": 32.85,
+                    "TwentyNewsgroupsClustering": 50.44
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "nomic-embed-text-v1.5-256",
-                    "SprintDuplicateQuestions": 92.31,
-                    "TwitterSemEval2015": 73.61,
-                    "TwitterURLCorpus": 86.34
+                    "Model": "gtr-t5-xxl",
+                    "SprintDuplicateQuestions": 95.68,
+                    "TwitterSemEval2015": 77.54,
+                    "TwitterURLCorpus": 85.13
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "nomic-embed-text-v1.5-256",
-                    "AskUbuntuDupQuestions": 61.34,
-                    "MindSmallReranking": 30.04,
-                    "SciDocsRR": 79.4,
-                    "StackOverflowDupQuestions": 49.95
+                    "Model": "gtr-t5-xxl",
+                    "AskUbuntuDupQuestions": 63.23,
+                    "MindSmallReranking": 31.93,
+                    "SciDocsRR": 77.96,
+                    "StackOverflowDupQuestions": 53.5
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "nomic-embed-text-v1.5-256",
-                    "ArguAna": 45.44,
-                    "CQADupstackRetrieval": 37.61,
-                    "ClimateFEVER": 39.63,
-                    "DBPedia": 39.42,
-                    "FEVER": 84.4,
-                    "FiQA2018": 35.0,
-                    "HotpotQA": 67.78,
-                    "MSMARCO": 41.38,
-                    "NFCorpus": 32.54,
-                    "NQ": 57.1,
-                    "QuoraRetrieval": 87.65,
-                    "SCIDOCS": 16.76,
-                    "SciFact": 68.24,
-                    "TRECCOVID": 80.65,
-                    "Touche2020": 28.49
+                    "Model": "gtr-t5-xxl",
+                    "ArguAna": 53.77,
+                    "CQADupstackRetrieval": 38.56,
+                    "ClimateFEVER": 27.21,
+                    "DBPedia": 41.28,
+                    "FEVER": 74.08,
+                    "FiQA2018": 46.78,
+                    "HotpotQA": 59.67,
+                    "MSMARCO": 44.05,
+                    "NFCorpus": 34.18,
+                    "NQ": 57.24,
+                    "QuoraRetrieval": 89.09,
+                    "SCIDOCS": 15.88,
+                    "SciFact": 66.77,
+                    "TRECCOVID": 51.9,
+                    "Touche2020": 26.76
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "nomic-embed-text-v1.5-256",
-                    "BIOSSES": 81.58,
-                    "SICK-R": 79.24,
-                    "STS12": 78.16,
-                    "STS13": 86.01,
-                    "STS14": 81.25,
-                    "STS15": 86.51,
-                    "STS16": 84.24,
-                    "STS17 (en-en)": 86.44,
-                    "STS22 (en)": 65.14,
-                    "STSBenchmark": 84.8
+                    "Model": "gtr-t5-xxl",
+                    "BIOSSES": 81.91,
+                    "SICK-R": 74.29,
+                    "STS12": 70.12,
+                    "STS13": 82.72,
+                    "STS14": 78.24,
+                    "STS15": 86.26,
+                    "STS16": 81.61,
+                    "STS17 (en-en)": 85.18,
+                    "STS22 (en)": 65.76,
+                    "STSBenchmark": 77.73
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "nomic-embed-text-v1.5-256",
-                    "SummEval": 30.05
+                    "Model": "gtr-t5-xxl",
+                    "SummEval": 30.64
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "gtr-t5-xxl"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "nomic-embed-text-v1.5-256"
+                    "Model": "gtr-t5-xxl"
                 }
             ]
         }
     },
-    "e5-mistral-7b-instruct-noinstruct": {
+    "nomic-embed-text-v1": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "e5-mistral-7b-instruct-noinstruct"
+                    "Model": "nomic-embed-text-v1"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "e5-mistral-7b-instruct-noinstruct"
+                    "Model": "nomic-embed-text-v1"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "e5-mistral-7b-instruct-noinstruct"
+                    "Model": "nomic-embed-text-v1"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "e5-mistral-7b-instruct-noinstruct"
+                    "Model": "nomic-embed-text-v1"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "e5-mistral-7b-instruct-noinstruct"
+                    "Model": "nomic-embed-text-v1"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "e5-mistral-7b-instruct-noinstruct",
-                    "ARCChallenge": 20.48,
-                    "AlphaNLI": 18.88,
-                    "HellaSwag": 32.25,
-                    "PIQA": 32.8,
-                    "Quail": 6.25,
-                    "RARbCode": 79.84,
-                    "RARbMath": 76.19,
-                    "SIQA": 5.08,
-                    "SpartQA": 10.87,
-                    "TempReasonL1": 3.04,
-                    "TempReasonL2Fact": 35.63,
-                    "TempReasonL2Pure": 9.32,
-                    "TempReasonL3Fact": 30.41,
-                    "TempReasonL3Pure": 14.39,
-                    "WinoGrande": 45.18
+                    "Model": "nomic-embed-text-v1",
+                    "LEMBNarrativeQARetrieval": 41.23,
+                    "LEMBNeedleRetrieval": 39.5,
+                    "LEMBPasskeyRetrieval": 44.75,
+                    "LEMBQMSumRetrieval": 36.65,
+                    "LEMBSummScreenFDRetrieval": 92.97,
+                    "LEMBWikimQARetrieval": 73.75
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "e5-mistral-7b-instruct-noinstruct"
+                    "Model": "nomic-embed-text-v1"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "e5-mistral-7b-instruct-noinstruct"
+                    "Model": "nomic-embed-text-v1"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "nomic-embed-text-v1"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "e5-mistral-7b-instruct-noinstruct"
+                    "Model": "nomic-embed-text-v1"
                 }
             ]
         }
     },
-    "e5-base-4k": {
+    "elser-v2": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "e5-base-4k"
+                    "Model": "elser-v2"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "e5-base-4k"
+                    "Model": "elser-v2",
+                    "AmazonCounterfactualClassification (en)": 74.16,
+                    "AmazonPolarityClassification": 61.91,
+                    "AmazonReviewsClassification (en)": 32.06,
+                    "Banking77Classification": 82.05,
+                    "EmotionClassification": 46.65,
+                    "ImdbClassification": 65.02,
+                    "MTOPDomainClassification (en)": 93.17,
+                    "MTOPIntentClassification (en)": 71.1,
+                    "MassiveIntentClassification (en)": 68.48,
+                    "MassiveScenarioClassification (en)": 74.98,
+                    "ToxicConversationsClassification": 68.15,
+                    "TweetSentimentExtractionClassification": 53.57
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "e5-base-4k"
+                    "Model": "elser-v2",
+                    "ArxivClusteringP2P": 35.27,
+                    "ArxivClusteringS2S": 23.18,
+                    "BiorxivClusteringP2P": 31.13,
+                    "BiorxivClusteringS2S": 26.78,
+                    "MedrxivClusteringP2P": 24.65,
+                    "MedrxivClusteringS2S": 24.21,
+                    "RedditClustering": 38.74,
+                    "RedditClusteringP2P": 51.92,
+                    "StackExchangeClustering": 42.7,
+                    "StackExchangeClusteringP2P": 28.7,
+                    "TwentyNewsgroupsClustering": 27.82
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "e5-base-4k"
+                    "Model": "elser-v2",
+                    "SprintDuplicateQuestions": 94.53,
+                    "TwitterSemEval2015": 64.41,
+                    "TwitterURLCorpus": 85.01
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "e5-base-4k"
+                    "Model": "elser-v2",
+                    "AskUbuntuDupQuestions": 58.31,
+                    "MindSmallReranking": 30.75,
+                    "SciDocsRR": 75.62,
+                    "StackOverflowDupQuestions": 48.4
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "e5-base-4k",
-                    "LEMBNarrativeQARetrieval": 30.35,
-                    "LEMBNeedleRetrieval": 41.5,
-                    "LEMBPasskeyRetrieval": 67.25,
-                    "LEMBQMSumRetrieval": 35.6,
-                    "LEMBSummScreenFDRetrieval": 95.23,
-                    "LEMBWikimQARetrieval": 69.19
+                    "Model": "elser-v2",
+                    "ArguAna": 55.98,
+                    "CQADupstackRetrieval": 34.27,
+                    "ClimateFEVER": 27.08,
+                    "DBPedia": 42.7,
+                    "FEVER": 78.55,
+                    "FiQA2018": 41.57,
+                    "HotpotQA": 67.01,
+                    "MSMARCO": 38.9,
+                    "NFCorpus": 36.66,
+                    "NQ": 55.84,
+                    "QuoraRetrieval": 84.69,
+                    "SCIDOCS": 16.24,
+                    "SciFact": 71.8,
+                    "TRECCOVID": 72.72,
+                    "Touche2020": 26.27
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "e5-base-4k"
+                    "Model": "elser-v2",
+                    "BIOSSES": 83.79,
+                    "SICK-R": 68.78,
+                    "STS12": 64.81,
+                    "STS13": 80.1,
+                    "STS14": 74.96,
+                    "STS15": 83.7,
+                    "STS16": 80.55,
+                    "STS17 (en-en)": 85.74,
+                    "STS22 (en)": 67.5,
+                    "STSBenchmark": 79.54
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "e5-base-4k"
+                    "Model": "elser-v2",
+                    "SummEval": 31.03
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "elser-v2"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "e5-base-4k"
+                    "Model": "elser-v2"
                 }
             ]
         }
     },
-    "sentence-t5-xl": {
+    "rubert-tiny": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "sentence-t5-xl",
-                    "BUCC (de-en)": 95.04,
-                    "BUCC (fr-en)": 94.96,
-                    "BUCC (ru-en)": 8.33,
-                    "BUCC (zh-en)": 1.3,
-                    "Tatoeba (afr-eng)": 41.84,
-                    "Tatoeba (amh-eng)": 0.03,
-                    "Tatoeba (ang-eng)": 37.87,
-                    "Tatoeba (ara-eng)": 0.61,
-                    "Tatoeba (arq-eng)": 0.74,
-                    "Tatoeba (arz-eng)": 0.42,
-                    "Tatoeba (ast-eng)": 65.41,
-                    "Tatoeba (awa-eng)": 1.46,
-                    "Tatoeba (aze-eng)": 8.79,
-                    "Tatoeba (bel-eng)": 5.76,
-                    "Tatoeba (ben-eng)": 0.01,
-                    "Tatoeba (ber-eng)": 5.92,
-                    "Tatoeba (bos-eng)": 16.12,
-                    "Tatoeba (bre-eng)": 6.12,
-                    "Tatoeba (bul-eng)": 9.06,
-                    "Tatoeba (cat-eng)": 57.4,
-                    "Tatoeba (cbk-eng)": 57.68,
-                    "Tatoeba (ceb-eng)": 12.56,
-                    "Tatoeba (ces-eng)": 9.47,
-                    "Tatoeba (cha-eng)": 27.13,
-                    "Tatoeba (cmn-eng)": 1.82,
-                    "Tatoeba (cor-eng)": 3.87,
-                    "Tatoeba (csb-eng)": 14.41,
-                    "Tatoeba (cym-eng)": 6.69,
-                    "Tatoeba (dan-eng)": 54.87,
-                    "Tatoeba (deu-eng)": 93.72,
-                    "Tatoeba (dsb-eng)": 14.74,
-                    "Tatoeba (dtp-eng)": 5.84,
-                    "Tatoeba (ell-eng)": 0.6,
-                    "Tatoeba (epo-eng)": 30.8,
-                    "Tatoeba (est-eng)": 5.39,
-                    "Tatoeba (eus-eng)": 11.9,
-                    "Tatoeba (fao-eng)": 28.08,
-                    "Tatoeba (fin-eng)": 6.81,
-                    "Tatoeba (fra-eng)": 85.29,
-                    "Tatoeba (fry-eng)": 38.68,
-                    "Tatoeba (gla-eng)": 2.96,
-                    "Tatoeba (gle-eng)": 3.74,
-                    "Tatoeba (glg-eng)": 70.0,
-                    "Tatoeba (gsw-eng)": 30.49,
-                    "Tatoeba (heb-eng)": 0.87,
-                    "Tatoeba (hin-eng)": 0.1,
-                    "Tatoeba (hrv-eng)": 17.43,
-                    "Tatoeba (hsb-eng)": 14.69,
-                    "Tatoeba (hun-eng)": 7.28,
-                    "Tatoeba (hye-eng)": 0.77,
-                    "Tatoeba (ido-eng)": 46.65,
-                    "Tatoeba (ile-eng)": 59.43,
-                    "Tatoeba (ina-eng)": 82.71,
-                    "Tatoeba (ind-eng)": 37.26,
-                    "Tatoeba (isl-eng)": 11.21,
-                    "Tatoeba (ita-eng)": 79.77,
-                    "Tatoeba (jav-eng)": 7.81,
-                    "Tatoeba (jpn-eng)": 0.91,
-                    "Tatoeba (kab-eng)": 2.23,
-                    "Tatoeba (kat-eng)": 1.48,
-                    "Tatoeba (kaz-eng)": 1.77,
-                    "Tatoeba (khm-eng)": 0.38,
-                    "Tatoeba (kor-eng)": 1.96,
-                    "Tatoeba (kur-eng)": 12.11,
-                    "Tatoeba (kzj-eng)": 6.13,
-                    "Tatoeba (lat-eng)": 27.84,
-                    "Tatoeba (lfn-eng)": 45.89,
-                    "Tatoeba (lit-eng)": 5.94,
-                    "Tatoeba (lvs-eng)": 8.11,
-                    "Tatoeba (mal-eng)": 0.59,
-                    "Tatoeba (mar-eng)": 0.03,
-                    "Tatoeba (max-eng)": 21.7,
-                    "Tatoeba (mhr-eng)": 0.68,
-                    "Tatoeba (mkd-eng)": 5.92,
-                    "Tatoeba (mon-eng)": 2.39,
-                    "Tatoeba (nds-eng)": 45.04,
-                    "Tatoeba (nld-eng)": 64.75,
-                    "Tatoeba (nno-eng)": 36.74,
-                    "Tatoeba (nob-eng)": 54.77,
-                    "Tatoeba (nov-eng)": 57.12,
-                    "Tatoeba (oci-eng)": 34.39,
-                    "Tatoeba (orv-eng)": 2.04,
-                    "Tatoeba (pam-eng)": 8.34,
-                    "Tatoeba (pes-eng)": 0.87,
-                    "Tatoeba (pms-eng)": 38.06,
-                    "Tatoeba (pol-eng)": 28.35,
-                    "Tatoeba (por-eng)": 83.61,
-                    "Tatoeba (ron-eng)": 65.27,
-                    "Tatoeba (rus-eng)": 30.42,
-                    "Tatoeba (slk-eng)": 13.19,
-                    "Tatoeba (slv-eng)": 13.49,
-                    "Tatoeba (spa-eng)": 89.18,
-                    "Tatoeba (sqi-eng)": 14.66,
-                    "Tatoeba (srp-eng)": 13.24,
-                    "Tatoeba (swe-eng)": 60.67,
-                    "Tatoeba (swg-eng)": 34.76,
-                    "Tatoeba (swh-eng)": 8.07,
-                    "Tatoeba (tam-eng)": 0.36,
-                    "Tatoeba (tat-eng)": 1.46,
-                    "Tatoeba (tel-eng)": 0.67,
-                    "Tatoeba (tgl-eng)": 25.22,
-                    "Tatoeba (tha-eng)": 1.58,
-                    "Tatoeba (tuk-eng)": 4.99,
-                    "Tatoeba (tur-eng)": 7.72,
-                    "Tatoeba (tzl-eng)": 38.49,
-                    "Tatoeba (uig-eng)": 0.87,
-                    "Tatoeba (ukr-eng)": 9.12,
-                    "Tatoeba (urd-eng)": 0.0,
-                    "Tatoeba (uzb-eng)": 5.48,
-                    "Tatoeba (vie-eng)": 8.45,
-                    "Tatoeba (war-eng)": 13.75,
-                    "Tatoeba (wuu-eng)": 1.44,
-                    "Tatoeba (xho-eng)": 9.15,
-                    "Tatoeba (yid-eng)": 0.28,
-                    "Tatoeba (yue-eng)": 0.98,
-                    "Tatoeba (zsm-eng)": 35.71
+                    "Model": "rubert-tiny"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "rubert-tiny",
+                    "GeoreviewClassification (rus-Cyrl)": 33.45,
+                    "HeadlineClassification (rus-Cyrl)": 57.65,
+                    "InappropriatenessClassification (rus-Cyrl)": 54.5,
+                    "KinopoiskClassification (rus-Cyrl)": 41.36,
+                    "MassiveIntentClassification (rus-Cyrl)": 50.1,
+                    "MassiveScenarioClassification (rus-Cyrl)": 52.15,
+                    "RuReviewsClassification (rus-Cyrl)": 49.56,
+                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 35.71,
+                    "RuSciBenchOECDClassification (rus-Cyrl)": 26.51
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "rubert-tiny",
+                    "GeoreviewClusteringP2P (rus-Cyrl)": 34.4,
+                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 29.89,
+                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 27.98
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "rubert-tiny",
+                    "TERRa (rus-Cyrl)": 51.06
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "rubert-tiny",
+                    "RuBQReranking (rus-Cyrl)": 35.44
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "rubert-tiny",
+                    "RiaNewsRetrieval (rus-Cyrl)": 0.79,
+                    "RuBQRetrieval (rus-Cyrl)": 3.24
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "rubert-tiny",
+                    "RUParaPhraserSTS (rus-Cyrl)": 53.41,
+                    "RuSTSBenchmarkSTS (rus-Cyrl)": 58.16,
+                    "STS22 (rus-Cyrl)": 47.88
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "rubert-tiny"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "rubert-tiny",
+                    "CEDRClassification (rus-Cyrl)": 37.39,
+                    "SensitiveTopicsClassification (rus-Cyrl)": 18.54
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "rubert-tiny"
+                }
+            ]
+        }
+    },
+    "bert-base-10lang-cased": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "bert-base-10lang-cased"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "sentence-t5-xl",
-                    "AmazonCounterfactualClassification (de)": 67.01,
-                    "AmazonCounterfactualClassification (en)": 76.01,
-                    "AmazonCounterfactualClassification (en-ext)": 77.29,
-                    "AmazonCounterfactualClassification (ja)": 45.61,
-                    "AmazonPolarityClassification": 93.17,
-                    "AmazonReviewsClassification (de)": 44.05,
-                    "AmazonReviewsClassification (en)": 48.18,
-                    "AmazonReviewsClassification (es)": 45.01,
-                    "AmazonReviewsClassification (fr)": 43.52,
-                    "AmazonReviewsClassification (ja)": 22.23,
-                    "AmazonReviewsClassification (zh)": 21.88,
-                    "Banking77Classification": 80.88,
-                    "EmotionClassification": 51.95,
-                    "ImdbClassification": 87.54,
-                    "MTOPDomainClassification (de)": 83.28,
-                    "MTOPDomainClassification (en)": 90.73,
-                    "MTOPDomainClassification (es)": 85.32,
-                    "MTOPDomainClassification (fr)": 85.14,
-                    "MTOPDomainClassification (hi)": 20.85,
-                    "MTOPDomainClassification (th)": 15.62,
-                    "MTOPIntentClassification (de)": 54.65,
-                    "MTOPIntentClassification (en)": 68.15,
-                    "MTOPIntentClassification (es)": 57.38,
-                    "MTOPIntentClassification (fr)": 54.39,
-                    "MTOPIntentClassification (hi)": 3.28,
-                    "MTOPIntentClassification (th)": 5.08,
-                    "MasakhaNEWSClassification (fra)": 80.09,
-                    "MassiveIntentClassification (af)": 40.17,
-                    "MassiveIntentClassification (am)": 2.18,
-                    "MassiveIntentClassification (ar)": 4.18,
-                    "MassiveIntentClassification (az)": 30.02,
-                    "MassiveIntentClassification (bn)": 2.6,
-                    "MassiveIntentClassification (cy)": 29.15,
-                    "MassiveIntentClassification (da)": 47.69,
-                    "MassiveIntentClassification (de)": 57.43,
-                    "MassiveIntentClassification (el)": 9.96,
-                    "MassiveIntentClassification (en)": 72.09,
-                    "MassiveIntentClassification (es)": 57.97,
-                    "MassiveIntentClassification (fa)": 3.6,
-                    "MassiveIntentClassification (fi)": 34.02,
-                    "MassiveIntentClassification (fr)": 60.99,
-                    "MassiveIntentClassification (he)": 2.51,
-                    "MassiveIntentClassification (hi)": 3.02,
-                    "MassiveIntentClassification (hu)": 31.66,
-                    "MassiveIntentClassification (hy)": 3.32,
-                    "MassiveIntentClassification (id)": 41.53,
-                    "MassiveIntentClassification (is)": 30.25,
-                    "MassiveIntentClassification (it)": 56.57,
-                    "MassiveIntentClassification (ja)": 3.5,
-                    "MassiveIntentClassification (jv)": 31.67,
-                    "MassiveIntentClassification (ka)": 2.79,
-                    "MassiveIntentClassification (km)": 5.43,
-                    "MassiveIntentClassification (kn)": 2.79,
-                    "MassiveIntentClassification (ko)": 2.67,
-                    "MassiveIntentClassification (lv)": 34.25,
-                    "MassiveIntentClassification (ml)": 2.98,
-                    "MassiveIntentClassification (mn)": 20.99,
-                    "MassiveIntentClassification (ms)": 37.43,
-                    "MassiveIntentClassification (my)": 4.02,
-                    "MassiveIntentClassification (nb)": 45.91,
-                    "MassiveIntentClassification (nl)": 50.51,
-                    "MassiveIntentClassification (pl)": 43.95,
-                    "MassiveIntentClassification (pt)": 57.95,
-                    "MassiveIntentClassification (ro)": 49.37,
-                    "MassiveIntentClassification (ru)": 33.46,
-                    "MassiveIntentClassification (sl)": 36.33,
-                    "MassiveIntentClassification (sq)": 37.65,
-                    "MassiveIntentClassification (sv)": 46.35,
-                    "MassiveIntentClassification (sw)": 30.6,
-                    "MassiveIntentClassification (ta)": 1.79,
-                    "MassiveIntentClassification (te)": 2.26,
-                    "MassiveIntentClassification (th)": 4.02,
-                    "MassiveIntentClassification (tl)": 38.92,
-                    "MassiveIntentClassification (tr)": 32.05,
-                    "MassiveIntentClassification (ur)": 2.7,
-                    "MassiveIntentClassification (vi)": 21.47,
-                    "MassiveIntentClassification (zh-CN)": 0.59,
-                    "MassiveIntentClassification (zh-TW)": 3.24,
-                    "MassiveScenarioClassification (af)": 50.81,
-                    "MassiveScenarioClassification (am)": 6.95,
-                    "MassiveScenarioClassification (ar)": 12.32,
-                    "MassiveScenarioClassification (az)": 38.79,
-                    "MassiveScenarioClassification (bn)": 8.0,
-                    "MassiveScenarioClassification (cy)": 33.91,
-                    "MassiveScenarioClassification (da)": 55.79,
-                    "MassiveScenarioClassification (de)": 65.33,
-                    "MassiveScenarioClassification (el)": 16.89,
-                    "MassiveScenarioClassification (en)": 73.26,
-                    "MassiveScenarioClassification (es)": 62.52,
-                    "MassiveScenarioClassification (fa)": 6.08,
-                    "MassiveScenarioClassification (fi)": 43.34,
-                    "MassiveScenarioClassification (fr)": 66.42,
-                    "MassiveScenarioClassification (he)": 7.55,
-                    "MassiveScenarioClassification (hi)": 7.44,
-                    "MassiveScenarioClassification (hu)": 40.85,
-                    "MassiveScenarioClassification (hy)": 9.25,
-                    "MassiveScenarioClassification (id)": 51.92,
-                    "MassiveScenarioClassification (is)": 40.09,
-                    "MassiveScenarioClassification (it)": 62.94,
-                    "MassiveScenarioClassification (ja)": 7.9,
-                    "MassiveScenarioClassification (jv)": 41.33,
-                    "MassiveScenarioClassification (ka)": 7.76,
-                    "MassiveScenarioClassification (km)": 9.19,
-                    "MassiveScenarioClassification (kn)": 8.36,
-                    "MassiveScenarioClassification (ko)": 6.13,
-                    "MassiveScenarioClassification (lv)": 40.7,
-                    "MassiveScenarioClassification (ml)": 6.98,
-                    "MassiveScenarioClassification (mn)": 27.0,
-                    "MassiveScenarioClassification (ms)": 46.9,
-                    "MassiveScenarioClassification (my)": 9.55,
-                    "MassiveScenarioClassification (nb)": 53.43,
-                    "MassiveScenarioClassification (nl)": 59.65,
-                    "MassiveScenarioClassification (pl)": 49.87,
-                    "MassiveScenarioClassification (pt)": 62.18,
-                    "MassiveScenarioClassification (ro)": 58.22,
-                    "MassiveScenarioClassification (ru)": 40.73,
-                    "MassiveScenarioClassification (sl)": 43.66,
-                    "MassiveScenarioClassification (sq)": 49.25,
-                    "MassiveScenarioClassification (sv)": 57.17,
-                    "MassiveScenarioClassification (sw)": 40.55,
-                    "MassiveScenarioClassification (ta)": 7.46,
-                    "MassiveScenarioClassification (te)": 7.03,
-                    "MassiveScenarioClassification (th)": 8.52,
-                    "MassiveScenarioClassification (tl)": 51.74,
-                    "MassiveScenarioClassification (tr)": 43.01,
-                    "MassiveScenarioClassification (ur)": 9.61,
-                    "MassiveScenarioClassification (vi)": 28.91,
-                    "MassiveScenarioClassification (zh-CN)": 5.86,
-                    "MassiveScenarioClassification (zh-TW)": 7.14,
-                    "ToxicConversationsClassification": 70.95,
-                    "TweetSentimentExtractionClassification": 61.21
+                    "Model": "bert-base-10lang-cased",
+                    "AmazonReviewsClassification (fr)": 29.38,
+                    "MTOPDomainClassification (fr)": 63.65,
+                    "MTOPIntentClassification (fr)": 37.87,
+                    "MasakhaNEWSClassification (fra)": 63.93,
+                    "MassiveIntentClassification (fr)": 37.28,
+                    "MassiveScenarioClassification (fr)": 44.5
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "bert-base-10lang-cased",
+                    "AlloProfClusteringP2P": 53.22,
+                    "AlloProfClusteringS2S": 42.92,
+                    "HALClusteringS2S": 19.94,
+                    "MLSUMClusteringP2P": 40.96,
+                    "MLSUMClusteringS2S": 31.87,
+                    "MasakhaNEWSClusteringP2P (fra)": 24.23,
+                    "MasakhaNEWSClusteringS2S (fra)": 24.46
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "bert-base-10lang-cased",
+                    "OpusparcusPC (fr)": 86.79,
+                    "PawsXPairClassification (fr)": 53.4
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "bert-base-10lang-cased",
+                    "AlloprofReranking": 36.21,
+                    "SyntecReranking": 53.25
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "bert-base-10lang-cased",
+                    "AlloprofRetrieval": 1.6,
+                    "BSARDRetrieval": 0.0,
+                    "MintakaRetrieval (fr)": 3.55,
+                    "SyntecRetrieval": 18.95,
+                    "XPQARetrieval (fr)": 18.39
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "bert-base-10lang-cased",
+                    "SICKFr": 58.76,
+                    "STS22 (fr)": 40.31,
+                    "STSBenchmarkMultilingualSTS (fr)": 52.25
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "bert-base-10lang-cased",
+                    "SummEvalFr": 29.06
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "bert-base-10lang-cased"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "bert-base-10lang-cased"
+                }
+            ]
+        }
+    },
+    "LLM2Vec-Mistral-unsupervised": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "LLM2Vec-Mistral-unsupervised"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "LLM2Vec-Mistral-unsupervised",
+                    "AmazonCounterfactualClassification (en)": 76.94,
+                    "AmazonPolarityClassification": 85.29,
+                    "AmazonReviewsClassification (en)": 47.09,
+                    "Banking77Classification": 86.16,
+                    "EmotionClassification": 48.88,
+                    "ImdbClassification": 77.95,
+                    "MTOPDomainClassification (en)": 95.48,
+                    "MTOPIntentClassification (en)": 82.84,
+                    "MassiveIntentClassification (en)": 76.65,
+                    "MassiveScenarioClassification (en)": 79.99,
+                    "ToxicConversationsClassification": 70.71,
+                    "TweetSentimentExtractionClassification": 60.9
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "sentence-t5-xl",
-                    "AlloProfClusteringP2P": 60.37,
-                    "AlloProfClusteringS2S": 40.76,
-                    "ArxivClusteringP2P": 41.62,
-                    "ArxivClusteringS2S": 31.17,
-                    "BiorxivClusteringP2P": 36.43,
-                    "BiorxivClusteringS2S": 26.47,
-                    "HALClusteringS2S": 20.28,
-                    "MLSUMClusteringP2P": 41.61,
-                    "MLSUMClusteringS2S": 33.6,
-                    "MasakhaNEWSClusteringP2P (fra)": 62.82,
-                    "MasakhaNEWSClusteringS2S (fra)": 31.74,
-                    "MedrxivClusteringP2P": 32.3,
+                    "Model": "LLM2Vec-Mistral-unsupervised",
+                    "ArxivClusteringP2P": 47.56,
+                    "ArxivClusteringS2S": 39.92,
+                    "BiorxivClusteringP2P": 36.14,
+                    "BiorxivClusteringS2S": 30.26,
+                    "MedrxivClusteringP2P": 30.11,
                     "MedrxivClusteringS2S": 26.93,
-                    "RedditClustering": 57.03,
-                    "RedditClusteringP2P": 62.34,
-                    "StackExchangeClustering": 67.13,
-                    "StackExchangeClusteringP2P": 34.79,
-                    "TwentyNewsgroupsClustering": 49.53
+                    "RedditClustering": 41.83,
+                    "RedditClusteringP2P": 62.08,
+                    "StackExchangeClustering": 67.34,
+                    "StackExchangeClusteringP2P": 34.5,
+                    "TwentyNewsgroupsClustering": 30.26
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "sentence-t5-xl",
-                    "OpusparcusPC (fr)": 92.48,
-                    "PawsXPairClassification (fr)": 62.52,
-                    "SprintDuplicateQuestions": 91.44,
-                    "TwitterSemEval2015": 80.89,
-                    "TwitterURLCorpus": 85.86
+                    "Model": "LLM2Vec-Mistral-unsupervised",
+                    "SprintDuplicateQuestions": 91.3,
+                    "TwitterSemEval2015": 68.76,
+                    "TwitterURLCorpus": 82.76
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "sentence-t5-xl",
-                    "AlloprofReranking": 63.3,
-                    "AskUbuntuDupQuestions": 62.86,
-                    "MindSmallReranking": 29.77,
-                    "SciDocsRR": 75.16,
-                    "StackOverflowDupQuestions": 51.05,
-                    "SyntecReranking": 83.07
+                    "Model": "LLM2Vec-Mistral-unsupervised",
+                    "AskUbuntuDupQuestions": 58.6,
+                    "MindSmallReranking": 29.73,
+                    "SciDocsRR": 77.81,
+                    "StackOverflowDupQuestions": 49.8
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "sentence-t5-xl",
-                    "AlloprofRetrieval": 40.38,
-                    "ArguAna": 39.4,
-                    "BSARDRetrieval": 0.14,
-                    "CQADupstackRetrieval": 40.78,
-                    "ClimateFEVER": 10.61,
-                    "DBPedia": 33.65,
-                    "FEVER": 36.12,
-                    "FiQA2018": 44.71,
-                    "HotpotQA": 37.17,
-                    "MSMARCO": 25.17,
-                    "MintakaRetrieval (fr)": 31.54,
-                    "NFCorpus": 33.18,
-                    "NQ": 46.29,
-                    "QuoraRetrieval": 85.85,
-                    "SCIDOCS": 15.97,
-                    "SciFact": 50.91,
-                    "SyntecRetrieval": 74.24,
-                    "TRECCOVID": 54.77,
-                    "Touche2020": 22.51,
-                    "XPQARetrieval (fr)": 52.14
+                    "Model": "LLM2Vec-Mistral-unsupervised",
+                    "ArguAna": 51.0,
+                    "CQADupstackRetrieval": 33.37,
+                    "ClimateFEVER": 22.97,
+                    "DBPedia": 25.48,
+                    "FEVER": 45.11,
+                    "FiQA2018": 27.24,
+                    "HotpotQA": 54.54,
+                    "MSMARCO": 19.13,
+                    "NFCorpus": 27.16,
+                    "NQ": 34.16,
+                    "QuoraRetrieval": 84.4,
+                    "SCIDOCS": 15.35,
+                    "SciFact": 68.68,
+                    "TRECCOVID": 55.67,
+                    "Touche2020": 6.54
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "sentence-t5-xl",
-                    "BIOSSES": 73.12,
-                    "SICK-R": 79.98,
-                    "SICKFr": 75.08,
-                    "STS12": 79.02,
-                    "STS13": 88.8,
-                    "STS14": 84.33,
-                    "STS15": 88.89,
-                    "STS16": 85.31,
-                    "STS17 (ar-ar)": 11.13,
-                    "STS17 (en-ar)": -3.93,
-                    "STS17 (en-de)": 79.04,
-                    "STS17 (en-en)": 88.91,
-                    "STS17 (en-tr)": 13.61,
-                    "STS17 (es-en)": 71.72,
-                    "STS17 (es-es)": 83.42,
-                    "STS17 (fr-en)": 71.38,
-                    "STS17 (it-en)": 69.5,
-                    "STS17 (ko-ko)": 9.61,
-                    "STS17 (nl-en)": 66.12,
-                    "STS22 (ar)": 29.6,
-                    "STS22 (de)": 47.72,
-                    "STS22 (de-en)": 49.64,
-                    "STS22 (de-fr)": 62.21,
-                    "STS22 (de-pl)": 34.34,
-                    "STS22 (en)": 64.32,
-                    "STS22 (es)": 58.16,
-                    "STS22 (es-en)": 69.15,
-                    "STS22 (es-it)": 65.26,
-                    "STS22 (fr)": 77.49,
-                    "STS22 (fr-pl)": 50.71,
-                    "STS22 (it)": 66.91,
-                    "STS22 (pl)": 27.04,
-                    "STS22 (pl-en)": 58.85,
-                    "STS22 (ru)": 26.63,
-                    "STS22 (tr)": 43.36,
-                    "STS22 (zh)": 33.55,
-                    "STS22 (zh-en)": 29.0,
-                    "STSBenchmark": 83.93,
-                    "STSBenchmarkMultilingualSTS (fr)": 79.42
+                    "Model": "LLM2Vec-Mistral-unsupervised",
+                    "BIOSSES": 83.29,
+                    "SICK-R": 75.55,
+                    "STS12": 67.65,
+                    "STS13": 83.9,
+                    "STS14": 76.97,
+                    "STS15": 83.8,
+                    "STS16": 81.91,
+                    "STS17 (en-en)": 85.58,
+                    "STS22 (en)": 65.93,
+                    "STSBenchmark": 80.42
+                }
+            ]
+        },
+        "Summarization": {
+            "spearman": [
+                {
+                    "Model": "LLM2Vec-Mistral-unsupervised",
+                    "SummEval": 30.19
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "LLM2Vec-Mistral-unsupervised"
+                }
+            ]
+        },
+        "InstructionRetrieval": {
+            "p-MRR": [
+                {
+                    "Model": "LLM2Vec-Mistral-unsupervised"
+                }
+            ]
+        }
+    },
+    "text-search-ada-doc-001": {
+        "BitextMining": {
+            "f1": [
+                {
+                    "Model": "text-search-ada-doc-001"
+                }
+            ]
+        },
+        "Classification": {
+            "accuracy": [
+                {
+                    "Model": "text-search-ada-doc-001"
+                }
+            ]
+        },
+        "Clustering": {
+            "v_measure": [
+                {
+                    "Model": "text-search-ada-doc-001",
+                    "TwentyNewsgroupsClustering": 32.92
+                }
+            ]
+        },
+        "PairClassification": {
+            "ap": [
+                {
+                    "Model": "text-search-ada-doc-001"
+                }
+            ]
+        },
+        "Reranking": {
+            "map": [
+                {
+                    "Model": "text-search-ada-doc-001"
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "text-search-ada-doc-001"
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "text-search-ada-doc-001"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "sentence-t5-xl",
-                    "SummEval": 29.91,
-                    "SummEvalFr": 31.59
+                    "Model": "text-search-ada-doc-001"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "text-search-ada-doc-001"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "sentence-t5-xl"
+                    "Model": "text-search-ada-doc-001"
                 }
             ]
         }
     },
-    "Cohere-embed-english-v3.0": {
+    "all-MiniLM-L12-v2": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "Cohere-embed-english-v3.0"
+                    "Model": "all-MiniLM-L12-v2",
+                    "BornholmBitextMining (dan-Latn)": 35.25,
+                    "Tatoeba (spa-Latn_eng-Latn)": 11.26,
+                    "Tatoeba (bos-Latn_eng-Latn)": 7.05,
+                    "Tatoeba (xho-Latn_eng-Latn)": 3.66,
+                    "Tatoeba (fry-Latn_eng-Latn)": 14.53,
+                    "Tatoeba (tur-Latn_eng-Latn)": 3.69,
+                    "Tatoeba (fao-Latn_eng-Latn)": 5.92,
+                    "Tatoeba (vie-Latn_eng-Latn)": 5.06,
+                    "Tatoeba (ind-Latn_eng-Latn)": 5.3,
+                    "Tatoeba (pol-Latn_eng-Latn)": 4.29,
+                    "Tatoeba (swe-Latn_eng-Latn)": 7.31,
+                    "Tatoeba (ita-Latn_eng-Latn)": 12.57,
+                    "Tatoeba (dtp-Latn_eng-Latn)": 3.31,
+                    "Tatoeba (ron-Latn_eng-Latn)": 8.77,
+                    "Tatoeba (isl-Latn_eng-Latn)": 3.44,
+                    "Tatoeba (hrv-Latn_eng-Latn)": 5.68,
+                    "Tatoeba (cha-Latn_eng-Latn)": 13.07,
+                    "Tatoeba (cor-Latn_eng-Latn)": 2.47,
+                    "Tatoeba (cym-Latn_eng-Latn)": 5.13,
+                    "Tatoeba (jpn-Jpan_eng-Latn)": 2.18,
+                    "Tatoeba (lfn-Latn_eng-Latn)": 7.52,
+                    "Tatoeba (hun-Latn_eng-Latn)": 3.93,
+                    "Tatoeba (lat-Latn_eng-Latn)": 7.14,
+                    "Tatoeba (tgl-Latn_eng-Latn)": 3.34,
+                    "Tatoeba (kur-Latn_eng-Latn)": 7.3,
+                    "Tatoeba (war-Latn_eng-Latn)": 6.18,
+                    "Tatoeba (kab-Latn_eng-Latn)": 0.91,
+                    "Tatoeba (kaz-Cyrl_eng-Latn)": 0.82,
+                    "Tatoeba (slv-Latn_eng-Latn)": 4.52,
+                    "Tatoeba (nds-Latn_eng-Latn)": 11.35,
+                    "Tatoeba (pam-Latn_eng-Latn)": 4.73,
+                    "Tatoeba (bul-Cyrl_eng-Latn)": 0.23,
+                    "Tatoeba (ces-Latn_eng-Latn)": 4.2,
+                    "Tatoeba (nno-Latn_eng-Latn)": 7.45,
+                    "Tatoeba (ben-Beng_eng-Latn)": 0.02,
+                    "Tatoeba (amh-Ethi_eng-Latn)": 0.01,
+                    "Tatoeba (lit-Latn_eng-Latn)": 1.56,
+                    "Tatoeba (pes-Arab_eng-Latn)": 0.3,
+                    "Tatoeba (jav-Latn_eng-Latn)": 3.5,
+                    "Tatoeba (mal-Mlym_eng-Latn)": 0.24,
+                    "Tatoeba (lvs-Latn_eng-Latn)": 3.45,
+                    "Tatoeba (gsw-Latn_eng-Latn)": 9.9,
+                    "Tatoeba (fra-Latn_eng-Latn)": 17.53,
+                    "Tatoeba (orv-Cyrl_eng-Latn)": 0.15,
+                    "Tatoeba (kat-Geor_eng-Latn)": 0.45,
+                    "Tatoeba (awa-Deva_eng-Latn)": 0.44,
+                    "Tatoeba (epo-Latn_eng-Latn)": 8.5,
+                    "Tatoeba (mhr-Cyrl_eng-Latn)": 0.0,
+                    "Tatoeba (dan-Latn_eng-Latn)": 10.21,
+                    "Tatoeba (bel-Cyrl_eng-Latn)": 0.85,
+                    "Tatoeba (nld-Latn_eng-Latn)": 12.56,
+                    "Tatoeba (mkd-Cyrl_eng-Latn)": 0.01,
+                    "Tatoeba (mon-Cyrl_eng-Latn)": 0.06,
+                    "Tatoeba (ast-Latn_eng-Latn)": 9.99,
+                    "Tatoeba (cat-Latn_eng-Latn)": 11.79,
+                    "Tatoeba (oci-Latn_eng-Latn)": 8.72,
+                    "Tatoeba (khm-Khmr_eng-Latn)": 0.42,
+                    "Tatoeba (urd-Arab_eng-Latn)": 0.0,
+                    "Tatoeba (tzl-Latn_eng-Latn)": 6.87,
+                    "Tatoeba (arq-Arab_eng-Latn)": 0.28,
+                    "Tatoeba (uig-Arab_eng-Latn)": 0.4,
+                    "Tatoeba (dsb-Latn_eng-Latn)": 3.06,
+                    "Tatoeba (hsb-Latn_eng-Latn)": 2.89,
+                    "Tatoeba (kzj-Latn_eng-Latn)": 3.64,
+                    "Tatoeba (cbk-Latn_eng-Latn)": 9.76,
+                    "Tatoeba (afr-Latn_eng-Latn)": 7.59,
+                    "Tatoeba (gle-Latn_eng-Latn)": 3.08,
+                    "Tatoeba (csb-Latn_eng-Latn)": 5.21,
+                    "Tatoeba (mar-Deva_eng-Latn)": 0.04,
+                    "Tatoeba (arz-Arab_eng-Latn)": 0.0,
+                    "Tatoeba (tat-Cyrl_eng-Latn)": 0.75,
+                    "Tatoeba (hin-Deva_eng-Latn)": 0.0,
+                    "Tatoeba (ang-Latn_eng-Latn)": 14.63,
+                    "Tatoeba (heb-Hebr_eng-Latn)": 0.3,
+                    "Tatoeba (tuk-Latn_eng-Latn)": 2.66,
+                    "Tatoeba (ile-Latn_eng-Latn)": 17.43,
+                    "Tatoeba (zsm-Latn_eng-Latn)": 5.99,
+                    "Tatoeba (kor-Hang_eng-Latn)": 0.9,
+                    "Tatoeba (uzb-Latn_eng-Latn)": 2.2,
+                    "Tatoeba (fin-Latn_eng-Latn)": 3.65,
+                    "Tatoeba (hye-Armn_eng-Latn)": 0.5,
+                    "Tatoeba (ukr-Cyrl_eng-Latn)": 0.57,
+                    "Tatoeba (swh-Latn_eng-Latn)": 5.82,
+                    "Tatoeba (gla-Latn_eng-Latn)": 2.58,
+                    "Tatoeba (aze-Latn_eng-Latn)": 1.47,
+                    "Tatoeba (ara-Arab_eng-Latn)": 0.43,
+                    "Tatoeba (eus-Latn_eng-Latn)": 6.58,
+                    "Tatoeba (deu-Latn_eng-Latn)": 13.89,
+                    "Tatoeba (por-Latn_eng-Latn)": 11.36,
+                    "Tatoeba (ber-Tfng_eng-Latn)": 4.72,
+                    "Tatoeba (sqi-Latn_eng-Latn)": 5.86,
+                    "Tatoeba (pms-Latn_eng-Latn)": 8.94,
+                    "Tatoeba (ina-Latn_eng-Latn)": 25.36,
+                    "Tatoeba (ido-Latn_eng-Latn)": 11.08,
+                    "Tatoeba (slk-Latn_eng-Latn)": 4.2,
+                    "Tatoeba (glg-Latn_eng-Latn)": 12.6,
+                    "Tatoeba (nov-Latn_eng-Latn)": 19.45,
+                    "Tatoeba (tel-Telu_eng-Latn)": 0.67,
+                    "Tatoeba (tam-Taml_eng-Latn)": 0.33,
+                    "Tatoeba (bre-Latn_eng-Latn)": 3.68,
+                    "Tatoeba (tha-Thai_eng-Latn)": 0.67,
+                    "Tatoeba (nob-Latn_eng-Latn)": 8.02,
+                    "Tatoeba (est-Latn_eng-Latn)": 2.6,
+                    "Tatoeba (wuu-Hans_eng-Latn)": 1.89,
+                    "Tatoeba (swg-Latn_eng-Latn)": 11.9,
+                    "Tatoeba (max-Deva_eng-Latn)": 8.4,
+                    "Tatoeba (srp-Cyrl_eng-Latn)": 2.22,
+                    "Tatoeba (yue-Hant_eng-Latn)": 1.89,
+                    "Tatoeba (rus-Cyrl_eng-Latn)": 0.07,
+                    "Tatoeba (ell-Grek_eng-Latn)": 0.2,
+                    "Tatoeba (ceb-Latn_eng-Latn)": 3.95,
+                    "Tatoeba (yid-Hebr_eng-Latn)": 0.19,
+                    "Tatoeba (cmn-Hans_eng-Latn)": 2.45
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "Cohere-embed-english-v3.0"
+                    "Model": "all-MiniLM-L12-v2",
+                    "AllegroReviews (pol-Latn)": 23.85,
+                    "AmazonCounterfactualClassification (en-ext)": 67.24,
+                    "AmazonCounterfactualClassification (en)": 65.28,
+                    "AmazonCounterfactualClassification (deu-Latn)": 57.13,
+                    "AmazonCounterfactualClassification (jpn-Jpan)": 59.94,
+                    "AmazonCounterfactualClassification (de)": 57.1,
+                    "AmazonCounterfactualClassification (ja)": 59.91,
+                    "AmazonPolarityClassification": 62.98,
+                    "AmazonReviewsClassification (en)": 30.79,
+                    "AmazonReviewsClassification (deu-Latn)": 25.92,
+                    "AmazonReviewsClassification (spa-Latn)": 27.64,
+                    "AmazonReviewsClassification (fra-Latn)": 27.53,
+                    "AmazonReviewsClassification (jpn-Jpan)": 23.57,
+                    "AmazonReviewsClassification (cmn-Hans)": 22.99,
+                    "AmazonReviewsClassification (de)": 25.91,
+                    "AmazonReviewsClassification (es)": 27.63,
+                    "AmazonReviewsClassification (fr)": 27.54,
+                    "AmazonReviewsClassification (ja)": 23.57,
+                    "AmazonReviewsClassification (zh)": 22.99,
+                    "AngryTweetsClassification (dan-Latn)": 42.87,
+                    "Banking77Classification": 80.4,
+                    "CBD (pol-Latn)": 48.46,
+                    "DanishPoliticalCommentsClassification (dan-Latn)": 27.07,
+                    "EmotionClassification": 41.17,
+                    "GeoreviewClassification (rus-Cyrl)": 23.49,
+                    "HeadlineClassification (rus-Cyrl)": 28.49,
+                    "IFlyTek (cmn-Hans)": 15.31,
+                    "ImdbClassification": 59.76,
+                    "InappropriatenessClassification (rus-Cyrl)": 50.85,
+                    "JDReview (cmn-Hans)": 59.57,
+                    "KinopoiskClassification (rus-Cyrl)": 34.17,
+                    "LccSentimentClassification (dan-Latn)": 41.93,
+                    "MTOPDomainClassification (en)": 91.9,
+                    "MTOPDomainClassification (deu-Latn)": 72.04,
+                    "MTOPDomainClassification (spa-Latn)": 72.99,
+                    "MTOPDomainClassification (fra-Latn)": 75.57,
+                    "MTOPDomainClassification (hin-Deva)": 40.4,
+                    "MTOPDomainClassification (tha-Thai)": 16.36,
+                    "MTOPDomainClassification (de)": 72.04,
+                    "MTOPDomainClassification (es)": 72.99,
+                    "MTOPDomainClassification (fr)": 75.59,
+                    "MTOPDomainClassification (hi)": 40.36,
+                    "MTOPDomainClassification (th)": 17.1,
+                    "MTOPIntentClassification (en)": 62.84,
+                    "MTOPIntentClassification (deu-Latn)": 43.42,
+                    "MTOPIntentClassification (spa-Latn)": 41.91,
+                    "MTOPIntentClassification (fra-Latn)": 38.96,
+                    "MTOPIntentClassification (hin-Deva)": 17.76,
+                    "MTOPIntentClassification (tha-Thai)": 6.13,
+                    "MTOPIntentClassification (de)": 43.41,
+                    "MTOPIntentClassification (es)": 41.88,
+                    "MTOPIntentClassification (fr)": 38.94,
+                    "MTOPIntentClassification (hi)": 17.75,
+                    "MTOPIntentClassification (th)": 5.63,
+                    "MasakhaNEWSClassification (amh-Ethi)": 30.64,
+                    "MasakhaNEWSClassification (eng)": 76.62,
+                    "MasakhaNEWSClassification (fra-Latn)": 67.18,
+                    "MasakhaNEWSClassification (hau-Latn)": 52.59,
+                    "MasakhaNEWSClassification (ibo-Latn)": 54.26,
+                    "MasakhaNEWSClassification (lin-Latn)": 62.23,
+                    "MasakhaNEWSClassification (lug-Latn)": 47.62,
+                    "MasakhaNEWSClassification (orm-Ethi)": 47.17,
+                    "MasakhaNEWSClassification (pcm-Latn)": 91.77,
+                    "MasakhaNEWSClassification (run-Latn)": 54.47,
+                    "MasakhaNEWSClassification (sna-Latn)": 66.53,
+                    "MasakhaNEWSClassification (som-Latn)": 40.27,
+                    "MasakhaNEWSClassification (swa-Latn)": 47.77,
+                    "MasakhaNEWSClassification (tir-Ethi)": 21.18,
+                    "MasakhaNEWSClassification (xho-Latn)": 54.34,
+                    "MasakhaNEWSClassification (yor-Latn)": 58.61,
+                    "MasakhaNEWSClassification (fra)": 72.2,
+                    "MassiveIntentClassification (jpn-Jpan)": 30.89,
+                    "MassiveIntentClassification (khm-Khmr)": 4.99,
+                    "MassiveIntentClassification (slv-Latn)": 38.48,
+                    "MassiveIntentClassification (hye-Armn)": 8.69,
+                    "MassiveIntentClassification (ita-Latn)": 43.16,
+                    "MassiveIntentClassification (fin-Latn)": 39.19,
+                    "MassiveIntentClassification (afr-Latn)": 38.84,
+                    "MassiveIntentClassification (kor-Kore)": 19.97,
+                    "MassiveIntentClassification (ben-Beng)": 13.7,
+                    "MassiveIntentClassification (heb-Hebr)": 23.71,
+                    "MassiveIntentClassification (dan-Latn)": 44.35,
+                    "MassiveIntentClassification (fra-Latn)": 44.75,
+                    "MassiveIntentClassification (pol-Latn)": 37.59,
+                    "MassiveIntentClassification (por-Latn)": 45.08,
+                    "MassiveIntentClassification (tha-Thai)": 10.46,
+                    "MassiveIntentClassification (nob-Latn)": 41.79,
+                    "MassiveIntentClassification (kat-Geor)": 9.17,
+                    "MassiveIntentClassification (tgl-Latn)": 38.63,
+                    "MassiveIntentClassification (swe-Latn)": 40.33,
+                    "MassiveIntentClassification (hun-Latn)": 37.95,
+                    "MassiveIntentClassification (cmo-Hant)": 22.38,
+                    "MassiveIntentClassification (hin-Deva)": 18.0,
+                    "MassiveIntentClassification (tur-Latn)": 35.93,
+                    "MassiveIntentClassification (vie-Latn)": 37.35,
+                    "MassiveIntentClassification (mal-Mlym)": 2.83,
+                    "MassiveIntentClassification (aze-Latn)": 34.3,
+                    "MassiveIntentClassification (amh-Ethi)": 2.45,
+                    "MassiveIntentClassification (kan-Knda)": 3.07,
+                    "MassiveIntentClassification (deu-Latn)": 44.12,
+                    "MassiveIntentClassification (rus-Cyrl)": 26.29,
+                    "MassiveIntentClassification (ara-Arab)": 21.02,
+                    "MassiveIntentClassification (msa-Latn)": 36.16,
+                    "MassiveIntentClassification (nld-Latn)": 41.77,
+                    "MassiveIntentClassification (fas-Arab)": 23.56,
+                    "MassiveIntentClassification (isl-Latn)": 35.17,
+                    "MassiveIntentClassification (cym-Latn)": 35.65,
+                    "MassiveIntentClassification (cmo-Hans)": 23.74,
+                    "MassiveIntentClassification (ell-Grek)": 28.68,
+                    "MassiveIntentClassification (spa-Latn)": 40.82,
+                    "MassiveIntentClassification (ind-Latn)": 39.65,
+                    "MassiveIntentClassification (jav-Latn)": 36.67,
+                    "MassiveIntentClassification (mon-Cyrl)": 23.27,
+                    "MassiveIntentClassification (mya-Mymr)": 4.36,
+                    "MassiveIntentClassification (sqi-Latn)": 41.47,
+                    "MassiveIntentClassification (tel-Telu)": 2.54,
+                    "MassiveIntentClassification (en)": 67.15,
+                    "MassiveIntentClassification (ron-Latn)": 41.64,
+                    "MassiveIntentClassification (tam-Taml)": 13.12,
+                    "MassiveIntentClassification (swa-Latn)": 35.26,
+                    "MassiveIntentClassification (urd-Arab)": 16.26,
+                    "MassiveIntentClassification (lav-Latn)": 38.54,
+                    "MassiveIntentClassification (af)": 38.94,
+                    "MassiveIntentClassification (am)": 2.45,
+                    "MassiveIntentClassification (ar)": 20.94,
+                    "MassiveIntentClassification (az)": 34.25,
+                    "MassiveIntentClassification (bn)": 13.67,
+                    "MassiveIntentClassification (cy)": 35.71,
+                    "MassiveIntentClassification (da)": 44.43,
+                    "MassiveIntentClassification (de)": 44.17,
+                    "MassiveIntentClassification (el)": 28.7,
+                    "MassiveIntentClassification (es)": 40.91,
+                    "MassiveIntentClassification (fa)": 23.52,
+                    "MassiveIntentClassification (fi)": 39.27,
+                    "MassiveIntentClassification (fr)": 44.82,
+                    "MassiveIntentClassification (he)": 23.65,
+                    "MassiveIntentClassification (hi)": 17.98,
+                    "MassiveIntentClassification (hu)": 38.0,
+                    "MassiveIntentClassification (hy)": 8.69,
+                    "MassiveIntentClassification (id)": 39.66,
+                    "MassiveIntentClassification (is)": 35.14,
+                    "MassiveIntentClassification (it)": 43.17,
+                    "MassiveIntentClassification (ja)": 30.94,
+                    "MassiveIntentClassification (jv)": 36.69,
+                    "MassiveIntentClassification (ka)": 9.17,
+                    "MassiveIntentClassification (km)": 4.99,
+                    "MassiveIntentClassification (kn)": 3.08,
+                    "MassiveIntentClassification (ko)": 19.97,
+                    "MassiveIntentClassification (lv)": 38.61,
+                    "MassiveIntentClassification (ml)": 2.85,
+                    "MassiveIntentClassification (mn)": 23.25,
+                    "MassiveIntentClassification (ms)": 36.21,
+                    "MassiveIntentClassification (my)": 4.38,
+                    "MassiveIntentClassification (nb)": 41.91,
+                    "MassiveIntentClassification (nl)": 41.85,
+                    "MassiveIntentClassification (pl)": 37.63,
+                    "MassiveIntentClassification (pt)": 45.12,
+                    "MassiveIntentClassification (ro)": 41.71,
+                    "MassiveIntentClassification (ru)": 26.33,
+                    "MassiveIntentClassification (sl)": 38.52,
+                    "MassiveIntentClassification (sq)": 41.62,
+                    "MassiveIntentClassification (sv)": 40.42,
+                    "MassiveIntentClassification (sw)": 35.28,
+                    "MassiveIntentClassification (ta)": 13.1,
+                    "MassiveIntentClassification (te)": 2.56,
+                    "MassiveIntentClassification (th)": 10.54,
+                    "MassiveIntentClassification (tl)": 38.56,
+                    "MassiveIntentClassification (tr)": 35.9,
+                    "MassiveIntentClassification (ur)": 16.18,
+                    "MassiveIntentClassification (vi)": 37.38,
+                    "MassiveIntentClassification (zh-CN)": 23.74,
+                    "MassiveIntentClassification (zh-TW)": 22.39,
+                    "MassiveScenarioClassification (jav-Latn)": 44.54,
+                    "MassiveScenarioClassification (aze-Latn)": 39.62,
+                    "MassiveScenarioClassification (cmo-Hans)": 33.19,
+                    "MassiveScenarioClassification (swa-Latn)": 43.18,
+                    "MassiveScenarioClassification (fra-Latn)": 53.77,
+                    "MassiveScenarioClassification (mon-Cyrl)": 29.01,
+                    "MassiveScenarioClassification (kat-Geor)": 14.85,
+                    "MassiveScenarioClassification (ben-Beng)": 18.98,
+                    "MassiveScenarioClassification (ind-Latn)": 44.37,
+                    "MassiveScenarioClassification (kor-Kore)": 25.72,
+                    "MassiveScenarioClassification (lav-Latn)": 42.75,
+                    "MassiveScenarioClassification (deu-Latn)": 52.08,
+                    "MassiveScenarioClassification (hun-Latn)": 44.1,
+                    "MassiveScenarioClassification (tam-Taml)": 19.4,
+                    "MassiveScenarioClassification (afr-Latn)": 45.72,
+                    "MassiveScenarioClassification (nob-Latn)": 47.35,
+                    "MassiveScenarioClassification (urd-Arab)": 24.45,
+                    "MassiveScenarioClassification (tha-Thai)": 18.32,
+                    "MassiveScenarioClassification (ita-Latn)": 51.7,
+                    "MassiveScenarioClassification (en)": 74.58,
+                    "MassiveScenarioClassification (sqi-Latn)": 49.12,
+                    "MassiveScenarioClassification (mya-Mymr)": 10.06,
+                    "MassiveScenarioClassification (ara-Arab)": 27.66,
+                    "MassiveScenarioClassification (tur-Latn)": 41.8,
+                    "MassiveScenarioClassification (khm-Khmr)": 9.75,
+                    "MassiveScenarioClassification (cym-Latn)": 41.43,
+                    "MassiveScenarioClassification (cmo-Hant)": 31.14,
+                    "MassiveScenarioClassification (hye-Armn)": 14.87,
+                    "MassiveScenarioClassification (ell-Grek)": 35.55,
+                    "MassiveScenarioClassification (ron-Latn)": 49.94,
+                    "MassiveScenarioClassification (kan-Knda)": 8.32,
+                    "MassiveScenarioClassification (jpn-Jpan)": 36.77,
+                    "MassiveScenarioClassification (fin-Latn)": 45.8,
+                    "MassiveScenarioClassification (swe-Latn)": 46.81,
+                    "MassiveScenarioClassification (dan-Latn)": 49.5,
+                    "MassiveScenarioClassification (msa-Latn)": 44.67,
+                    "MassiveScenarioClassification (hin-Deva)": 23.03,
+                    "MassiveScenarioClassification (tgl-Latn)": 48.29,
+                    "MassiveScenarioClassification (pol-Latn)": 44.74,
+                    "MassiveScenarioClassification (isl-Latn)": 43.11,
+                    "MassiveScenarioClassification (por-Latn)": 53.0,
+                    "MassiveScenarioClassification (slv-Latn)": 42.24,
+                    "MassiveScenarioClassification (rus-Cyrl)": 28.77,
+                    "MassiveScenarioClassification (tel-Telu)": 7.74,
+                    "MassiveScenarioClassification (heb-Hebr)": 25.73,
+                    "MassiveScenarioClassification (fas-Arab)": 29.0,
+                    "MassiveScenarioClassification (vie-Latn)": 40.97,
+                    "MassiveScenarioClassification (nld-Latn)": 49.14,
+                    "MassiveScenarioClassification (spa-Latn)": 50.73,
+                    "MassiveScenarioClassification (mal-Mlym)": 7.25,
+                    "MassiveScenarioClassification (amh-Ethi)": 7.41,
+                    "MassiveScenarioClassification (af)": 45.71,
+                    "MassiveScenarioClassification (am)": 7.41,
+                    "MassiveScenarioClassification (ar)": 27.62,
+                    "MassiveScenarioClassification (az)": 39.58,
+                    "MassiveScenarioClassification (bn)": 18.98,
+                    "MassiveScenarioClassification (cy)": 41.4,
+                    "MassiveScenarioClassification (da)": 49.47,
+                    "MassiveScenarioClassification (de)": 52.07,
+                    "MassiveScenarioClassification (el)": 35.51,
+                    "MassiveScenarioClassification (es)": 50.74,
+                    "MassiveScenarioClassification (fa)": 29.0,
+                    "MassiveScenarioClassification (fi)": 45.8,
+                    "MassiveScenarioClassification (fr)": 53.76,
+                    "MassiveScenarioClassification (he)": 25.68,
+                    "MassiveScenarioClassification (hi)": 23.02,
+                    "MassiveScenarioClassification (hu)": 44.09,
+                    "MassiveScenarioClassification (hy)": 14.83,
+                    "MassiveScenarioClassification (id)": 44.35,
+                    "MassiveScenarioClassification (is)": 43.08,
+                    "MassiveScenarioClassification (it)": 51.71,
+                    "MassiveScenarioClassification (ja)": 36.75,
+                    "MassiveScenarioClassification (jv)": 44.57,
+                    "MassiveScenarioClassification (ka)": 14.84,
+                    "MassiveScenarioClassification (km)": 9.75,
+                    "MassiveScenarioClassification (kn)": 8.32,
+                    "MassiveScenarioClassification (ko)": 25.72,
+                    "MassiveScenarioClassification (lv)": 42.75,
+                    "MassiveScenarioClassification (ml)": 7.25,
+                    "MassiveScenarioClassification (mn)": 29.03,
+                    "MassiveScenarioClassification (ms)": 44.65,
+                    "MassiveScenarioClassification (my)": 10.07,
+                    "MassiveScenarioClassification (nb)": 47.36,
+                    "MassiveScenarioClassification (nl)": 49.15,
+                    "MassiveScenarioClassification (pl)": 44.72,
+                    "MassiveScenarioClassification (pt)": 53.0,
+                    "MassiveScenarioClassification (ro)": 49.97,
+                    "MassiveScenarioClassification (ru)": 28.75,
+                    "MassiveScenarioClassification (sl)": 42.26,
+                    "MassiveScenarioClassification (sq)": 49.14,
+                    "MassiveScenarioClassification (sv)": 46.83,
+                    "MassiveScenarioClassification (sw)": 43.18,
+                    "MassiveScenarioClassification (ta)": 19.38,
+                    "MassiveScenarioClassification (te)": 7.74,
+                    "MassiveScenarioClassification (th)": 18.32,
+                    "MassiveScenarioClassification (tl)": 48.31,
+                    "MassiveScenarioClassification (tr)": 41.79,
+                    "MassiveScenarioClassification (ur)": 24.46,
+                    "MassiveScenarioClassification (vi)": 40.94,
+                    "MassiveScenarioClassification (zh-CN)": 33.18,
+                    "MassiveScenarioClassification (zh-TW)": 31.16,
+                    "MultilingualSentiment (cmn-Hans)": 40.52,
+                    "NoRecClassification (nob-Latn)": 37.73,
+                    "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 54.17,
+                    "OnlineShopping (cmn-Hans)": 58.65,
+                    "PAC (pol-Latn)": 59.53,
+                    "PolEmo2.0-IN (pol-Latn)": 38.32,
+                    "PolEmo2.0-OUT (pol-Latn)": 22.98,
+                    "RuReviewsClassification (rus-Cyrl)": 42.49,
+                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 10.49,
+                    "RuSciBenchOECDClassification (rus-Cyrl)": 8.31,
+                    "TNews (cmn-Hans)": 20.37,
+                    "ToxicConversationsClassification": 67.47,
+                    "TweetSentimentExtractionClassification": 54.25,
+                    "Waimai (cmn-Hans)": 63.48
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "Cohere-embed-english-v3.0"
+                    "Model": "all-MiniLM-L12-v2",
+                    "AlloProfClusteringP2P": 46.03,
+                    "AlloProfClusteringS2S": 31.83,
+                    "ArxivClusteringP2P": 46.07,
+                    "ArxivClusteringS2S": 37.5,
+                    "BiorxivClusteringP2P": 36.99,
+                    "BiorxivClusteringS2S": 33.21,
+                    "GeoreviewClusteringP2P (rus-Cyrl)": 20.76,
+                    "HALClusteringS2S": 19.58,
+                    "MLSUMClusteringP2P": 34.35,
+                    "MLSUMClusteringS2S": 29.3,
+                    "MasakhaNEWSClusteringP2P (amh-Ethi)": 40.5,
+                    "MasakhaNEWSClusteringP2P (eng)": 55.86,
+                    "MasakhaNEWSClusteringP2P (fra-Latn)": 42.72,
+                    "MasakhaNEWSClusteringP2P (hau-Latn)": 26.61,
+                    "MasakhaNEWSClusteringP2P (ibo-Latn)": 44.26,
+                    "MasakhaNEWSClusteringP2P (lin-Latn)": 54.52,
+                    "MasakhaNEWSClusteringP2P (lug-Latn)": 43.87,
+                    "MasakhaNEWSClusteringP2P (orm-Ethi)": 24.87,
+                    "MasakhaNEWSClusteringP2P (pcm-Latn)": 74.42,
+                    "MasakhaNEWSClusteringP2P (run-Latn)": 51.73,
+                    "MasakhaNEWSClusteringP2P (sna-Latn)": 46.89,
+                    "MasakhaNEWSClusteringP2P (som-Latn)": 31.17,
+                    "MasakhaNEWSClusteringP2P (swa-Latn)": 23.72,
+                    "MasakhaNEWSClusteringP2P (tir-Ethi)": 44.08,
+                    "MasakhaNEWSClusteringP2P (xho-Latn)": 26.97,
+                    "MasakhaNEWSClusteringP2P (yor-Latn)": 32.51,
+                    "MasakhaNEWSClusteringP2P (fra)": 42.72,
+                    "MasakhaNEWSClusteringS2S (amh-Ethi)": 44.11,
+                    "MasakhaNEWSClusteringS2S (eng)": 40.71,
+                    "MasakhaNEWSClusteringS2S (fra-Latn)": 32.47,
+                    "MasakhaNEWSClusteringS2S (hau-Latn)": 20.63,
+                    "MasakhaNEWSClusteringS2S (ibo-Latn)": 35.33,
+                    "MasakhaNEWSClusteringS2S (lin-Latn)": 54.52,
+                    "MasakhaNEWSClusteringS2S (lug-Latn)": 51.42,
+                    "MasakhaNEWSClusteringS2S (orm-Ethi)": 24.84,
+                    "MasakhaNEWSClusteringS2S (pcm-Latn)": 70.72,
+                    "MasakhaNEWSClusteringS2S (run-Latn)": 50.88,
+                    "MasakhaNEWSClusteringS2S (sna-Latn)": 46.6,
+                    "MasakhaNEWSClusteringS2S (som-Latn)": 29.87,
+                    "MasakhaNEWSClusteringS2S (swa-Latn)": 10.82,
+                    "MasakhaNEWSClusteringS2S (tir-Ethi)": 43.63,
+                    "MasakhaNEWSClusteringS2S (xho-Latn)": 24.55,
+                    "MasakhaNEWSClusteringS2S (yor-Latn)": 32.85,
+                    "MasakhaNEWSClusteringS2S (fra)": 32.47,
+                    "MedrxivClusteringP2P": 34.25,
+                    "MedrxivClusteringS2S": 32.24,
+                    "RedditClustering": 51.18,
+                    "RedditClusteringP2P": 54.8,
+                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 10.65,
+                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 10.19,
+                    "StackExchangeClustering": 53.05,
+                    "StackExchangeClusteringP2P": 33.13,
+                    "TwentyNewsgroupsClustering": 47.47
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "Cohere-embed-english-v3.0"
+                    "Model": "all-MiniLM-L12-v2",
+                    "CDSC-E (pol-Latn)": 49.04,
+                    "OpusparcusPC (deu-Latn)": 91.2,
+                    "OpusparcusPC (en)": 97.41,
+                    "OpusparcusPC (fin-Latn)": 85.99,
+                    "OpusparcusPC (fra-Latn)": 87.35,
+                    "OpusparcusPC (rus-Cyrl)": 79.23,
+                    "OpusparcusPC (swe-Latn)": 84.87,
+                    "PSC (pol-Latn)": 87.92,
+                    "PawsXPairClassification (deu-Latn)": 50.83,
+                    "PawsXPairClassification (en)": 58.62,
+                    "PawsXPairClassification (spa-Latn)": 52.08,
+                    "PawsXPairClassification (fra-Latn)": 55.54,
+                    "PawsXPairClassification (jpn-Hira)": 47.75,
+                    "PawsXPairClassification (kor-Hang)": 49.59,
+                    "PawsXPairClassification (cmn-Hans)": 52.8,
+                    "SICK-E-PL (pol-Latn)": 49.63,
+                    "SprintDuplicateQuestions": 92.45,
+                    "TERRa (rus-Cyrl)": 46.4,
+                    "TwitterSemEval2015": 70.02,
+                    "TwitterURLCorpus": 84.77
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "Cohere-embed-english-v3.0"
+                    "Model": "all-MiniLM-L12-v2",
+                    "AlloprofReranking (fra-Latn)": 67.01,
+                    "AskUbuntuDupQuestions": 64.06,
+                    "MMarcoReranking (cmn-Hans)": 5.27,
+                    "MindSmallReranking": 31.02,
+                    "RuBQReranking (rus-Cyrl)": 38.51,
+                    "SciDocsRR": 87.2,
+                    "StackOverflowDupQuestions": 51.47,
+                    "SyntecReranking (fra-Latn)": 69.17,
+                    "T2Reranking (cmn-Hans)": 60.32
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "Cohere-embed-english-v3.0",
-                    "AILACasedocs": 31.54,
-                    "AILAStatutes": 27.15,
-                    "ARCChallenge": 9.89,
-                    "AlphaNLI": 15.1,
-                    "BrightRetrieval (psychology)": 21.82,
-                    "BrightRetrieval (economics)": 20.18,
-                    "BrightRetrieval (robotics)": 16.21,
-                    "BrightRetrieval (biology)": 18.98,
-                    "BrightRetrieval (stackoverflow)": 16.47,
-                    "BrightRetrieval (theoremqa_theorems)": 6.04,
-                    "BrightRetrieval (pony)": 1.77,
-                    "BrightRetrieval (sustainable_living)": 17.69,
-                    "BrightRetrieval (aops)": 6.46,
-                    "BrightRetrieval (theoremqa_questions)": 15.07,
-                    "BrightRetrieval (leetcode)": 26.78,
-                    "BrightRetrieval (earth_science)": 27.45,
-                    "GerDaLIRSmall": 6.05,
-                    "HellaSwag": 26.35,
-                    "LeCaRDv2": 21.02,
-                    "LegalBenchConsumerContractsQA": 77.12,
-                    "LegalBenchCorporateLobbying": 93.68,
-                    "LegalQuAD": 26.08,
-                    "LegalSummarization": 61.7,
-                    "PIQA": 28.49,
-                    "Quail": 4.1,
-                    "RARbCode": 57.19,
-                    "RARbMath": 72.26,
-                    "SIQA": 4.26,
-                    "SpartQA": 3.75,
-                    "TempReasonL1": 1.5,
-                    "TempReasonL2Fact": 35.91,
-                    "TempReasonL2Pure": 1.89,
-                    "TempReasonL3Fact": 27.51,
-                    "TempReasonL3Pure": 8.53,
-                    "WinoGrande": 58.01
+                    "Model": "all-MiniLM-L12-v2",
+                    "AILACasedocs": 16.8,
+                    "AILAStatutes": 20.71,
+                    "ARCChallenge": 10.23,
+                    "AlloprofRetrieval (fra-Latn)": 33.2,
+                    "AlloprofRetrieval": 33.2,
+                    "AlphaNLI": 25.35,
+                    "ArguAna": 47.13,
+                    "ArguAna-PL (pol-Latn)": 13.4,
+                    "BSARDRetrieval (fra-Latn)": 6.24,
+                    "CQADupstackRetrieval": 42.53,
+                    "ClimateFEVER": 21.57,
+                    "CmedqaRetrieval (cmn-Hans)": 2.58,
+                    "CovidRetrieval (cmn-Hans)": 10.79,
+                    "DBPedia": 33.35,
+                    "DuRetrieval (cmn-Hans)": 6.62,
+                    "EcomRetrieval (cmn-Hans)": 4.01,
+                    "FEVER": 55.9,
+                    "FiQA-PL (pol-Latn)": 5.82,
+                    "FiQA2018": 37.27,
+                    "GerDaLIRSmall (deu-Latn)": 1.35,
+                    "HellaSwag": 24.08,
+                    "HotpotQA": 44.59,
+                    "LEMBNarrativeQARetrieval": 19.64,
+                    "LEMBNeedleRetrieval": 12.25,
+                    "LEMBPasskeyRetrieval": 14.75,
+                    "LEMBQMSumRetrieval": 13.08,
+                    "LEMBSummScreenFDRetrieval": 46.98,
+                    "LEMBWikimQARetrieval": 44.88,
+                    "LeCaRDv2 (zho-Hans)": 18.77,
+                    "LegalBenchConsumerContractsQA": 60.21,
+                    "LegalBenchCorporateLobbying": 88.69,
+                    "LegalQuAD (deu-Latn)": 7.44,
+                    "LegalSummarization": 57.43,
+                    "MMarcoRetrieval (cmn-Hans)": 7.46,
+                    "MSMARCO": 39.03,
+                    "MedicalRetrieval (cmn-Hans)": 2.3,
+                    "MintakaRetrieval (ara-Arab)": 2.74,
+                    "MintakaRetrieval (deu-Latn)": 20.04,
+                    "MintakaRetrieval (spa-Latn)": 11.76,
+                    "MintakaRetrieval (fra-Latn)": 16.08,
+                    "MintakaRetrieval (hin-Deva)": 3.04,
+                    "MintakaRetrieval (ita-Latn)": 11.83,
+                    "MintakaRetrieval (jpn-Hira)": 7.31,
+                    "MintakaRetrieval (por-Latn)": 13.66,
+                    "NFCorpus": 32.25,
+                    "NFCorpus-PL (pol-Latn)": 15.43,
+                    "NQ": 46.47,
+                    "PIQA": 26.44,
+                    "Quail": 3.08,
+                    "QuoraRetrieval": 87.75,
+                    "RARbCode": 42.44,
+                    "RARbMath": 66.36,
+                    "RuBQRetrieval (rus-Cyrl)": 8.84,
+                    "SCIDOCS": 21.82,
+                    "SCIDOCS-PL (pol-Latn)": 5.34,
+                    "SIQA": 2.09,
+                    "SciFact": 62.64,
+                    "SciFact-PL (pol-Latn)": 22.48,
+                    "SpartQA": 2.67,
+                    "SyntecRetrieval (fra-Latn)": 60.8,
+                    "T2Retrieval (cmn-Hans)": 4.82,
+                    "TRECCOVID": 50.82,
+                    "TRECCOVID-PL (pol-Latn)": 16.52,
+                    "TempReasonL1": 1.66,
+                    "TempReasonL2Fact": 10.31,
+                    "TempReasonL2Pure": 0.63,
+                    "TempReasonL3Fact": 11.11,
+                    "TempReasonL3Pure": 6.63,
+                    "Touche2020": 17.22,
+                    "VideoRetrieval (cmn-Hans)": 9.38,
+                    "WinoGrande": 27.2,
+                    "XPQARetrieval (ara-Arab_ara-Arab)": 7.83,
+                    "XPQARetrieval (eng-Latn_ara-Arab)": 2.52,
+                    "XPQARetrieval (ara-Arab_eng-Latn)": 8.88,
+                    "XPQARetrieval (deu-Latn_deu-Latn)": 56.77,
+                    "XPQARetrieval (eng-Latn_deu-Latn)": 18.2,
+                    "XPQARetrieval (deu-Latn_eng-Latn)": 30.06,
+                    "XPQARetrieval (spa-Latn_spa-Latn)": 42.22,
+                    "XPQARetrieval (eng-Latn_spa-Latn)": 7.53,
+                    "XPQARetrieval (spa-Latn_eng-Latn)": 26.27,
+                    "XPQARetrieval (fra-Latn_fra-Latn)": 55.9,
+                    "XPQARetrieval (eng-Latn_fra-Latn)": 14.89,
+                    "XPQARetrieval (fra-Latn_eng-Latn)": 34.2,
+                    "XPQARetrieval (hin-Deva_hin-Deva)": 33.26,
+                    "XPQARetrieval (eng-Latn_hin-Deva)": 6.44,
+                    "XPQARetrieval (hin-Deva_eng-Latn)": 6.98,
+                    "XPQARetrieval (ita-Latn_ita-Latn)": 58.68,
+                    "XPQARetrieval (eng-Latn_ita-Latn)": 8.56,
+                    "XPQARetrieval (ita-Latn_eng-Latn)": 28.71,
+                    "XPQARetrieval (jpn-Hira_jpn-Hira)": 39.53,
+                    "XPQARetrieval (eng-Latn_jpn-Hira)": 5.7,
+                    "XPQARetrieval (jpn-Hira_eng-Latn)": 13.75,
+                    "XPQARetrieval (kor-Hang_kor-Hang)": 13.48,
+                    "XPQARetrieval (eng-Latn_kor-Hang)": 7.43,
+                    "XPQARetrieval (kor-Hang_eng-Latn)": 7.34,
+                    "XPQARetrieval (pol-Latn_pol-Latn)": 28.07,
+                    "XPQARetrieval (eng-Latn_pol-Latn)": 10.03,
+                    "XPQARetrieval (pol-Latn_eng-Latn)": 16.58,
+                    "XPQARetrieval (por-Latn_por-Latn)": 34.09,
+                    "XPQARetrieval (eng-Latn_por-Latn)": 7.38,
+                    "XPQARetrieval (por-Latn_eng-Latn)": 22.59,
+                    "XPQARetrieval (tam-Taml_tam-Taml)": 9.13,
+                    "XPQARetrieval (eng-Latn_tam-Taml)": 4.15,
+                    "XPQARetrieval (tam-Taml_eng-Latn)": 3.76,
+                    "XPQARetrieval (cmn-Hans_cmn-Hans)": 21.09,
+                    "XPQARetrieval (eng-Latn_cmn-Hans)": 6.58,
+                    "XPQARetrieval (cmn-Hans_eng-Latn)": 9.39,
+                    "XPQARetrieval (fr)": 55.9
+                }
+            ]
+        },
+        "STS": {
+            "spearman": [
+                {
+                    "Model": "all-MiniLM-L12-v2",
+                    "AFQMC (cmn-Hans)": 7.94,
+                    "ATEC (cmn-Hans)": 12.97,
+                    "BIOSSES": 83.57,
+                    "BQ (cmn-Hans)": 23.31,
+                    "CDSC-R (pol-Latn)": 82.5,
+                    "LCQMC (cmn-Hans)": 21.04,
+                    "PAWSX (cmn-Hans)": 7.31,
+                    "RUParaPhraserSTS (rus-Cyrl)": 45.47,
+                    "RuSTSBenchmarkSTS (rus-Cyrl)": 56.33,
+                    "SICK-R": 79.32,
+                    "SICK-R-PL (pol-Latn)": 54.26,
+                    "SICKFr (fra-Latn)": 63.16,
+                    "STS12": 73.08,
+                    "STS13": 82.13,
+                    "STS14": 76.73,
+                    "STS15": 85.58,
+                    "STS16": 80.23,
+                    "STS17 (nld-Latn_eng-Latn)": 24.51,
+                    "STS17 (eng-Latn_ara-Arab)": 0.54,
+                    "STS17 (ara-Arab)": 58.71,
+                    "STS17 (kor-Hang)": 43.37,
+                    "STS17 (eng-Latn_tur-Latn)": 0.43,
+                    "STS17 (ita-Latn_eng-Latn)": 24.28,
+                    "STS17 (eng-Latn_deu-Latn)": 27.54,
+                    "STS17 (fra-Latn_eng-Latn)": 30.7,
+                    "STS17 (spa-Latn)": 78.37,
+                    "STS17 (en-en)": 88.63,
+                    "STS17 (spa-Latn_eng-Latn)": 22.01,
+                    "STS17 (ar-ar)": 58.71,
+                    "STS17 (en-ar)": 0.54,
+                    "STS17 (en-de)": 27.54,
+                    "STS17 (en-tr)": 0.43,
+                    "STS17 (es-en)": 22.01,
+                    "STS17 (es-es)": 78.37,
+                    "STS17 (fr-en)": 30.7,
+                    "STS17 (it-en)": 24.28,
+                    "STS17 (ko-ko)": 43.37,
+                    "STS17 (nl-en)": 24.51,
+                    "STS22 (deu-Latn_fra-Latn)": 43.52,
+                    "STS22 (tur-Latn)": 21.6,
+                    "STS22 (en)": 65.67,
+                    "STS22 (ara-Arab)": 17.54,
+                    "STS22 (pol-Latn_eng-Latn)": 42.67,
+                    "STS22 (spa-Latn_ita-Latn)": 40.71,
+                    "STS22 (pol-Latn)": 19.22,
+                    "STS22 (fra-Latn_pol-Latn)": 16.9,
+                    "STS22 (spa-Latn)": 43.98,
+                    "STS22 (rus-Cyrl)": 11.19,
+                    "STS22 (ita-Latn)": 47.48,
+                    "STS22 (deu-Latn_eng-Latn)": 42.86,
+                    "STS22 (deu-Latn)": 22.53,
+                    "STS22 (cmn-Hans_eng-Latn)": 44.39,
+                    "STS22 (deu-Latn_pol-Latn)": 1.63,
+                    "STS22 (fra-Latn)": 69.51,
+                    "STS22 (cmn-Hans)": 33.15,
+                    "STS22 (spa-Latn_eng-Latn)": 53.99,
+                    "STS22 (ar)": 17.54,
+                    "STS22 (de)": 22.53,
+                    "STS22 (de-en)": 42.86,
+                    "STS22 (de-fr)": 43.52,
+                    "STS22 (de-pl)": 1.63,
+                    "STS22 (es)": 43.98,
+                    "STS22 (es-en)": 53.99,
+                    "STS22 (es-it)": 40.71,
+                    "STS22 (fr)": 69.51,
+                    "STS22 (fr-pl)": 16.9,
+                    "STS22 (it)": 47.48,
+                    "STS22 (pl)": 19.22,
+                    "STS22 (pl-en)": 42.67,
+                    "STS22 (ru)": 11.19,
+                    "STS22 (tr)": 21.6,
+                    "STS22 (zh)": 33.15,
+                    "STS22 (zh-en)": 44.39,
+                    "STSB (cmn-Hans)": 36.66,
+                    "STSBenchmark": 83.09,
+                    "STSBenchmarkMultilingualSTS (nld-Latn)": 60.03,
+                    "STSBenchmarkMultilingualSTS (spa-Latn)": 65.33,
+                    "STSBenchmarkMultilingualSTS (ita-Latn)": 60.71,
+                    "STSBenchmarkMultilingualSTS (cmn-Hans)": 38.93,
+                    "STSBenchmarkMultilingualSTS (en)": 83.09,
+                    "STSBenchmarkMultilingualSTS (por-Latn)": 63.85,
+                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 56.09,
+                    "STSBenchmarkMultilingualSTS (fra-Latn)": 66.68,
+                    "STSBenchmarkMultilingualSTS (pol-Latn)": 60.2,
+                    "STSBenchmarkMultilingualSTS (deu-Latn)": 63.28
                 }
             ]
         },
-        "STS": {
+        "Summarization": {
             "spearman": [
                 {
-                    "Model": "Cohere-embed-english-v3.0"
+                    "Model": "all-MiniLM-L12-v2",
+                    "SummEval": 27.9,
+                    "SummEvalFr (fra-Latn)": 26.63
                 }
             ]
         },
-        "Summarization": {
-            "spearman": [
+        "MultilabelClassification": {
+            "accuracy": [
                 {
-                    "Model": "Cohere-embed-english-v3.0"
+                    "Model": "all-MiniLM-L12-v2",
+                    "CEDRClassification (rus-Cyrl)": 33.86,
+                    "SensitiveTopicsClassification (rus-Cyrl)": 18.05
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "Cohere-embed-english-v3.0",
-                    "Core17InstructionRetrieval": 2.8,
-                    "News21InstructionRetrieval": 0.2,
-                    "Robust04InstructionRetrieval": -3.63
+                    "Model": "all-MiniLM-L12-v2"
                 }
             ]
         }
     },
-    "sentence-camembert-base": {
+    "LLM2Vec-Llama-2-supervised": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "sentence-camembert-base"
+                    "Model": "LLM2Vec-Llama-2-supervised"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "sentence-camembert-base",
-                    "AmazonReviewsClassification (fr)": 36.03,
-                    "MTOPDomainClassification (fr)": 77.1,
-                    "MTOPIntentClassification (fr)": 43.44,
-                    "MasakhaNEWSClassification (fra)": 70.36,
-                    "MassiveIntentClassification (fr)": 51.59,
-                    "MassiveScenarioClassification (fr)": 61.28
+                    "Model": "LLM2Vec-Llama-2-supervised",
+                    "AmazonCounterfactualClassification (en)": 82.22,
+                    "AmazonPolarityClassification": 89.69,
+                    "AmazonReviewsClassification (en)": 48.47,
+                    "Banking77Classification": 88.17,
+                    "EmotionClassification": 51.71,
+                    "ImdbClassification": 85.78,
+                    "MTOPDomainClassification (en)": 95.57,
+                    "MTOPIntentClassification (en)": 82.81,
+                    "MassiveIntentClassification (en)": 78.06,
+                    "MassiveScenarioClassification (en)": 81.35,
+                    "ToxicConversationsClassification": 71.01,
+                    "TweetSentimentExtractionClassification": 61.11
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "sentence-camembert-base",
-                    "AlloProfClusteringP2P": 59.09,
-                    "AlloProfClusteringS2S": 38.92,
-                    "HALClusteringS2S": 20.22,
-                    "MLSUMClusteringP2P": 35.98,
-                    "MLSUMClusteringS2S": 27.05,
-                    "MasakhaNEWSClusteringP2P (fra)": 36.03,
-                    "MasakhaNEWSClusteringS2S (fra)": 30.77
+                    "Model": "LLM2Vec-Llama-2-supervised",
+                    "ArxivClusteringP2P": 43.14,
+                    "ArxivClusteringS2S": 42.38,
+                    "BiorxivClusteringP2P": 35.88,
+                    "BiorxivClusteringS2S": 34.81,
+                    "MedrxivClusteringP2P": 32.23,
+                    "MedrxivClusteringS2S": 31.37,
+                    "RedditClustering": 61.1,
+                    "RedditClusteringP2P": 64.52,
+                    "StackExchangeClustering": 67.98,
+                    "StackExchangeClusteringP2P": 33.2,
+                    "TwentyNewsgroupsClustering": 51.04
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "sentence-camembert-base",
-                    "OpusparcusPC (fr)": 92.05,
-                    "PawsXPairClassification (fr)": 57.44
+                    "Model": "LLM2Vec-Llama-2-supervised",
+                    "SprintDuplicateQuestions": 96.83,
+                    "TwitterSemEval2015": 80.7,
+                    "TwitterURLCorpus": 86.56
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "sentence-camembert-base",
-                    "AlloprofReranking": 48.68,
-                    "SyntecReranking": 79.75
+                    "Model": "LLM2Vec-Llama-2-supervised",
+                    "AskUbuntuDupQuestions": 63.13,
+                    "MindSmallReranking": 31.34,
+                    "SciDocsRR": 84.03,
+                    "StackOverflowDupQuestions": 51.02
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "sentence-camembert-base",
-                    "AlloprofRetrieval": 21.94,
-                    "BSARDRetrieval": 0.0,
-                    "MintakaRetrieval (fr)": 13.36,
-                    "SyntecRetrieval": 68.62,
-                    "XPQARetrieval (fr)": 57.92
+                    "Model": "LLM2Vec-Llama-2-supervised",
+                    "ArguAna": 56.53,
+                    "CQADupstackRetrieval": 45.94,
+                    "ClimateFEVER": 30.7,
+                    "DBPedia": 48.42,
+                    "FEVER": 89.93,
+                    "FiQA2018": 51.28,
+                    "HotpotQA": 72.99,
+                    "MSMARCO": 41.46,
+                    "NFCorpus": 40.33,
+                    "NQ": 61.24,
+                    "QuoraRetrieval": 85.59,
+                    "SCIDOCS": 21.05,
+                    "SciFact": 77.3,
+                    "TRECCOVID": 79.25,
+                    "Touche2020": 16.92
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "sentence-camembert-base",
-                    "SICKFr": 74.18,
-                    "STS22 (fr)": 77.54,
-                    "STSBenchmarkMultilingualSTS (fr)": 81.64
+                    "Model": "LLM2Vec-Llama-2-supervised",
+                    "BIOSSES": 82.13,
+                    "SICK-R": 83.01,
+                    "STS12": 78.85,
+                    "STS13": 86.84,
+                    "STS14": 84.04,
+                    "STS15": 88.72,
+                    "STS16": 86.79,
+                    "STS17 (en-en)": 90.63,
+                    "STS22 (en)": 67.55,
+                    "STSBenchmark": 88.72
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "sentence-camembert-base",
-                    "SummEvalFr": 28.77
+                    "Model": "LLM2Vec-Llama-2-supervised",
+                    "SummEval": 28.49
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "LLM2Vec-Llama-2-supervised"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "sentence-camembert-base"
+                    "Model": "LLM2Vec-Llama-2-supervised"
                 }
             ]
         }
     },
-    "Cohere-embed-multilingual-v3.0": {
+    "monot5-base-msmarco-10k": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "Cohere-embed-multilingual-v3.0"
+                    "Model": "monot5-base-msmarco-10k"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "Cohere-embed-multilingual-v3.0",
-                    "AmazonReviewsClassification (fr)": 41.89,
-                    "MTOPDomainClassification (fr)": 86.23,
-                    "MTOPIntentClassification (fr)": 61.07,
-                    "MasakhaNEWSClassification (fra)": 83.06,
-                    "MassiveIntentClassification (fr)": 62.94,
-                    "MassiveScenarioClassification (fr)": 67.29
+                    "Model": "monot5-base-msmarco-10k"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "Cohere-embed-multilingual-v3.0",
-                    "AlloProfClusteringP2P": 63.53,
-                    "AlloProfClusteringS2S": 36.18,
-                    "HALClusteringS2S": 19.9,
-                    "MLSUMClusteringP2P": 45.08,
-                    "MLSUMClusteringS2S": 34.75,
-                    "MasakhaNEWSClusteringP2P (fra)": 53.18,
-                    "MasakhaNEWSClusteringS2S (fra)": 32.31
+                    "Model": "monot5-base-msmarco-10k"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "Cohere-embed-multilingual-v3.0",
-                    "OpusparcusPC (fr)": 94.08,
-                    "PawsXPairClassification (fr)": 61.26
+                    "Model": "monot5-base-msmarco-10k"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "Cohere-embed-multilingual-v3.0",
-                    "AlloprofReranking": 51.01,
-                    "SyntecReranking": 85.72
+                    "Model": "monot5-base-msmarco-10k"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "Cohere-embed-multilingual-v3.0",
-                    "AlloprofRetrieval": 38.36,
-                    "BSARDRetrieval": 0.14,
-                    "MintakaRetrieval (fr)": 25.44,
-                    "SyntecRetrieval": 79.27,
-                    "XPQARetrieval (fr)": 58.87
+                    "Model": "monot5-base-msmarco-10k"
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "Cohere-embed-multilingual-v3.0",
-                    "SICKFr": 79.23,
-                    "STS22 (fr)": 82.76,
-                    "STSBenchmarkMultilingualSTS (fr)": 81.84
+                    "Model": "monot5-base-msmarco-10k"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "Cohere-embed-multilingual-v3.0",
-                    "SummEvalFr": 31.26
+                    "Model": "monot5-base-msmarco-10k"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "monot5-base-msmarco-10k"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "Cohere-embed-multilingual-v3.0"
+                    "Model": "monot5-base-msmarco-10k",
+                    "Core17InstructionRetrieval": -4.06,
+                    "News21InstructionRetrieval": 5.02,
+                    "Robust04InstructionRetrieval": -6.2
                 }
             ]
         }
     },
-    "text2vec-large-chinese": {
+    "nomic-embed-text-v1.5-256": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "text2vec-large-chinese"
+                    "Model": "nomic-embed-text-v1.5-256"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "text2vec-large-chinese",
-                    "AmazonReviewsClassification (zh)": 33.77,
-                    "IFlyTek": 41.54,
-                    "JDReview": 81.56,
-                    "MassiveIntentClassification (zh-CN)": 63.23,
-                    "MassiveScenarioClassification (zh-CN)": 68.45,
-                    "MultilingualSentiment": 58.97,
-                    "OnlineShopping": 83.51,
-                    "TNews": 38.92,
-                    "Waimai": 76.01
+                    "Model": "nomic-embed-text-v1.5-256",
+                    "AmazonCounterfactualClassification (en)": 72.94,
+                    "AmazonPolarityClassification": 91.35,
+                    "AmazonReviewsClassification (en)": 45.73,
+                    "Banking77Classification": 83.69,
+                    "EmotionClassification": 45.88,
+                    "ImdbClassification": 83.99,
+                    "MTOPDomainClassification (en)": 91.68,
+                    "MTOPIntentClassification (en)": 72.47,
+                    "MassiveIntentClassification (en)": 71.76,
+                    "MassiveScenarioClassification (en)": 75.67,
+                    "ToxicConversationsClassification": 70.87,
+                    "TweetSentimentExtractionClassification": 59.2
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "text2vec-large-chinese",
-                    "CLSClusteringP2P": 30.13,
-                    "CLSClusteringS2S": 28.77,
-                    "ThuNewsClusteringP2P": 35.05,
-                    "ThuNewsClusteringS2S": 26.14
+                    "Model": "nomic-embed-text-v1.5-256",
+                    "ArxivClusteringP2P": 44.82,
+                    "ArxivClusteringS2S": 35.32,
+                    "BiorxivClusteringP2P": 38.19,
+                    "BiorxivClusteringS2S": 31.83,
+                    "MedrxivClusteringP2P": 34.08,
+                    "MedrxivClusteringS2S": 30.98,
+                    "RedditClustering": 54.92,
+                    "RedditClusteringP2P": 60.23,
+                    "StackExchangeClustering": 61.81,
+                    "StackExchangeClusteringP2P": 34.03,
+                    "TwentyNewsgroupsClustering": 48.56
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "text2vec-large-chinese",
-                    "Cmnli": 77.67,
-                    "Ocnli": 64.04
+                    "Model": "nomic-embed-text-v1.5-256",
+                    "SprintDuplicateQuestions": 92.31,
+                    "TwitterSemEval2015": 73.61,
+                    "TwitterURLCorpus": 86.34
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "text2vec-large-chinese",
-                    "CMedQAv1": 58.92,
-                    "CMedQAv2": 60.41,
-                    "MMarcoReranking": 12.48,
-                    "T2Reranking": 64.82
+                    "Model": "nomic-embed-text-v1.5-256",
+                    "AskUbuntuDupQuestions": 61.34,
+                    "MindSmallReranking": 30.04,
+                    "SciDocsRR": 79.4,
+                    "StackOverflowDupQuestions": 49.95
+                }
+            ]
+        },
+        "Retrieval": {
+            "ndcg_at_10": [
+                {
+                    "Model": "nomic-embed-text-v1.5-256",
+                    "ArguAna": 45.44,
+                    "CQADupstackRetrieval": 37.61,
+                    "ClimateFEVER": 39.63,
+                    "DBPedia": 39.42,
+                    "FEVER": 84.4,
+                    "FiQA2018": 35.0,
+                    "HotpotQA": 67.78,
+                    "MSMARCO": 41.38,
+                    "NFCorpus": 32.54,
+                    "NQ": 57.1,
+                    "QuoraRetrieval": 87.65,
+                    "SCIDOCS": 16.76,
+                    "SciFact": 68.24,
+                    "TRECCOVID": 80.65,
+                    "Touche2020": 28.49
                 }
             ]
         },
-        "Retrieval": {
-            "ndcg_at_10": [
+        "STS": {
+            "spearman": [
                 {
-                    "Model": "text2vec-large-chinese",
-                    "CmedqaRetrieval": 15.53,
-                    "CovidRetrieval": 60.48,
-                    "DuRetrieval": 51.87,
-                    "EcomRetrieval": 37.58,
-                    "MMarcoRetrieval": 45.96,
-                    "MedicalRetrieval": 30.93,
-                    "T2Retrieval": 50.52,
-                    "VideoRetrieval": 42.65
+                    "Model": "nomic-embed-text-v1.5-256",
+                    "BIOSSES": 81.58,
+                    "SICK-R": 79.24,
+                    "STS12": 78.16,
+                    "STS13": 86.01,
+                    "STS14": 81.25,
+                    "STS15": 86.51,
+                    "STS16": 84.24,
+                    "STS17 (en-en)": 86.44,
+                    "STS22 (en)": 65.14,
+                    "STSBenchmark": 84.8
                 }
             ]
         },
-        "STS": {
+        "Summarization": {
             "spearman": [
                 {
-                    "Model": "text2vec-large-chinese",
-                    "AFQMC": 24.51,
-                    "ATEC": 32.45,
-                    "BQ": 44.22,
-                    "LCQMC": 69.16,
-                    "PAWSX": 14.55,
-                    "QBQTC": 29.51,
-                    "STS22 (zh)": 65.94,
-                    "STSB": 79.45
+                    "Model": "nomic-embed-text-v1.5-256",
+                    "SummEval": 30.05
                 }
             ]
         },
-        "Summarization": {
-            "spearman": [
+        "MultilabelClassification": {
+            "accuracy": [
                 {
-                    "Model": "text2vec-large-chinese"
+                    "Model": "nomic-embed-text-v1.5-256"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "text2vec-large-chinese"
+                    "Model": "nomic-embed-text-v1.5-256"
                 }
             ]
         }
     },
-    "dragon-plus": {
+    "e5-large-v2": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "dragon-plus"
+                    "Model": "e5-large-v2"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "dragon-plus"
+                    "Model": "e5-large-v2"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "dragon-plus"
+                    "Model": "e5-large-v2",
+                    "BiorxivClusteringP2P": 36.72,
+                    "BiorxivClusteringS2S": 35.47,
+                    "MedrxivClusteringP2P": 31.45,
+                    "MedrxivClusteringS2S": 29.91,
+                    "RedditClustering": 55.5,
+                    "RedditClusteringP2P": 63.71,
+                    "StackExchangeClustering": 65.23,
+                    "StackExchangeClusteringP2P": 33.62,
+                    "TwentyNewsgroupsClustering": 48.73
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "dragon-plus"
+                    "Model": "e5-large-v2"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "dragon-plus"
+                    "Model": "e5-large-v2"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "dragon-plus",
-                    "ARCChallenge": 8.91,
-                    "AlphaNLI": 32.1,
-                    "HellaSwag": 27.69,
-                    "PIQA": 28.01,
-                    "Quail": 4.09,
-                    "RARbCode": 17.58,
-                    "RARbMath": 45.09,
-                    "SIQA": 2.0,
-                    "SpartQA": 10.34,
-                    "TempReasonL1": 1.82,
-                    "TempReasonL2Fact": 17.45,
-                    "TempReasonL2Pure": 0.55,
-                    "TempReasonL3Fact": 15.71,
-                    "TempReasonL3Pure": 7.97,
-                    "WinoGrande": 67.18
+                    "Model": "e5-large-v2"
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "dragon-plus"
+                    "Model": "e5-large-v2"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "dragon-plus"
+                    "Model": "e5-large-v2"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "e5-large-v2"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "dragon-plus"
+                    "Model": "e5-large-v2",
+                    "Core17InstructionRetrieval": 0.12,
+                    "News21InstructionRetrieval": 0.87,
+                    "Robust04InstructionRetrieval": -4.16
                 }
             ]
         }
     },
-    "bge-small-zh-v1.5": {
+    "bert-base-uncased": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "bge-small-zh-v1.5"
+                    "Model": "bert-base-uncased"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "bge-small-zh-v1.5",
-                    "AmazonReviewsClassification (zh)": 35.91,
-                    "IFlyTek": 45.49,
-                    "JDReview": 80.04,
-                    "MassiveIntentClassification (zh-CN)": 63.95,
-                    "MassiveScenarioClassification (zh-CN)": 70.8,
-                    "MultilingualSentiment": 63.06,
-                    "OnlineShopping": 85.05,
-                    "TNews": 48.15,
-                    "Waimai": 83.18
+                    "Model": "bert-base-uncased",
+                    "AmazonCounterfactualClassification (en)": 74.25,
+                    "AmazonPolarityClassification": 71.33,
+                    "AmazonReviewsClassification (en)": 33.56,
+                    "Banking77Classification": 63.41,
+                    "EmotionClassification": 35.28,
+                    "ImdbClassification": 65.35,
+                    "MTOPDomainClassification (en)": 82.63,
+                    "MTOPIntentClassification (en)": 68.14,
+                    "MassiveIntentClassification (en)": 59.88,
+                    "MassiveScenarioClassification (en)": 64.28,
+                    "ToxicConversationsClassification": 70.0,
+                    "TweetSentimentExtractionClassification": 51.81
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "bge-small-zh-v1.5",
-                    "CLSClusteringP2P": 38.14,
-                    "CLSClusteringS2S": 35.14,
-                    "ThuNewsClusteringP2P": 54.22,
-                    "ThuNewsClusteringS2S": 49.22
+                    "Model": "bert-base-uncased",
+                    "ArxivClusteringP2P": 35.19,
+                    "ArxivClusteringS2S": 27.51,
+                    "BiorxivClusteringP2P": 30.12,
+                    "BiorxivClusteringS2S": 24.77,
+                    "MedrxivClusteringP2P": 26.09,
+                    "MedrxivClusteringS2S": 23.6,
+                    "RedditClustering": 27.24,
+                    "RedditClusteringP2P": 43.32,
+                    "StackExchangeClustering": 43.58,
+                    "StackExchangeClusteringP2P": 26.55,
+                    "TwentyNewsgroupsClustering": 23.35
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "bge-small-zh-v1.5",
-                    "Cmnli": 76.24,
-                    "Ocnli": 64.57
+                    "Model": "bert-base-uncased",
+                    "SprintDuplicateQuestions": 36.81,
+                    "TwitterSemEval2015": 55.9,
+                    "TwitterURLCorpus": 76.29
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "bge-small-zh-v1.5",
-                    "CMedQAv1": 77.4,
-                    "CMedQAv2": 79.86,
-                    "MMarcoReranking": 20.5,
-                    "T2Reranking": 65.9
+                    "Model": "bert-base-uncased",
+                    "AskUbuntuDupQuestions": 45.84,
+                    "MindSmallReranking": 28.37,
+                    "SciDocsRR": 64.94,
+                    "StackOverflowDupQuestions": 34.62
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "bge-small-zh-v1.5",
-                    "CmedqaRetrieval": 35.11,
-                    "CovidRetrieval": 70.14,
-                    "DuRetrieval": 77.28,
-                    "EcomRetrieval": 55.71,
-                    "MMarcoRetrieval": 63.48,
-                    "MedicalRetrieval": 49.8,
-                    "T2Retrieval": 76.43,
-                    "VideoRetrieval": 66.19
+                    "Model": "bert-base-uncased",
+                    "ArguAna": 28.29,
+                    "CQADupstackRetrieval": 5.51,
+                    "ClimateFEVER": 5.41,
+                    "DBPedia": 4.13,
+                    "FEVER": 3.3,
+                    "FiQA2018": 2.19,
+                    "HotpotQA": 8.26,
+                    "MSMARCO": 1.91,
+                    "NFCorpus": 4.3,
+                    "NQ": 2.62,
+                    "QuoraRetrieval": 61.03,
+                    "SCIDOCS": 2.82,
+                    "SciFact": 13.34,
+                    "TRECCOVID": 14.74,
+                    "Touche2020": 0.97
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "bge-small-zh-v1.5",
-                    "AFQMC": 33.42,
-                    "ATEC": 43.01,
-                    "BQ": 55.22,
-                    "LCQMC": 72.19,
-                    "PAWSX": 9.26,
-                    "QBQTC": 35.29,
-                    "STS22 (zh)": 67.72,
-                    "STSB": 76.73
+                    "Model": "bert-base-uncased",
+                    "BIOSSES": 54.7,
+                    "SICK-R": 58.65,
+                    "STS12": 30.87,
+                    "STS13": 59.89,
+                    "STS14": 47.73,
+                    "STS15": 60.29,
+                    "STS16": 63.73,
+                    "STS17 (en-en)": 64.1,
+                    "STS22 (en)": 56.37,
+                    "STSBenchmark": 47.29
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "bge-small-zh-v1.5"
+                    "Model": "bert-base-uncased",
+                    "SummEval": 29.82
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "bert-base-uncased"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "bge-small-zh-v1.5"
+                    "Model": "bert-base-uncased"
                 }
             ]
         }
     },
-    "text-embedding-3-large": {
+    "bge-base-en-v1.5": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "text-embedding-3-large"
+                    "Model": "bge-base-en-v1.5"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "text-embedding-3-large",
-                    "AmazonCounterfactualClassification (en)": 78.93,
-                    "AmazonPolarityClassification": 92.85,
-                    "AmazonReviewsClassification (en)": 48.7,
-                    "Banking77Classification": 85.69,
-                    "EmotionClassification": 51.58,
-                    "ImdbClassification": 87.67,
-                    "MTOPDomainClassification (en)": 95.36,
-                    "MTOPIntentClassification (en)": 75.07,
-                    "MassiveIntentClassification (en)": 74.64,
-                    "MassiveScenarioClassification (en)": 79.79,
-                    "ToxicConversationsClassification": 72.92,
-                    "TweetSentimentExtractionClassification": 62.22
+                    "Model": "bge-base-en-v1.5"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "text-embedding-3-large",
-                    "ArxivClusteringP2P": 49.01,
-                    "ArxivClusteringS2S": 44.45,
-                    "BiorxivClusteringP2P": 38.03,
-                    "BiorxivClusteringS2S": 36.53,
-                    "MedrxivClusteringP2P": 32.7,
-                    "MedrxivClusteringS2S": 31.27,
-                    "RedditClustering": 67.84,
-                    "RedditClusteringP2P": 67.96,
-                    "StackExchangeClustering": 76.26,
-                    "StackExchangeClusteringP2P": 36.88,
-                    "TwentyNewsgroupsClustering": 58.14
+                    "Model": "bge-base-en-v1.5",
+                    "BiorxivClusteringP2P": 39.44,
+                    "BiorxivClusteringS2S": 36.62,
+                    "MedrxivClusteringP2P": 33.21,
+                    "MedrxivClusteringS2S": 31.68,
+                    "RedditClustering": 56.61,
+                    "RedditClusteringP2P": 62.66,
+                    "StackExchangeClustering": 66.11,
+                    "StackExchangeClusteringP2P": 35.24,
+                    "TwentyNewsgroupsClustering": 50.75
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "text-embedding-3-large",
-                    "SprintDuplicateQuestions": 92.25,
-                    "TwitterSemEval2015": 77.13,
-                    "TwitterURLCorpus": 87.78
+                    "Model": "bge-base-en-v1.5"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "text-embedding-3-large",
-                    "AskUbuntuDupQuestions": 65.03,
-                    "MindSmallReranking": 29.86,
-                    "SciDocsRR": 86.66,
-                    "StackOverflowDupQuestions": 55.08
+                    "Model": "bge-base-en-v1.5"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "text-embedding-3-large",
-                    "AILACasedocs": 39.0,
-                    "AILAStatutes": 41.31,
-                    "ARCChallenge": 23.98,
-                    "AlphaNLI": 37.27,
-                    "ArguAna": 58.05,
-                    "BrightRetrieval (theoremqa_questions)": 22.22,
-                    "BrightRetrieval (leetcode)": 23.65,
-                    "BrightRetrieval (earth_science)": 26.27,
-                    "BrightRetrieval (psychology)": 27.52,
-                    "BrightRetrieval (robotics)": 12.93,
-                    "BrightRetrieval (economics)": 19.98,
-                    "BrightRetrieval (stackoverflow)": 12.49,
-                    "BrightRetrieval (biology)": 23.67,
-                    "BrightRetrieval (theoremqa_theorems)": 9.25,
-                    "BrightRetrieval (pony)": 2.45,
-                    "BrightRetrieval (sustainable_living)": 20.32,
-                    "BrightRetrieval (aops)": 8.45,
-                    "CQADupstackRetrieval": 47.54,
-                    "ClimateFEVER": 30.27,
-                    "DBPedia": 44.76,
-                    "FEVER": 87.94,
-                    "FiQA2018": 55.0,
-                    "GerDaLIRSmall": 32.77,
-                    "HellaSwag": 34.12,
-                    "HotpotQA": 71.58,
-                    "LEMBNarrativeQARetrieval": 44.09,
-                    "LEMBNeedleRetrieval": 29.25,
-                    "LEMBPasskeyRetrieval": 63.0,
-                    "LEMBQMSumRetrieval": 32.49,
-                    "LEMBSummScreenFDRetrieval": 84.8,
-                    "LEMBWikimQARetrieval": 54.16,
-                    "LeCaRDv2": 57.2,
-                    "LegalBenchConsumerContractsQA": 79.39,
-                    "LegalBenchCorporateLobbying": 95.09,
-                    "LegalQuAD": 57.47,
-                    "LegalSummarization": 71.55,
-                    "MSMARCO": 40.24,
-                    "NFCorpus": 42.07,
-                    "NQ": 61.27,
-                    "PIQA": 41.96,
-                    "Quail": 10.15,
-                    "QuoraRetrieval": 89.05,
-                    "RARbCode": 89.64,
-                    "RARbMath": 90.08,
-                    "SCIDOCS": 23.11,
-                    "SIQA": 3.44,
-                    "SciFact": 77.77,
-                    "SpartQA": 7.51,
-                    "TRECCOVID": 79.56,
-                    "TempReasonL1": 2.13,
-                    "TempReasonL2Fact": 28.65,
-                    "TempReasonL2Pure": 10.34,
-                    "TempReasonL3Fact": 25.52,
-                    "TempReasonL3Pure": 15.28,
-                    "Touche2020": 23.35,
-                    "WinoGrande": 29.11
+                    "Model": "bge-base-en-v1.5",
+                    "ARCChallenge": 9.66,
+                    "AlphaNLI": 10.99,
+                    "HellaSwag": 26.64,
+                    "PIQA": 25.69,
+                    "Quail": 1.42,
+                    "RARbCode": 46.47,
+                    "RARbMath": 46.86,
+                    "SIQA": 0.94,
+                    "SpartQA": 3.37,
+                    "TempReasonL1": 1.07,
+                    "TempReasonL2Fact": 17.23,
+                    "TempReasonL2Pure": 1.29,
+                    "TempReasonL3Fact": 13.36,
+                    "TempReasonL3Pure": 5.2,
+                    "WinoGrande": 13.76
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "text-embedding-3-large",
-                    "BIOSSES": 84.68,
-                    "SICK-R": 79.0,
-                    "STS12": 72.84,
-                    "STS13": 86.1,
-                    "STS14": 81.15,
-                    "STS15": 88.49,
-                    "STS16": 85.08,
-                    "STS17 (en-en)": 90.22,
-                    "STS22 (en)": 66.14,
-                    "STSBenchmark": 83.56
+                    "Model": "bge-base-en-v1.5"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "text-embedding-3-large",
-                    "SummEval": 29.92
+                    "Model": "bge-base-en-v1.5"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "bge-base-en-v1.5"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "text-embedding-3-large",
-                    "Core17InstructionRetrieval": -0.2,
-                    "News21InstructionRetrieval": -2.03,
-                    "Robust04InstructionRetrieval": -5.81
+                    "Model": "bge-base-en-v1.5"
                 }
             ]
         }
     },
-    "nb-bert-large": {
+    "contriever": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "nb-bert-large",
-                    "BornholmBitextMining": 4.53
+                    "Model": "contriever"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "nb-bert-large",
-                    "AngryTweetsClassification": 52.14,
-                    "DKHateClassification": 62.13,
-                    "DanishPoliticalCommentsClassification": 35.04,
-                    "LccSentimentClassification": 56.27,
-                    "MassiveIntentClassification (da)": 57.03,
-                    "MassiveIntentClassification (nb)": 62.68,
-                    "MassiveIntentClassification (sv)": 55.02,
-                    "MassiveScenarioClassification (da)": 60.43,
-                    "MassiveScenarioClassification (nb)": 67.44,
-                    "MassiveScenarioClassification (sv)": 57.12,
-                    "NoRecClassification": 55.46,
-                    "NordicLangClassification": 85.27,
-                    "NorwegianParliament": 62.58,
-                    "ScalaDaClassification": 62.85,
-                    "ScalaNbClassification": 66.97
+                    "Model": "contriever"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "nb-bert-large"
+                    "Model": "contriever"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "nb-bert-large"
+                    "Model": "contriever"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "nb-bert-large"
+                    "Model": "contriever"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "nb-bert-large"
+                    "Model": "contriever",
+                    "ARCChallenge": 8.62,
+                    "AlphaNLI": 31.77,
+                    "HellaSwag": 17.73,
+                    "PIQA": 24.64,
+                    "Quail": 4.97,
+                    "RARbCode": 9.28,
+                    "RARbMath": 30.76,
+                    "SIQA": 1.27,
+                    "SpartQA": 10.94,
+                    "TempReasonL1": 1.93,
+                    "TempReasonL2Fact": 22.68,
+                    "TempReasonL2Pure": 1.12,
+                    "TempReasonL3Fact": 20.62,
+                    "TempReasonL3Pure": 7.8,
+                    "WinoGrande": 47.15
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "nb-bert-large"
+                    "Model": "contriever"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "nb-bert-large"
+                    "Model": "contriever"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "contriever"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "nb-bert-large"
+                    "Model": "contriever"
                 }
             ]
         }
     },
-    "e5-large-v2": {
+    "text-similarity-babbage-001": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "e5-large-v2"
+                    "Model": "text-similarity-babbage-001"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "e5-large-v2"
+                    "Model": "text-similarity-babbage-001"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "e5-large-v2",
-                    "BiorxivClusteringP2P": 36.72,
-                    "BiorxivClusteringS2S": 35.47,
-                    "MedrxivClusteringP2P": 31.45,
-                    "MedrxivClusteringS2S": 29.91,
-                    "RedditClustering": 55.5,
-                    "RedditClusteringP2P": 63.71,
-                    "StackExchangeClustering": 65.23,
-                    "StackExchangeClusteringP2P": 33.62,
-                    "TwentyNewsgroupsClustering": 48.73
+                    "Model": "text-similarity-babbage-001",
+                    "RedditClustering": 45.64,
+                    "StackExchangeClustering": 53.01,
+                    "TwentyNewsgroupsClustering": 42.01
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "e5-large-v2"
+                    "Model": "text-similarity-babbage-001",
+                    "SprintDuplicateQuestions": 76.46,
+                    "TwitterSemEval2015": 70.85,
+                    "TwitterURLCorpus": 85.08
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "e5-large-v2"
+                    "Model": "text-similarity-babbage-001",
+                    "AskUbuntuDupQuestions": 54.68,
+                    "SciDocsRR": 72.78,
+                    "StackOverflowDupQuestions": 40.65
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "e5-large-v2"
+                    "Model": "text-similarity-babbage-001"
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "e5-large-v2"
+                    "Model": "text-similarity-babbage-001",
+                    "BIOSSES": 78.12,
+                    "SICK-R": 77.02,
+                    "STSBenchmark": 84.32
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "e5-large-v2"
+                    "Model": "text-similarity-babbage-001"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "text-similarity-babbage-001"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "e5-large-v2",
-                    "Core17InstructionRetrieval": 0.12,
-                    "News21InstructionRetrieval": 0.87,
-                    "Robust04InstructionRetrieval": -4.16
+                    "Model": "text-similarity-babbage-001"
                 }
             ]
         }
     },
-    "universal-sentence-encoder-multilingual-3": {
+    "gbert-base": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "universal-sentence-encoder-multilingual-3"
+                    "Model": "gbert-base"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "universal-sentence-encoder-multilingual-3",
-                    "AmazonReviewsClassification (fr)": 33.51,
-                    "MTOPDomainClassification (fr)": 85.5,
-                    "MTOPIntentClassification (fr)": 53.98,
-                    "MasakhaNEWSClassification (fra)": 82.06,
-                    "MassiveIntentClassification (fr)": 61.19,
-                    "MassiveScenarioClassification (fr)": 70.22
+                    "Model": "gbert-base"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "universal-sentence-encoder-multilingual-3",
-                    "AlloProfClusteringP2P": 56.9,
-                    "AlloProfClusteringS2S": 37.84,
-                    "HALClusteringS2S": 18.95,
-                    "MLSUMClusteringP2P": 43.9,
-                    "MLSUMClusteringS2S": 35.5,
-                    "MasakhaNEWSClusteringP2P (fra)": 60.57,
-                    "MasakhaNEWSClusteringS2S (fra)": 40.31
+                    "Model": "gbert-base",
+                    "BlurbsClusteringP2P": 35.36,
+                    "BlurbsClusteringS2S": 11.27,
+                    "TenKGnadClusteringP2P": 37.16,
+                    "TenKGnadClusteringS2S": 24.23
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "universal-sentence-encoder-multilingual-3",
-                    "OpusparcusPC (fr)": 91.46,
-                    "PawsXPairClassification (fr)": 52.39
+                    "Model": "gbert-base"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "universal-sentence-encoder-multilingual-3",
-                    "AlloprofReranking": 56.23,
-                    "SyntecReranking": 73.85
+                    "Model": "gbert-base"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "universal-sentence-encoder-multilingual-3",
-                    "AlloprofRetrieval": 35.27,
-                    "BSARDRetrieval": 0.0,
-                    "MintakaRetrieval (fr)": 26.12,
-                    "SyntecRetrieval": 69.82,
-                    "XPQARetrieval (fr)": 59.59
+                    "Model": "gbert-base"
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "universal-sentence-encoder-multilingual-3",
-                    "SICKFr": 71.37,
-                    "STS22 (fr)": 77.91,
-                    "STSBenchmarkMultilingualSTS (fr)": 75.48
+                    "Model": "gbert-base"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "universal-sentence-encoder-multilingual-3",
-                    "SummEvalFr": 28.21
+                    "Model": "gbert-base"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "gbert-base"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "universal-sentence-encoder-multilingual-3"
+                    "Model": "gbert-base"
                 }
             ]
         }
     },
-    "LaBSE-ru-turbo": {
+    "mistral-7b-instruct-v0.2": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "LaBSE-ru-turbo",
-                    "Tatoeba (rus-Cyrl_eng-Latn)": 93.22
+                    "Model": "mistral-7b-instruct-v0.2"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "LaBSE-ru-turbo",
-                    "GeoreviewClassification (rus-Cyrl)": 46.04,
-                    "HeadlineClassification (rus-Cyrl)": 69.98,
-                    "InappropriatenessClassification (rus-Cyrl)": 61.39,
-                    "KinopoiskClassification (rus-Cyrl)": 53.59,
-                    "MassiveIntentClassification (rus-Cyrl)": 66.08,
-                    "MassiveScenarioClassification (rus-Cyrl)": 71.13,
-                    "RuReviewsClassification (rus-Cyrl)": 64.58,
-                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 56.67,
-                    "RuSciBenchOECDClassification (rus-Cyrl)": 43.58
+                    "Model": "mistral-7b-instruct-v0.2"
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "LaBSE-ru-turbo",
-                    "GeoreviewClusteringP2P (rus-Cyrl)": 64.55,
-                    "MLSUMClusteringP2P (rus-Cyrl)": 45.7,
-                    "MLSUMClusteringS2S (rus-Cyrl)": 42.93,
-                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 50.64,
-                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 44.48
+                    "Model": "mistral-7b-instruct-v0.2"
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "LaBSE-ru-turbo",
-                    "OpusparcusPC (rus-Cyrl)": 89.32,
-                    "TERRa (rus-Cyrl)": 57.81
+                    "Model": "mistral-7b-instruct-v0.2"
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "LaBSE-ru-turbo",
-                    "RuBQReranking (rus-Cyrl)": 68.65
+                    "Model": "mistral-7b-instruct-v0.2"
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "LaBSE-ru-turbo",
-                    "RiaNewsRetrieval (rus-Cyrl)": 69.36,
-                    "RuBQRetrieval (rus-Cyrl)": 65.71
+                    "Model": "mistral-7b-instruct-v0.2"
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "LaBSE-ru-turbo",
-                    "RUParaPhraserSTS (rus-Cyrl)": 72.97,
-                    "RuSTSBenchmarkSTS (rus-Cyrl)": 81.77,
-                    "STS22 (rus-Cyrl)": 62.89,
-                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 81.81
+                    "Model": "mistral-7b-instruct-v0.2"
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "LaBSE-ru-turbo"
+                    "Model": "mistral-7b-instruct-v0.2"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "mistral-7b-instruct-v0.2"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "LaBSE-ru-turbo"
+                    "Model": "mistral-7b-instruct-v0.2",
+                    "Core17InstructionRetrieval": 13.03,
+                    "News21InstructionRetrieval": 4.81,
+                    "Robust04InstructionRetrieval": 12.61
                 }
             ]
         }
     },
-    "bge-large-zh-noinstruct": {
+    "bge-small-zh-v1.5": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "bge-large-zh-noinstruct"
+                    "Model": "bge-small-zh-v1.5"
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "bge-large-zh-noinstruct",
-                    "AmazonReviewsClassification (zh)": 41.94,
-                    "IFlyTek": 45.32,
-                    "JDReview": 85.38,
-                    "MassiveIntentClassification (zh-CN)": 66.96,
-                    "MassiveScenarioClassification (zh-CN)": 73.39,
-                    "MultilingualSentiment": 73.7,
-                    "OnlineShopping": 91.66,
-                    "TNews": 52.05,
-                    "Waimai": 86.83
+                    "Model": "bge-small-zh-v1.5",
+                    "AmazonReviewsClassification (zh)": 35.91,
+                    "IFlyTek": 45.49,
+                    "JDReview": 80.04,
+                    "MassiveIntentClassification (zh-CN)": 63.95,
+                    "MassiveScenarioClassification (zh-CN)": 70.8,
+                    "MultilingualSentiment": 63.06,
+                    "OnlineShopping": 85.05,
+                    "TNews": 48.15,
+                    "Waimai": 83.18
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "bge-large-zh-noinstruct",
-                    "CLSClusteringP2P": 41.23,
-                    "CLSClusteringS2S": 40.04,
-                    "ThuNewsClusteringP2P": 62.03,
-                    "ThuNewsClusteringS2S": 56.75
+                    "Model": "bge-small-zh-v1.5",
+                    "CLSClusteringP2P": 38.14,
+                    "CLSClusteringS2S": 35.14,
+                    "ThuNewsClusteringP2P": 54.22,
+                    "ThuNewsClusteringS2S": 49.22
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "bge-large-zh-noinstruct",
-                    "Cmnli": 82.17,
-                    "Ocnli": 71.37
+                    "Model": "bge-small-zh-v1.5",
+                    "Cmnli": 76.24,
+                    "Ocnli": 64.57
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "bge-large-zh-noinstruct",
-                    "CMedQAv1": 81.72,
-                    "CMedQAv2": 84.64,
-                    "MMarcoReranking": 27.1,
-                    "T2Reranking": 66.16
+                    "Model": "bge-small-zh-v1.5",
+                    "CMedQAv1": 77.4,
+                    "CMedQAv2": 79.86,
+                    "MMarcoReranking": 20.5,
+                    "T2Reranking": 65.9
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "bge-large-zh-noinstruct",
-                    "CmedqaRetrieval": 41.03,
-                    "CovidRetrieval": 75.07,
-                    "DuRetrieval": 84.68,
-                    "EcomRetrieval": 65.6,
-                    "MMarcoRetrieval": 81.38,
-                    "MedicalRetrieval": 58.28,
-                    "T2Retrieval": 84.39,
-                    "VideoRetrieval": 73.93
+                    "Model": "bge-small-zh-v1.5",
+                    "CmedqaRetrieval": 35.11,
+                    "CovidRetrieval": 70.14,
+                    "DuRetrieval": 77.28,
+                    "EcomRetrieval": 55.71,
+                    "MMarcoRetrieval": 63.48,
+                    "MedicalRetrieval": 49.8,
+                    "T2Retrieval": 76.43,
+                    "VideoRetrieval": 66.19
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "bge-large-zh-noinstruct",
-                    "AFQMC": 43.06,
-                    "ATEC": 48.29,
-                    "BQ": 60.53,
-                    "LCQMC": 74.71,
-                    "PAWSX": 16.64,
-                    "QBQTC": 35.2,
-                    "STS22 (zh)": 67.19,
-                    "STSB": 78.41
+                    "Model": "bge-small-zh-v1.5",
+                    "AFQMC": 33.42,
+                    "ATEC": 43.01,
+                    "BQ": 55.22,
+                    "LCQMC": 72.19,
+                    "PAWSX": 9.26,
+                    "QBQTC": 35.29,
+                    "STS22 (zh)": 67.72,
+                    "STSB": 76.73
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "bge-large-zh-noinstruct"
+                    "Model": "bge-small-zh-v1.5"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "bge-small-zh-v1.5"
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "bge-large-zh-noinstruct"
+                    "Model": "bge-small-zh-v1.5"
                 }
             ]
         }
     },
-    "bm25s": {
+    "bge-m3": {
         "BitextMining": {
             "f1": [
                 {
-                    "Model": "bm25s"
+                    "Model": "bge-m3",
+                    "Tatoeba (rus-Cyrl_eng-Latn)": 93.42
                 }
             ]
         },
         "Classification": {
             "accuracy": [
                 {
-                    "Model": "bm25s"
+                    "Model": "bge-m3",
+                    "GeoreviewClassification (rus-Cyrl)": 48.27,
+                    "HeadlineClassification (rus-Cyrl)": 70.32,
+                    "InappropriatenessClassification (rus-Cyrl)": 59.87,
+                    "KinopoiskClassification (rus-Cyrl)": 58.23,
+                    "MassiveIntentClassification (rus-Cyrl)": 68.75,
+                    "MassiveScenarioClassification (rus-Cyrl)": 73.42,
+                    "RuReviewsClassification (rus-Cyrl)": 66.91,
+                    "RuSciBenchGRNTIClassification (rus-Cyrl)": 55.81,
+                    "RuSciBenchOECDClassification (rus-Cyrl)": 42.57
                 }
             ]
         },
         "Clustering": {
             "v_measure": [
                 {
-                    "Model": "bm25s"
+                    "Model": "bge-m3",
+                    "GeoreviewClusteringP2P (rus-Cyrl)": 63.75,
+                    "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 50.57,
+                    "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 43.21
                 }
             ]
         },
         "PairClassification": {
             "ap": [
                 {
-                    "Model": "bm25s"
+                    "Model": "bge-m3",
+                    "OpusparcusPC (rus-Cyrl)": 89.64,
+                    "TERRa (rus-Cyrl)": 60.6
                 }
             ]
         },
         "Reranking": {
             "map": [
                 {
-                    "Model": "bm25s"
+                    "Model": "bge-m3",
+                    "MIRACLReranking (rus-Cyrl)": 65.38,
+                    "RuBQReranking (rus-Cyrl)": 74.02
                 }
             ]
         },
         "Retrieval": {
             "ndcg_at_10": [
                 {
-                    "Model": "bm25s",
-                    "ArguAna": 49.28,
-                    "CQADupstackRetrieval": 31.86,
-                    "ClimateFEVER": 13.62,
-                    "DBPedia": 29.91,
-                    "FEVER": 48.09,
-                    "FiQA2018": 25.14,
-                    "HotpotQA": 56.91,
-                    "MSMARCO": 21.89,
-                    "NFCorpus": 32.08,
-                    "NQ": 28.5,
-                    "QuoraRetrieval": 80.42,
-                    "SCIDOCS": 15.78,
-                    "SciFact": 68.7,
-                    "TRECCOVID": 62.31,
-                    "Touche2020": 33.05
+                    "Model": "bge-m3",
+                    "ARCChallenge": 9.02,
+                    "AlphaNLI": 24.73,
+                    "HellaSwag": 25.67,
+                    "LEMBNarrativeQARetrieval": 45.76,
+                    "LEMBNeedleRetrieval": 40.25,
+                    "LEMBPasskeyRetrieval": 46.0,
+                    "LEMBQMSumRetrieval": 35.54,
+                    "LEMBSummScreenFDRetrieval": 94.09,
+                    "LEMBWikimQARetrieval": 77.73,
+                    "MIRACLRetrieval (rus-Cyrl)": 70.11,
+                    "PIQA": 22.93,
+                    "Quail": 7.51,
+                    "RARbCode": 38.8,
+                    "RARbMath": 69.19,
+                    "RiaNewsRetrieval (rus-Cyrl)": 82.98,
+                    "RuBQRetrieval (rus-Cyrl)": 71.21,
+                    "SIQA": 4.89,
+                    "SpartQA": 7.49,
+                    "TempReasonL1": 0.99,
+                    "TempReasonL2Fact": 33.23,
+                    "TempReasonL2Pure": 0.68,
+                    "TempReasonL3Fact": 30.05,
+                    "TempReasonL3Pure": 5.28,
+                    "WinoGrande": 41.72
                 }
             ]
         },
         "STS": {
             "spearman": [
                 {
-                    "Model": "bm25s"
+                    "Model": "bge-m3",
+                    "RUParaPhraserSTS (rus-Cyrl)": 74.9,
+                    "RuSTSBenchmarkSTS (rus-Cyrl)": 79.87,
+                    "STS22 (rus-Cyrl)": 66.26,
+                    "STSBenchmarkMultilingualSTS (rus-Cyrl)": 79.27
                 }
             ]
         },
         "Summarization": {
             "spearman": [
                 {
-                    "Model": "bm25s"
+                    "Model": "bge-m3"
+                }
+            ]
+        },
+        "MultilabelClassification": {
+            "accuracy": [
+                {
+                    "Model": "bge-m3",
+                    "CEDRClassification (rus-Cyrl)": 43.47,
+                    "SensitiveTopicsClassification (rus-Cyrl)": 25.03
                 }
             ]
         },
         "InstructionRetrieval": {
             "p-MRR": [
                 {
-                    "Model": "bm25s"
+                    "Model": "bge-m3"
                 }
             ]
         }
diff --git a/all_data_tasks/0/default.jsonl b/all_data_tasks/0/default.jsonl
index 76e7093fa08cbae86587f5c3ce9ad475cc060bd8..afc224fb9c48b6cd64517260dcaa0590d8262551 100644
--- a/all_data_tasks/0/default.jsonl
+++ b/all_data_tasks/0/default.jsonl
@@ -205,7 +205,7 @@
 {"index":107,"Rank":238,"Model":"LLaMA-embeeding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":84.82,"AmazonPolarityClassification":76.88,"AmazonReviewsClassification (en)":36.72,"Banking77Classification":"","EmotionClassification":41.93,"ImdbClassification":"","MassiveIntentClassification (en)":65.91,"MassiveScenarioClassification (en)":67.62,"MTOPDomainClassification (en)":87.95,"MTOPIntentClassification (en)":78.43,"ToxicConversationsClassification":"","TweetSentimentExtractionClassification":56.28}
 {"index":108,"Rank":239,"Model":"yiyouliao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":74.78,"AmazonPolarityClassification":71.89,"AmazonReviewsClassification (en)":36.7,"Banking77Classification":81.37,"EmotionClassification":42.6,"ImdbClassification":63.96,"MassiveIntentClassification (en)":68.56,"MassiveScenarioClassification (en)":74.15,"MTOPDomainClassification (en)":90.19,"MTOPIntentClassification (en)":69.5,"ToxicConversationsClassification":69.85,"TweetSentimentExtractionClassification":""}
 {"index":112,"Rank":240,"Model":"snowflake-arctic-embed-m-v1.5<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","AmazonCounterfactualClassification (en)":68.3,"AmazonPolarityClassification":"","AmazonReviewsClassification (en)":46.27,"Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":66.69,"MassiveScenarioClassification (en)":73.06,"MTOPDomainClassification (en)":91.36,"MTOPIntentClassification (en)":60.64,"ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
-{"index":121,"Rank":243,"Model":"EminEmbed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":62.97,"AmazonPolarityClassification":"","AmazonReviewsClassification (en)":48.48,"Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":66.09,"MassiveScenarioClassification (en)":72.55,"MTOPDomainClassification (en)":90.15,"MTOPIntentClassification (en)":57.92,"ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
+{"index":121,"Rank":243,"Model":"EminEmbed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":67.79,"AmazonPolarityClassification":"","AmazonReviewsClassification (en)":48.48,"Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":66.09,"MassiveScenarioClassification (en)":72.55,"MTOPDomainClassification (en)":90.15,"MTOPIntentClassification (en)":57.92,"ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
 {"index":140,"Rank":248,"Model":"slx-v0.1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.08,"Average":"","AmazonCounterfactualClassification (en)":61.46,"AmazonPolarityClassification":"","AmazonReviewsClassification (en)":30.3,"Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":67.94,"MassiveScenarioClassification (en)":73.91,"MTOPDomainClassification (en)":91.97,"MTOPIntentClassification (en)":63.3,"ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
 {"index":142,"Rank":249,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":"","AmazonCounterfactualClassification (en)":"","AmazonPolarityClassification":"","AmazonReviewsClassification (en)":"","Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":60.48,"MassiveScenarioClassification (en)":65.43,"MTOPDomainClassification (en)":"","MTOPIntentClassification (en)":"","ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
 {"index":212,"Rank":272,"Model":"fin-mpnet-base<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","AmazonCounterfactualClassification (en)":"","AmazonPolarityClassification":"","AmazonReviewsClassification (en)":29.13,"Banking77Classification":80.25,"EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":"","MassiveScenarioClassification (en)":"","MTOPDomainClassification (en)":"","MTOPIntentClassification (en)":"","ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
diff --git a/all_data_tasks/33/default.jsonl b/all_data_tasks/33/default.jsonl
index 346574ffc075324f912aac6963c0502e7c699ab1..7dc310344150232bb0ea8382a118fb9520db31fe 100644
--- a/all_data_tasks/33/default.jsonl
+++ b/all_data_tasks/33/default.jsonl
@@ -1,25 +1,25 @@
-{"index":13,"Rank":1,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":64.57,"GeoreviewClassification (rus-Cyrl)":50.25,"HeadlineClassification (rus-Cyrl)":85.68,"InappropriatenessClassification (rus-Cyrl)":67.19,"KinopoiskClassification (rus-Cyrl)":65.49,"RuReviewsClassification (rus-Cyrl)":67.68,"RuSciBenchGRNTIClassification (rus-Cyrl)":64.59,"RuSciBenchOECDClassification (rus-Cyrl)":51.13}
-{"index":4,"Rank":2,"Model":"ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":60.84,"GeoreviewClassification (rus-Cyrl)":49.7,"HeadlineClassification (rus-Cyrl)":78.0,"InappropriatenessClassification (rus-Cyrl)":61.32,"KinopoiskClassification (rus-Cyrl)":63.27,"RuReviewsClassification (rus-Cyrl)":67.96,"RuSciBenchGRNTIClassification (rus-Cyrl)":59.33,"RuSciBenchOECDClassification (rus-Cyrl)":46.33}
-{"index":11,"Rank":3,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":59.36,"GeoreviewClassification (rus-Cyrl)":50.98,"HeadlineClassification (rus-Cyrl)":70.09,"InappropriatenessClassification (rus-Cyrl)":60.76,"KinopoiskClassification (rus-Cyrl)":63.33,"RuReviewsClassification (rus-Cyrl)":68.52,"RuSciBenchGRNTIClassification (rus-Cyrl)":57.67,"RuSciBenchOECDClassification (rus-Cyrl)":44.2}
-{"index":15,"Rank":4,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":58.92,"GeoreviewClassification (rus-Cyrl)":49.69,"HeadlineClassification (rus-Cyrl)":77.19,"InappropriatenessClassification (rus-Cyrl)":61.6,"KinopoiskClassification (rus-Cyrl)":56.59,"RuReviewsClassification (rus-Cyrl)":65.28,"RuSciBenchGRNTIClassification (rus-Cyrl)":58.2,"RuSciBenchOECDClassification (rus-Cyrl)":43.91}
-{"index":10,"Rank":5,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":57.86,"GeoreviewClassification (rus-Cyrl)":47.23,"HeadlineClassification (rus-Cyrl)":74.88,"InappropriatenessClassification (rus-Cyrl)":61.94,"KinopoiskClassification (rus-Cyrl)":55.69,"RuReviewsClassification (rus-Cyrl)":66.44,"RuSciBenchGRNTIClassification (rus-Cyrl)":55.55,"RuSciBenchOECDClassification (rus-Cyrl)":43.28}
-{"index":0,"Rank":6,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":57.43,"GeoreviewClassification (rus-Cyrl)":48.27,"HeadlineClassification (rus-Cyrl)":70.32,"InappropriatenessClassification (rus-Cyrl)":59.87,"KinopoiskClassification (rus-Cyrl)":58.23,"RuReviewsClassification (rus-Cyrl)":66.91,"RuSciBenchGRNTIClassification (rus-Cyrl)":55.81,"RuSciBenchOECDClassification (rus-Cyrl)":42.57}
-{"index":23,"Rank":7,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":56.55,"GeoreviewClassification (rus-Cyrl)":46.04,"HeadlineClassification (rus-Cyrl)":69.98,"InappropriatenessClassification (rus-Cyrl)":61.39,"KinopoiskClassification (rus-Cyrl)":53.59,"RuReviewsClassification (rus-Cyrl)":64.58,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.67,"RuSciBenchOECDClassification (rus-Cyrl)":43.58}
-{"index":14,"Rank":8,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":56.19,"GeoreviewClassification (rus-Cyrl)":46.05,"HeadlineClassification (rus-Cyrl)":75.64,"InappropriatenessClassification (rus-Cyrl)":58.78,"KinopoiskClassification (rus-Cyrl)":50.89,"RuReviewsClassification (rus-Cyrl)":62.99,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.28,"RuSciBenchOECDClassification (rus-Cyrl)":42.69}
-{"index":5,"Rank":9,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":55.44,"GeoreviewClassification (rus-Cyrl)":39.67,"HeadlineClassification (rus-Cyrl)":77.19,"InappropriatenessClassification (rus-Cyrl)":64.64,"KinopoiskClassification (rus-Cyrl)":50.33,"RuReviewsClassification (rus-Cyrl)":58.29,"RuSciBenchGRNTIClassification (rus-Cyrl)":54.19,"RuSciBenchOECDClassification (rus-Cyrl)":43.8}
-{"index":6,"Rank":10,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":55.21,"GeoreviewClassification (rus-Cyrl)":39.97,"HeadlineClassification (rus-Cyrl)":79.26,"InappropriatenessClassification (rus-Cyrl)":62.52,"KinopoiskClassification (rus-Cyrl)":49.51,"RuReviewsClassification (rus-Cyrl)":58.27,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.9,"RuSciBenchOECDClassification (rus-Cyrl)":43.04}
-{"index":16,"Rank":11,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":55.09,"GeoreviewClassification (rus-Cyrl)":44.66,"HeadlineClassification (rus-Cyrl)":73.94,"InappropriatenessClassification (rus-Cyrl)":59.16,"KinopoiskClassification (rus-Cyrl)":49.96,"RuReviewsClassification (rus-Cyrl)":61.18,"RuSciBenchGRNTIClassification (rus-Cyrl)":54.99,"RuSciBenchOECDClassification (rus-Cyrl)":41.72}
-{"index":12,"Rank":12,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":54.23,"GeoreviewClassification (rus-Cyrl)":40.19,"HeadlineClassification (rus-Cyrl)":78.75,"InappropriatenessClassification (rus-Cyrl)":61.33,"KinopoiskClassification (rus-Cyrl)":48.78,"RuReviewsClassification (rus-Cyrl)":55.66,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.53,"RuSciBenchOECDClassification (rus-Cyrl)":41.34}
-{"index":22,"Rank":13,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":54.11,"GeoreviewClassification (rus-Cyrl)":42.33,"HeadlineClassification (rus-Cyrl)":70.35,"InappropriatenessClassification (rus-Cyrl)":59.32,"KinopoiskClassification (rus-Cyrl)":44.31,"RuReviewsClassification (rus-Cyrl)":62.33,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.01,"RuSciBenchOECDClassification (rus-Cyrl)":44.14}
-{"index":24,"Rank":14,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":53.46,"GeoreviewClassification (rus-Cyrl)":41.36,"HeadlineClassification (rus-Cyrl)":68.9,"InappropriatenessClassification (rus-Cyrl)":59.11,"KinopoiskClassification (rus-Cyrl)":50.47,"RuReviewsClassification (rus-Cyrl)":60.66,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.93,"RuSciBenchOECDClassification (rus-Cyrl)":40.79}
-{"index":7,"Rank":15,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":52.73,"GeoreviewClassification (rus-Cyrl)":40.89,"HeadlineClassification (rus-Cyrl)":68.75,"InappropriatenessClassification (rus-Cyrl)":58.48,"KinopoiskClassification (rus-Cyrl)":49.85,"RuReviewsClassification (rus-Cyrl)":58.01,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.8,"RuSciBenchOECDClassification (rus-Cyrl)":40.36}
-{"index":17,"Rank":16,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":52.35,"GeoreviewClassification (rus-Cyrl)":40.86,"HeadlineClassification (rus-Cyrl)":68.75,"InappropriatenessClassification (rus-Cyrl)":58.52,"KinopoiskClassification (rus-Cyrl)":46.77,"RuReviewsClassification (rus-Cyrl)":58.01,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.04,"RuSciBenchOECDClassification (rus-Cyrl)":40.48}
-{"index":1,"Rank":17,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":52.16,"GeoreviewClassification (rus-Cyrl)":38.95,"HeadlineClassification (rus-Cyrl)":75.59,"InappropriatenessClassification (rus-Cyrl)":60.68,"KinopoiskClassification (rus-Cyrl)":49.67,"RuReviewsClassification (rus-Cyrl)":54.05,"RuSciBenchGRNTIClassification (rus-Cyrl)":48.53,"RuSciBenchOECDClassification (rus-Cyrl)":37.65}
-{"index":3,"Rank":18,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":51.49,"GeoreviewClassification (rus-Cyrl)":38.05,"HeadlineClassification (rus-Cyrl)":67.64,"InappropriatenessClassification (rus-Cyrl)":58.27,"KinopoiskClassification (rus-Cyrl)":45.86,"RuReviewsClassification (rus-Cyrl)":58.34,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.18,"RuSciBenchOECDClassification (rus-Cyrl)":40.11}
-{"index":21,"Rank":19,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":51.38,"GeoreviewClassification (rus-Cyrl)":38.24,"HeadlineClassification (rus-Cyrl)":68.3,"InappropriatenessClassification (rus-Cyrl)":58.18,"KinopoiskClassification (rus-Cyrl)":41.45,"RuReviewsClassification (rus-Cyrl)":58.88,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.19,"RuSciBenchOECDClassification (rus-Cyrl)":41.41}
-{"index":9,"Rank":20,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":51.37,"GeoreviewClassification (rus-Cyrl)":39.64,"HeadlineClassification (rus-Cyrl)":74.19,"InappropriatenessClassification (rus-Cyrl)":58.57,"KinopoiskClassification (rus-Cyrl)":49.06,"RuReviewsClassification (rus-Cyrl)":56.99,"RuSciBenchGRNTIClassification (rus-Cyrl)":45.63,"RuSciBenchOECDClassification (rus-Cyrl)":35.48}
-{"index":2,"Rank":21,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":50.66,"GeoreviewClassification (rus-Cyrl)":37.22,"HeadlineClassification (rus-Cyrl)":75.23,"InappropriatenessClassification (rus-Cyrl)":57.34,"KinopoiskClassification (rus-Cyrl)":49.91,"RuReviewsClassification (rus-Cyrl)":50.74,"RuSciBenchGRNTIClassification (rus-Cyrl)":48.03,"RuSciBenchOECDClassification (rus-Cyrl)":36.13}
-{"index":8,"Rank":22,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":42.68,"GeoreviewClassification (rus-Cyrl)":33.45,"HeadlineClassification (rus-Cyrl)":57.65,"InappropriatenessClassification (rus-Cyrl)":54.5,"KinopoiskClassification (rus-Cyrl)":41.36,"RuReviewsClassification (rus-Cyrl)":49.56,"RuSciBenchGRNTIClassification (rus-Cyrl)":35.71,"RuSciBenchOECDClassification (rus-Cyrl)":26.51}
-{"index":20,"Rank":23,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":29.53,"GeoreviewClassification (rus-Cyrl)":25.93,"HeadlineClassification (rus-Cyrl)":28.53,"InappropriatenessClassification (rus-Cyrl)":51.82,"KinopoiskClassification (rus-Cyrl)":34.18,"RuReviewsClassification (rus-Cyrl)":42.33,"RuSciBenchGRNTIClassification (rus-Cyrl)":13.29,"RuSciBenchOECDClassification (rus-Cyrl)":10.62}
-{"index":19,"Rank":24,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":28.67,"GeoreviewClassification (rus-Cyrl)":27.08,"HeadlineClassification (rus-Cyrl)":27.77,"InappropriatenessClassification (rus-Cyrl)":51.73,"KinopoiskClassification (rus-Cyrl)":33.93,"RuReviewsClassification (rus-Cyrl)":41.79,"RuSciBenchGRNTIClassification (rus-Cyrl)":10.08,"RuSciBenchOECDClassification (rus-Cyrl)":8.3}
-{"index":18,"Rank":25,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":28.33,"GeoreviewClassification (rus-Cyrl)":23.49,"HeadlineClassification (rus-Cyrl)":28.49,"InappropriatenessClassification (rus-Cyrl)":50.85,"KinopoiskClassification (rus-Cyrl)":34.17,"RuReviewsClassification (rus-Cyrl)":42.49,"RuSciBenchGRNTIClassification (rus-Cyrl)":10.49,"RuSciBenchOECDClassification (rus-Cyrl)":8.31}
+{"index":13,"Rank":1,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":67.52,"GeoreviewClassification (rus-Cyrl)":50.25,"HeadlineClassification (rus-Cyrl)":85.68,"InappropriatenessClassification (rus-Cyrl)":67.19,"KinopoiskClassification (rus-Cyrl)":65.49,"RuReviewsClassification (rus-Cyrl)":67.68,"RuSciBenchGRNTIClassification (rus-Cyrl)":64.59,"RuSciBenchOECDClassification (rus-Cyrl)":51.13,"MassiveIntentClassification (rus-Cyrl)":76.08,"MassiveScenarioClassification (rus-Cyrl)":79.61}
+{"index":11,"Rank":2,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":61.92,"GeoreviewClassification (rus-Cyrl)":50.98,"HeadlineClassification (rus-Cyrl)":70.09,"InappropriatenessClassification (rus-Cyrl)":60.76,"KinopoiskClassification (rus-Cyrl)":63.33,"RuReviewsClassification (rus-Cyrl)":68.52,"RuSciBenchGRNTIClassification (rus-Cyrl)":57.67,"RuSciBenchOECDClassification (rus-Cyrl)":44.2,"MassiveIntentClassification (rus-Cyrl)":68.85,"MassiveScenarioClassification (rus-Cyrl)":72.9}
+{"index":15,"Rank":3,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":61.01,"GeoreviewClassification (rus-Cyrl)":49.69,"HeadlineClassification (rus-Cyrl)":77.19,"InappropriatenessClassification (rus-Cyrl)":61.6,"KinopoiskClassification (rus-Cyrl)":56.59,"RuReviewsClassification (rus-Cyrl)":65.28,"RuSciBenchGRNTIClassification (rus-Cyrl)":58.2,"RuSciBenchOECDClassification (rus-Cyrl)":43.91,"MassiveIntentClassification (rus-Cyrl)":65.76,"MassiveScenarioClassification (rus-Cyrl)":70.85}
+{"index":0,"Rank":4,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":60.46,"GeoreviewClassification (rus-Cyrl)":48.27,"HeadlineClassification (rus-Cyrl)":70.32,"InappropriatenessClassification (rus-Cyrl)":59.87,"KinopoiskClassification (rus-Cyrl)":58.23,"RuReviewsClassification (rus-Cyrl)":66.91,"RuSciBenchGRNTIClassification (rus-Cyrl)":55.81,"RuSciBenchOECDClassification (rus-Cyrl)":42.57,"MassiveIntentClassification (rus-Cyrl)":68.75,"MassiveScenarioClassification (rus-Cyrl)":73.42}
+{"index":10,"Rank":5,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":59.88,"GeoreviewClassification (rus-Cyrl)":47.23,"HeadlineClassification (rus-Cyrl)":74.88,"InappropriatenessClassification (rus-Cyrl)":61.94,"KinopoiskClassification (rus-Cyrl)":55.69,"RuReviewsClassification (rus-Cyrl)":66.44,"RuSciBenchGRNTIClassification (rus-Cyrl)":55.55,"RuSciBenchOECDClassification (rus-Cyrl)":43.28,"MassiveIntentClassification (rus-Cyrl)":65.57,"MassiveScenarioClassification (rus-Cyrl)":68.33}
+{"index":23,"Rank":6,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":59.23,"GeoreviewClassification (rus-Cyrl)":46.04,"HeadlineClassification (rus-Cyrl)":69.98,"InappropriatenessClassification (rus-Cyrl)":61.39,"KinopoiskClassification (rus-Cyrl)":53.59,"RuReviewsClassification (rus-Cyrl)":64.58,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.67,"RuSciBenchOECDClassification (rus-Cyrl)":43.58,"MassiveIntentClassification (rus-Cyrl)":66.08,"MassiveScenarioClassification (rus-Cyrl)":71.13}
+{"index":14,"Rank":7,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":58.26,"GeoreviewClassification (rus-Cyrl)":46.05,"HeadlineClassification (rus-Cyrl)":75.64,"InappropriatenessClassification (rus-Cyrl)":58.78,"KinopoiskClassification (rus-Cyrl)":50.89,"RuReviewsClassification (rus-Cyrl)":62.99,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.28,"RuSciBenchOECDClassification (rus-Cyrl)":42.69,"MassiveIntentClassification (rus-Cyrl)":62.78,"MassiveScenarioClassification (rus-Cyrl)":68.21}
+{"index":5,"Rank":8,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":57.52,"GeoreviewClassification (rus-Cyrl)":39.67,"HeadlineClassification (rus-Cyrl)":77.19,"InappropriatenessClassification (rus-Cyrl)":64.64,"KinopoiskClassification (rus-Cyrl)":50.33,"RuReviewsClassification (rus-Cyrl)":58.29,"RuSciBenchGRNTIClassification (rus-Cyrl)":54.19,"RuSciBenchOECDClassification (rus-Cyrl)":43.8,"MassiveIntentClassification (rus-Cyrl)":61.42,"MassiveScenarioClassification (rus-Cyrl)":68.13}
+{"index":6,"Rank":9,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":57.24,"GeoreviewClassification (rus-Cyrl)":39.97,"HeadlineClassification (rus-Cyrl)":79.26,"InappropriatenessClassification (rus-Cyrl)":62.52,"KinopoiskClassification (rus-Cyrl)":49.51,"RuReviewsClassification (rus-Cyrl)":58.27,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.9,"RuSciBenchOECDClassification (rus-Cyrl)":43.04,"MassiveIntentClassification (rus-Cyrl)":61.09,"MassiveScenarioClassification (rus-Cyrl)":67.6}
+{"index":22,"Rank":10,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":56.88,"GeoreviewClassification (rus-Cyrl)":42.33,"HeadlineClassification (rus-Cyrl)":70.35,"InappropriatenessClassification (rus-Cyrl)":59.32,"KinopoiskClassification (rus-Cyrl)":44.31,"RuReviewsClassification (rus-Cyrl)":62.33,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.01,"RuSciBenchOECDClassification (rus-Cyrl)":44.14,"MassiveIntentClassification (rus-Cyrl)":63.23,"MassiveScenarioClassification (rus-Cyrl)":69.92}
+{"index":16,"Rank":11,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":56.44,"GeoreviewClassification (rus-Cyrl)":44.66,"HeadlineClassification (rus-Cyrl)":73.94,"InappropriatenessClassification (rus-Cyrl)":59.16,"KinopoiskClassification (rus-Cyrl)":49.96,"RuReviewsClassification (rus-Cyrl)":61.18,"RuSciBenchGRNTIClassification (rus-Cyrl)":54.99,"RuSciBenchOECDClassification (rus-Cyrl)":41.72,"MassiveIntentClassification (rus-Cyrl)":58.43,"MassiveScenarioClassification (rus-Cyrl)":63.89}
+{"index":12,"Rank":12,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":56.18,"GeoreviewClassification (rus-Cyrl)":40.19,"HeadlineClassification (rus-Cyrl)":78.75,"InappropriatenessClassification (rus-Cyrl)":61.33,"KinopoiskClassification (rus-Cyrl)":48.78,"RuReviewsClassification (rus-Cyrl)":55.66,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.53,"RuSciBenchOECDClassification (rus-Cyrl)":41.34,"MassiveIntentClassification (rus-Cyrl)":61.32,"MassiveScenarioClassification (rus-Cyrl)":64.71}
+{"index":1,"Rank":13,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":55.15,"GeoreviewClassification (rus-Cyrl)":38.95,"HeadlineClassification (rus-Cyrl)":75.59,"InappropriatenessClassification (rus-Cyrl)":60.68,"KinopoiskClassification (rus-Cyrl)":49.67,"RuReviewsClassification (rus-Cyrl)":54.05,"RuSciBenchGRNTIClassification (rus-Cyrl)":48.53,"RuSciBenchOECDClassification (rus-Cyrl)":37.65,"MassiveIntentClassification (rus-Cyrl)":63.12,"MassiveScenarioClassification (rus-Cyrl)":68.08}
+{"index":24,"Rank":14,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":55.01,"GeoreviewClassification (rus-Cyrl)":41.36,"HeadlineClassification (rus-Cyrl)":68.9,"InappropriatenessClassification (rus-Cyrl)":59.11,"KinopoiskClassification (rus-Cyrl)":50.47,"RuReviewsClassification (rus-Cyrl)":60.66,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.93,"RuSciBenchOECDClassification (rus-Cyrl)":40.79,"MassiveIntentClassification (rus-Cyrl)":57.98,"MassiveScenarioClassification (rus-Cyrl)":62.9}
+{"index":7,"Rank":15,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":54.98,"GeoreviewClassification (rus-Cyrl)":40.89,"HeadlineClassification (rus-Cyrl)":68.75,"InappropriatenessClassification (rus-Cyrl)":58.48,"KinopoiskClassification (rus-Cyrl)":49.85,"RuReviewsClassification (rus-Cyrl)":58.01,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.8,"RuSciBenchOECDClassification (rus-Cyrl)":40.36,"MassiveIntentClassification (rus-Cyrl)":60.53,"MassiveScenarioClassification (rus-Cyrl)":65.15}
+{"index":17,"Rank":16,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":54.7,"GeoreviewClassification (rus-Cyrl)":40.86,"HeadlineClassification (rus-Cyrl)":68.75,"InappropriatenessClassification (rus-Cyrl)":58.52,"KinopoiskClassification (rus-Cyrl)":46.77,"RuReviewsClassification (rus-Cyrl)":58.01,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.04,"RuSciBenchOECDClassification (rus-Cyrl)":40.48,"MassiveIntentClassification (rus-Cyrl)":60.64,"MassiveScenarioClassification (rus-Cyrl)":65.23}
+{"index":21,"Rank":17,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":53.77,"GeoreviewClassification (rus-Cyrl)":38.24,"HeadlineClassification (rus-Cyrl)":68.3,"InappropriatenessClassification (rus-Cyrl)":58.18,"KinopoiskClassification (rus-Cyrl)":41.45,"RuReviewsClassification (rus-Cyrl)":58.88,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.19,"RuSciBenchOECDClassification (rus-Cyrl)":41.41,"MassiveIntentClassification (rus-Cyrl)":59.06,"MassiveScenarioClassification (rus-Cyrl)":65.25}
+{"index":9,"Rank":18,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":52.17,"GeoreviewClassification (rus-Cyrl)":39.64,"HeadlineClassification (rus-Cyrl)":74.19,"InappropriatenessClassification (rus-Cyrl)":58.57,"KinopoiskClassification (rus-Cyrl)":49.06,"RuReviewsClassification (rus-Cyrl)":56.99,"RuSciBenchGRNTIClassification (rus-Cyrl)":45.63,"RuSciBenchOECDClassification (rus-Cyrl)":35.48,"MassiveIntentClassification (rus-Cyrl)":50.83,"MassiveScenarioClassification (rus-Cyrl)":59.15}
+{"index":2,"Rank":19,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":51.6,"GeoreviewClassification (rus-Cyrl)":37.22,"HeadlineClassification (rus-Cyrl)":75.23,"InappropriatenessClassification (rus-Cyrl)":57.34,"KinopoiskClassification (rus-Cyrl)":49.91,"RuReviewsClassification (rus-Cyrl)":50.74,"RuSciBenchGRNTIClassification (rus-Cyrl)":48.03,"RuSciBenchOECDClassification (rus-Cyrl)":36.13,"MassiveIntentClassification (rus-Cyrl)":53.02,"MassiveScenarioClassification (rus-Cyrl)":56.79}
+{"index":3,"Rank":20,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":51.27,"GeoreviewClassification (rus-Cyrl)":38.05,"HeadlineClassification (rus-Cyrl)":67.64,"InappropriatenessClassification (rus-Cyrl)":58.27,"KinopoiskClassification (rus-Cyrl)":45.86,"RuReviewsClassification (rus-Cyrl)":58.34,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.18,"RuSciBenchOECDClassification (rus-Cyrl)":40.11,"MassiveIntentClassification (rus-Cyrl)":49.1,"MassiveScenarioClassification (rus-Cyrl)":51.91}
+{"index":8,"Rank":21,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":44.55,"GeoreviewClassification (rus-Cyrl)":33.45,"HeadlineClassification (rus-Cyrl)":57.65,"InappropriatenessClassification (rus-Cyrl)":54.5,"KinopoiskClassification (rus-Cyrl)":41.36,"RuReviewsClassification (rus-Cyrl)":49.56,"RuSciBenchGRNTIClassification (rus-Cyrl)":35.71,"RuSciBenchOECDClassification (rus-Cyrl)":26.51,"MassiveIntentClassification (rus-Cyrl)":50.1,"MassiveScenarioClassification (rus-Cyrl)":52.15}
+{"index":20,"Rank":22,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":28.82,"GeoreviewClassification (rus-Cyrl)":25.93,"HeadlineClassification (rus-Cyrl)":28.53,"InappropriatenessClassification (rus-Cyrl)":51.82,"KinopoiskClassification (rus-Cyrl)":34.18,"RuReviewsClassification (rus-Cyrl)":42.33,"RuSciBenchGRNTIClassification (rus-Cyrl)":13.29,"RuSciBenchOECDClassification (rus-Cyrl)":10.62,"MassiveIntentClassification (rus-Cyrl)":23.98,"MassiveScenarioClassification (rus-Cyrl)":28.71}
+{"index":19,"Rank":23,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":28.75,"GeoreviewClassification (rus-Cyrl)":27.08,"HeadlineClassification (rus-Cyrl)":27.77,"InappropriatenessClassification (rus-Cyrl)":51.73,"KinopoiskClassification (rus-Cyrl)":33.93,"RuReviewsClassification (rus-Cyrl)":41.79,"RuSciBenchGRNTIClassification (rus-Cyrl)":10.08,"RuSciBenchOECDClassification (rus-Cyrl)":8.3,"MassiveIntentClassification (rus-Cyrl)":27.58,"MassiveScenarioClassification (rus-Cyrl)":30.46}
+{"index":18,"Rank":24,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":28.15,"GeoreviewClassification (rus-Cyrl)":23.49,"HeadlineClassification (rus-Cyrl)":28.49,"InappropriatenessClassification (rus-Cyrl)":50.85,"KinopoiskClassification (rus-Cyrl)":34.17,"RuReviewsClassification (rus-Cyrl)":42.49,"RuSciBenchGRNTIClassification (rus-Cyrl)":10.49,"RuSciBenchOECDClassification (rus-Cyrl)":8.31,"MassiveIntentClassification (rus-Cyrl)":26.29,"MassiveScenarioClassification (rus-Cyrl)":28.77}
+{"index":4,"Rank":25,"Model":"ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":"","GeoreviewClassification (rus-Cyrl)":49.7,"HeadlineClassification (rus-Cyrl)":78.0,"InappropriatenessClassification (rus-Cyrl)":61.32,"KinopoiskClassification (rus-Cyrl)":63.27,"RuReviewsClassification (rus-Cyrl)":67.96,"RuSciBenchGRNTIClassification (rus-Cyrl)":59.33,"RuSciBenchOECDClassification (rus-Cyrl)":46.33,"MassiveIntentClassification (rus-Cyrl)":"","MassiveScenarioClassification (rus-Cyrl)":""}
diff --git a/all_data_tasks/34/default.jsonl b/all_data_tasks/34/default.jsonl
index 639ccd83dc5ddfac5549c8f21a8d3b2614186cda..215b1424011c3b76cb70dcb565fae6d28a81c201 100644
--- a/all_data_tasks/34/default.jsonl
+++ b/all_data_tasks/34/default.jsonl
@@ -11,8 +11,8 @@
 {"index":14,"Rank":11,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":50.27,"GeoreviewClusteringP2P (rus-Cyrl)":54.46,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":51.56,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":44.78}
 {"index":24,"Rank":12,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":49.57,"GeoreviewClusteringP2P (rus-Cyrl)":59.71,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":47.55,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.44}
 {"index":22,"Rank":13,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":49.18,"GeoreviewClusteringP2P (rus-Cyrl)":56.18,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":48.47,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":42.9}
-{"index":17,"Rank":14,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":47.75,"GeoreviewClusteringP2P (rus-Cyrl)":52.19,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":49.09,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.97}
-{"index":21,"Rank":15,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":47.75,"GeoreviewClusteringP2P (rus-Cyrl)":53.35,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":48.22,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.68}
+{"index":21,"Rank":14,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":47.75,"GeoreviewClusteringP2P (rus-Cyrl)":53.35,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":48.22,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.68}
+{"index":17,"Rank":15,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":47.75,"GeoreviewClusteringP2P (rus-Cyrl)":52.19,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":49.09,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.97}
 {"index":7,"Rank":16,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":46.84,"GeoreviewClusteringP2P (rus-Cyrl)":51.89,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":47.48,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.16}
 {"index":3,"Rank":17,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":43.13,"GeoreviewClusteringP2P (rus-Cyrl)":41.82,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":46.29,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.28}
 {"index":12,"Rank":18,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":42.92,"GeoreviewClusteringP2P (rus-Cyrl)":58.79,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":36.66,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":33.31}
diff --git a/all_data_tasks/36/default.jsonl b/all_data_tasks/36/default.jsonl
index 94b26645ae88603d8786540ad94907d69e1570f6..11bca3023892126f3f107fe108c1fa324f7d3ed2 100644
--- a/all_data_tasks/36/default.jsonl
+++ b/all_data_tasks/36/default.jsonl
@@ -1,25 +1,25 @@
-{"index":15,"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"RuBQReranking (rus-Cyrl)":75.58}
-{"index":13,"Rank":2,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"RuBQReranking (rus-Cyrl)":74.61}
-{"index":0,"Rank":3,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"RuBQReranking (rus-Cyrl)":74.02}
-{"index":11,"Rank":4,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"RuBQReranking (rus-Cyrl)":73.08}
-{"index":14,"Rank":5,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"RuBQReranking (rus-Cyrl)":72.01}
-{"index":16,"Rank":6,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"RuBQReranking (rus-Cyrl)":71.46}
-{"index":4,"Rank":7,"Model":"ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"RuBQReranking (rus-Cyrl)":70.87}
-{"index":23,"Rank":8,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"RuBQReranking (rus-Cyrl)":68.65}
-{"index":10,"Rank":9,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"RuBQReranking (rus-Cyrl)":64.42}
-{"index":24,"Rank":10,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"RuBQReranking (rus-Cyrl)":62.15}
-{"index":22,"Rank":11,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"RuBQReranking (rus-Cyrl)":58.77}
-{"index":5,"Rank":12,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"RuBQReranking (rus-Cyrl)":56.13}
-{"index":17,"Rank":13,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"RuBQReranking (rus-Cyrl)":55.13}
-{"index":7,"Rank":14,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"RuBQReranking (rus-Cyrl)":54.83}
-{"index":21,"Rank":15,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"RuBQReranking (rus-Cyrl)":52.8}
-{"index":6,"Rank":16,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"RuBQReranking (rus-Cyrl)":46.81}
-{"index":9,"Rank":17,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"RuBQReranking (rus-Cyrl)":46.09}
-{"index":1,"Rank":18,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"RuBQReranking (rus-Cyrl)":42.58}
-{"index":2,"Rank":19,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"RuBQReranking (rus-Cyrl)":41.65}
-{"index":3,"Rank":20,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"RuBQReranking (rus-Cyrl)":39.89}
-{"index":18,"Rank":21,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"RuBQReranking (rus-Cyrl)":38.51}
-{"index":8,"Rank":22,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"RuBQReranking (rus-Cyrl)":35.44}
-{"index":12,"Rank":23,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"RuBQReranking (rus-Cyrl)":34.01}
-{"index":20,"Rank":24,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"RuBQReranking (rus-Cyrl)":30.96}
-{"index":19,"Rank":25,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"RuBQReranking (rus-Cyrl)":27.05}
+{"index":0,"Rank":1,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":69.7,"RuBQReranking (rus-Cyrl)":74.02,"MIRACLReranking (rus-Cyrl)":65.38}
+{"index":15,"Rank":2,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":69.64,"RuBQReranking (rus-Cyrl)":75.58,"MIRACLReranking (rus-Cyrl)":63.71}
+{"index":14,"Rank":3,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":66.24,"RuBQReranking (rus-Cyrl)":72.01,"MIRACLReranking (rus-Cyrl)":60.47}
+{"index":16,"Rank":4,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":65.29,"RuBQReranking (rus-Cyrl)":71.46,"MIRACLReranking (rus-Cyrl)":59.12}
+{"index":5,"Rank":5,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":40.56,"RuBQReranking (rus-Cyrl)":56.13,"MIRACLReranking (rus-Cyrl)":24.99}
+{"index":6,"Rank":6,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":32.8,"RuBQReranking (rus-Cyrl)":46.81,"MIRACLReranking (rus-Cyrl)":18.8}
+{"index":9,"Rank":7,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":30.95,"RuBQReranking (rus-Cyrl)":46.09,"MIRACLReranking (rus-Cyrl)":15.81}
+{"index":1,"Rank":8,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":"","RuBQReranking (rus-Cyrl)":42.58,"MIRACLReranking (rus-Cyrl)":""}
+{"index":2,"Rank":9,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":"","RuBQReranking (rus-Cyrl)":41.65,"MIRACLReranking (rus-Cyrl)":""}
+{"index":3,"Rank":10,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":"","RuBQReranking (rus-Cyrl)":39.89,"MIRACLReranking (rus-Cyrl)":""}
+{"index":4,"Rank":11,"Model":"ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":"","RuBQReranking (rus-Cyrl)":70.87,"MIRACLReranking (rus-Cyrl)":""}
+{"index":7,"Rank":12,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":"","RuBQReranking (rus-Cyrl)":54.83,"MIRACLReranking (rus-Cyrl)":""}
+{"index":8,"Rank":13,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":"","RuBQReranking (rus-Cyrl)":35.44,"MIRACLReranking (rus-Cyrl)":""}
+{"index":10,"Rank":14,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":"","RuBQReranking (rus-Cyrl)":64.42,"MIRACLReranking (rus-Cyrl)":""}
+{"index":11,"Rank":15,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":"","RuBQReranking (rus-Cyrl)":73.08,"MIRACLReranking (rus-Cyrl)":""}
+{"index":12,"Rank":16,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":"","RuBQReranking (rus-Cyrl)":34.01,"MIRACLReranking (rus-Cyrl)":""}
+{"index":13,"Rank":17,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","RuBQReranking (rus-Cyrl)":74.61,"MIRACLReranking (rus-Cyrl)":""}
+{"index":17,"Rank":18,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","RuBQReranking (rus-Cyrl)":55.13,"MIRACLReranking (rus-Cyrl)":""}
+{"index":18,"Rank":19,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","RuBQReranking (rus-Cyrl)":38.51,"MIRACLReranking (rus-Cyrl)":""}
+{"index":19,"Rank":20,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","RuBQReranking (rus-Cyrl)":27.05,"MIRACLReranking (rus-Cyrl)":""}
+{"index":20,"Rank":21,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","RuBQReranking (rus-Cyrl)":30.96,"MIRACLReranking (rus-Cyrl)":""}
+{"index":21,"Rank":22,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","RuBQReranking (rus-Cyrl)":52.8,"MIRACLReranking (rus-Cyrl)":""}
+{"index":22,"Rank":23,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","RuBQReranking (rus-Cyrl)":58.77,"MIRACLReranking (rus-Cyrl)":""}
+{"index":23,"Rank":24,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":"","RuBQReranking (rus-Cyrl)":68.65,"MIRACLReranking (rus-Cyrl)":""}
+{"index":24,"Rank":25,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":"","RuBQReranking (rus-Cyrl)":62.15,"MIRACLReranking (rus-Cyrl)":""}
diff --git a/all_data_tasks/37/default.jsonl b/all_data_tasks/37/default.jsonl
index d175d7f27f48e60cc35a62157c26d2d8e9c3a16a..f317b880eb804efe197d9065ce2f3655cd62ed9c 100644
--- a/all_data_tasks/37/default.jsonl
+++ b/all_data_tasks/37/default.jsonl
@@ -1,25 +1,25 @@
-{"index":13,"Rank":1,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":77.96,"RiaNewsRetrieval (rus-Cyrl)":81.94,"RuBQRetrieval (rus-Cyrl)":73.98}
-{"index":15,"Rank":2,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":77.39,"RiaNewsRetrieval (rus-Cyrl)":80.67,"RuBQRetrieval (rus-Cyrl)":74.11}
-{"index":0,"Rank":3,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":77.1,"RiaNewsRetrieval (rus-Cyrl)":82.98,"RuBQRetrieval (rus-Cyrl)":71.21}
-{"index":11,"Rank":4,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":76.78,"RiaNewsRetrieval (rus-Cyrl)":83.53,"RuBQRetrieval (rus-Cyrl)":70.03}
-{"index":4,"Rank":5,"Model":"ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":72.82,"RiaNewsRetrieval (rus-Cyrl)":78.86,"RuBQRetrieval (rus-Cyrl)":66.77}
-{"index":14,"Rank":6,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":69.91,"RiaNewsRetrieval (rus-Cyrl)":70.24,"RuBQRetrieval (rus-Cyrl)":69.58}
-{"index":16,"Rank":7,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":69.27,"RiaNewsRetrieval (rus-Cyrl)":70.01,"RuBQRetrieval (rus-Cyrl)":68.53}
-{"index":23,"Rank":8,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":67.54,"RiaNewsRetrieval (rus-Cyrl)":69.36,"RuBQRetrieval (rus-Cyrl)":65.71}
-{"index":10,"Rank":9,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":67.34,"RiaNewsRetrieval (rus-Cyrl)":77.83,"RuBQRetrieval (rus-Cyrl)":56.86}
-{"index":24,"Rank":10,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":51.5,"RiaNewsRetrieval (rus-Cyrl)":51.27,"RuBQRetrieval (rus-Cyrl)":51.73}
-{"index":22,"Rank":11,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":44.4,"RiaNewsRetrieval (rus-Cyrl)":51.75,"RuBQRetrieval (rus-Cyrl)":37.04}
-{"index":21,"Rank":12,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":37.26,"RiaNewsRetrieval (rus-Cyrl)":44.82,"RuBQRetrieval (rus-Cyrl)":29.7}
-{"index":17,"Rank":13,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":36.38,"RiaNewsRetrieval (rus-Cyrl)":42.75,"RuBQRetrieval (rus-Cyrl)":30.02}
-{"index":7,"Rank":14,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":31.88,"RiaNewsRetrieval (rus-Cyrl)":34.73,"RuBQRetrieval (rus-Cyrl)":29.03}
-{"index":5,"Rank":15,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":25.6,"RiaNewsRetrieval (rus-Cyrl)":21.4,"RuBQRetrieval (rus-Cyrl)":29.8}
-{"index":9,"Rank":16,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":12.4,"RiaNewsRetrieval (rus-Cyrl)":13.92,"RuBQRetrieval (rus-Cyrl)":10.87}
-{"index":6,"Rank":17,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":11.78,"RiaNewsRetrieval (rus-Cyrl)":11.11,"RuBQRetrieval (rus-Cyrl)":12.45}
-{"index":3,"Rank":18,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":9.68,"RiaNewsRetrieval (rus-Cyrl)":6.72,"RuBQRetrieval (rus-Cyrl)":12.63}
-{"index":2,"Rank":19,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":7.55,"RiaNewsRetrieval (rus-Cyrl)":5.58,"RuBQRetrieval (rus-Cyrl)":9.52}
-{"index":12,"Rank":20,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":7.5,"RiaNewsRetrieval (rus-Cyrl)":4.84,"RuBQRetrieval (rus-Cyrl)":10.15}
-{"index":1,"Rank":21,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":7.37,"RiaNewsRetrieval (rus-Cyrl)":4.14,"RuBQRetrieval (rus-Cyrl)":10.6}
-{"index":8,"Rank":22,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":2.02,"RiaNewsRetrieval (rus-Cyrl)":0.79,"RuBQRetrieval (rus-Cyrl)":3.24}
-{"index":19,"Rank":23,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":1.66,"RiaNewsRetrieval (rus-Cyrl)":0.67,"RuBQRetrieval (rus-Cyrl)":2.64}
-{"index":18,"Rank":24,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","RiaNewsRetrieval (rus-Cyrl)":"","RuBQRetrieval (rus-Cyrl)":8.84}
-{"index":20,"Rank":25,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","RiaNewsRetrieval (rus-Cyrl)":"","RuBQRetrieval (rus-Cyrl)":4.75}
+{"index":0,"Rank":1,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":74.77,"RiaNewsRetrieval (rus-Cyrl)":82.98,"RuBQRetrieval (rus-Cyrl)":71.21,"MIRACLRetrieval (rus-Cyrl)":70.11}
+{"index":15,"Rank":2,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":74.04,"RiaNewsRetrieval (rus-Cyrl)":80.67,"RuBQRetrieval (rus-Cyrl)":74.11,"MIRACLRetrieval (rus-Cyrl)":67.33}
+{"index":14,"Rank":3,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":67.14,"RiaNewsRetrieval (rus-Cyrl)":70.24,"RuBQRetrieval (rus-Cyrl)":69.58,"MIRACLRetrieval (rus-Cyrl)":61.6}
+{"index":16,"Rank":4,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":65.85,"RiaNewsRetrieval (rus-Cyrl)":70.01,"RuBQRetrieval (rus-Cyrl)":68.53,"MIRACLRetrieval (rus-Cyrl)":59.01}
+{"index":5,"Rank":5,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":19.13,"RiaNewsRetrieval (rus-Cyrl)":21.4,"RuBQRetrieval (rus-Cyrl)":29.8,"MIRACLRetrieval (rus-Cyrl)":6.2}
+{"index":9,"Rank":6,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":8.89,"RiaNewsRetrieval (rus-Cyrl)":13.92,"RuBQRetrieval (rus-Cyrl)":10.87,"MIRACLRetrieval (rus-Cyrl)":1.89}
+{"index":6,"Rank":7,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":8.51,"RiaNewsRetrieval (rus-Cyrl)":11.11,"RuBQRetrieval (rus-Cyrl)":12.45,"MIRACLRetrieval (rus-Cyrl)":1.98}
+{"index":19,"Rank":8,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":1.23,"RiaNewsRetrieval (rus-Cyrl)":0.67,"RuBQRetrieval (rus-Cyrl)":2.64,"MIRACLRetrieval (rus-Cyrl)":0.39}
+{"index":1,"Rank":9,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":"","RiaNewsRetrieval (rus-Cyrl)":4.14,"RuBQRetrieval (rus-Cyrl)":10.6,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":2,"Rank":10,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":"","RiaNewsRetrieval (rus-Cyrl)":5.58,"RuBQRetrieval (rus-Cyrl)":9.52,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":3,"Rank":11,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":"","RiaNewsRetrieval (rus-Cyrl)":6.72,"RuBQRetrieval (rus-Cyrl)":12.63,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":4,"Rank":12,"Model":"ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":"","RiaNewsRetrieval (rus-Cyrl)":78.86,"RuBQRetrieval (rus-Cyrl)":66.77,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":7,"Rank":13,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":"","RiaNewsRetrieval (rus-Cyrl)":34.73,"RuBQRetrieval (rus-Cyrl)":29.03,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":8,"Rank":14,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":"","RiaNewsRetrieval (rus-Cyrl)":0.79,"RuBQRetrieval (rus-Cyrl)":3.24,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":10,"Rank":15,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":"","RiaNewsRetrieval (rus-Cyrl)":77.83,"RuBQRetrieval (rus-Cyrl)":56.86,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":11,"Rank":16,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":"","RiaNewsRetrieval (rus-Cyrl)":83.53,"RuBQRetrieval (rus-Cyrl)":70.03,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":12,"Rank":17,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":"","RiaNewsRetrieval (rus-Cyrl)":4.84,"RuBQRetrieval (rus-Cyrl)":10.15,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":13,"Rank":18,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","RiaNewsRetrieval (rus-Cyrl)":81.94,"RuBQRetrieval (rus-Cyrl)":73.98,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":17,"Rank":19,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","RiaNewsRetrieval (rus-Cyrl)":42.75,"RuBQRetrieval (rus-Cyrl)":30.02,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":18,"Rank":20,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","RiaNewsRetrieval (rus-Cyrl)":"","RuBQRetrieval (rus-Cyrl)":8.84,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":20,"Rank":21,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","RiaNewsRetrieval (rus-Cyrl)":"","RuBQRetrieval (rus-Cyrl)":4.75,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":21,"Rank":22,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","RiaNewsRetrieval (rus-Cyrl)":44.82,"RuBQRetrieval (rus-Cyrl)":29.7,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":22,"Rank":23,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","RiaNewsRetrieval (rus-Cyrl)":51.75,"RuBQRetrieval (rus-Cyrl)":37.04,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":23,"Rank":24,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":"","RiaNewsRetrieval (rus-Cyrl)":69.36,"RuBQRetrieval (rus-Cyrl)":65.71,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":24,"Rank":25,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":"","RiaNewsRetrieval (rus-Cyrl)":51.27,"RuBQRetrieval (rus-Cyrl)":51.73,"MIRACLRetrieval (rus-Cyrl)":""}
diff --git a/all_data_tasks/38/default.jsonl b/all_data_tasks/38/default.jsonl
index dcf6aaa1f46c1df70e63da7188351aef04f4bf31..2a7e044b28007213f0bb5bc5b0200bc087ba67cc 100644
--- a/all_data_tasks/38/default.jsonl
+++ b/all_data_tasks/38/default.jsonl
@@ -1,25 +1,25 @@
-{"index":13,"Rank":1,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":80.15,"RUParaPhraserSTS (rus-Cyrl)":76.17,"RuSTSBenchmarkSTS (rus-Cyrl)":84.13}
-{"index":11,"Rank":2,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":79.85,"RUParaPhraserSTS (rus-Cyrl)":76.36,"RuSTSBenchmarkSTS (rus-Cyrl)":83.35}
-{"index":10,"Rank":3,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":77.91,"RUParaPhraserSTS (rus-Cyrl)":73.56,"RuSTSBenchmarkSTS (rus-Cyrl)":82.26}
-{"index":15,"Rank":4,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":77.48,"RUParaPhraserSTS (rus-Cyrl)":71.82,"RuSTSBenchmarkSTS (rus-Cyrl)":83.15}
-{"index":4,"Rank":5,"Model":"ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":77.42,"RUParaPhraserSTS (rus-Cyrl)":76.16,"RuSTSBenchmarkSTS (rus-Cyrl)":78.69}
-{"index":0,"Rank":6,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":77.39,"RUParaPhraserSTS (rus-Cyrl)":74.9,"RuSTSBenchmarkSTS (rus-Cyrl)":79.87}
-{"index":23,"Rank":7,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":77.37,"RUParaPhraserSTS (rus-Cyrl)":72.97,"RuSTSBenchmarkSTS (rus-Cyrl)":81.77}
-{"index":24,"Rank":8,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":75.32,"RUParaPhraserSTS (rus-Cyrl)":72.15,"RuSTSBenchmarkSTS (rus-Cyrl)":78.48}
-{"index":14,"Rank":9,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":74.9,"RUParaPhraserSTS (rus-Cyrl)":70.17,"RuSTSBenchmarkSTS (rus-Cyrl)":79.64}
-{"index":16,"Rank":10,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":74.27,"RUParaPhraserSTS (rus-Cyrl)":70.46,"RuSTSBenchmarkSTS (rus-Cyrl)":78.08}
-{"index":22,"Rank":11,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":74.1,"RUParaPhraserSTS (rus-Cyrl)":65.74,"RuSTSBenchmarkSTS (rus-Cyrl)":82.46}
-{"index":21,"Rank":12,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":70.71,"RUParaPhraserSTS (rus-Cyrl)":61.87,"RuSTSBenchmarkSTS (rus-Cyrl)":79.55}
-{"index":7,"Rank":13,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":69.6,"RUParaPhraserSTS (rus-Cyrl)":65.87,"RuSTSBenchmarkSTS (rus-Cyrl)":73.32}
-{"index":17,"Rank":14,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":69.54,"RUParaPhraserSTS (rus-Cyrl)":65.74,"RuSTSBenchmarkSTS (rus-Cyrl)":73.34}
-{"index":5,"Rank":15,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":68.19,"RUParaPhraserSTS (rus-Cyrl)":65.17,"RuSTSBenchmarkSTS (rus-Cyrl)":71.22}
-{"index":9,"Rank":16,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":67.28,"RUParaPhraserSTS (rus-Cyrl)":65.14,"RuSTSBenchmarkSTS (rus-Cyrl)":69.43}
-{"index":3,"Rank":17,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":66.13,"RUParaPhraserSTS (rus-Cyrl)":66.24,"RuSTSBenchmarkSTS (rus-Cyrl)":66.03}
-{"index":6,"Rank":18,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":60.44,"RUParaPhraserSTS (rus-Cyrl)":62.06,"RuSTSBenchmarkSTS (rus-Cyrl)":58.82}
-{"index":1,"Rank":19,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":58.36,"RUParaPhraserSTS (rus-Cyrl)":55.01,"RuSTSBenchmarkSTS (rus-Cyrl)":61.72}
-{"index":12,"Rank":20,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":56.25,"RUParaPhraserSTS (rus-Cyrl)":54.03,"RuSTSBenchmarkSTS (rus-Cyrl)":58.47}
-{"index":8,"Rank":21,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":55.78,"RUParaPhraserSTS (rus-Cyrl)":53.41,"RuSTSBenchmarkSTS (rus-Cyrl)":58.16}
-{"index":2,"Rank":22,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":51.84,"RUParaPhraserSTS (rus-Cyrl)":49.72,"RuSTSBenchmarkSTS (rus-Cyrl)":53.95}
-{"index":18,"Rank":23,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":50.9,"RUParaPhraserSTS (rus-Cyrl)":45.47,"RuSTSBenchmarkSTS (rus-Cyrl)":56.33}
-{"index":19,"Rank":24,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":49.74,"RUParaPhraserSTS (rus-Cyrl)":43.93,"RuSTSBenchmarkSTS (rus-Cyrl)":55.56}
-{"index":20,"Rank":25,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":48.92,"RUParaPhraserSTS (rus-Cyrl)":42.15,"RuSTSBenchmarkSTS (rus-Cyrl)":55.68}
+{"index":11,"Rank":1,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":75.38,"RUParaPhraserSTS (rus-Cyrl)":76.36,"RuSTSBenchmarkSTS (rus-Cyrl)":83.35,"STS22 (rus-Cyrl)":66.42}
+{"index":0,"Rank":2,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":73.68,"RUParaPhraserSTS (rus-Cyrl)":74.9,"RuSTSBenchmarkSTS (rus-Cyrl)":79.87,"STS22 (rus-Cyrl)":66.26}
+{"index":10,"Rank":3,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":73.07,"RUParaPhraserSTS (rus-Cyrl)":73.56,"RuSTSBenchmarkSTS (rus-Cyrl)":82.26,"STS22 (rus-Cyrl)":63.39}
+{"index":23,"Rank":4,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":72.54,"RUParaPhraserSTS (rus-Cyrl)":72.97,"RuSTSBenchmarkSTS (rus-Cyrl)":81.77,"STS22 (rus-Cyrl)":62.89}
+{"index":15,"Rank":5,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":71.62,"RUParaPhraserSTS (rus-Cyrl)":71.82,"RuSTSBenchmarkSTS (rus-Cyrl)":83.15,"STS22 (rus-Cyrl)":59.89}
+{"index":24,"Rank":6,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":70.23,"RUParaPhraserSTS (rus-Cyrl)":72.15,"RuSTSBenchmarkSTS (rus-Cyrl)":78.48,"STS22 (rus-Cyrl)":60.06}
+{"index":14,"Rank":7,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":70.16,"RUParaPhraserSTS (rus-Cyrl)":70.17,"RuSTSBenchmarkSTS (rus-Cyrl)":79.64,"STS22 (rus-Cyrl)":60.67}
+{"index":16,"Rank":8,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":69.48,"RUParaPhraserSTS (rus-Cyrl)":70.46,"RuSTSBenchmarkSTS (rus-Cyrl)":78.08,"STS22 (rus-Cyrl)":59.9}
+{"index":22,"Rank":9,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":68.98,"RUParaPhraserSTS (rus-Cyrl)":65.74,"RuSTSBenchmarkSTS (rus-Cyrl)":82.46,"STS22 (rus-Cyrl)":58.74}
+{"index":21,"Rank":10,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":66.17,"RUParaPhraserSTS (rus-Cyrl)":61.87,"RuSTSBenchmarkSTS (rus-Cyrl)":79.55,"STS22 (rus-Cyrl)":57.08}
+{"index":7,"Rank":11,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":65.91,"RUParaPhraserSTS (rus-Cyrl)":65.87,"RuSTSBenchmarkSTS (rus-Cyrl)":73.32,"STS22 (rus-Cyrl)":58.53}
+{"index":17,"Rank":12,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":65.52,"RUParaPhraserSTS (rus-Cyrl)":65.74,"RuSTSBenchmarkSTS (rus-Cyrl)":73.34,"STS22 (rus-Cyrl)":57.49}
+{"index":5,"Rank":13,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":64.4,"RUParaPhraserSTS (rus-Cyrl)":65.17,"RuSTSBenchmarkSTS (rus-Cyrl)":71.22,"STS22 (rus-Cyrl)":56.82}
+{"index":9,"Rank":14,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":61.6,"RUParaPhraserSTS (rus-Cyrl)":65.14,"RuSTSBenchmarkSTS (rus-Cyrl)":69.43,"STS22 (rus-Cyrl)":50.23}
+{"index":3,"Rank":15,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":61.18,"RUParaPhraserSTS (rus-Cyrl)":66.24,"RuSTSBenchmarkSTS (rus-Cyrl)":66.03,"STS22 (rus-Cyrl)":51.27}
+{"index":6,"Rank":16,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":57.21,"RUParaPhraserSTS (rus-Cyrl)":62.06,"RuSTSBenchmarkSTS (rus-Cyrl)":58.82,"STS22 (rus-Cyrl)":50.75}
+{"index":1,"Rank":17,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":56.2,"RUParaPhraserSTS (rus-Cyrl)":55.01,"RuSTSBenchmarkSTS (rus-Cyrl)":61.72,"STS22 (rus-Cyrl)":51.87}
+{"index":12,"Rank":18,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":53.39,"RUParaPhraserSTS (rus-Cyrl)":54.03,"RuSTSBenchmarkSTS (rus-Cyrl)":58.47,"STS22 (rus-Cyrl)":47.67}
+{"index":8,"Rank":19,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":53.15,"RUParaPhraserSTS (rus-Cyrl)":53.41,"RuSTSBenchmarkSTS (rus-Cyrl)":58.16,"STS22 (rus-Cyrl)":47.88}
+{"index":2,"Rank":20,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":46.22,"RUParaPhraserSTS (rus-Cyrl)":49.72,"RuSTSBenchmarkSTS (rus-Cyrl)":53.95,"STS22 (rus-Cyrl)":34.98}
+{"index":19,"Rank":21,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":38.07,"RUParaPhraserSTS (rus-Cyrl)":43.93,"RuSTSBenchmarkSTS (rus-Cyrl)":55.56,"STS22 (rus-Cyrl)":14.72}
+{"index":20,"Rank":22,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":37.89,"RUParaPhraserSTS (rus-Cyrl)":42.15,"RuSTSBenchmarkSTS (rus-Cyrl)":55.68,"STS22 (rus-Cyrl)":15.83}
+{"index":18,"Rank":23,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":37.66,"RUParaPhraserSTS (rus-Cyrl)":45.47,"RuSTSBenchmarkSTS (rus-Cyrl)":56.33,"STS22 (rus-Cyrl)":11.19}
+{"index":4,"Rank":24,"Model":"ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":"","RUParaPhraserSTS (rus-Cyrl)":76.16,"RuSTSBenchmarkSTS (rus-Cyrl)":78.69,"STS22 (rus-Cyrl)":""}
+{"index":13,"Rank":25,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","RUParaPhraserSTS (rus-Cyrl)":76.17,"RuSTSBenchmarkSTS (rus-Cyrl)":84.13,"STS22 (rus-Cyrl)":""}
diff --git a/all_data_tasks/39/default.jsonl b/all_data_tasks/39/default.jsonl
index 8cce9fa388b252e2822a8f84bba8321d20dfdbbc..1b9fdd8b44a1b6505710a548b5391a8770332ef9 100644
--- a/all_data_tasks/39/default.jsonl
+++ b/all_data_tasks/39/default.jsonl
@@ -1,52 +1,25 @@
-{"index":15,"Rank":1,"Model":"nb-bert-large<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":66.73,"MassiveIntentClassification (nb)":62.68,"MassiveScenarioClassification (nb)":67.44,"NoRecClassification":55.46,"NordicLangClassification":85.27,"NorwegianParliament":62.58,"ScalaNbClassification":66.97}
-{"index":14,"Rank":2,"Model":"nb-bert-base<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":63.94,"MassiveIntentClassification (nb)":60.67,"MassiveScenarioClassification (nb)":67.31,"NoRecClassification":51.32,"NordicLangClassification":84.69,"NorwegianParliament":57.41,"ScalaNbClassification":62.25}
-{"index":26,"Rank":3,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":63.64,"MassiveIntentClassification (nb)":59.83,"MassiveScenarioClassification (nb)":66.18,"NoRecClassification":62.76,"NordicLangClassification":82.29,"NorwegianParliament":60.36,"ScalaNbClassification":50.44}
-{"index":32,"Rank":4,"Model":"norbert3-base<\/a>","Model Size (Million Parameters)":131,"Memory Usage (GB, fp32)":0.49,"Average":61.75,"MassiveIntentClassification (nb)":54.2,"MassiveScenarioClassification (nb)":60.69,"NoRecClassification":53.4,"NordicLangClassification":82.67,"NorwegianParliament":59.33,"ScalaNbClassification":60.19}
-{"index":25,"Rank":5,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":61.63,"MassiveIntentClassification (nb)":59.83,"MassiveScenarioClassification (nb)":66.18,"NoRecClassification":57.58,"NordicLangClassification":75.94,"NorwegianParliament":59.94,"ScalaNbClassification":50.32}
-{"index":33,"Rank":6,"Model":"norbert3-large<\/a>","Model Size (Million Parameters)":368,"Memory Usage (GB, fp32)":1.37,"Average":60.34,"MassiveIntentClassification (nb)":47.42,"MassiveScenarioClassification (nb)":54.25,"NoRecClassification":50.46,"NordicLangClassification":84.25,"NorwegianParliament":58.85,"ScalaNbClassification":66.79}
-{"index":28,"Rank":7,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":58.86,"MassiveIntentClassification (nb)":53.96,"MassiveScenarioClassification (nb)":59.9,"NoRecClassification":53.96,"NordicLangClassification":75.15,"NorwegianParliament":60.15,"ScalaNbClassification":50.06}
-{"index":19,"Rank":8,"Model":"dfm-encoder-large-v1<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":58.46,"MassiveIntentClassification (nb)":52.49,"MassiveScenarioClassification (nb)":54.59,"NoRecClassification":48.3,"NordicLangClassification":77.68,"NorwegianParliament":58.78,"ScalaNbClassification":58.95}
-{"index":50,"Rank":9,"Model":"DanskBERT<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":55.0,"MassiveIntentClassification (nb)":45.38,"MassiveScenarioClassification (nb)":47.55,"NoRecClassification":46.06,"NordicLangClassification":74.25,"NorwegianParliament":56.79,"ScalaNbClassification":59.99}
-{"index":20,"Rank":10,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.92,"MassiveIntentClassification (nb)":59.9,"MassiveScenarioClassification (nb)":65.81,"NoRecClassification":48.25,"NordicLangClassification":48.4,"NorwegianParliament":55.99,"ScalaNbClassification":51.18}
-{"index":51,"Rank":11,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":54.34,"MassiveIntentClassification (nb)":40.46,"MassiveScenarioClassification (nb)":44.83,"NoRecClassification":46.28,"NordicLangClassification":79.39,"NorwegianParliament":56.75,"ScalaNbClassification":58.33}
-{"index":21,"Rank":12,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":50.14,"MassiveIntentClassification (nb)":41.57,"MassiveScenarioClassification (nb)":50.33,"NoRecClassification":42.0,"NordicLangClassification":59.34,"NorwegianParliament":57.42,"ScalaNbClassification":50.18}
-{"index":22,"Rank":13,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":50.01,"MassiveIntentClassification (nb)":40.63,"MassiveScenarioClassification (nb)":51.91,"NoRecClassification":41.83,"NordicLangClassification":58.3,"NorwegianParliament":57.26,"ScalaNbClassification":50.13}
-{"index":8,"Rank":14,"Model":"sentence-bert-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":48.88,"MassiveIntentClassification (nb)":42.74,"MassiveScenarioClassification (nb)":49.49,"NoRecClassification":43.53,"NordicLangClassification":51.45,"NorwegianParliament":55.74,"ScalaNbClassification":50.34}
-{"index":24,"Rank":15,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":48.46,"MassiveIntentClassification (nb)":40.25,"MassiveScenarioClassification (nb)":48.58,"NoRecClassification":41.84,"NordicLangClassification":53.47,"NorwegianParliament":56.57,"ScalaNbClassification":50.03}
-{"index":6,"Rank":16,"Model":"bert-base-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":48.18,"MassiveIntentClassification (nb)":35.75,"MassiveScenarioClassification (nb)":35.76,"NoRecClassification":43.91,"NordicLangClassification":62.45,"NorwegianParliament":57.56,"ScalaNbClassification":53.63}
-{"index":36,"Rank":17,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":47.28,"MassiveIntentClassification (nb)":39.34,"MassiveScenarioClassification (nb)":44.67,"NoRecClassification":40.02,"NordicLangClassification":54.71,"NorwegianParliament":54.8,"ScalaNbClassification":50.17}
-{"index":31,"Rank":18,"Model":"electra-small-nordic<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":47.28,"MassiveIntentClassification (nb)":24.6,"MassiveScenarioClassification (nb)":27.3,"NoRecClassification":45.44,"NordicLangClassification":57.82,"NorwegianParliament":53.25,"ScalaNbClassification":75.28}
-{"index":7,"Rank":19,"Model":"electra-small-swedish-cased-discriminator<\/a>","Model Size (Million Parameters)":16,"Memory Usage (GB, fp32)":0.06,"Average":34.34,"MassiveIntentClassification (nb)":5.66,"MassiveScenarioClassification (nb)":11.26,"NoRecClassification":39.72,"NordicLangClassification":44.53,"NorwegianParliament":52.44,"ScalaNbClassification":52.41}
-{"index":0,"Rank":20,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":"","MassiveIntentClassification (nb)":37.74,"MassiveScenarioClassification (nb)":43.9,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
-{"index":1,"Rank":21,"Model":"gte-multilingual-base<\/a>","Model Size (Million Parameters)":305,"Memory Usage (GB, fp32)":1.14,"Average":"","MassiveIntentClassification (nb)":63.74,"MassiveScenarioClassification (nb)":71.5,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
-{"index":2,"Rank":22,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":36.89,"MassiveScenarioClassification (nb)":44.27,"NoRecClassification":43.53,"NordicLangClassification":"","NorwegianParliament":54.9,"ScalaNbClassification":""}
-{"index":3,"Rank":23,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":28.65,"MassiveScenarioClassification (nb)":35.24,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
-{"index":4,"Rank":24,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":31.07,"MassiveScenarioClassification (nb)":38.02,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
-{"index":5,"Rank":25,"Model":"e5-dansk-test-0.1<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NordicLangClassification":61.0,"NorwegianParliament":"","ScalaNbClassification":""}
-{"index":9,"Rank":26,"Model":"multilingual-e5-large-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":69.88,"MassiveScenarioClassification (nb)":74.84,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
-{"index":10,"Rank":27,"Model":"multilingual-e5-large-instruct-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":71.66,"MassiveScenarioClassification (nb)":77.21,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
-{"index":11,"Rank":28,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","MassiveIntentClassification (nb)":39.67,"MassiveScenarioClassification (nb)":50.89,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
-{"index":12,"Rank":29,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","MassiveIntentClassification (nb)":39.82,"MassiveScenarioClassification (nb)":39.02,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
-{"index":13,"Rank":30,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","MassiveIntentClassification (nb)":39.48,"MassiveScenarioClassification (nb)":40.47,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
-{"index":16,"Rank":31,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":54.41,"MassiveScenarioClassification (nb)":64.64,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
-{"index":17,"Rank":32,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":7068,"Memory Usage (GB, fp32)":26.33,"Average":"","MassiveIntentClassification (nb)":49.41,"MassiveScenarioClassification (nb)":51.8,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
-{"index":18,"Rank":33,"Model":"slx-v0.1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.08,"Average":"","MassiveIntentClassification (nb)":38.18,"MassiveScenarioClassification (nb)":43.39,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
-{"index":23,"Rank":34,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","MassiveIntentClassification (nb)":70.93,"MassiveScenarioClassification (nb)":75.7,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
-{"index":27,"Rank":35,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","MassiveIntentClassification (nb)":71.66,"MassiveScenarioClassification (nb)":77.21,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
-{"index":29,"Rank":36,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":1065,"Memory Usage (GB, fp32)":3.97,"Average":"","MassiveIntentClassification (nb)":46.18,"MassiveScenarioClassification (nb)":50.32,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
-{"index":30,"Rank":37,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":559,"Memory Usage (GB, fp32)":2.08,"Average":"","MassiveIntentClassification (nb)":44.12,"MassiveScenarioClassification (nb)":46.79,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
-{"index":34,"Rank":38,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","MassiveIntentClassification (nb)":57.91,"MassiveScenarioClassification (nb)":64.29,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
-{"index":35,"Rank":39,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","MassiveIntentClassification (nb)":41.91,"MassiveScenarioClassification (nb)":47.36,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
-{"index":37,"Rank":40,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
-{"index":38,"Rank":41,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":34.75,"MassiveScenarioClassification (nb)":35.71,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
-{"index":39,"Rank":42,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","MassiveIntentClassification (nb)":46.01,"MassiveScenarioClassification (nb)":54.98,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
-{"index":40,"Rank":43,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","MassiveIntentClassification (nb)":43.78,"MassiveScenarioClassification (nb)":52.71,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
-{"index":41,"Rank":44,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","MassiveIntentClassification (nb)":45.87,"MassiveScenarioClassification (nb)":54.44,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
-{"index":42,"Rank":45,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
-{"index":43,"Rank":46,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
-{"index":44,"Rank":47,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":38.53,"MassiveScenarioClassification (nb)":46.6,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
-{"index":45,"Rank":48,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","MassiveIntentClassification (nb)":41.29,"MassiveScenarioClassification (nb)":49.92,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
-{"index":46,"Rank":49,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","MassiveIntentClassification (nb)":45.91,"MassiveScenarioClassification (nb)":53.43,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
-{"index":47,"Rank":50,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":"","MassiveIntentClassification (nb)":31.49,"MassiveScenarioClassification (nb)":38.05,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
-{"index":48,"Rank":51,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","MassiveIntentClassification (nb)":54.64,"MassiveScenarioClassification (nb)":60.26,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
-{"index":49,"Rank":52,"Model":"multilingual-e5-large-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":69.88,"MassiveScenarioClassification (nb)":74.84,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":4,"Rank":1,"Model":"ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":38.88,"CEDRClassification (rus-Cyrl)":44.69,"SensitiveTopicsClassification (rus-Cyrl)":33.07}
+{"index":10,"Rank":2,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":36.98,"CEDRClassification (rus-Cyrl)":46.47,"SensitiveTopicsClassification (rus-Cyrl)":27.5}
+{"index":23,"Rank":3,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":36.32,"CEDRClassification (rus-Cyrl)":45.11,"SensitiveTopicsClassification (rus-Cyrl)":27.52}
+{"index":15,"Rank":4,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":36.01,"CEDRClassification (rus-Cyrl)":44.84,"SensitiveTopicsClassification (rus-Cyrl)":27.17}
+{"index":11,"Rank":5,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":35.88,"CEDRClassification (rus-Cyrl)":45.48,"SensitiveTopicsClassification (rus-Cyrl)":26.29}
+{"index":0,"Rank":6,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":34.25,"CEDRClassification (rus-Cyrl)":43.47,"SensitiveTopicsClassification (rus-Cyrl)":25.03}
+{"index":14,"Rank":7,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":33.65,"CEDRClassification (rus-Cyrl)":42.32,"SensitiveTopicsClassification (rus-Cyrl)":24.98}
+{"index":13,"Rank":8,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":33.37,"CEDRClassification (rus-Cyrl)":40.8,"SensitiveTopicsClassification (rus-Cyrl)":25.94}
+{"index":22,"Rank":9,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":32.9,"CEDRClassification (rus-Cyrl)":39.98,"SensitiveTopicsClassification (rus-Cyrl)":25.83}
+{"index":5,"Rank":10,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":32.64,"CEDRClassification (rus-Cyrl)":36.81,"SensitiveTopicsClassification (rus-Cyrl)":28.47}
+{"index":16,"Rank":11,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":31.99,"CEDRClassification (rus-Cyrl)":40.07,"SensitiveTopicsClassification (rus-Cyrl)":23.91}
+{"index":6,"Rank":12,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":31.9,"CEDRClassification (rus-Cyrl)":35.84,"SensitiveTopicsClassification (rus-Cyrl)":27.97}
+{"index":24,"Rank":13,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":31.7,"CEDRClassification (rus-Cyrl)":38.95,"SensitiveTopicsClassification (rus-Cyrl)":24.44}
+{"index":17,"Rank":14,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":31.42,"CEDRClassification (rus-Cyrl)":40.61,"SensitiveTopicsClassification (rus-Cyrl)":22.23}
+{"index":21,"Rank":15,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":31.3,"CEDRClassification (rus-Cyrl)":37.76,"SensitiveTopicsClassification (rus-Cyrl)":24.84}
+{"index":7,"Rank":16,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":31.27,"CEDRClassification (rus-Cyrl)":40.75,"SensitiveTopicsClassification (rus-Cyrl)":21.79}
+{"index":9,"Rank":17,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":29.44,"CEDRClassification (rus-Cyrl)":36.87,"SensitiveTopicsClassification (rus-Cyrl)":22.02}
+{"index":1,"Rank":18,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":29.32,"CEDRClassification (rus-Cyrl)":36.19,"SensitiveTopicsClassification (rus-Cyrl)":22.45}
+{"index":12,"Rank":19,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":28.9,"CEDRClassification (rus-Cyrl)":34.14,"SensitiveTopicsClassification (rus-Cyrl)":23.67}
+{"index":8,"Rank":20,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":27.96,"CEDRClassification (rus-Cyrl)":37.39,"SensitiveTopicsClassification (rus-Cyrl)":18.54}
+{"index":3,"Rank":21,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":27.8,"CEDRClassification (rus-Cyrl)":35.55,"SensitiveTopicsClassification (rus-Cyrl)":20.05}
+{"index":20,"Rank":22,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":26.9,"CEDRClassification (rus-Cyrl)":35.98,"SensitiveTopicsClassification (rus-Cyrl)":17.83}
+{"index":2,"Rank":23,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":26.2,"CEDRClassification (rus-Cyrl)":33.59,"SensitiveTopicsClassification (rus-Cyrl)":18.8}
+{"index":18,"Rank":24,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":25.96,"CEDRClassification (rus-Cyrl)":33.86,"SensitiveTopicsClassification (rus-Cyrl)":18.05}
+{"index":19,"Rank":25,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":25.27,"CEDRClassification (rus-Cyrl)":32.72,"SensitiveTopicsClassification (rus-Cyrl)":17.82}
diff --git a/all_data_tasks/40/default.jsonl b/all_data_tasks/40/default.jsonl
index 58eba42543543dd20857eb5b82bf7bcafac3b11c..8cce9fa388b252e2822a8f84bba8321d20dfdbbc 100644
--- a/all_data_tasks/40/default.jsonl
+++ b/all_data_tasks/40/default.jsonl
@@ -1,141 +1,52 @@
-{"index":38,"Rank":1,"Model":"multilingual-e5-large-instruct-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.36,"AmazonCounterfactualClassification (de)":66.71,"AmazonCounterfactualClassification (ja)":76.05,"AmazonReviewsClassification (de)":53.0,"AmazonReviewsClassification (es)":48.81,"AmazonReviewsClassification (fr)":48.51,"AmazonReviewsClassification (ja)":47.71,"AmazonReviewsClassification (zh)":44.66,"MTOPDomainClassification (de)":92.68,"MTOPDomainClassification (es)":92.27,"MTOPDomainClassification (fr)":89.97,"MTOPDomainClassification (hi)":90.03,"MTOPDomainClassification (th)":87.56,"MTOPIntentClassification (de)":79.37,"MTOPIntentClassification (es)":80.34,"MTOPIntentClassification (fr)":76.72,"MTOPIntentClassification (hi)":77.87,"MTOPIntentClassification (th)":77.74,"MassiveIntentClassification (af)":67.66,"MassiveIntentClassification (am)":60.71,"MassiveIntentClassification (ar)":63.61,"MassiveIntentClassification (az)":69.0,"MassiveIntentClassification (bn)":68.54,"MassiveIntentClassification (cy)":63.03,"MassiveIntentClassification (de)":72.47,"MassiveIntentClassification (el)":71.24,"MassiveIntentClassification (es)":72.82,"MassiveIntentClassification (fa)":74.25,"MassiveIntentClassification (fi)":72.29,"MassiveIntentClassification (fr)":73.32,"MassiveIntentClassification (he)":70.22,"MassiveIntentClassification (hi)":71.58,"MassiveIntentClassification (hu)":71.92,"MassiveIntentClassification (hy)":68.07,"MassiveIntentClassification (id)":72.62,"MassiveIntentClassification (is)":65.77,"MassiveIntentClassification (it)":73.45,"MassiveIntentClassification (ja)":74.69,"MassiveIntentClassification (jv)":63.04,"MassiveIntentClassification (ka)":58.91,"MassiveIntentClassification (km)":54.43,"MassiveIntentClassification (kn)":66.33,"MassiveIntentClassification (ko)":70.59,"MassiveIntentClassification (lv)":69.11,"MassiveIntentClassification (ml)":69.7,"MassiveIntentClassification (mn)":66.44,"MassiveIntentClassification (ms)":70.8,"MassiveIntentClassification (my)":64.79,"MassiveIntentClassification (nl)":74.43,"MassiveIntentClassification (pt)":73.63,"MassiveIntentClassification (ro)":71.89,"MassiveIntentClassification (ru)":74.16,"MassiveIntentClassification (sl)":69.96,"MassiveIntentClassification (sq)":69.5,"MassiveIntentClassification (sw)":63.01,"MassiveIntentClassification (ta)":66.91,"MassiveIntentClassification (te)":67.62,"MassiveIntentClassification (th)":69.51,"MassiveIntentClassification (tl)":69.31,"MassiveIntentClassification (tr)":72.24,"MassiveIntentClassification (ur)":67.5,"MassiveIntentClassification (vi)":71.29,"MassiveIntentClassification (zh-TW)":69.38,"MassiveScenarioClassification (af)":73.34,"MassiveScenarioClassification (am)":65.84,"MassiveScenarioClassification (ar)":69.76,"MassiveScenarioClassification (az)":72.02,"MassiveScenarioClassification (bn)":72.76,"MassiveScenarioClassification (cy)":68.02,"MassiveScenarioClassification (de)":77.68,"MassiveScenarioClassification (el)":76.13,"MassiveScenarioClassification (es)":76.97,"MassiveScenarioClassification (fa)":78.1,"MassiveScenarioClassification (fi)":75.21,"MassiveScenarioClassification (fr)":77.07,"MassiveScenarioClassification (he)":73.53,"MassiveScenarioClassification (hi)":75.75,"MassiveScenarioClassification (hu)":77.09,"MassiveScenarioClassification (hy)":71.08,"MassiveScenarioClassification (id)":77.1,"MassiveScenarioClassification (is)":71.26,"MassiveScenarioClassification (it)":77.08,"MassiveScenarioClassification (ja)":79.35,"MassiveScenarioClassification (jv)":68.42,"MassiveScenarioClassification (ka)":66.16,"MassiveScenarioClassification (km)":60.11,"MassiveScenarioClassification (kn)":71.25,"MassiveScenarioClassification (ko)":76.46,"MassiveScenarioClassification (lv)":73.25,"MassiveScenarioClassification (ml)":74.12,"MassiveScenarioClassification (mn)":70.02,"MassiveScenarioClassification (ms)":74.41,"MassiveScenarioClassification (my)":68.4,"MassiveScenarioClassification (nl)":78.52,"MassiveScenarioClassification (pt)":77.12,"MassiveScenarioClassification (ro)":75.26,"MassiveScenarioClassification (ru)":77.71,"MassiveScenarioClassification (sl)":74.84,"MassiveScenarioClassification (sq)":74.72,"MassiveScenarioClassification (sw)":67.92,"MassiveScenarioClassification (ta)":70.93,"MassiveScenarioClassification (te)":72.41,"MassiveScenarioClassification (th)":75.18,"MassiveScenarioClassification (tl)":72.86,"MassiveScenarioClassification (tr)":76.47,"MassiveScenarioClassification (ur)":71.89,"MassiveScenarioClassification (vi)":74.75,"MassiveScenarioClassification (zh-TW)":75.46}
-{"index":82,"Rank":2,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":71.36,"AmazonCounterfactualClassification (de)":66.71,"AmazonCounterfactualClassification (ja)":76.05,"AmazonReviewsClassification (de)":53.0,"AmazonReviewsClassification (es)":48.81,"AmazonReviewsClassification (fr)":48.51,"AmazonReviewsClassification (ja)":47.71,"AmazonReviewsClassification (zh)":44.66,"MTOPDomainClassification (de)":92.68,"MTOPDomainClassification (es)":92.27,"MTOPDomainClassification (fr)":89.97,"MTOPDomainClassification (hi)":90.03,"MTOPDomainClassification (th)":87.56,"MTOPIntentClassification (de)":79.37,"MTOPIntentClassification (es)":80.34,"MTOPIntentClassification (fr)":76.72,"MTOPIntentClassification (hi)":77.87,"MTOPIntentClassification (th)":77.74,"MassiveIntentClassification (af)":67.66,"MassiveIntentClassification (am)":60.71,"MassiveIntentClassification (ar)":63.61,"MassiveIntentClassification (az)":69.0,"MassiveIntentClassification (bn)":68.54,"MassiveIntentClassification (cy)":63.03,"MassiveIntentClassification (de)":72.47,"MassiveIntentClassification (el)":71.24,"MassiveIntentClassification (es)":72.82,"MassiveIntentClassification (fa)":74.25,"MassiveIntentClassification (fi)":72.29,"MassiveIntentClassification (fr)":73.32,"MassiveIntentClassification (he)":70.22,"MassiveIntentClassification (hi)":71.58,"MassiveIntentClassification (hu)":71.92,"MassiveIntentClassification (hy)":68.07,"MassiveIntentClassification (id)":72.62,"MassiveIntentClassification (is)":65.77,"MassiveIntentClassification (it)":73.45,"MassiveIntentClassification (ja)":74.69,"MassiveIntentClassification (jv)":63.04,"MassiveIntentClassification (ka)":58.91,"MassiveIntentClassification (km)":54.43,"MassiveIntentClassification (kn)":66.33,"MassiveIntentClassification (ko)":70.59,"MassiveIntentClassification (lv)":69.11,"MassiveIntentClassification (ml)":69.7,"MassiveIntentClassification (mn)":66.44,"MassiveIntentClassification (ms)":70.8,"MassiveIntentClassification (my)":64.79,"MassiveIntentClassification (nl)":74.43,"MassiveIntentClassification (pt)":73.63,"MassiveIntentClassification (ro)":71.89,"MassiveIntentClassification (ru)":74.16,"MassiveIntentClassification (sl)":69.96,"MassiveIntentClassification (sq)":69.5,"MassiveIntentClassification (sw)":63.01,"MassiveIntentClassification (ta)":66.91,"MassiveIntentClassification (te)":67.62,"MassiveIntentClassification (th)":69.51,"MassiveIntentClassification (tl)":69.31,"MassiveIntentClassification (tr)":72.24,"MassiveIntentClassification (ur)":67.5,"MassiveIntentClassification (vi)":71.29,"MassiveIntentClassification (zh-TW)":69.38,"MassiveScenarioClassification (af)":73.34,"MassiveScenarioClassification (am)":65.84,"MassiveScenarioClassification (ar)":69.76,"MassiveScenarioClassification (az)":72.02,"MassiveScenarioClassification (bn)":72.76,"MassiveScenarioClassification (cy)":68.02,"MassiveScenarioClassification (de)":77.68,"MassiveScenarioClassification (el)":76.13,"MassiveScenarioClassification (es)":76.97,"MassiveScenarioClassification (fa)":78.1,"MassiveScenarioClassification (fi)":75.21,"MassiveScenarioClassification (fr)":77.07,"MassiveScenarioClassification (he)":73.53,"MassiveScenarioClassification (hi)":75.75,"MassiveScenarioClassification (hu)":77.09,"MassiveScenarioClassification (hy)":71.08,"MassiveScenarioClassification (id)":77.1,"MassiveScenarioClassification (is)":71.26,"MassiveScenarioClassification (it)":77.08,"MassiveScenarioClassification (ja)":79.35,"MassiveScenarioClassification (jv)":68.42,"MassiveScenarioClassification (ka)":66.16,"MassiveScenarioClassification (km)":60.11,"MassiveScenarioClassification (kn)":71.25,"MassiveScenarioClassification (ko)":76.46,"MassiveScenarioClassification (lv)":73.25,"MassiveScenarioClassification (ml)":74.12,"MassiveScenarioClassification (mn)":70.02,"MassiveScenarioClassification (ms)":74.41,"MassiveScenarioClassification (my)":68.4,"MassiveScenarioClassification (nl)":78.52,"MassiveScenarioClassification (pt)":77.12,"MassiveScenarioClassification (ro)":75.26,"MassiveScenarioClassification (ru)":77.71,"MassiveScenarioClassification (sl)":74.84,"MassiveScenarioClassification (sq)":74.72,"MassiveScenarioClassification (sw)":67.92,"MassiveScenarioClassification (ta)":70.93,"MassiveScenarioClassification (te)":72.41,"MassiveScenarioClassification (th)":75.18,"MassiveScenarioClassification (tl)":72.86,"MassiveScenarioClassification (tr)":76.47,"MassiveScenarioClassification (ur)":71.89,"MassiveScenarioClassification (vi)":74.75,"MassiveScenarioClassification (zh-TW)":75.46}
-{"index":127,"Rank":3,"Model":"multilingual-e5-large-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.6,"AmazonCounterfactualClassification (de)":71.22,"AmazonCounterfactualClassification (ja)":77.84,"AmazonReviewsClassification (de)":45.4,"AmazonReviewsClassification (es)":43.07,"AmazonReviewsClassification (fr)":41.89,"AmazonReviewsClassification (ja)":40.12,"AmazonReviewsClassification (zh)":38.83,"MTOPDomainClassification (de)":91.95,"MTOPDomainClassification (es)":92.2,"MTOPDomainClassification (fr)":89.0,"MTOPDomainClassification (hi)":89.78,"MTOPDomainClassification (th)":88.75,"MTOPIntentClassification (de)":74.53,"MTOPIntentClassification (es)":75.44,"MTOPIntentClassification (fr)":70.56,"MTOPIntentClassification (hi)":73.12,"MTOPIntentClassification (th)":74.29,"MassiveIntentClassification (af)":62.38,"MassiveIntentClassification (am)":56.07,"MassiveIntentClassification (ar)":60.75,"MassiveIntentClassification (az)":64.87,"MassiveIntentClassification (bn)":63.97,"MassiveIntentClassification (cy)":54.98,"MassiveIntentClassification (de)":69.33,"MassiveIntentClassification (el)":69.09,"MassiveIntentClassification (es)":70.49,"MassiveIntentClassification (fa)":69.91,"MassiveIntentClassification (fi)":70.12,"MassiveIntentClassification (fr)":69.34,"MassiveIntentClassification (he)":67.61,"MassiveIntentClassification (hi)":67.8,"MassiveIntentClassification (hu)":69.69,"MassiveIntentClassification (hy)":62.07,"MassiveIntentClassification (id)":70.05,"MassiveIntentClassification (is)":62.42,"MassiveIntentClassification (it)":70.53,"MassiveIntentClassification (ja)":72.51,"MassiveIntentClassification (jv)":57.91,"MassiveIntentClassification (ka)":51.78,"MassiveIntentClassification (km)":47.02,"MassiveIntentClassification (kn)":62.16,"MassiveIntentClassification (ko)":69.43,"MassiveIntentClassification (lv)":67.08,"MassiveIntentClassification (ml)":65.57,"MassiveIntentClassification (mn)":61.71,"MassiveIntentClassification (ms)":66.04,"MassiveIntentClassification (my)":60.88,"MassiveIntentClassification (nl)":70.45,"MassiveIntentClassification (pt)":70.73,"MassiveIntentClassification (ro)":68.36,"MassiveIntentClassification (ru)":71.7,"MassiveIntentClassification (sl)":67.09,"MassiveIntentClassification (sq)":65.18,"MassiveIntentClassification (sw)":58.5,"MassiveIntentClassification (ta)":62.69,"MassiveIntentClassification (te)":63.02,"MassiveIntentClassification (th)":68.29,"MassiveIntentClassification (tl)":64.77,"MassiveIntentClassification (tr)":69.87,"MassiveIntentClassification (ur)":64.05,"MassiveIntentClassification (vi)":69.38,"MassiveIntentClassification (zh-TW)":66.2,"MassiveScenarioClassification (af)":68.74,"MassiveScenarioClassification (am)":60.59,"MassiveScenarioClassification (ar)":66.23,"MassiveScenarioClassification (az)":66.48,"MassiveScenarioClassification (bn)":67.75,"MassiveScenarioClassification (cy)":59.09,"MassiveScenarioClassification (de)":74.7,"MassiveScenarioClassification (el)":73.9,"MassiveScenarioClassification (es)":74.31,"MassiveScenarioClassification (fa)":72.95,"MassiveScenarioClassification (fi)":73.12,"MassiveScenarioClassification (fr)":73.87,"MassiveScenarioClassification (he)":71.4,"MassiveScenarioClassification (hi)":72.13,"MassiveScenarioClassification (hu)":74.52,"MassiveScenarioClassification (hy)":64.94,"MassiveScenarioClassification (id)":74.12,"MassiveScenarioClassification (is)":67.15,"MassiveScenarioClassification (it)":74.32,"MassiveScenarioClassification (ja)":77.47,"MassiveScenarioClassification (jv)":63.32,"MassiveScenarioClassification (ka)":58.35,"MassiveScenarioClassification (km)":50.88,"MassiveScenarioClassification (kn)":66.44,"MassiveScenarioClassification (ko)":75.05,"MassiveScenarioClassification (lv)":70.5,"MassiveScenarioClassification (ml)":69.94,"MassiveScenarioClassification (mn)":64.79,"MassiveScenarioClassification (ms)":69.88,"MassiveScenarioClassification (my)":63.25,"MassiveScenarioClassification (nl)":74.83,"MassiveScenarioClassification (pt)":73.49,"MassiveScenarioClassification (ro)":71.72,"MassiveScenarioClassification (ru)":75.14,"MassiveScenarioClassification (sl)":71.87,"MassiveScenarioClassification (sq)":70.28,"MassiveScenarioClassification (sw)":63.14,"MassiveScenarioClassification (ta)":66.28,"MassiveScenarioClassification (te)":66.69,"MassiveScenarioClassification (th)":73.45,"MassiveScenarioClassification (tl)":67.71,"MassiveScenarioClassification (tr)":73.99,"MassiveScenarioClassification (ur)":68.15,"MassiveScenarioClassification (vi)":73.11,"MassiveScenarioClassification (zh-TW)":71.81}
-{"index":37,"Rank":4,"Model":"multilingual-e5-large-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.6,"AmazonCounterfactualClassification (de)":71.22,"AmazonCounterfactualClassification (ja)":77.84,"AmazonReviewsClassification (de)":45.4,"AmazonReviewsClassification (es)":43.07,"AmazonReviewsClassification (fr)":41.89,"AmazonReviewsClassification (ja)":40.12,"AmazonReviewsClassification (zh)":38.83,"MTOPDomainClassification (de)":91.95,"MTOPDomainClassification (es)":92.2,"MTOPDomainClassification (fr)":89.0,"MTOPDomainClassification (hi)":89.78,"MTOPDomainClassification (th)":88.75,"MTOPIntentClassification (de)":74.53,"MTOPIntentClassification (es)":75.44,"MTOPIntentClassification (fr)":70.56,"MTOPIntentClassification (hi)":73.12,"MTOPIntentClassification (th)":74.29,"MassiveIntentClassification (af)":62.38,"MassiveIntentClassification (am)":56.07,"MassiveIntentClassification (ar)":60.75,"MassiveIntentClassification (az)":64.87,"MassiveIntentClassification (bn)":63.97,"MassiveIntentClassification (cy)":54.98,"MassiveIntentClassification (de)":69.33,"MassiveIntentClassification (el)":69.09,"MassiveIntentClassification (es)":70.49,"MassiveIntentClassification (fa)":69.91,"MassiveIntentClassification (fi)":70.12,"MassiveIntentClassification (fr)":69.34,"MassiveIntentClassification (he)":67.61,"MassiveIntentClassification (hi)":67.8,"MassiveIntentClassification (hu)":69.69,"MassiveIntentClassification (hy)":62.07,"MassiveIntentClassification (id)":70.05,"MassiveIntentClassification (is)":62.42,"MassiveIntentClassification (it)":70.53,"MassiveIntentClassification (ja)":72.51,"MassiveIntentClassification (jv)":57.91,"MassiveIntentClassification (ka)":51.78,"MassiveIntentClassification (km)":47.02,"MassiveIntentClassification (kn)":62.16,"MassiveIntentClassification (ko)":69.43,"MassiveIntentClassification (lv)":67.08,"MassiveIntentClassification (ml)":65.57,"MassiveIntentClassification (mn)":61.71,"MassiveIntentClassification (ms)":66.04,"MassiveIntentClassification (my)":60.88,"MassiveIntentClassification (nl)":70.45,"MassiveIntentClassification (pt)":70.73,"MassiveIntentClassification (ro)":68.36,"MassiveIntentClassification (ru)":71.7,"MassiveIntentClassification (sl)":67.09,"MassiveIntentClassification (sq)":65.18,"MassiveIntentClassification (sw)":58.5,"MassiveIntentClassification (ta)":62.69,"MassiveIntentClassification (te)":63.02,"MassiveIntentClassification (th)":68.29,"MassiveIntentClassification (tl)":64.77,"MassiveIntentClassification (tr)":69.87,"MassiveIntentClassification (ur)":64.05,"MassiveIntentClassification (vi)":69.38,"MassiveIntentClassification (zh-TW)":66.2,"MassiveScenarioClassification (af)":68.74,"MassiveScenarioClassification (am)":60.59,"MassiveScenarioClassification (ar)":66.23,"MassiveScenarioClassification (az)":66.48,"MassiveScenarioClassification (bn)":67.75,"MassiveScenarioClassification (cy)":59.09,"MassiveScenarioClassification (de)":74.7,"MassiveScenarioClassification (el)":73.9,"MassiveScenarioClassification (es)":74.31,"MassiveScenarioClassification (fa)":72.95,"MassiveScenarioClassification (fi)":73.12,"MassiveScenarioClassification (fr)":73.87,"MassiveScenarioClassification (he)":71.4,"MassiveScenarioClassification (hi)":72.13,"MassiveScenarioClassification (hu)":74.52,"MassiveScenarioClassification (hy)":64.94,"MassiveScenarioClassification (id)":74.12,"MassiveScenarioClassification (is)":67.15,"MassiveScenarioClassification (it)":74.32,"MassiveScenarioClassification (ja)":77.47,"MassiveScenarioClassification (jv)":63.32,"MassiveScenarioClassification (ka)":58.35,"MassiveScenarioClassification (km)":50.88,"MassiveScenarioClassification (kn)":66.44,"MassiveScenarioClassification (ko)":75.05,"MassiveScenarioClassification (lv)":70.5,"MassiveScenarioClassification (ml)":69.94,"MassiveScenarioClassification (mn)":64.79,"MassiveScenarioClassification (ms)":69.88,"MassiveScenarioClassification (my)":63.25,"MassiveScenarioClassification (nl)":74.83,"MassiveScenarioClassification (pt)":73.49,"MassiveScenarioClassification (ro)":71.72,"MassiveScenarioClassification (ru)":75.14,"MassiveScenarioClassification (sl)":71.87,"MassiveScenarioClassification (sq)":70.28,"MassiveScenarioClassification (sw)":63.14,"MassiveScenarioClassification (ta)":66.28,"MassiveScenarioClassification (te)":66.69,"MassiveScenarioClassification (th)":73.45,"MassiveScenarioClassification (tl)":67.71,"MassiveScenarioClassification (tr)":73.99,"MassiveScenarioClassification (ur)":68.15,"MassiveScenarioClassification (vi)":73.11,"MassiveScenarioClassification (zh-TW)":71.81}
-{"index":81,"Rank":5,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":67.48,"AmazonCounterfactualClassification (de)":71.22,"AmazonCounterfactualClassification (ja)":77.84,"AmazonReviewsClassification (de)":45.4,"AmazonReviewsClassification (es)":43.07,"AmazonReviewsClassification (fr)":41.91,"AmazonReviewsClassification (ja)":40.12,"AmazonReviewsClassification (zh)":38.83,"MTOPDomainClassification (de)":91.95,"MTOPDomainClassification (es)":92.2,"MTOPDomainClassification (fr)":86.41,"MTOPDomainClassification (hi)":89.78,"MTOPDomainClassification (th)":88.75,"MTOPIntentClassification (de)":74.53,"MTOPIntentClassification (es)":75.44,"MTOPIntentClassification (fr)":59.43,"MTOPIntentClassification (hi)":73.12,"MTOPIntentClassification (th)":74.29,"MassiveIntentClassification (af)":62.38,"MassiveIntentClassification (am)":56.07,"MassiveIntentClassification (ar)":60.75,"MassiveIntentClassification (az)":64.87,"MassiveIntentClassification (bn)":63.97,"MassiveIntentClassification (cy)":54.98,"MassiveIntentClassification (de)":69.33,"MassiveIntentClassification (el)":69.09,"MassiveIntentClassification (es)":70.49,"MassiveIntentClassification (fa)":69.91,"MassiveIntentClassification (fi)":70.12,"MassiveIntentClassification (fr)":69.34,"MassiveIntentClassification (he)":67.61,"MassiveIntentClassification (hi)":67.8,"MassiveIntentClassification (hu)":69.69,"MassiveIntentClassification (hy)":62.07,"MassiveIntentClassification (id)":70.05,"MassiveIntentClassification (is)":62.42,"MassiveIntentClassification (it)":70.53,"MassiveIntentClassification (ja)":72.51,"MassiveIntentClassification (jv)":57.91,"MassiveIntentClassification (ka)":51.78,"MassiveIntentClassification (km)":47.02,"MassiveIntentClassification (kn)":62.16,"MassiveIntentClassification (ko)":69.43,"MassiveIntentClassification (lv)":67.08,"MassiveIntentClassification (ml)":65.57,"MassiveIntentClassification (mn)":61.71,"MassiveIntentClassification (ms)":66.04,"MassiveIntentClassification (my)":60.88,"MassiveIntentClassification (nl)":70.45,"MassiveIntentClassification (pt)":70.73,"MassiveIntentClassification (ro)":68.36,"MassiveIntentClassification (ru)":71.7,"MassiveIntentClassification (sl)":67.09,"MassiveIntentClassification (sq)":65.18,"MassiveIntentClassification (sw)":58.5,"MassiveIntentClassification (ta)":62.69,"MassiveIntentClassification (te)":63.02,"MassiveIntentClassification (th)":68.29,"MassiveIntentClassification (tl)":64.77,"MassiveIntentClassification (tr)":69.87,"MassiveIntentClassification (ur)":64.05,"MassiveIntentClassification (vi)":69.38,"MassiveIntentClassification (zh-TW)":66.2,"MassiveScenarioClassification (af)":68.74,"MassiveScenarioClassification (am)":60.59,"MassiveScenarioClassification (ar)":66.23,"MassiveScenarioClassification (az)":66.48,"MassiveScenarioClassification (bn)":67.75,"MassiveScenarioClassification (cy)":59.09,"MassiveScenarioClassification (de)":74.7,"MassiveScenarioClassification (el)":73.9,"MassiveScenarioClassification (es)":74.31,"MassiveScenarioClassification (fa)":72.95,"MassiveScenarioClassification (fi)":73.12,"MassiveScenarioClassification (fr)":73.87,"MassiveScenarioClassification (he)":71.4,"MassiveScenarioClassification (hi)":72.13,"MassiveScenarioClassification (hu)":74.52,"MassiveScenarioClassification (hy)":64.94,"MassiveScenarioClassification (id)":74.12,"MassiveScenarioClassification (is)":67.15,"MassiveScenarioClassification (it)":74.32,"MassiveScenarioClassification (ja)":77.47,"MassiveScenarioClassification (jv)":63.32,"MassiveScenarioClassification (ka)":58.35,"MassiveScenarioClassification (km)":50.88,"MassiveScenarioClassification (kn)":66.44,"MassiveScenarioClassification (ko)":75.05,"MassiveScenarioClassification (lv)":70.5,"MassiveScenarioClassification (ml)":69.94,"MassiveScenarioClassification (mn)":64.79,"MassiveScenarioClassification (ms)":69.88,"MassiveScenarioClassification (my)":63.25,"MassiveScenarioClassification (nl)":74.83,"MassiveScenarioClassification (pt)":73.49,"MassiveScenarioClassification (ro)":71.72,"MassiveScenarioClassification (ru)":75.14,"MassiveScenarioClassification (sl)":71.87,"MassiveScenarioClassification (sq)":70.28,"MassiveScenarioClassification (sw)":63.14,"MassiveScenarioClassification (ta)":66.28,"MassiveScenarioClassification (te)":66.69,"MassiveScenarioClassification (th)":73.45,"MassiveScenarioClassification (tl)":67.71,"MassiveScenarioClassification (tr)":73.99,"MassiveScenarioClassification (ur)":68.15,"MassiveScenarioClassification (vi)":73.11,"MassiveScenarioClassification (zh-TW)":71.81}
-{"index":79,"Rank":6,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":66.64,"AmazonCounterfactualClassification (de)":74.05,"AmazonCounterfactualClassification (ja)":77.22,"AmazonReviewsClassification (de)":53.26,"AmazonReviewsClassification (es)":50.33,"AmazonReviewsClassification (fr)":36.71,"AmazonReviewsClassification (ja)":48.69,"AmazonReviewsClassification (zh)":46.24,"MTOPDomainClassification (de)":92.98,"MTOPDomainClassification (es)":93.37,"MTOPDomainClassification (fr)":74.8,"MTOPDomainClassification (hi)":88.81,"MTOPDomainClassification (th)":85.52,"MTOPIntentClassification (de)":77.77,"MTOPIntentClassification (es)":79.94,"MTOPIntentClassification (fr)":53.97,"MTOPIntentClassification (hi)":72.91,"MTOPIntentClassification (th)":73.24,"MassiveIntentClassification (af)":66.48,"MassiveIntentClassification (am)":44.29,"MassiveIntentClassification (ar)":63.17,"MassiveIntentClassification (az)":64.23,"MassiveIntentClassification (bn)":64.94,"MassiveIntentClassification (cy)":55.48,"MassiveIntentClassification (de)":74.09,"MassiveIntentClassification (el)":68.31,"MassiveIntentClassification (es)":75.09,"MassiveIntentClassification (fa)":72.21,"MassiveIntentClassification (fi)":68.74,"MassiveIntentClassification (fr)":46.39,"MassiveIntentClassification (he)":66.22,"MassiveIntentClassification (hi)":69.45,"MassiveIntentClassification (hu)":69.41,"MassiveIntentClassification (hy)":56.92,"MassiveIntentClassification (id)":72.71,"MassiveIntentClassification (is)":59.91,"MassiveIntentClassification (it)":75.25,"MassiveIntentClassification (ja)":76.36,"MassiveIntentClassification (jv)":57.92,"MassiveIntentClassification (ka)":52.55,"MassiveIntentClassification (km)":46.45,"MassiveIntentClassification (kn)":53.96,"MassiveIntentClassification (ko)":74.21,"MassiveIntentClassification (lv)":59.23,"MassiveIntentClassification (ml)":51.45,"MassiveIntentClassification (mn)":51.38,"MassiveIntentClassification (ms)":69.85,"MassiveIntentClassification (my)":49.15,"MassiveIntentClassification (nl)":74.83,"MassiveIntentClassification (pt)":75.27,"MassiveIntentClassification (ro)":69.63,"MassiveIntentClassification (ru)":76.63,"MassiveIntentClassification (sl)":67.15,"MassiveIntentClassification (sq)":58.84,"MassiveIntentClassification (sw)":57.37,"MassiveIntentClassification (ta)":53.15,"MassiveIntentClassification (te)":51.51,"MassiveIntentClassification (th)":66.91,"MassiveIntentClassification (tl)":68.73,"MassiveIntentClassification (tr)":72.07,"MassiveIntentClassification (ur)":62.09,"MassiveIntentClassification (vi)":71.17,"MassiveIntentClassification (zh-TW)":71.14,"MassiveScenarioClassification (af)":73.37,"MassiveScenarioClassification (am)":47.21,"MassiveScenarioClassification (ar)":69.84,"MassiveScenarioClassification (az)":67.0,"MassiveScenarioClassification (bn)":68.05,"MassiveScenarioClassification (cy)":61.88,"MassiveScenarioClassification (de)":79.03,"MassiveScenarioClassification (el)":72.97,"MassiveScenarioClassification (es)":78.84,"MassiveScenarioClassification (fa)":76.74,"MassiveScenarioClassification (fi)":71.22,"MassiveScenarioClassification (fr)":53.86,"MassiveScenarioClassification (he)":69.64,"MassiveScenarioClassification (hi)":73.51,"MassiveScenarioClassification (hu)":74.06,"MassiveScenarioClassification (hy)":59.55,"MassiveScenarioClassification (id)":77.41,"MassiveScenarioClassification (is)":66.58,"MassiveScenarioClassification (it)":78.39,"MassiveScenarioClassification (ja)":79.62,"MassiveScenarioClassification (jv)":64.29,"MassiveScenarioClassification (ka)":57.52,"MassiveScenarioClassification (km)":52.42,"MassiveScenarioClassification (kn)":58.55,"MassiveScenarioClassification (ko)":78.89,"MassiveScenarioClassification (lv)":63.5,"MassiveScenarioClassification (ml)":54.03,"MassiveScenarioClassification (mn)":54.24,"MassiveScenarioClassification (ms)":75.53,"MassiveScenarioClassification (my)":52.19,"MassiveScenarioClassification (nl)":78.48,"MassiveScenarioClassification (pt)":77.96,"MassiveScenarioClassification (ro)":73.19,"MassiveScenarioClassification (ru)":80.52,"MassiveScenarioClassification (sl)":73.66,"MassiveScenarioClassification (sq)":64.03,"MassiveScenarioClassification (sw)":64.66,"MassiveScenarioClassification (ta)":57.76,"MassiveScenarioClassification (te)":57.27,"MassiveScenarioClassification (th)":72.46,"MassiveScenarioClassification (tl)":73.71,"MassiveScenarioClassification (tr)":75.04,"MassiveScenarioClassification (ur)":67.05,"MassiveScenarioClassification (vi)":75.52,"MassiveScenarioClassification (zh-TW)":76.87}
-{"index":80,"Rank":7,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":64.32,"AmazonCounterfactualClassification (de)":71.72,"AmazonCounterfactualClassification (ja)":73.33,"AmazonReviewsClassification (de)":41.83,"AmazonReviewsClassification (es)":40.53,"AmazonReviewsClassification (fr)":40.94,"AmazonReviewsClassification (ja)":37.44,"AmazonReviewsClassification (zh)":37.23,"MTOPDomainClassification (de)":89.63,"MTOPDomainClassification (es)":90.59,"MTOPDomainClassification (fr)":84.79,"MTOPDomainClassification (hi)":87.32,"MTOPDomainClassification (th)":86.24,"MTOPIntentClassification (de)":71.23,"MTOPIntentClassification (es)":71.27,"MTOPIntentClassification (fr)":55.51,"MTOPIntentClassification (hi)":69.24,"MTOPIntentClassification (th)":71.71,"MassiveIntentClassification (af)":59.98,"MassiveIntentClassification (am)":53.66,"MassiveIntentClassification (ar)":57.46,"MassiveIntentClassification (az)":62.59,"MassiveIntentClassification (bn)":61.13,"MassiveIntentClassification (cy)":50.06,"MassiveIntentClassification (de)":66.09,"MassiveIntentClassification (el)":64.68,"MassiveIntentClassification (es)":68.4,"MassiveIntentClassification (fa)":67.25,"MassiveIntentClassification (fi)":65.78,"MassiveIntentClassification (fr)":67.95,"MassiveIntentClassification (he)":62.05,"MassiveIntentClassification (hi)":64.95,"MassiveIntentClassification (hu)":64.97,"MassiveIntentClassification (hy)":60.08,"MassiveIntentClassification (id)":66.64,"MassiveIntentClassification (is)":56.39,"MassiveIntentClassification (it)":68.93,"MassiveIntentClassification (ja)":68.94,"MassiveIntentClassification (jv)":54.26,"MassiveIntentClassification (ka)":48.99,"MassiveIntentClassification (km)":44.69,"MassiveIntentClassification (kn)":59.19,"MassiveIntentClassification (ko)":66.34,"MassiveIntentClassification (lv)":60.34,"MassiveIntentClassification (ml)":63.09,"MassiveIntentClassification (mn)":58.76,"MassiveIntentClassification (ms)":62.48,"MassiveIntentClassification (my)":58.56,"MassiveIntentClassification (nl)":67.3,"MassiveIntentClassification (pt)":68.98,"MassiveIntentClassification (ro)":65.54,"MassiveIntentClassification (ru)":69.02,"MassiveIntentClassification (sl)":62.35,"MassiveIntentClassification (sq)":61.23,"MassiveIntentClassification (sw)":56.0,"MassiveIntentClassification (ta)":58.71,"MassiveIntentClassification (te)":59.72,"MassiveIntentClassification (th)":65.6,"MassiveIntentClassification (tl)":60.86,"MassiveIntentClassification (tr)":67.41,"MassiveIntentClassification (ur)":61.52,"MassiveIntentClassification (vi)":66.17,"MassiveIntentClassification (zh-TW)":64.65,"MassiveScenarioClassification (af)":65.09,"MassiveScenarioClassification (am)":58.52,"MassiveScenarioClassification (ar)":62.24,"MassiveScenarioClassification (az)":63.75,"MassiveScenarioClassification (bn)":65.0,"MassiveScenarioClassification (cy)":52.84,"MassiveScenarioClassification (de)":71.95,"MassiveScenarioClassification (el)":70.18,"MassiveScenarioClassification (es)":71.5,"MassiveScenarioClassification (fa)":70.25,"MassiveScenarioClassification (fi)":69.13,"MassiveScenarioClassification (fr)":71.89,"MassiveScenarioClassification (he)":67.44,"MassiveScenarioClassification (hi)":69.16,"MassiveScenarioClassification (hu)":70.75,"MassiveScenarioClassification (hy)":63.14,"MassiveScenarioClassification (id)":70.7,"MassiveScenarioClassification (is)":60.94,"MassiveScenarioClassification (it)":72.32,"MassiveScenarioClassification (ja)":74.65,"MassiveScenarioClassification (jv)":59.69,"MassiveScenarioClassification (ka)":54.37,"MassiveScenarioClassification (km)":48.31,"MassiveScenarioClassification (kn)":62.15,"MassiveScenarioClassification (ko)":72.45,"MassiveScenarioClassification (lv)":62.81,"MassiveScenarioClassification (ml)":68.04,"MassiveScenarioClassification (mn)":61.44,"MassiveScenarioClassification (ms)":66.9,"MassiveScenarioClassification (my)":61.64,"MassiveScenarioClassification (nl)":72.11,"MassiveScenarioClassification (pt)":70.83,"MassiveScenarioClassification (ro)":69.19,"MassiveScenarioClassification (ru)":72.99,"MassiveScenarioClassification (sl)":65.26,"MassiveScenarioClassification (sq)":66.49,"MassiveScenarioClassification (sw)":59.89,"MassiveScenarioClassification (ta)":62.38,"MassiveScenarioClassification (te)":62.59,"MassiveScenarioClassification (th)":71.61,"MassiveScenarioClassification (tl)":62.74,"MassiveScenarioClassification (tr)":71.67,"MassiveScenarioClassification (ur)":64.64,"MassiveScenarioClassification (vi)":70.01,"MassiveScenarioClassification (zh-TW)":70.69}
-{"index":67,"Rank":8,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":63.11,"AmazonCounterfactualClassification (de)":72.02,"AmazonCounterfactualClassification (ja)":71.79,"AmazonReviewsClassification (de)":34.61,"AmazonReviewsClassification (es)":35.17,"AmazonReviewsClassification (fr)":34.91,"AmazonReviewsClassification (ja)":31.84,"AmazonReviewsClassification (zh)":31.91,"MTOPDomainClassification (de)":89.54,"MTOPDomainClassification (es)":90.62,"MTOPDomainClassification (fr)":86.19,"MTOPDomainClassification (hi)":89.42,"MTOPDomainClassification (th)":85.9,"MTOPIntentClassification (de)":73.43,"MTOPIntentClassification (es)":73.84,"MTOPIntentClassification (fr)":66.75,"MTOPIntentClassification (hi)":69.14,"MTOPIntentClassification (th)":68.47,"MassiveIntentClassification (af)":58.15,"MassiveIntentClassification (am)":57.91,"MassiveIntentClassification (ar)":57.23,"MassiveIntentClassification (az)":57.17,"MassiveIntentClassification (bn)":62.24,"MassiveIntentClassification (cy)":51.19,"MassiveIntentClassification (de)":61.31,"MassiveIntentClassification (el)":64.21,"MassiveIntentClassification (es)":63.97,"MassiveIntentClassification (fa)":66.67,"MassiveIntentClassification (fi)":62.86,"MassiveIntentClassification (fr)":58.55,"MassiveIntentClassification (he)":63.45,"MassiveIntentClassification (hi)":61.32,"MassiveIntentClassification (hu)":62.91,"MassiveIntentClassification (hy)":60.03,"MassiveIntentClassification (id)":63.29,"MassiveIntentClassification (is)":56.02,"MassiveIntentClassification (it)":64.44,"MassiveIntentClassification (ja)":63.7,"MassiveIntentClassification (jv)":52.0,"MassiveIntentClassification (ka)":54.09,"MassiveIntentClassification (km)":43.34,"MassiveIntentClassification (kn)":57.87,"MassiveIntentClassification (ko)":62.69,"MassiveIntentClassification (lv)":56.24,"MassiveIntentClassification (ml)":62.81,"MassiveIntentClassification (mn)":58.49,"MassiveIntentClassification (ms)":61.56,"MassiveIntentClassification (my)":59.4,"MassiveIntentClassification (nl)":64.56,"MassiveIntentClassification (pt)":63.49,"MassiveIntentClassification (ro)":62.53,"MassiveIntentClassification (ru)":62.63,"MassiveIntentClassification (sl)":63.43,"MassiveIntentClassification (sq)":61.45,"MassiveIntentClassification (sw)":56.25,"MassiveIntentClassification (ta)":59.75,"MassiveIntentClassification (te)":59.61,"MassiveIntentClassification (th)":59.42,"MassiveIntentClassification (tl)":58.12,"MassiveIntentClassification (tr)":60.91,"MassiveIntentClassification (ur)":59.49,"MassiveIntentClassification (vi)":60.48,"MassiveIntentClassification (zh-TW)":56.73,"MassiveScenarioClassification (af)":64.06,"MassiveScenarioClassification (am)":63.24,"MassiveScenarioClassification (ar)":63.69,"MassiveScenarioClassification (az)":60.86,"MassiveScenarioClassification (bn)":67.17,"MassiveScenarioClassification (cy)":56.52,"MassiveScenarioClassification (de)":67.48,"MassiveScenarioClassification (el)":70.23,"MassiveScenarioClassification (es)":69.08,"MassiveScenarioClassification (fa)":72.1,"MassiveScenarioClassification (fi)":67.16,"MassiveScenarioClassification (fr)":63.02,"MassiveScenarioClassification (he)":68.83,"MassiveScenarioClassification (hi)":66.9,"MassiveScenarioClassification (hu)":69.33,"MassiveScenarioClassification (hy)":65.82,"MassiveScenarioClassification (id)":68.98,"MassiveScenarioClassification (is)":63.14,"MassiveScenarioClassification (it)":70.04,"MassiveScenarioClassification (ja)":70.68,"MassiveScenarioClassification (jv)":59.79,"MassiveScenarioClassification (ka)":61.03,"MassiveScenarioClassification (km)":49.05,"MassiveScenarioClassification (kn)":63.78,"MassiveScenarioClassification (ko)":69.6,"MassiveScenarioClassification (lv)":59.97,"MassiveScenarioClassification (ml)":69.2,"MassiveScenarioClassification (mn)":62.72,"MassiveScenarioClassification (ms)":67.87,"MassiveScenarioClassification (my)":64.98,"MassiveScenarioClassification (nl)":69.8,"MassiveScenarioClassification (pt)":67.5,"MassiveScenarioClassification (ro)":67.53,"MassiveScenarioClassification (ru)":67.96,"MassiveScenarioClassification (sl)":69.57,"MassiveScenarioClassification (sq)":68.48,"MassiveScenarioClassification (sw)":63.18,"MassiveScenarioClassification (ta)":64.85,"MassiveScenarioClassification (te)":65.39,"MassiveScenarioClassification (th)":67.99,"MassiveScenarioClassification (tl)":63.4,"MassiveScenarioClassification (tr)":65.77,"MassiveScenarioClassification (ur)":65.81,"MassiveScenarioClassification (vi)":66.52,"MassiveScenarioClassification (zh-TW)":63.3}
-{"index":83,"Rank":9,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":61.51,"AmazonCounterfactualClassification (de)":71.65,"AmazonCounterfactualClassification (ja)":64.19,"AmazonReviewsClassification (de)":40.25,"AmazonReviewsClassification (es)":40.39,"AmazonReviewsClassification (fr)":39.68,"AmazonReviewsClassification (ja)":37.68,"AmazonReviewsClassification (zh)":37.5,"MTOPDomainClassification (de)":87.47,"MTOPDomainClassification (es)":89.27,"MTOPDomainClassification (fr)":81.2,"MTOPDomainClassification (hi)":86.02,"MTOPDomainClassification (th)":85.35,"MTOPIntentClassification (de)":65.86,"MTOPIntentClassification (es)":67.97,"MTOPIntentClassification (fr)":46.01,"MTOPIntentClassification (hi)":66.3,"MTOPIntentClassification (th)":67.52,"MassiveIntentClassification (af)":57.07,"MassiveIntentClassification (am)":51.96,"MassiveIntentClassification (ar)":54.43,"MassiveIntentClassification (az)":59.8,"MassiveIntentClassification (bn)":59.38,"MassiveIntentClassification (cy)":46.56,"MassiveIntentClassification (de)":62.73,"MassiveIntentClassification (el)":61.6,"MassiveIntentClassification (es)":66.31,"MassiveIntentClassification (fa)":65.54,"MassiveIntentClassification (fi)":61.46,"MassiveIntentClassification (fr)":65.47,"MassiveIntentClassification (he)":58.05,"MassiveIntentClassification (hi)":64.07,"MassiveIntentClassification (hu)":60.95,"MassiveIntentClassification (hy)":57.4,"MassiveIntentClassification (id)":64.17,"MassiveIntentClassification (is)":52.26,"MassiveIntentClassification (it)":65.54,"MassiveIntentClassification (ja)":68.23,"MassiveIntentClassification (jv)":50.85,"MassiveIntentClassification (ka)":48.45,"MassiveIntentClassification (km)":42.83,"MassiveIntentClassification (kn)":57.51,"MassiveIntentClassification (ko)":63.79,"MassiveIntentClassification (lv)":54.99,"MassiveIntentClassification (ml)":61.9,"MassiveIntentClassification (mn)":57.1,"MassiveIntentClassification (ms)":58.99,"MassiveIntentClassification (my)":55.9,"MassiveIntentClassification (nl)":65.64,"MassiveIntentClassification (pt)":66.85,"MassiveIntentClassification (ro)":60.81,"MassiveIntentClassification (ru)":58.65,"MassiveIntentClassification (sl)":56.52,"MassiveIntentClassification (sq)":57.99,"MassiveIntentClassification (sw)":53.57,"MassiveIntentClassification (ta)":57.26,"MassiveIntentClassification (te)":57.83,"MassiveIntentClassification (th)":64.07,"MassiveIntentClassification (tl)":58.91,"MassiveIntentClassification (tr)":63.54,"MassiveIntentClassification (ur)":59.28,"MassiveIntentClassification (vi)":64.07,"MassiveIntentClassification (zh-TW)":62.54,"MassiveScenarioClassification (af)":63.04,"MassiveScenarioClassification (am)":56.84,"MassiveScenarioClassification (ar)":59.62,"MassiveScenarioClassification (az)":60.85,"MassiveScenarioClassification (bn)":62.77,"MassiveScenarioClassification (cy)":50.18,"MassiveScenarioClassification (de)":69.19,"MassiveScenarioClassification (el)":67.07,"MassiveScenarioClassification (es)":69.83,"MassiveScenarioClassification (fa)":68.71,"MassiveScenarioClassification (fi)":65.95,"MassiveScenarioClassification (fr)":68.76,"MassiveScenarioClassification (he)":63.81,"MassiveScenarioClassification (hi)":67.69,"MassiveScenarioClassification (hu)":66.47,"MassiveScenarioClassification (hy)":59.5,"MassiveScenarioClassification (id)":67.92,"MassiveScenarioClassification (is)":56.49,"MassiveScenarioClassification (it)":69.04,"MassiveScenarioClassification (ja)":73.89,"MassiveScenarioClassification (jv)":56.63,"MassiveScenarioClassification (ka)":52.24,"MassiveScenarioClassification (km)":46.62,"MassiveScenarioClassification (kn)":59.16,"MassiveScenarioClassification (ko)":69.85,"MassiveScenarioClassification (lv)":56.66,"MassiveScenarioClassification (ml)":66.54,"MassiveScenarioClassification (mn)":59.31,"MassiveScenarioClassification (ms)":64.88,"MassiveScenarioClassification (my)":58.86,"MassiveScenarioClassification (nl)":70.87,"MassiveScenarioClassification (pt)":68.18,"MassiveScenarioClassification (ro)":64.65,"MassiveScenarioClassification (ru)":63.77,"MassiveScenarioClassification (sl)":60.18,"MassiveScenarioClassification (sq)":62.86,"MassiveScenarioClassification (sw)":58.15,"MassiveScenarioClassification (ta)":59.44,"MassiveScenarioClassification (te)":60.85,"MassiveScenarioClassification (th)":70.66,"MassiveScenarioClassification (tl)":60.88,"MassiveScenarioClassification (tr)":68.05,"MassiveScenarioClassification (ur)":62.11,"MassiveScenarioClassification (vi)":67.44,"MassiveScenarioClassification (zh-TW)":68.32}
-{"index":107,"Rank":10,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":60.56,"AmazonCounterfactualClassification (de)":73.17,"AmazonCounterfactualClassification (ja)":76.42,"AmazonReviewsClassification (de)":39.92,"AmazonReviewsClassification (es)":39.39,"AmazonReviewsClassification (fr)":38.52,"AmazonReviewsClassification (ja)":36.44,"AmazonReviewsClassification (zh)":36.45,"MTOPDomainClassification (de)":86.95,"MTOPDomainClassification (es)":84.07,"MTOPDomainClassification (fr)":84.14,"MTOPDomainClassification (hi)":85.11,"MTOPDomainClassification (th)":81.24,"MTOPIntentClassification (de)":63.42,"MTOPIntentClassification (es)":64.44,"MTOPIntentClassification (fr)":62.01,"MTOPIntentClassification (hi)":62.58,"MTOPIntentClassification (th)":64.61,"MassiveIntentClassification (af)":56.12,"MassiveIntentClassification (am)":55.71,"MassiveIntentClassification (ar)":50.86,"MassiveIntentClassification (az)":58.97,"MassiveIntentClassification (bn)":58.22,"MassiveIntentClassification (cy)":50.16,"MassiveIntentClassification (de)":56.21,"MassiveIntentClassification (el)":57.03,"MassiveIntentClassification (es)":58.32,"MassiveIntentClassification (fa)":62.33,"MassiveIntentClassification (fi)":60.12,"MassiveIntentClassification (fr)":60.47,"MassiveIntentClassification (he)":56.55,"MassiveIntentClassification (hi)":59.4,"MassiveIntentClassification (hu)":59.52,"MassiveIntentClassification (hy)":56.2,"MassiveIntentClassification (id)":61.12,"MassiveIntentClassification (is)":54.9,"MassiveIntentClassification (it)":59.83,"MassiveIntentClassification (ja)":63.11,"MassiveIntentClassification (jv)":50.98,"MassiveIntentClassification (ka)":48.35,"MassiveIntentClassification (km)":48.55,"MassiveIntentClassification (kn)":56.24,"MassiveIntentClassification (ko)":60.99,"MassiveIntentClassification (lv)":57.1,"MassiveIntentClassification (ml)":57.91,"MassiveIntentClassification (mn)":58.5,"MassiveIntentClassification (ms)":58.6,"MassiveIntentClassification (my)":57.35,"MassiveIntentClassification (nl)":59.37,"MassiveIntentClassification (pt)":60.16,"MassiveIntentClassification (ro)":57.92,"MassiveIntentClassification (ru)":60.67,"MassiveIntentClassification (sl)":59.37,"MassiveIntentClassification (sq)":58.03,"MassiveIntentClassification (sw)":51.62,"MassiveIntentClassification (ta)":55.04,"MassiveIntentClassification (te)":58.32,"MassiveIntentClassification (th)":56.58,"MassiveIntentClassification (tl)":55.28,"MassiveIntentClassification (tr)":60.91,"MassiveIntentClassification (ur)":56.7,"MassiveIntentClassification (vi)":56.67,"MassiveIntentClassification (zh-TW)":59.51,"MassiveScenarioClassification (af)":63.39,"MassiveScenarioClassification (am)":62.02,"MassiveScenarioClassification (ar)":57.72,"MassiveScenarioClassification (az)":63.48,"MassiveScenarioClassification (bn)":61.84,"MassiveScenarioClassification (cy)":56.13,"MassiveScenarioClassification (de)":62.39,"MassiveScenarioClassification (el)":64.58,"MassiveScenarioClassification (es)":63.61,"MassiveScenarioClassification (fa)":67.46,"MassiveScenarioClassification (fi)":64.58,"MassiveScenarioClassification (fr)":65.1,"MassiveScenarioClassification (he)":63.53,"MassiveScenarioClassification (hi)":64.4,"MassiveScenarioClassification (hu)":65.82,"MassiveScenarioClassification (hy)":61.25,"MassiveScenarioClassification (id)":65.84,"MassiveScenarioClassification (is)":61.94,"MassiveScenarioClassification (it)":64.09,"MassiveScenarioClassification (ja)":67.72,"MassiveScenarioClassification (jv)":58.29,"MassiveScenarioClassification (ka)":53.38,"MassiveScenarioClassification (km)":56.18,"MassiveScenarioClassification (kn)":61.74,"MassiveScenarioClassification (ko)":67.26,"MassiveScenarioClassification (lv)":61.87,"MassiveScenarioClassification (ml)":62.26,"MassiveScenarioClassification (mn)":62.6,"MassiveScenarioClassification (ms)":65.63,"MassiveScenarioClassification (my)":62.94,"MassiveScenarioClassification (nl)":65.16,"MassiveScenarioClassification (pt)":63.28,"MassiveScenarioClassification (ro)":62.41,"MassiveScenarioClassification (ru)":65.25,"MassiveScenarioClassification (sl)":64.25,"MassiveScenarioClassification (sq)":64.54,"MassiveScenarioClassification (sw)":58.36,"MassiveScenarioClassification (ta)":59.08,"MassiveScenarioClassification (te)":64.13,"MassiveScenarioClassification (th)":64.34,"MassiveScenarioClassification (tl)":60.23,"MassiveScenarioClassification (tr)":65.43,"MassiveScenarioClassification (ur)":61.52,"MassiveScenarioClassification (vi)":61.05,"MassiveScenarioClassification (zh-TW)":67.08}
-{"index":55,"Rank":11,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.73,"AmazonCounterfactualClassification (de)":70.94,"AmazonCounterfactualClassification (ja)":80.06,"AmazonReviewsClassification (de)":38.83,"AmazonReviewsClassification (es)":39.39,"AmazonReviewsClassification (fr)":39.47,"AmazonReviewsClassification (ja)":35.9,"AmazonReviewsClassification (zh)":36.07,"MTOPDomainClassification (de)":86.91,"MTOPDomainClassification (es)":87.74,"MTOPDomainClassification (fr)":86.22,"MTOPDomainClassification (hi)":82.92,"MTOPDomainClassification (th)":69.9,"MTOPIntentClassification (de)":63.26,"MTOPIntentClassification (es)":65.06,"MTOPIntentClassification (fr)":59.43,"MTOPIntentClassification (hi)":59.08,"MTOPIntentClassification (th)":43.75,"MassiveIntentClassification (af)":47.6,"MassiveIntentClassification (am)":31.57,"MassiveIntentClassification (ar)":52.63,"MassiveIntentClassification (az)":50.09,"MassiveIntentClassification (bn)":46.54,"MassiveIntentClassification (cy)":44.26,"MassiveIntentClassification (de)":61.87,"MassiveIntentClassification (el)":47.02,"MassiveIntentClassification (es)":62.54,"MassiveIntentClassification (fa)":55.19,"MassiveIntentClassification (fi)":48.43,"MassiveIntentClassification (fr)":64.27,"MassiveIntentClassification (he)":57.62,"MassiveIntentClassification (hi)":57.54,"MassiveIntentClassification (hu)":45.67,"MassiveIntentClassification (hy)":39.2,"MassiveIntentClassification (id)":55.0,"MassiveIntentClassification (is)":43.14,"MassiveIntentClassification (it)":61.0,"MassiveIntentClassification (ja)":64.29,"MassiveIntentClassification (jv)":43.69,"MassiveIntentClassification (ka)":38.35,"MassiveIntentClassification (km)":34.22,"MassiveIntentClassification (kn)":51.79,"MassiveIntentClassification (ko)":59.59,"MassiveIntentClassification (lv)":46.54,"MassiveIntentClassification (ml)":54.47,"MassiveIntentClassification (mn)":40.68,"MassiveIntentClassification (ms)":51.24,"MassiveIntentClassification (my)":31.76,"MassiveIntentClassification (nl)":60.82,"MassiveIntentClassification (pt)":62.74,"MassiveIntentClassification (ro)":49.68,"MassiveIntentClassification (ru)":60.85,"MassiveIntentClassification (sl)":48.59,"MassiveIntentClassification (sq)":47.17,"MassiveIntentClassification (sw)":45.97,"MassiveIntentClassification (ta)":53.6,"MassiveIntentClassification (te)":53.45,"MassiveIntentClassification (th)":46.17,"MassiveIntentClassification (tl)":49.48,"MassiveIntentClassification (tr)":58.03,"MassiveIntentClassification (ur)":39.26,"MassiveIntentClassification (vi)":52.16,"MassiveIntentClassification (zh-TW)":58.21,"MassiveScenarioClassification (af)":58.07,"MassiveScenarioClassification (am)":38.21,"MassiveScenarioClassification (ar)":57.47,"MassiveScenarioClassification (az)":54.37,"MassiveScenarioClassification (bn)":52.72,"MassiveScenarioClassification (cy)":49.5,"MassiveScenarioClassification (de)":71.28,"MassiveScenarioClassification (el)":52.42,"MassiveScenarioClassification (es)":67.04,"MassiveScenarioClassification (fa)":60.17,"MassiveScenarioClassification (fi)":54.05,"MassiveScenarioClassification (fr)":69.76,"MassiveScenarioClassification (he)":62.85,"MassiveScenarioClassification (hi)":62.18,"MassiveScenarioClassification (hu)":53.52,"MassiveScenarioClassification (hy)":45.95,"MassiveScenarioClassification (id)":60.33,"MassiveScenarioClassification (is)":50.1,"MassiveScenarioClassification (it)":66.49,"MassiveScenarioClassification (ja)":68.36,"MassiveScenarioClassification (jv)":50.59,"MassiveScenarioClassification (ka)":42.76,"MassiveScenarioClassification (km)":40.65,"MassiveScenarioClassification (kn)":57.25,"MassiveScenarioClassification (ko)":63.84,"MassiveScenarioClassification (lv)":53.14,"MassiveScenarioClassification (ml)":58.84,"MassiveScenarioClassification (mn)":44.82,"MassiveScenarioClassification (ms)":58.9,"MassiveScenarioClassification (my)":38.52,"MassiveScenarioClassification (nl)":67.54,"MassiveScenarioClassification (pt)":65.7,"MassiveScenarioClassification (ro)":57.2,"MassiveScenarioClassification (ru)":65.42,"MassiveScenarioClassification (sl)":55.15,"MassiveScenarioClassification (sq)":55.68,"MassiveScenarioClassification (sw)":52.3,"MassiveScenarioClassification (ta)":56.19,"MassiveScenarioClassification (te)":58.02,"MassiveScenarioClassification (th)":52.56,"MassiveScenarioClassification (tl)":57.43,"MassiveScenarioClassification (tr)":61.55,"MassiveScenarioClassification (ur)":47.11,"MassiveScenarioClassification (vi)":56.83,"MassiveScenarioClassification (zh-TW)":64.02}
-{"index":58,"Rank":12,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":7068,"Memory Usage (GB, fp32)":26.33,"Average":54.35,"AmazonCounterfactualClassification (de)":61.35,"AmazonCounterfactualClassification (ja)":58.23,"AmazonReviewsClassification (de)":29.7,"AmazonReviewsClassification (es)":35.97,"AmazonReviewsClassification (fr)":35.92,"AmazonReviewsClassification (ja)":27.64,"AmazonReviewsClassification (zh)":32.63,"MTOPDomainClassification (de)":82.05,"MTOPDomainClassification (es)":93.55,"MTOPDomainClassification (fr)":90.98,"MTOPDomainClassification (hi)":89.33,"MTOPDomainClassification (th)":60.49,"MTOPIntentClassification (de)":61.92,"MTOPIntentClassification (es)":74.49,"MTOPIntentClassification (fr)":69.12,"MTOPIntentClassification (hi)":64.85,"MTOPIntentClassification (th)":49.36,"MassiveIntentClassification (af)":47.85,"MassiveIntentClassification (am)":33.3,"MassiveIntentClassification (ar)":59.25,"MassiveIntentClassification (az)":45.24,"MassiveIntentClassification (bn)":61.59,"MassiveIntentClassification (cy)":44.92,"MassiveIntentClassification (de)":56.1,"MassiveIntentClassification (el)":46.13,"MassiveIntentClassification (es)":66.35,"MassiveIntentClassification (fa)":51.2,"MassiveIntentClassification (fi)":45.33,"MassiveIntentClassification (fr)":66.95,"MassiveIntentClassification (he)":43.18,"MassiveIntentClassification (hi)":63.54,"MassiveIntentClassification (hu)":44.73,"MassiveIntentClassification (hy)":38.13,"MassiveIntentClassification (id)":64.06,"MassiveIntentClassification (is)":44.35,"MassiveIntentClassification (it)":60.77,"MassiveIntentClassification (ja)":61.22,"MassiveIntentClassification (jv)":50.94,"MassiveIntentClassification (ka)":33.84,"MassiveIntentClassification (km)":37.34,"MassiveIntentClassification (kn)":53.54,"MassiveIntentClassification (ko)":53.36,"MassiveIntentClassification (lv)":46.5,"MassiveIntentClassification (ml)":58.27,"MassiveIntentClassification (mn)":40.28,"MassiveIntentClassification (ms)":59.65,"MassiveIntentClassification (my)":37.42,"MassiveIntentClassification (nl)":52.09,"MassiveIntentClassification (pt)":66.69,"MassiveIntentClassification (ro)":50.53,"MassiveIntentClassification (ru)":58.32,"MassiveIntentClassification (sl)":47.74,"MassiveIntentClassification (sq)":48.94,"MassiveIntentClassification (sw)":49.81,"MassiveIntentClassification (ta)":56.4,"MassiveIntentClassification (te)":54.71,"MassiveIntentClassification (th)":44.43,"MassiveIntentClassification (tl)":50.21,"MassiveIntentClassification (tr)":46.56,"MassiveIntentClassification (ur)":56.75,"MassiveIntentClassification (vi)":64.53,"MassiveIntentClassification (zh-TW)":62.89,"MassiveScenarioClassification (af)":51.47,"MassiveScenarioClassification (am)":34.87,"MassiveScenarioClassification (ar)":65.21,"MassiveScenarioClassification (az)":45.58,"MassiveScenarioClassification (bn)":67.3,"MassiveScenarioClassification (cy)":46.29,"MassiveScenarioClassification (de)":61.74,"MassiveScenarioClassification (el)":48.96,"MassiveScenarioClassification (es)":73.34,"MassiveScenarioClassification (fa)":53.17,"MassiveScenarioClassification (fi)":44.69,"MassiveScenarioClassification (fr)":72.91,"MassiveScenarioClassification (he)":43.1,"MassiveScenarioClassification (hi)":69.27,"MassiveScenarioClassification (hu)":45.16,"MassiveScenarioClassification (hy)":38.73,"MassiveScenarioClassification (id)":70.13,"MassiveScenarioClassification (is)":44.21,"MassiveScenarioClassification (it)":65.57,"MassiveScenarioClassification (ja)":65.76,"MassiveScenarioClassification (jv)":54.79,"MassiveScenarioClassification (ka)":32.99,"MassiveScenarioClassification (km)":39.34,"MassiveScenarioClassification (kn)":60.5,"MassiveScenarioClassification (ko)":55.69,"MassiveScenarioClassification (lv)":44.35,"MassiveScenarioClassification (ml)":65.53,"MassiveScenarioClassification (mn)":38.72,"MassiveScenarioClassification (ms)":64.99,"MassiveScenarioClassification (my)":36.84,"MassiveScenarioClassification (nl)":56.32,"MassiveScenarioClassification (pt)":71.46,"MassiveScenarioClassification (ro)":53.69,"MassiveScenarioClassification (ru)":61.6,"MassiveScenarioClassification (sl)":48.04,"MassiveScenarioClassification (sq)":50.06,"MassiveScenarioClassification (sw)":54.22,"MassiveScenarioClassification (ta)":62.77,"MassiveScenarioClassification (te)":62.59,"MassiveScenarioClassification (th)":45.18,"MassiveScenarioClassification (tl)":52.06,"MassiveScenarioClassification (tr)":47.21,"MassiveScenarioClassification (ur)":64.26,"MassiveScenarioClassification (vi)":70.61,"MassiveScenarioClassification (zh-TW)":70.3}
-{"index":124,"Rank":13,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":53.59,"AmazonCounterfactualClassification (de)":68.69,"AmazonCounterfactualClassification (ja)":61.61,"AmazonReviewsClassification (de)":33.39,"AmazonReviewsClassification (es)":34.82,"AmazonReviewsClassification (fr)":33.45,"AmazonReviewsClassification (ja)":30.05,"AmazonReviewsClassification (zh)":32.52,"MTOPDomainClassification (de)":78.59,"MTOPDomainClassification (es)":79.24,"MTOPDomainClassification (fr)":76.17,"MTOPDomainClassification (hi)":78.75,"MTOPDomainClassification (th)":77.67,"MTOPIntentClassification (de)":55.29,"MTOPIntentClassification (es)":58.68,"MTOPIntentClassification (fr)":53.26,"MTOPIntentClassification (hi)":59.62,"MTOPIntentClassification (th)":58.8,"MassiveIntentClassification (af)":45.42,"MassiveIntentClassification (am)":37.68,"MassiveIntentClassification (ar)":45.02,"MassiveIntentClassification (az)":48.71,"MassiveIntentClassification (bn)":43.79,"MassiveIntentClassification (cy)":28.76,"MassiveIntentClassification (de)":51.56,"MassiveIntentClassification (el)":56.47,"MassiveIntentClassification (es)":58.28,"MassiveIntentClassification (fa)":59.05,"MassiveIntentClassification (fi)":57.36,"MassiveIntentClassification (fr)":58.8,"MassiveIntentClassification (he)":51.18,"MassiveIntentClassification (hi)":57.06,"MassiveIntentClassification (hu)":58.36,"MassiveIntentClassification (hy)":52.11,"MassiveIntentClassification (id)":58.27,"MassiveIntentClassification (is)":35.81,"MassiveIntentClassification (it)":58.28,"MassiveIntentClassification (ja)":60.78,"MassiveIntentClassification (jv)":31.15,"MassiveIntentClassification (ka)":44.5,"MassiveIntentClassification (km)":40.99,"MassiveIntentClassification (kn)":46.96,"MassiveIntentClassification (ko)":54.73,"MassiveIntentClassification (lv)":54.87,"MassiveIntentClassification (ml)":47.89,"MassiveIntentClassification (mn)":52.23,"MassiveIntentClassification (ms)":54.28,"MassiveIntentClassification (my)":51.96,"MassiveIntentClassification (nl)":59.45,"MassiveIntentClassification (pt)":59.84,"MassiveIntentClassification (ro)":57.04,"MassiveIntentClassification (ru)":58.02,"MassiveIntentClassification (sl)":56.36,"MassiveIntentClassification (sq)":56.48,"MassiveIntentClassification (sw)":33.96,"MassiveIntentClassification (ta)":44.29,"MassiveIntentClassification (te)":47.14,"MassiveIntentClassification (th)":56.86,"MassiveIntentClassification (tl)":35.36,"MassiveIntentClassification (tr)":59.63,"MassiveIntentClassification (ur)":52.79,"MassiveIntentClassification (vi)":54.65,"MassiveIntentClassification (zh-TW)":57.47,"MassiveScenarioClassification (af)":50.86,"MassiveScenarioClassification (am)":41.18,"MassiveScenarioClassification (ar)":50.08,"MassiveScenarioClassification (az)":51.29,"MassiveScenarioClassification (bn)":46.53,"MassiveScenarioClassification (cy)":34.35,"MassiveScenarioClassification (de)":56.4,"MassiveScenarioClassification (el)":61.8,"MassiveScenarioClassification (es)":62.21,"MassiveScenarioClassification (fa)":62.44,"MassiveScenarioClassification (fi)":61.1,"MassiveScenarioClassification (fr)":63.39,"MassiveScenarioClassification (he)":56.29,"MassiveScenarioClassification (hi)":60.63,"MassiveScenarioClassification (hu)":63.29,"MassiveScenarioClassification (hy)":54.88,"MassiveScenarioClassification (id)":61.99,"MassiveScenarioClassification (is)":38.58,"MassiveScenarioClassification (it)":62.35,"MassiveScenarioClassification (ja)":65.17,"MassiveScenarioClassification (jv)":36.13,"MassiveScenarioClassification (ka)":50.27,"MassiveScenarioClassification (km)":44.24,"MassiveScenarioClassification (kn)":47.37,"MassiveScenarioClassification (ko)":58.89,"MassiveScenarioClassification (lv)":56.51,"MassiveScenarioClassification (ml)":50.06,"MassiveScenarioClassification (mn)":55.05,"MassiveScenarioClassification (ms)":59.77,"MassiveScenarioClassification (my)":55.72,"MassiveScenarioClassification (nl)":63.38,"MassiveScenarioClassification (pt)":62.41,"MassiveScenarioClassification (ro)":60.68,"MassiveScenarioClassification (ru)":62.31,"MassiveScenarioClassification (sl)":61.43,"MassiveScenarioClassification (sq)":62.23,"MassiveScenarioClassification (sw)":38.52,"MassiveScenarioClassification (ta)":47.0,"MassiveScenarioClassification (te)":51.02,"MassiveScenarioClassification (th)":63.23,"MassiveScenarioClassification (tl)":38.72,"MassiveScenarioClassification (tr)":64.49,"MassiveScenarioClassification (ur)":56.8,"MassiveScenarioClassification (vi)":57.06,"MassiveScenarioClassification (zh-TW)":63.37}
-{"index":84,"Rank":14,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.6,"AmazonCounterfactualClassification (de)":66.03,"AmazonCounterfactualClassification (ja)":58.77,"AmazonReviewsClassification (de)":30.45,"AmazonReviewsClassification (es)":40.8,"AmazonReviewsClassification (fr)":35.12,"AmazonReviewsClassification (ja)":32.07,"AmazonReviewsClassification (zh)":38.12,"MTOPDomainClassification (de)":74.64,"MTOPDomainClassification (es)":92.07,"MTOPDomainClassification (fr)":69.24,"MTOPDomainClassification (hi)":88.54,"MTOPDomainClassification (th)":55.63,"MTOPIntentClassification (de)":53.4,"MTOPIntentClassification (es)":71.33,"MTOPIntentClassification (fr)":51.25,"MTOPIntentClassification (hi)":66.73,"MTOPIntentClassification (th)":42.13,"MassiveIntentClassification (af)":44.98,"MassiveIntentClassification (am)":25.35,"MassiveIntentClassification (ar)":57.08,"MassiveIntentClassification (az)":39.11,"MassiveIntentClassification (bn)":61.37,"MassiveIntentClassification (cy)":42.97,"MassiveIntentClassification (de)":50.93,"MassiveIntentClassification (el)":40.09,"MassiveIntentClassification (es)":63.81,"MassiveIntentClassification (fa)":49.06,"MassiveIntentClassification (fi)":42.58,"MassiveIntentClassification (fr)":43.21,"MassiveIntentClassification (he)":37.15,"MassiveIntentClassification (hi)":62.89,"MassiveIntentClassification (hu)":41.62,"MassiveIntentClassification (hy)":32.98,"MassiveIntentClassification (id)":62.11,"MassiveIntentClassification (is)":41.04,"MassiveIntentClassification (it)":55.83,"MassiveIntentClassification (ja)":58.83,"MassiveIntentClassification (jv)":49.31,"MassiveIntentClassification (ka)":26.43,"MassiveIntentClassification (km)":28.77,"MassiveIntentClassification (kn)":52.33,"MassiveIntentClassification (ko)":46.93,"MassiveIntentClassification (lv)":44.26,"MassiveIntentClassification (ml)":57.75,"MassiveIntentClassification (mn)":33.31,"MassiveIntentClassification (ms)":55.7,"MassiveIntentClassification (my)":27.39,"MassiveIntentClassification (nl)":48.34,"MassiveIntentClassification (pt)":64.74,"MassiveIntentClassification (ro)":48.41,"MassiveIntentClassification (ru)":52.99,"MassiveIntentClassification (sl)":44.77,"MassiveIntentClassification (sq)":45.45,"MassiveIntentClassification (sw)":46.46,"MassiveIntentClassification (ta)":55.46,"MassiveIntentClassification (te)":51.41,"MassiveIntentClassification (th)":39.2,"MassiveIntentClassification (tl)":48.53,"MassiveIntentClassification (tr)":39.51,"MassiveIntentClassification (ur)":54.72,"MassiveIntentClassification (vi)":62.01,"MassiveIntentClassification (zh-TW)":62.56,"MassiveScenarioClassification (af)":50.47,"MassiveScenarioClassification (am)":27.22,"MassiveScenarioClassification (ar)":65.43,"MassiveScenarioClassification (az)":40.74,"MassiveScenarioClassification (bn)":67.65,"MassiveScenarioClassification (cy)":43.94,"MassiveScenarioClassification (de)":56.67,"MassiveScenarioClassification (el)":41.81,"MassiveScenarioClassification (es)":71.78,"MassiveScenarioClassification (fa)":49.96,"MassiveScenarioClassification (fi)":41.01,"MassiveScenarioClassification (fr)":49.78,"MassiveScenarioClassification (he)":36.69,"MassiveScenarioClassification (hi)":69.28,"MassiveScenarioClassification (hu)":44.31,"MassiveScenarioClassification (hy)":33.64,"MassiveScenarioClassification (id)":68.98,"MassiveScenarioClassification (is)":42.1,"MassiveScenarioClassification (it)":60.27,"MassiveScenarioClassification (ja)":62.48,"MassiveScenarioClassification (jv)":54.68,"MassiveScenarioClassification (ka)":27.22,"MassiveScenarioClassification (km)":32.14,"MassiveScenarioClassification (kn)":57.95,"MassiveScenarioClassification (ko)":47.95,"MassiveScenarioClassification (lv)":42.76,"MassiveScenarioClassification (ml)":62.84,"MassiveScenarioClassification (mn)":33.21,"MassiveScenarioClassification (ms)":62.57,"MassiveScenarioClassification (my)":28.84,"MassiveScenarioClassification (nl)":52.85,"MassiveScenarioClassification (pt)":70.24,"MassiveScenarioClassification (ro)":52.73,"MassiveScenarioClassification (ru)":54.26,"MassiveScenarioClassification (sl)":46.89,"MassiveScenarioClassification (sq)":47.16,"MassiveScenarioClassification (sw)":51.2,"MassiveScenarioClassification (ta)":61.84,"MassiveScenarioClassification (te)":59.79,"MassiveScenarioClassification (th)":41.62,"MassiveScenarioClassification (tl)":50.47,"MassiveScenarioClassification (tr)":43.41,"MassiveScenarioClassification (ur)":60.15,"MassiveScenarioClassification (vi)":68.99,"MassiveScenarioClassification (zh-TW)":71.7}
-{"index":85,"Rank":15,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.37,"AmazonCounterfactualClassification (de)":66.42,"AmazonCounterfactualClassification (ja)":56.86,"AmazonReviewsClassification (de)":26.85,"AmazonReviewsClassification (es)":38.97,"AmazonReviewsClassification (fr)":26.85,"AmazonReviewsClassification (ja)":28.31,"AmazonReviewsClassification (zh)":35.7,"MTOPDomainClassification (de)":68.42,"MTOPDomainClassification (es)":88.21,"MTOPDomainClassification (fr)":34.99,"MTOPDomainClassification (hi)":84.23,"MTOPDomainClassification (th)":53.17,"MTOPIntentClassification (de)":49.17,"MTOPIntentClassification (es)":65.72,"MTOPIntentClassification (fr)":15.76,"MTOPIntentClassification (hi)":61.88,"MTOPIntentClassification (th)":41.67,"MassiveIntentClassification (af)":43.29,"MassiveIntentClassification (am)":23.21,"MassiveIntentClassification (ar)":53.38,"MassiveIntentClassification (az)":39.56,"MassiveIntentClassification (bn)":56.74,"MassiveIntentClassification (cy)":40.0,"MassiveIntentClassification (de)":45.82,"MassiveIntentClassification (el)":37.87,"MassiveIntentClassification (es)":61.17,"MassiveIntentClassification (fa)":45.65,"MassiveIntentClassification (fi)":40.28,"MassiveIntentClassification (fr)":15.09,"MassiveIntentClassification (he)":32.23,"MassiveIntentClassification (hi)":59.46,"MassiveIntentClassification (hu)":40.91,"MassiveIntentClassification (hy)":29.94,"MassiveIntentClassification (id)":59.14,"MassiveIntentClassification (is)":39.62,"MassiveIntentClassification (it)":51.77,"MassiveIntentClassification (ja)":53.75,"MassiveIntentClassification (jv)":46.29,"MassiveIntentClassification (ka)":25.11,"MassiveIntentClassification (km)":27.22,"MassiveIntentClassification (kn)":47.97,"MassiveIntentClassification (ko)":40.54,"MassiveIntentClassification (lv)":43.14,"MassiveIntentClassification (ml)":53.69,"MassiveIntentClassification (mn)":33.37,"MassiveIntentClassification (ms)":51.94,"MassiveIntentClassification (my)":25.32,"MassiveIntentClassification (nl)":44.03,"MassiveIntentClassification (pt)":61.74,"MassiveIntentClassification (ro)":45.73,"MassiveIntentClassification (ru)":47.61,"MassiveIntentClassification (sl)":42.83,"MassiveIntentClassification (sq)":43.61,"MassiveIntentClassification (sw)":45.55,"MassiveIntentClassification (ta)":51.24,"MassiveIntentClassification (te)":47.43,"MassiveIntentClassification (th)":36.88,"MassiveIntentClassification (tl)":45.93,"MassiveIntentClassification (tr)":38.59,"MassiveIntentClassification (ur)":51.85,"MassiveIntentClassification (vi)":58.72,"MassiveIntentClassification (zh-TW)":59.95,"MassiveScenarioClassification (af)":47.42,"MassiveScenarioClassification (am)":24.71,"MassiveScenarioClassification (ar)":62.09,"MassiveScenarioClassification (az)":39.25,"MassiveScenarioClassification (bn)":63.37,"MassiveScenarioClassification (cy)":39.17,"MassiveScenarioClassification (de)":50.71,"MassiveScenarioClassification (el)":39.47,"MassiveScenarioClassification (es)":68.31,"MassiveScenarioClassification (fa)":45.65,"MassiveScenarioClassification (fi)":38.95,"MassiveScenarioClassification (fr)":21.67,"MassiveScenarioClassification (he)":32.13,"MassiveScenarioClassification (hi)":65.57,"MassiveScenarioClassification (hu)":42.97,"MassiveScenarioClassification (hy)":32.13,"MassiveScenarioClassification (id)":65.11,"MassiveScenarioClassification (is)":40.84,"MassiveScenarioClassification (it)":54.55,"MassiveScenarioClassification (ja)":57.15,"MassiveScenarioClassification (jv)":49.3,"MassiveScenarioClassification (ka)":25.86,"MassiveScenarioClassification (km)":31.18,"MassiveScenarioClassification (kn)":53.01,"MassiveScenarioClassification (ko)":40.25,"MassiveScenarioClassification (lv)":41.88,"MassiveScenarioClassification (ml)":59.08,"MassiveScenarioClassification (mn)":33.34,"MassiveScenarioClassification (ms)":57.45,"MassiveScenarioClassification (my)":27.2,"MassiveScenarioClassification (nl)":48.42,"MassiveScenarioClassification (pt)":66.41,"MassiveScenarioClassification (ro)":50.08,"MassiveScenarioClassification (ru)":49.94,"MassiveScenarioClassification (sl)":43.43,"MassiveScenarioClassification (sq)":44.08,"MassiveScenarioClassification (sw)":49.53,"MassiveScenarioClassification (ta)":56.79,"MassiveScenarioClassification (te)":54.01,"MassiveScenarioClassification (th)":38.58,"MassiveScenarioClassification (tl)":48.07,"MassiveScenarioClassification (tr)":40.65,"MassiveScenarioClassification (ur)":57.75,"MassiveScenarioClassification (vi)":65.83,"MassiveScenarioClassification (zh-TW)":69.64}
-{"index":110,"Rank":16,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":44.78,"AmazonCounterfactualClassification (de)":68.14,"AmazonCounterfactualClassification (ja)":65.39,"AmazonReviewsClassification (de)":35.03,"AmazonReviewsClassification (es)":36.24,"AmazonReviewsClassification (fr)":35.7,"AmazonReviewsClassification (ja)":31.08,"AmazonReviewsClassification (zh)":33.89,"MTOPDomainClassification (de)":86.19,"MTOPDomainClassification (es)":87.75,"MTOPDomainClassification (fr)":84.61,"MTOPDomainClassification (hi)":76.41,"MTOPDomainClassification (th)":73.62,"MTOPIntentClassification (de)":59.21,"MTOPIntentClassification (es)":57.21,"MTOPIntentClassification (fr)":53.41,"MTOPIntentClassification (hi)":45.54,"MTOPIntentClassification (th)":47.73,"MassiveIntentClassification (af)":40.02,"MassiveIntentClassification (am)":2.35,"MassiveIntentClassification (ar)":43.14,"MassiveIntentClassification (az)":25.6,"MassiveIntentClassification (bn)":4.84,"MassiveIntentClassification (cy)":15.43,"MassiveIntentClassification (de)":51.57,"MassiveIntentClassification (el)":49.65,"MassiveIntentClassification (es)":56.57,"MassiveIntentClassification (fa)":55.36,"MassiveIntentClassification (fi)":45.72,"MassiveIntentClassification (fr)":57.02,"MassiveIntentClassification (he)":46.74,"MassiveIntentClassification (hi)":48.55,"MassiveIntentClassification (hu)":50.65,"MassiveIntentClassification (hy)":40.79,"MassiveIntentClassification (id)":56.0,"MassiveIntentClassification (is)":16.08,"MassiveIntentClassification (it)":57.65,"MassiveIntentClassification (ja)":55.33,"MassiveIntentClassification (jv)":28.16,"MassiveIntentClassification (ka)":29.41,"MassiveIntentClassification (km)":4.79,"MassiveIntentClassification (kn)":3.37,"MassiveIntentClassification (ko)":49.97,"MassiveIntentClassification (lv)":44.31,"MassiveIntentClassification (ml)":3.24,"MassiveIntentClassification (mn)":40.37,"MassiveIntentClassification (ms)":47.97,"MassiveIntentClassification (my)":38.48,"MassiveIntentClassification (nl)":58.29,"MassiveIntentClassification (pt)":58.63,"MassiveIntentClassification (ro)":50.63,"MassiveIntentClassification (ru)":57.96,"MassiveIntentClassification (sl)":50.66,"MassiveIntentClassification (sq)":50.25,"MassiveIntentClassification (sw)":19.29,"MassiveIntentClassification (ta)":3.79,"MassiveIntentClassification (te)":3.36,"MassiveIntentClassification (th)":45.28,"MassiveIntentClassification (tl)":28.44,"MassiveIntentClassification (tr)":50.47,"MassiveIntentClassification (ur)":46.03,"MassiveIntentClassification (vi)":45.25,"MassiveIntentClassification (zh-TW)":54.96,"MassiveScenarioClassification (af)":53.67,"MassiveScenarioClassification (am)":7.72,"MassiveScenarioClassification (ar)":52.19,"MassiveScenarioClassification (az)":34.75,"MassiveScenarioClassification (bn)":10.65,"MassiveScenarioClassification (cy)":21.24,"MassiveScenarioClassification (de)":61.4,"MassiveScenarioClassification (el)":60.68,"MassiveScenarioClassification (es)":64.61,"MassiveScenarioClassification (fa)":59.24,"MassiveScenarioClassification (fi)":54.66,"MassiveScenarioClassification (fr)":65.2,"MassiveScenarioClassification (he)":54.74,"MassiveScenarioClassification (hi)":55.99,"MassiveScenarioClassification (hu)":61.2,"MassiveScenarioClassification (hy)":49.63,"MassiveScenarioClassification (id)":65.25,"MassiveScenarioClassification (is)":22.6,"MassiveScenarioClassification (it)":64.63,"MassiveScenarioClassification (ja)":62.32,"MassiveScenarioClassification (jv)":35.77,"MassiveScenarioClassification (ka)":39.08,"MassiveScenarioClassification (km)":9.24,"MassiveScenarioClassification (kn)":8.28,"MassiveScenarioClassification (ko)":57.6,"MassiveScenarioClassification (lv)":51.72,"MassiveScenarioClassification (ml)":8.25,"MassiveScenarioClassification (mn)":47.21,"MassiveScenarioClassification (ms)":55.65,"MassiveScenarioClassification (my)":43.31,"MassiveScenarioClassification (nl)":67.49,"MassiveScenarioClassification (pt)":64.26,"MassiveScenarioClassification (ro)":58.03,"MassiveScenarioClassification (ru)":65.41,"MassiveScenarioClassification (sl)":59.36,"MassiveScenarioClassification (sq)":62.69,"MassiveScenarioClassification (sw)":25.12,"MassiveScenarioClassification (ta)":8.67,"MassiveScenarioClassification (te)":7.82,"MassiveScenarioClassification (th)":54.65,"MassiveScenarioClassification (tl)":36.09,"MassiveScenarioClassification (tr)":60.89,"MassiveScenarioClassification (ur)":54.71,"MassiveScenarioClassification (vi)":55.15,"MassiveScenarioClassification (zh-TW)":62.89}
-{"index":5,"Rank":17,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":42.45,"AmazonCounterfactualClassification (de)":67.82,"AmazonCounterfactualClassification (ja)":68.76,"AmazonReviewsClassification (de)":31.07,"AmazonReviewsClassification (es)":32.72,"AmazonReviewsClassification (fr)":31.12,"AmazonReviewsClassification (ja)":28.94,"AmazonReviewsClassification (zh)":30.89,"MTOPDomainClassification (de)":74.08,"MTOPDomainClassification (es)":73.47,"MTOPDomainClassification (fr)":72.26,"MTOPDomainClassification (hi)":72.95,"MTOPDomainClassification (th)":72.68,"MTOPIntentClassification (de)":51.62,"MTOPIntentClassification (es)":52.75,"MTOPIntentClassification (fr)":50.12,"MTOPIntentClassification (hi)":45.55,"MTOPIntentClassification (th)":50.07,"MassiveIntentClassification (af)":38.01,"MassiveIntentClassification (am)":12.7,"MassiveIntentClassification (ar)":37.16,"MassiveIntentClassification (az)":19.98,"MassiveIntentClassification (bn)":42.51,"MassiveIntentClassification (cy)":17.33,"MassiveIntentClassification (de)":44.79,"MassiveIntentClassification (el)":46.71,"MassiveIntentClassification (es)":45.44,"MassiveIntentClassification (fa)":45.01,"MassiveIntentClassification (fi)":45.94,"MassiveIntentClassification (fr)":46.13,"MassiveIntentClassification (he)":42.55,"MassiveIntentClassification (hi)":40.2,"MassiveIntentClassification (hu)":42.77,"MassiveIntentClassification (hy)":28.07,"MassiveIntentClassification (id)":45.81,"MassiveIntentClassification (is)":39.86,"MassiveIntentClassification (it)":48.25,"MassiveIntentClassification (ja)":45.3,"MassiveIntentClassification (jv)":24.3,"MassiveIntentClassification (ka)":22.7,"MassiveIntentClassification (km)":22.48,"MassiveIntentClassification (kn)":4.32,"MassiveIntentClassification (ko)":44.26,"MassiveIntentClassification (lv)":39.75,"MassiveIntentClassification (ml)":41.33,"MassiveIntentClassification (mn)":16.2,"MassiveIntentClassification (ms)":43.23,"MassiveIntentClassification (my)":25.37,"MassiveIntentClassification (nl)":45.0,"MassiveIntentClassification (pt)":48.55,"MassiveIntentClassification (ro)":44.3,"MassiveIntentClassification (ru)":44.29,"MassiveIntentClassification (sl)":44.72,"MassiveIntentClassification (sq)":46.12,"MassiveIntentClassification (sw)":31.89,"MassiveIntentClassification (ta)":29.63,"MassiveIntentClassification (te)":36.03,"MassiveIntentClassification (th)":43.39,"MassiveIntentClassification (tl)":29.73,"MassiveIntentClassification (tr)":43.93,"MassiveIntentClassification (ur)":26.11,"MassiveIntentClassification (vi)":44.33,"MassiveIntentClassification (zh-TW)":32.93,"MassiveScenarioClassification (af)":47.1,"MassiveScenarioClassification (am)":17.7,"MassiveScenarioClassification (ar)":45.21,"MassiveScenarioClassification (az)":28.21,"MassiveScenarioClassification (bn)":50.52,"MassiveScenarioClassification (cy)":22.58,"MassiveScenarioClassification (de)":54.34,"MassiveScenarioClassification (el)":55.47,"MassiveScenarioClassification (es)":52.77,"MassiveScenarioClassification (fa)":52.5,"MassiveScenarioClassification (fi)":52.63,"MassiveScenarioClassification (fr)":54.32,"MassiveScenarioClassification (he)":52.41,"MassiveScenarioClassification (hi)":47.37,"MassiveScenarioClassification (hu)":53.43,"MassiveScenarioClassification (hy)":33.57,"MassiveScenarioClassification (id)":54.38,"MassiveScenarioClassification (is)":49.78,"MassiveScenarioClassification (it)":54.84,"MassiveScenarioClassification (ja)":54.12,"MassiveScenarioClassification (jv)":32.71,"MassiveScenarioClassification (ka)":26.92,"MassiveScenarioClassification (km)":27.23,"MassiveScenarioClassification (kn)":10.06,"MassiveScenarioClassification (ko)":52.01,"MassiveScenarioClassification (lv)":44.82,"MassiveScenarioClassification (ml)":49.1,"MassiveScenarioClassification (mn)":21.51,"MassiveScenarioClassification (ms)":53.6,"MassiveScenarioClassification (my)":29.72,"MassiveScenarioClassification (nl)":53.33,"MassiveScenarioClassification (pt)":53.41,"MassiveScenarioClassification (ro)":50.48,"MassiveScenarioClassification (ru)":51.84,"MassiveScenarioClassification (sl)":51.29,"MassiveScenarioClassification (sq)":55.65,"MassiveScenarioClassification (sw)":42.04,"MassiveScenarioClassification (ta)":36.72,"MassiveScenarioClassification (te)":42.08,"MassiveScenarioClassification (th)":52.15,"MassiveScenarioClassification (tl)":37.34,"MassiveScenarioClassification (tr)":52.56,"MassiveScenarioClassification (ur)":32.6,"MassiveScenarioClassification (vi)":50.97,"MassiveScenarioClassification (zh-TW)":42.32}
-{"index":45,"Rank":18,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":37.64,"AmazonCounterfactualClassification (de)":56.88,"AmazonCounterfactualClassification (ja)":54.65,"AmazonReviewsClassification (de)":24.79,"AmazonReviewsClassification (es)":26.64,"AmazonReviewsClassification (fr)":26.39,"AmazonReviewsClassification (ja)":22.08,"AmazonReviewsClassification (zh)":24.27,"MTOPDomainClassification (de)":62.73,"MTOPDomainClassification (es)":67.55,"MTOPDomainClassification (fr)":65.35,"MTOPDomainClassification (hi)":45.37,"MTOPDomainClassification (th)":55.28,"MTOPIntentClassification (de)":49.56,"MTOPIntentClassification (es)":49.94,"MTOPIntentClassification (fr)":46.33,"MTOPIntentClassification (hi)":32.21,"MTOPIntentClassification (th)":43.63,"MassiveIntentClassification (af)":40.55,"MassiveIntentClassification (am)":24.18,"MassiveIntentClassification (ar)":30.13,"MassiveIntentClassification (az)":35.88,"MassiveIntentClassification (bn)":29.17,"MassiveIntentClassification (cy)":41.79,"MassiveIntentClassification (de)":42.07,"MassiveIntentClassification (el)":36.25,"MassiveIntentClassification (es)":42.68,"MassiveIntentClassification (fa)":35.59,"MassiveIntentClassification (fi)":40.04,"MassiveIntentClassification (fr)":43.44,"MassiveIntentClassification (he)":31.59,"MassiveIntentClassification (hi)":27.04,"MassiveIntentClassification (hu)":38.45,"MassiveIntentClassification (hy)":27.98,"MassiveIntentClassification (id)":43.97,"MassiveIntentClassification (is)":40.3,"MassiveIntentClassification (it)":45.47,"MassiveIntentClassification (ja)":45.61,"MassiveIntentClassification (jv)":38.67,"MassiveIntentClassification (ka)":25.65,"MassiveIntentClassification (km)":28.3,"MassiveIntentClassification (kn)":23.48,"MassiveIntentClassification (ko)":36.56,"MassiveIntentClassification (lv)":41.85,"MassiveIntentClassification (ml)":24.91,"MassiveIntentClassification (mn)":29.86,"MassiveIntentClassification (ms)":42.42,"MassiveIntentClassification (my)":25.13,"MassiveIntentClassification (nl)":43.62,"MassiveIntentClassification (pt)":45.21,"MassiveIntentClassification (ro)":41.81,"MassiveIntentClassification (ru)":35.97,"MassiveIntentClassification (sl)":40.61,"MassiveIntentClassification (sq)":42.76,"MassiveIntentClassification (sw)":41.12,"MassiveIntentClassification (ta)":24.6,"MassiveIntentClassification (te)":25.04,"MassiveIntentClassification (th)":35.4,"MassiveIntentClassification (tl)":41.19,"MassiveIntentClassification (tr)":36.41,"MassiveIntentClassification (ur)":25.93,"MassiveIntentClassification (vi)":38.8,"MassiveIntentClassification (zh-TW)":42.31,"MassiveScenarioClassification (af)":43.25,"MassiveScenarioClassification (am)":25.3,"MassiveScenarioClassification (ar)":32.07,"MassiveScenarioClassification (az)":36.68,"MassiveScenarioClassification (bn)":29.57,"MassiveScenarioClassification (cy)":42.1,"MassiveScenarioClassification (de)":43.21,"MassiveScenarioClassification (el)":36.5,"MassiveScenarioClassification (es)":44.08,"MassiveScenarioClassification (fa)":32.61,"MassiveScenarioClassification (fi)":40.36,"MassiveScenarioClassification (fr)":45.07,"MassiveScenarioClassification (he)":32.18,"MassiveScenarioClassification (hi)":26.9,"MassiveScenarioClassification (hu)":40.38,"MassiveScenarioClassification (hy)":28.38,"MassiveScenarioClassification (id)":44.36,"MassiveScenarioClassification (is)":39.29,"MassiveScenarioClassification (it)":46.47,"MassiveScenarioClassification (ja)":46.26,"MassiveScenarioClassification (jv)":41.13,"MassiveScenarioClassification (ka)":24.73,"MassiveScenarioClassification (km)":29.74,"MassiveScenarioClassification (kn)":23.85,"MassiveScenarioClassification (ko)":36.57,"MassiveScenarioClassification (lv)":40.93,"MassiveScenarioClassification (ml)":25.53,"MassiveScenarioClassification (mn)":29.11,"MassiveScenarioClassification (ms)":43.79,"MassiveScenarioClassification (my)":27.27,"MassiveScenarioClassification (nl)":45.36,"MassiveScenarioClassification (pt)":45.9,"MassiveScenarioClassification (ro)":44.12,"MassiveScenarioClassification (ru)":32.76,"MassiveScenarioClassification (sl)":40.5,"MassiveScenarioClassification (sq)":42.52,"MassiveScenarioClassification (sw)":43.0,"MassiveScenarioClassification (ta)":28.33,"MassiveScenarioClassification (te)":26.59,"MassiveScenarioClassification (th)":36.79,"MassiveScenarioClassification (tl)":42.57,"MassiveScenarioClassification (tr)":37.09,"MassiveScenarioClassification (ur)":28.84,"MassiveScenarioClassification (vi)":37.36,"MassiveScenarioClassification (zh-TW)":44.42}
-{"index":46,"Rank":19,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":36.66,"AmazonCounterfactualClassification (de)":59.08,"AmazonCounterfactualClassification (ja)":56.42,"AmazonReviewsClassification (de)":24.52,"AmazonReviewsClassification (es)":29.1,"AmazonReviewsClassification (fr)":27.4,"AmazonReviewsClassification (ja)":21.72,"AmazonReviewsClassification (zh)":23.98,"MTOPDomainClassification (de)":60.37,"MTOPDomainClassification (es)":67.37,"MTOPDomainClassification (fr)":63.13,"MTOPDomainClassification (hi)":47.05,"MTOPDomainClassification (th)":52.28,"MTOPIntentClassification (de)":45.07,"MTOPIntentClassification (es)":48.81,"MTOPIntentClassification (fr)":44.34,"MTOPIntentClassification (hi)":34.2,"MTOPIntentClassification (th)":43.11,"MassiveIntentClassification (af)":37.79,"MassiveIntentClassification (am)":23.72,"MassiveIntentClassification (ar)":29.64,"MassiveIntentClassification (az)":39.48,"MassiveIntentClassification (bn)":26.55,"MassiveIntentClassification (cy)":38.78,"MassiveIntentClassification (de)":40.39,"MassiveIntentClassification (el)":37.29,"MassiveIntentClassification (es)":41.18,"MassiveIntentClassification (fa)":36.42,"MassiveIntentClassification (fi)":38.76,"MassiveIntentClassification (fr)":43.67,"MassiveIntentClassification (he)":31.98,"MassiveIntentClassification (hi)":28.04,"MassiveIntentClassification (hu)":38.14,"MassiveIntentClassification (hy)":26.05,"MassiveIntentClassification (id)":41.16,"MassiveIntentClassification (is)":38.63,"MassiveIntentClassification (it)":44.04,"MassiveIntentClassification (ja)":46.21,"MassiveIntentClassification (jv)":37.61,"MassiveIntentClassification (ka)":24.47,"MassiveIntentClassification (km)":26.24,"MassiveIntentClassification (kn)":17.83,"MassiveIntentClassification (ko)":37.27,"MassiveIntentClassification (lv)":40.93,"MassiveIntentClassification (ml)":17.89,"MassiveIntentClassification (mn)":32.98,"MassiveIntentClassification (ms)":40.91,"MassiveIntentClassification (my)":17.83,"MassiveIntentClassification (nl)":41.76,"MassiveIntentClassification (pt)":44.54,"MassiveIntentClassification (ro)":39.97,"MassiveIntentClassification (ru)":37.46,"MassiveIntentClassification (sl)":38.29,"MassiveIntentClassification (sq)":40.95,"MassiveIntentClassification (sw)":38.33,"MassiveIntentClassification (ta)":19.03,"MassiveIntentClassification (te)":19.38,"MassiveIntentClassification (th)":34.09,"MassiveIntentClassification (tl)":40.29,"MassiveIntentClassification (tr)":38.86,"MassiveIntentClassification (ur)":27.83,"MassiveIntentClassification (vi)":38.71,"MassiveIntentClassification (zh-TW)":42.32,"MassiveScenarioClassification (af)":40.25,"MassiveScenarioClassification (am)":25.69,"MassiveScenarioClassification (ar)":32.4,"MassiveScenarioClassification (az)":40.53,"MassiveScenarioClassification (bn)":27.23,"MassiveScenarioClassification (cy)":38.7,"MassiveScenarioClassification (de)":41.36,"MassiveScenarioClassification (el)":38.44,"MassiveScenarioClassification (es)":44.18,"MassiveScenarioClassification (fa)":34.83,"MassiveScenarioClassification (fi)":40.56,"MassiveScenarioClassification (fr)":45.92,"MassiveScenarioClassification (he)":32.08,"MassiveScenarioClassification (hi)":28.37,"MassiveScenarioClassification (hu)":39.49,"MassiveScenarioClassification (hy)":25.9,"MassiveScenarioClassification (id)":40.96,"MassiveScenarioClassification (is)":38.56,"MassiveScenarioClassification (it)":46.59,"MassiveScenarioClassification (ja)":46.25,"MassiveScenarioClassification (jv)":39.66,"MassiveScenarioClassification (ka)":25.28,"MassiveScenarioClassification (km)":28.97,"MassiveScenarioClassification (kn)":19.27,"MassiveScenarioClassification (ko)":35.73,"MassiveScenarioClassification (lv)":39.57,"MassiveScenarioClassification (ml)":19.9,"MassiveScenarioClassification (mn)":32.43,"MassiveScenarioClassification (ms)":42.32,"MassiveScenarioClassification (my)":20.86,"MassiveScenarioClassification (nl)":43.59,"MassiveScenarioClassification (pt)":46.31,"MassiveScenarioClassification (ro)":42.53,"MassiveScenarioClassification (ru)":35.95,"MassiveScenarioClassification (sl)":38.69,"MassiveScenarioClassification (sq)":40.47,"MassiveScenarioClassification (sw)":39.55,"MassiveScenarioClassification (ta)":22.88,"MassiveScenarioClassification (te)":20.51,"MassiveScenarioClassification (th)":34.93,"MassiveScenarioClassification (tl)":40.75,"MassiveScenarioClassification (tr)":39.07,"MassiveScenarioClassification (ur)":29.75,"MassiveScenarioClassification (vi)":38.02,"MassiveScenarioClassification (zh-TW)":45.18}
-{"index":108,"Rank":20,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":31.93,"AmazonCounterfactualClassification (de)":57.1,"AmazonCounterfactualClassification (ja)":59.91,"AmazonReviewsClassification (de)":25.91,"AmazonReviewsClassification (es)":27.63,"AmazonReviewsClassification (fr)":27.54,"AmazonReviewsClassification (ja)":23.57,"AmazonReviewsClassification (zh)":22.99,"MTOPDomainClassification (de)":72.04,"MTOPDomainClassification (es)":72.99,"MTOPDomainClassification (fr)":75.59,"MTOPDomainClassification (hi)":40.36,"MTOPDomainClassification (th)":17.1,"MTOPIntentClassification (de)":43.41,"MTOPIntentClassification (es)":41.88,"MTOPIntentClassification (fr)":38.94,"MTOPIntentClassification (hi)":17.75,"MTOPIntentClassification (th)":5.63,"MassiveIntentClassification (af)":38.94,"MassiveIntentClassification (am)":2.45,"MassiveIntentClassification (ar)":20.94,"MassiveIntentClassification (az)":34.25,"MassiveIntentClassification (bn)":13.67,"MassiveIntentClassification (cy)":35.71,"MassiveIntentClassification (de)":44.17,"MassiveIntentClassification (el)":28.7,"MassiveIntentClassification (es)":40.91,"MassiveIntentClassification (fa)":23.52,"MassiveIntentClassification (fi)":39.27,"MassiveIntentClassification (fr)":44.82,"MassiveIntentClassification (he)":23.65,"MassiveIntentClassification (hi)":17.98,"MassiveIntentClassification (hu)":38.0,"MassiveIntentClassification (hy)":8.69,"MassiveIntentClassification (id)":39.66,"MassiveIntentClassification (is)":35.14,"MassiveIntentClassification (it)":43.17,"MassiveIntentClassification (ja)":30.94,"MassiveIntentClassification (jv)":36.69,"MassiveIntentClassification (ka)":9.17,"MassiveIntentClassification (km)":4.99,"MassiveIntentClassification (kn)":3.08,"MassiveIntentClassification (ko)":19.97,"MassiveIntentClassification (lv)":38.61,"MassiveIntentClassification (ml)":2.85,"MassiveIntentClassification (mn)":23.25,"MassiveIntentClassification (ms)":36.21,"MassiveIntentClassification (my)":4.38,"MassiveIntentClassification (nl)":41.85,"MassiveIntentClassification (pt)":45.12,"MassiveIntentClassification (ro)":41.71,"MassiveIntentClassification (ru)":26.33,"MassiveIntentClassification (sl)":38.52,"MassiveIntentClassification (sq)":41.62,"MassiveIntentClassification (sw)":35.28,"MassiveIntentClassification (ta)":13.1,"MassiveIntentClassification (te)":2.56,"MassiveIntentClassification (th)":10.54,"MassiveIntentClassification (tl)":38.56,"MassiveIntentClassification (tr)":35.9,"MassiveIntentClassification (ur)":16.18,"MassiveIntentClassification (vi)":37.38,"MassiveIntentClassification (zh-TW)":22.39,"MassiveScenarioClassification (af)":45.71,"MassiveScenarioClassification (am)":7.41,"MassiveScenarioClassification (ar)":27.62,"MassiveScenarioClassification (az)":39.58,"MassiveScenarioClassification (bn)":18.98,"MassiveScenarioClassification (cy)":41.4,"MassiveScenarioClassification (de)":52.07,"MassiveScenarioClassification (el)":35.51,"MassiveScenarioClassification (es)":50.74,"MassiveScenarioClassification (fa)":29.0,"MassiveScenarioClassification (fi)":45.8,"MassiveScenarioClassification (fr)":53.76,"MassiveScenarioClassification (he)":25.68,"MassiveScenarioClassification (hi)":23.02,"MassiveScenarioClassification (hu)":44.09,"MassiveScenarioClassification (hy)":14.83,"MassiveScenarioClassification (id)":44.35,"MassiveScenarioClassification (is)":43.08,"MassiveScenarioClassification (it)":51.71,"MassiveScenarioClassification (ja)":36.75,"MassiveScenarioClassification (jv)":44.57,"MassiveScenarioClassification (ka)":14.84,"MassiveScenarioClassification (km)":9.75,"MassiveScenarioClassification (kn)":8.32,"MassiveScenarioClassification (ko)":25.72,"MassiveScenarioClassification (lv)":42.75,"MassiveScenarioClassification (ml)":7.25,"MassiveScenarioClassification (mn)":29.03,"MassiveScenarioClassification (ms)":44.65,"MassiveScenarioClassification (my)":10.07,"MassiveScenarioClassification (nl)":49.15,"MassiveScenarioClassification (pt)":53.0,"MassiveScenarioClassification (ro)":49.97,"MassiveScenarioClassification (ru)":28.75,"MassiveScenarioClassification (sl)":42.26,"MassiveScenarioClassification (sq)":49.14,"MassiveScenarioClassification (sw)":43.18,"MassiveScenarioClassification (ta)":19.38,"MassiveScenarioClassification (te)":7.74,"MassiveScenarioClassification (th)":18.32,"MassiveScenarioClassification (tl)":48.31,"MassiveScenarioClassification (tr)":41.79,"MassiveScenarioClassification (ur)":24.46,"MassiveScenarioClassification (vi)":40.94,"MassiveScenarioClassification (zh-TW)":31.16}
-{"index":39,"Rank":21,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":31.78,"AmazonCounterfactualClassification (de)":54.33,"AmazonCounterfactualClassification (ja)":56.34,"AmazonReviewsClassification (de)":27.2,"AmazonReviewsClassification (es)":34.88,"AmazonReviewsClassification (fr)":31.56,"AmazonReviewsClassification (ja)":22.71,"AmazonReviewsClassification (zh)":22.35,"MTOPDomainClassification (de)":74.86,"MTOPDomainClassification (es)":77.09,"MTOPDomainClassification (fr)":79.8,"MTOPDomainClassification (hi)":32.79,"MTOPDomainClassification (th)":16.65,"MTOPIntentClassification (de)":42.36,"MTOPIntentClassification (es)":44.73,"MTOPIntentClassification (fr)":38.96,"MTOPIntentClassification (hi)":13.58,"MTOPIntentClassification (th)":5.4,"MassiveIntentClassification (af)":37.22,"MassiveIntentClassification (am)":3.19,"MassiveIntentClassification (ar)":14.26,"MassiveIntentClassification (az)":37.22,"MassiveIntentClassification (bn)":10.76,"MassiveIntentClassification (cy)":32.5,"MassiveIntentClassification (de)":42.78,"MassiveIntentClassification (el)":33.49,"MassiveIntentClassification (es)":44.45,"MassiveIntentClassification (fa)":26.74,"MassiveIntentClassification (fi)":38.1,"MassiveIntentClassification (fr)":46.89,"MassiveIntentClassification (he)":25.2,"MassiveIntentClassification (hi)":13.94,"MassiveIntentClassification (hu)":34.71,"MassiveIntentClassification (hy)":6.71,"MassiveIntentClassification (id)":38.57,"MassiveIntentClassification (is)":32.23,"MassiveIntentClassification (it)":45.8,"MassiveIntentClassification (ja)":29.19,"MassiveIntentClassification (jv)":34.22,"MassiveIntentClassification (ka)":8.89,"MassiveIntentClassification (km)":4.62,"MassiveIntentClassification (kn)":3.17,"MassiveIntentClassification (ko)":15.03,"MassiveIntentClassification (lv)":36.1,"MassiveIntentClassification (ml)":3.0,"MassiveIntentClassification (mn)":23.3,"MassiveIntentClassification (ms)":36.13,"MassiveIntentClassification (my)":3.81,"MassiveIntentClassification (nl)":41.08,"MassiveIntentClassification (pt)":45.2,"MassiveIntentClassification (ro)":39.49,"MassiveIntentClassification (ru)":31.82,"MassiveIntentClassification (sl)":35.45,"MassiveIntentClassification (sq)":36.89,"MassiveIntentClassification (sw)":37.54,"MassiveIntentClassification (ta)":7.91,"MassiveIntentClassification (te)":2.85,"MassiveIntentClassification (th)":10.5,"MassiveIntentClassification (tl)":39.47,"MassiveIntentClassification (tr)":37.5,"MassiveIntentClassification (ur)":16.11,"MassiveIntentClassification (vi)":36.11,"MassiveIntentClassification (zh-TW)":17.22,"MassiveScenarioClassification (af)":47.8,"MassiveScenarioClassification (am)":7.08,"MassiveScenarioClassification (ar)":22.83,"MassiveScenarioClassification (az)":44.95,"MassiveScenarioClassification (bn)":16.59,"MassiveScenarioClassification (cy)":37.92,"MassiveScenarioClassification (de)":58.74,"MassiveScenarioClassification (el)":43.0,"MassiveScenarioClassification (es)":54.47,"MassiveScenarioClassification (fa)":30.58,"MassiveScenarioClassification (fi)":43.57,"MassiveScenarioClassification (fr)":56.99,"MassiveScenarioClassification (he)":28.08,"MassiveScenarioClassification (hi)":18.1,"MassiveScenarioClassification (hu)":41.74,"MassiveScenarioClassification (hy)":11.54,"MassiveScenarioClassification (id)":46.95,"MassiveScenarioClassification (is)":42.78,"MassiveScenarioClassification (it)":54.65,"MassiveScenarioClassification (ja)":35.9,"MassiveScenarioClassification (jv)":42.51,"MassiveScenarioClassification (ka)":13.8,"MassiveScenarioClassification (km)":9.45,"MassiveScenarioClassification (kn)":8.16,"MassiveScenarioClassification (ko)":19.91,"MassiveScenarioClassification (lv)":40.48,"MassiveScenarioClassification (ml)":6.7,"MassiveScenarioClassification (mn)":28.55,"MassiveScenarioClassification (ms)":46.62,"MassiveScenarioClassification (my)":9.98,"MassiveScenarioClassification (nl)":51.76,"MassiveScenarioClassification (pt)":55.6,"MassiveScenarioClassification (ro)":50.54,"MassiveScenarioClassification (ru)":37.73,"MassiveScenarioClassification (sl)":41.67,"MassiveScenarioClassification (sq)":47.38,"MassiveScenarioClassification (sw)":44.18,"MassiveScenarioClassification (ta)":12.6,"MassiveScenarioClassification (te)":7.02,"MassiveScenarioClassification (th)":19.79,"MassiveScenarioClassification (tl)":50.36,"MassiveScenarioClassification (tr)":45.48,"MassiveScenarioClassification (ur)":23.68,"MassiveScenarioClassification (vi)":41.63,"MassiveScenarioClassification (zh-TW)":27.52}
-{"index":112,"Rank":22,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":30.51,"AmazonCounterfactualClassification (de)":59.79,"AmazonCounterfactualClassification (ja)":50.59,"AmazonReviewsClassification (de)":35.06,"AmazonReviewsClassification (es)":37.18,"AmazonReviewsClassification (fr)":35.48,"AmazonReviewsClassification (ja)":22.24,"AmazonReviewsClassification (zh)":21.89,"MTOPDomainClassification (de)":85.42,"MTOPDomainClassification (es)":88.2,"MTOPDomainClassification (fr)":85.05,"MTOPDomainClassification (hi)":21.74,"MTOPDomainClassification (th)":15.87,"MTOPIntentClassification (de)":55.75,"MTOPIntentClassification (es)":57.73,"MTOPIntentClassification (fr)":51.07,"MTOPIntentClassification (hi)":3.19,"MTOPIntentClassification (th)":5.55,"MassiveIntentClassification (af)":42.6,"MassiveIntentClassification (am)":2.12,"MassiveIntentClassification (ar)":4.64,"MassiveIntentClassification (az)":35.05,"MassiveIntentClassification (bn)":2.84,"MassiveIntentClassification (cy)":36.19,"MassiveIntentClassification (de)":55.49,"MassiveIntentClassification (el)":10.14,"MassiveIntentClassification (es)":56.72,"MassiveIntentClassification (fa)":3.54,"MassiveIntentClassification (fi)":37.13,"MassiveIntentClassification (fr)":57.67,"MassiveIntentClassification (he)":2.56,"MassiveIntentClassification (hi)":3.24,"MassiveIntentClassification (hu)":34.22,"MassiveIntentClassification (hy)":3.01,"MassiveIntentClassification (id)":46.54,"MassiveIntentClassification (is)":34.77,"MassiveIntentClassification (it)":54.13,"MassiveIntentClassification (ja)":4.27,"MassiveIntentClassification (jv)":36.97,"MassiveIntentClassification (ka)":2.72,"MassiveIntentClassification (km)":5.35,"MassiveIntentClassification (kn)":3.17,"MassiveIntentClassification (ko)":2.64,"MassiveIntentClassification (lv)":36.32,"MassiveIntentClassification (ml)":3.18,"MassiveIntentClassification (mn)":22.85,"MassiveIntentClassification (ms)":42.87,"MassiveIntentClassification (my)":4.04,"MassiveIntentClassification (nl)":49.53,"MassiveIntentClassification (pt)":57.03,"MassiveIntentClassification (ro)":49.95,"MassiveIntentClassification (ru)":36.58,"MassiveIntentClassification (sl)":39.44,"MassiveIntentClassification (sq)":41.78,"MassiveIntentClassification (sw)":35.85,"MassiveIntentClassification (ta)":2.32,"MassiveIntentClassification (te)":2.2,"MassiveIntentClassification (th)":3.74,"MassiveIntentClassification (tl)":43.12,"MassiveIntentClassification (tr)":35.24,"MassiveIntentClassification (ur)":3.0,"MassiveIntentClassification (vi)":30.01,"MassiveIntentClassification (zh-TW)":3.35,"MassiveScenarioClassification (af)":52.54,"MassiveScenarioClassification (am)":6.3,"MassiveScenarioClassification (ar)":11.96,"MassiveScenarioClassification (az)":40.17,"MassiveScenarioClassification (bn)":8.29,"MassiveScenarioClassification (cy)":42.24,"MassiveScenarioClassification (de)":68.09,"MassiveScenarioClassification (el)":16.66,"MassiveScenarioClassification (es)":64.32,"MassiveScenarioClassification (fa)":6.9,"MassiveScenarioClassification (fi)":43.96,"MassiveScenarioClassification (fr)":66.72,"MassiveScenarioClassification (he)":7.51,"MassiveScenarioClassification (hi)":7.82,"MassiveScenarioClassification (hu)":42.16,"MassiveScenarioClassification (hy)":9.33,"MassiveScenarioClassification (id)":53.54,"MassiveScenarioClassification (is)":42.84,"MassiveScenarioClassification (it)":62.44,"MassiveScenarioClassification (ja)":7.29,"MassiveScenarioClassification (jv)":43.13,"MassiveScenarioClassification (ka)":7.63,"MassiveScenarioClassification (km)":9.08,"MassiveScenarioClassification (kn)":8.1,"MassiveScenarioClassification (ko)":6.35,"MassiveScenarioClassification (lv)":40.24,"MassiveScenarioClassification (ml)":7.65,"MassiveScenarioClassification (mn)":27.98,"MassiveScenarioClassification (ms)":52.41,"MassiveScenarioClassification (my)":9.21,"MassiveScenarioClassification (nl)":60.35,"MassiveScenarioClassification (pt)":62.78,"MassiveScenarioClassification (ro)":59.62,"MassiveScenarioClassification (ru)":43.44,"MassiveScenarioClassification (sl)":44.79,"MassiveScenarioClassification (sq)":50.84,"MassiveScenarioClassification (sw)":44.63,"MassiveScenarioClassification (ta)":7.95,"MassiveScenarioClassification (te)":7.5,"MassiveScenarioClassification (th)":8.79,"MassiveScenarioClassification (tl)":53.54,"MassiveScenarioClassification (tr)":42.47,"MassiveScenarioClassification (ur)":9.58,"MassiveScenarioClassification (vi)":34.68,"MassiveScenarioClassification (zh-TW)":8.77}
-{"index":59,"Rank":23,"Model":"slx-v0.1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.08,"Average":30.35,"AmazonCounterfactualClassification (de)":57.98,"AmazonCounterfactualClassification (ja)":59.38,"AmazonReviewsClassification (de)":26.73,"AmazonReviewsClassification (es)":27.23,"AmazonReviewsClassification (fr)":27.0,"AmazonReviewsClassification (ja)":23.75,"AmazonReviewsClassification (zh)":23.13,"MTOPDomainClassification (de)":70.39,"MTOPDomainClassification (es)":73.35,"MTOPDomainClassification (fr)":75.57,"MTOPDomainClassification (hi)":39.06,"MTOPDomainClassification (th)":17.09,"MTOPIntentClassification (de)":45.26,"MTOPIntentClassification (es)":44.9,"MTOPIntentClassification (fr)":41.12,"MTOPIntentClassification (hi)":18.3,"MTOPIntentClassification (th)":4.68,"MassiveIntentClassification (af)":36.53,"MassiveIntentClassification (am)":2.44,"MassiveIntentClassification (ar)":18.02,"MassiveIntentClassification (az)":28.74,"MassiveIntentClassification (bn)":12.1,"MassiveIntentClassification (cy)":33.68,"MassiveIntentClassification (de)":42.21,"MassiveIntentClassification (el)":22.5,"MassiveIntentClassification (es)":40.63,"MassiveIntentClassification (fa)":19.65,"MassiveIntentClassification (fi)":36.97,"MassiveIntentClassification (fr)":41.95,"MassiveIntentClassification (he)":20.56,"MassiveIntentClassification (hi)":17.6,"MassiveIntentClassification (hu)":33.79,"MassiveIntentClassification (hy)":7.13,"MassiveIntentClassification (id)":38.6,"MassiveIntentClassification (is)":29.51,"MassiveIntentClassification (it)":40.84,"MassiveIntentClassification (ja)":30.52,"MassiveIntentClassification (jv)":35.6,"MassiveIntentClassification (ka)":8.55,"MassiveIntentClassification (km)":4.51,"MassiveIntentClassification (kn)":3.39,"MassiveIntentClassification (ko)":16.09,"MassiveIntentClassification (lv)":36.23,"MassiveIntentClassification (ml)":2.54,"MassiveIntentClassification (mn)":18.88,"MassiveIntentClassification (ms)":34.14,"MassiveIntentClassification (my)":4.49,"MassiveIntentClassification (nl)":38.17,"MassiveIntentClassification (pt)":42.6,"MassiveIntentClassification (ro)":39.3,"MassiveIntentClassification (ru)":27.14,"MassiveIntentClassification (sl)":36.45,"MassiveIntentClassification (sq)":40.24,"MassiveIntentClassification (sw)":33.87,"MassiveIntentClassification (ta)":11.43,"MassiveIntentClassification (te)":2.25,"MassiveIntentClassification (th)":10.3,"MassiveIntentClassification (tl)":38.09,"MassiveIntentClassification (tr)":33.09,"MassiveIntentClassification (ur)":15.12,"MassiveIntentClassification (vi)":37.36,"MassiveIntentClassification (zh-TW)":24.11,"MassiveScenarioClassification (af)":43.56,"MassiveScenarioClassification (am)":7.37,"MassiveScenarioClassification (ar)":24.37,"MassiveScenarioClassification (az)":35.12,"MassiveScenarioClassification (bn)":19.76,"MassiveScenarioClassification (cy)":38.78,"MassiveScenarioClassification (de)":50.05,"MassiveScenarioClassification (el)":29.29,"MassiveScenarioClassification (es)":49.3,"MassiveScenarioClassification (fa)":22.91,"MassiveScenarioClassification (fi)":40.72,"MassiveScenarioClassification (fr)":50.59,"MassiveScenarioClassification (he)":23.92,"MassiveScenarioClassification (hi)":23.15,"MassiveScenarioClassification (hu)":39.28,"MassiveScenarioClassification (hy)":12.36,"MassiveScenarioClassification (id)":43.03,"MassiveScenarioClassification (is)":35.19,"MassiveScenarioClassification (it)":49.37,"MassiveScenarioClassification (ja)":37.64,"MassiveScenarioClassification (jv)":42.82,"MassiveScenarioClassification (ka)":14.13,"MassiveScenarioClassification (km)":8.84,"MassiveScenarioClassification (kn)":8.35,"MassiveScenarioClassification (ko)":20.59,"MassiveScenarioClassification (lv)":40.16,"MassiveScenarioClassification (ml)":7.48,"MassiveScenarioClassification (mn)":24.15,"MassiveScenarioClassification (ms)":42.39,"MassiveScenarioClassification (my)":11.19,"MassiveScenarioClassification (nl)":46.52,"MassiveScenarioClassification (pt)":49.51,"MassiveScenarioClassification (ro)":47.62,"MassiveScenarioClassification (ru)":29.95,"MassiveScenarioClassification (sl)":41.42,"MassiveScenarioClassification (sq)":45.49,"MassiveScenarioClassification (sw)":41.83,"MassiveScenarioClassification (ta)":17.58,"MassiveScenarioClassification (te)":7.58,"MassiveScenarioClassification (th)":18.84,"MassiveScenarioClassification (tl)":46.35,"MassiveScenarioClassification (tr)":38.19,"MassiveScenarioClassification (ur)":23.33,"MassiveScenarioClassification (vi)":40.89,"MassiveScenarioClassification (zh-TW)":33.27}
-{"index":118,"Rank":24,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":29.6,"AmazonCounterfactualClassification (de)":67.01,"AmazonCounterfactualClassification (ja)":45.61,"AmazonReviewsClassification (de)":44.05,"AmazonReviewsClassification (es)":45.01,"AmazonReviewsClassification (fr)":43.52,"AmazonReviewsClassification (ja)":22.23,"AmazonReviewsClassification (zh)":21.88,"MTOPDomainClassification (de)":83.28,"MTOPDomainClassification (es)":85.32,"MTOPDomainClassification (fr)":85.14,"MTOPDomainClassification (hi)":20.85,"MTOPDomainClassification (th)":15.62,"MTOPIntentClassification (de)":54.65,"MTOPIntentClassification (es)":57.38,"MTOPIntentClassification (fr)":54.39,"MTOPIntentClassification (hi)":3.28,"MTOPIntentClassification (th)":5.08,"MassiveIntentClassification (af)":40.17,"MassiveIntentClassification (am)":2.18,"MassiveIntentClassification (ar)":4.18,"MassiveIntentClassification (az)":30.02,"MassiveIntentClassification (bn)":2.6,"MassiveIntentClassification (cy)":29.15,"MassiveIntentClassification (de)":57.43,"MassiveIntentClassification (el)":9.96,"MassiveIntentClassification (es)":57.97,"MassiveIntentClassification (fa)":3.6,"MassiveIntentClassification (fi)":34.02,"MassiveIntentClassification (fr)":60.99,"MassiveIntentClassification (he)":2.51,"MassiveIntentClassification (hi)":3.02,"MassiveIntentClassification (hu)":31.66,"MassiveIntentClassification (hy)":3.32,"MassiveIntentClassification (id)":41.53,"MassiveIntentClassification (is)":30.25,"MassiveIntentClassification (it)":56.57,"MassiveIntentClassification (ja)":3.5,"MassiveIntentClassification (jv)":31.67,"MassiveIntentClassification (ka)":2.79,"MassiveIntentClassification (km)":5.43,"MassiveIntentClassification (kn)":2.79,"MassiveIntentClassification (ko)":2.67,"MassiveIntentClassification (lv)":34.25,"MassiveIntentClassification (ml)":2.98,"MassiveIntentClassification (mn)":20.99,"MassiveIntentClassification (ms)":37.43,"MassiveIntentClassification (my)":4.02,"MassiveIntentClassification (nl)":50.51,"MassiveIntentClassification (pt)":57.95,"MassiveIntentClassification (ro)":49.37,"MassiveIntentClassification (ru)":33.46,"MassiveIntentClassification (sl)":36.33,"MassiveIntentClassification (sq)":37.65,"MassiveIntentClassification (sw)":30.6,"MassiveIntentClassification (ta)":1.79,"MassiveIntentClassification (te)":2.26,"MassiveIntentClassification (th)":4.02,"MassiveIntentClassification (tl)":38.92,"MassiveIntentClassification (tr)":32.05,"MassiveIntentClassification (ur)":2.7,"MassiveIntentClassification (vi)":21.47,"MassiveIntentClassification (zh-TW)":3.24,"MassiveScenarioClassification (af)":50.81,"MassiveScenarioClassification (am)":6.95,"MassiveScenarioClassification (ar)":12.32,"MassiveScenarioClassification (az)":38.79,"MassiveScenarioClassification (bn)":8.0,"MassiveScenarioClassification (cy)":33.91,"MassiveScenarioClassification (de)":65.33,"MassiveScenarioClassification (el)":16.89,"MassiveScenarioClassification (es)":62.52,"MassiveScenarioClassification (fa)":6.08,"MassiveScenarioClassification (fi)":43.34,"MassiveScenarioClassification (fr)":66.42,"MassiveScenarioClassification (he)":7.55,"MassiveScenarioClassification (hi)":7.44,"MassiveScenarioClassification (hu)":40.85,"MassiveScenarioClassification (hy)":9.25,"MassiveScenarioClassification (id)":51.92,"MassiveScenarioClassification (is)":40.09,"MassiveScenarioClassification (it)":62.94,"MassiveScenarioClassification (ja)":7.9,"MassiveScenarioClassification (jv)":41.33,"MassiveScenarioClassification (ka)":7.76,"MassiveScenarioClassification (km)":9.19,"MassiveScenarioClassification (kn)":8.36,"MassiveScenarioClassification (ko)":6.13,"MassiveScenarioClassification (lv)":40.7,"MassiveScenarioClassification (ml)":6.98,"MassiveScenarioClassification (mn)":27.0,"MassiveScenarioClassification (ms)":46.9,"MassiveScenarioClassification (my)":9.55,"MassiveScenarioClassification (nl)":59.65,"MassiveScenarioClassification (pt)":62.18,"MassiveScenarioClassification (ro)":58.22,"MassiveScenarioClassification (ru)":40.73,"MassiveScenarioClassification (sl)":43.66,"MassiveScenarioClassification (sq)":49.25,"MassiveScenarioClassification (sw)":40.55,"MassiveScenarioClassification (ta)":7.46,"MassiveScenarioClassification (te)":7.03,"MassiveScenarioClassification (th)":8.52,"MassiveScenarioClassification (tl)":51.74,"MassiveScenarioClassification (tr)":43.01,"MassiveScenarioClassification (ur)":9.61,"MassiveScenarioClassification (vi)":28.91,"MassiveScenarioClassification (zh-TW)":7.14}
-{"index":111,"Rank":25,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":28.9,"AmazonCounterfactualClassification (de)":59.38,"AmazonCounterfactualClassification (ja)":45.87,"AmazonReviewsClassification (de)":33.06,"AmazonReviewsClassification (es)":34.0,"AmazonReviewsClassification (fr)":33.48,"AmazonReviewsClassification (ja)":21.78,"AmazonReviewsClassification (zh)":21.83,"MTOPDomainClassification (de)":81.91,"MTOPDomainClassification (es)":84.7,"MTOPDomainClassification (fr)":82.48,"MTOPDomainClassification (hi)":22.11,"MTOPDomainClassification (th)":16.36,"MTOPIntentClassification (de)":52.13,"MTOPIntentClassification (es)":52.62,"MTOPIntentClassification (fr)":46.39,"MTOPIntentClassification (hi)":3.9,"MTOPIntentClassification (th)":5.38,"MassiveIntentClassification (af)":41.02,"MassiveIntentClassification (am)":2.34,"MassiveIntentClassification (ar)":4.87,"MassiveIntentClassification (az)":34.92,"MassiveIntentClassification (bn)":2.52,"MassiveIntentClassification (cy)":35.87,"MassiveIntentClassification (de)":51.48,"MassiveIntentClassification (el)":10.0,"MassiveIntentClassification (es)":53.3,"MassiveIntentClassification (fa)":3.59,"MassiveIntentClassification (fi)":37.35,"MassiveIntentClassification (fr)":54.83,"MassiveIntentClassification (he)":2.52,"MassiveIntentClassification (hi)":2.88,"MassiveIntentClassification (hu)":33.52,"MassiveIntentClassification (hy)":3.13,"MassiveIntentClassification (id)":40.11,"MassiveIntentClassification (is)":34.77,"MassiveIntentClassification (it)":51.21,"MassiveIntentClassification (ja)":4.75,"MassiveIntentClassification (jv)":35.6,"MassiveIntentClassification (ka)":2.71,"MassiveIntentClassification (km)":5.48,"MassiveIntentClassification (kn)":2.44,"MassiveIntentClassification (ko)":2.59,"MassiveIntentClassification (lv)":38.15,"MassiveIntentClassification (ml)":2.67,"MassiveIntentClassification (mn)":18.47,"MassiveIntentClassification (ms)":35.58,"MassiveIntentClassification (my)":4.35,"MassiveIntentClassification (nl)":45.96,"MassiveIntentClassification (pt)":52.27,"MassiveIntentClassification (ro)":46.39,"MassiveIntentClassification (ru)":16.82,"MassiveIntentClassification (sl)":37.3,"MassiveIntentClassification (sq)":41.73,"MassiveIntentClassification (sw)":35.97,"MassiveIntentClassification (ta)":1.52,"MassiveIntentClassification (te)":2.57,"MassiveIntentClassification (th)":3.94,"MassiveIntentClassification (tl)":41.03,"MassiveIntentClassification (tr)":33.75,"MassiveIntentClassification (ur)":2.57,"MassiveIntentClassification (vi)":25.23,"MassiveIntentClassification (zh-TW)":4.64,"MassiveScenarioClassification (af)":51.48,"MassiveScenarioClassification (am)":7.74,"MassiveScenarioClassification (ar)":12.03,"MassiveScenarioClassification (az)":41.77,"MassiveScenarioClassification (bn)":8.07,"MassiveScenarioClassification (cy)":43.67,"MassiveScenarioClassification (de)":63.63,"MassiveScenarioClassification (el)":16.83,"MassiveScenarioClassification (es)":61.48,"MassiveScenarioClassification (fa)":6.48,"MassiveScenarioClassification (fi)":43.54,"MassiveScenarioClassification (fr)":64.06,"MassiveScenarioClassification (he)":8.03,"MassiveScenarioClassification (hi)":7.5,"MassiveScenarioClassification (hu)":42.59,"MassiveScenarioClassification (hy)":9.22,"MassiveScenarioClassification (id)":48.67,"MassiveScenarioClassification (is)":43.87,"MassiveScenarioClassification (it)":59.83,"MassiveScenarioClassification (ja)":5.62,"MassiveScenarioClassification (jv)":42.18,"MassiveScenarioClassification (ka)":7.52,"MassiveScenarioClassification (km)":9.55,"MassiveScenarioClassification (kn)":8.34,"MassiveScenarioClassification (ko)":6.11,"MassiveScenarioClassification (lv)":43.35,"MassiveScenarioClassification (ml)":7.28,"MassiveScenarioClassification (mn)":23.94,"MassiveScenarioClassification (ms)":45.18,"MassiveScenarioClassification (my)":9.33,"MassiveScenarioClassification (nl)":57.02,"MassiveScenarioClassification (pt)":59.45,"MassiveScenarioClassification (ro)":56.8,"MassiveScenarioClassification (ru)":25.85,"MassiveScenarioClassification (sl)":42.51,"MassiveScenarioClassification (sq)":50.41,"MassiveScenarioClassification (sw)":43.02,"MassiveScenarioClassification (ta)":7.21,"MassiveScenarioClassification (te)":6.9,"MassiveScenarioClassification (th)":8.7,"MassiveScenarioClassification (tl)":51.76,"MassiveScenarioClassification (tr)":42.54,"MassiveScenarioClassification (ur)":9.32,"MassiveScenarioClassification (vi)":31.51,"MassiveScenarioClassification (zh-TW)":8.16}
-{"index":117,"Rank":26,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":28.26,"AmazonCounterfactualClassification (de)":67.97,"AmazonCounterfactualClassification (ja)":45.72,"AmazonReviewsClassification (de)":43.16,"AmazonReviewsClassification (es)":42.89,"AmazonReviewsClassification (fr)":41.48,"AmazonReviewsClassification (ja)":22.49,"AmazonReviewsClassification (zh)":22.12,"MTOPDomainClassification (de)":80.56,"MTOPDomainClassification (es)":80.78,"MTOPDomainClassification (fr)":79.6,"MTOPDomainClassification (hi)":21.22,"MTOPDomainClassification (th)":15.82,"MTOPIntentClassification (de)":52.5,"MTOPIntentClassification (es)":52.07,"MTOPIntentClassification (fr)":47.73,"MTOPIntentClassification (hi)":3.74,"MTOPIntentClassification (th)":4.96,"MassiveIntentClassification (af)":38.41,"MassiveIntentClassification (am)":2.49,"MassiveIntentClassification (ar)":4.7,"MassiveIntentClassification (az)":31.77,"MassiveIntentClassification (bn)":2.77,"MassiveIntentClassification (cy)":31.69,"MassiveIntentClassification (de)":52.01,"MassiveIntentClassification (el)":9.74,"MassiveIntentClassification (es)":54.1,"MassiveIntentClassification (fa)":3.86,"MassiveIntentClassification (fi)":34.07,"MassiveIntentClassification (fr)":57.01,"MassiveIntentClassification (he)":2.14,"MassiveIntentClassification (hi)":2.97,"MassiveIntentClassification (hu)":32.01,"MassiveIntentClassification (hy)":3.17,"MassiveIntentClassification (id)":34.55,"MassiveIntentClassification (is)":32.0,"MassiveIntentClassification (it)":52.94,"MassiveIntentClassification (ja)":2.9,"MassiveIntentClassification (jv)":32.42,"MassiveIntentClassification (ka)":2.71,"MassiveIntentClassification (km)":5.5,"MassiveIntentClassification (kn)":2.41,"MassiveIntentClassification (ko)":2.57,"MassiveIntentClassification (lv)":35.09,"MassiveIntentClassification (ml)":2.95,"MassiveIntentClassification (mn)":18.33,"MassiveIntentClassification (ms)":29.69,"MassiveIntentClassification (my)":3.99,"MassiveIntentClassification (nl)":44.95,"MassiveIntentClassification (pt)":51.96,"MassiveIntentClassification (ro)":43.83,"MassiveIntentClassification (ru)":17.32,"MassiveIntentClassification (sl)":33.71,"MassiveIntentClassification (sq)":37.62,"MassiveIntentClassification (sw)":31.9,"MassiveIntentClassification (ta)":1.91,"MassiveIntentClassification (te)":2.54,"MassiveIntentClassification (th)":3.85,"MassiveIntentClassification (tl)":36.83,"MassiveIntentClassification (tr)":33.0,"MassiveIntentClassification (ur)":2.62,"MassiveIntentClassification (vi)":22.81,"MassiveIntentClassification (zh-TW)":3.49,"MassiveScenarioClassification (af)":50.28,"MassiveScenarioClassification (am)":7.15,"MassiveScenarioClassification (ar)":12.12,"MassiveScenarioClassification (az)":39.68,"MassiveScenarioClassification (bn)":8.06,"MassiveScenarioClassification (cy)":38.01,"MassiveScenarioClassification (de)":62.71,"MassiveScenarioClassification (el)":17.19,"MassiveScenarioClassification (es)":59.56,"MassiveScenarioClassification (fa)":6.5,"MassiveScenarioClassification (fi)":41.72,"MassiveScenarioClassification (fr)":63.6,"MassiveScenarioClassification (he)":7.93,"MassiveScenarioClassification (hi)":7.85,"MassiveScenarioClassification (hu)":41.37,"MassiveScenarioClassification (hy)":9.42,"MassiveScenarioClassification (id)":44.88,"MassiveScenarioClassification (is)":40.86,"MassiveScenarioClassification (it)":60.09,"MassiveScenarioClassification (ja)":6.56,"MassiveScenarioClassification (jv)":40.18,"MassiveScenarioClassification (ka)":7.37,"MassiveScenarioClassification (km)":9.56,"MassiveScenarioClassification (kn)":8.4,"MassiveScenarioClassification (ko)":5.96,"MassiveScenarioClassification (lv)":41.44,"MassiveScenarioClassification (ml)":7.47,"MassiveScenarioClassification (mn)":25.36,"MassiveScenarioClassification (ms)":39.69,"MassiveScenarioClassification (my)":9.68,"MassiveScenarioClassification (nl)":56.09,"MassiveScenarioClassification (pt)":57.99,"MassiveScenarioClassification (ro)":56.0,"MassiveScenarioClassification (ru)":27.47,"MassiveScenarioClassification (sl)":41.04,"MassiveScenarioClassification (sq)":49.38,"MassiveScenarioClassification (sw)":40.62,"MassiveScenarioClassification (ta)":7.59,"MassiveScenarioClassification (te)":7.07,"MassiveScenarioClassification (th)":8.52,"MassiveScenarioClassification (tl)":49.89,"MassiveScenarioClassification (tr)":43.08,"MassiveScenarioClassification (ur)":9.31,"MassiveScenarioClassification (vi)":27.46,"MassiveScenarioClassification (zh-TW)":7.24}
-{"index":116,"Rank":27,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":26.73,"AmazonCounterfactualClassification (de)":69.98,"AmazonCounterfactualClassification (ja)":46.05,"AmazonReviewsClassification (de)":37.9,"AmazonReviewsClassification (es)":37.33,"AmazonReviewsClassification (fr)":37.35,"AmazonReviewsClassification (ja)":22.29,"AmazonReviewsClassification (zh)":21.53,"MTOPDomainClassification (de)":76.98,"MTOPDomainClassification (es)":73.61,"MTOPDomainClassification (fr)":75.03,"MTOPDomainClassification (hi)":21.4,"MTOPDomainClassification (th)":16.21,"MTOPIntentClassification (de)":44.43,"MTOPIntentClassification (es)":42.03,"MTOPIntentClassification (fr)":43.85,"MTOPIntentClassification (hi)":3.8,"MTOPIntentClassification (th)":5.21,"MassiveIntentClassification (af)":34.32,"MassiveIntentClassification (am)":2.38,"MassiveIntentClassification (ar)":4.53,"MassiveIntentClassification (az)":31.76,"MassiveIntentClassification (bn)":2.58,"MassiveIntentClassification (cy)":28.94,"MassiveIntentClassification (de)":45.23,"MassiveIntentClassification (el)":10.05,"MassiveIntentClassification (es)":45.32,"MassiveIntentClassification (fa)":3.58,"MassiveIntentClassification (fi)":33.52,"MassiveIntentClassification (fr)":51.13,"MassiveIntentClassification (he)":2.63,"MassiveIntentClassification (hi)":2.68,"MassiveIntentClassification (hu)":32.31,"MassiveIntentClassification (hy)":3.33,"MassiveIntentClassification (id)":35.5,"MassiveIntentClassification (is)":29.82,"MassiveIntentClassification (it)":45.59,"MassiveIntentClassification (ja)":3.67,"MassiveIntentClassification (jv)":31.15,"MassiveIntentClassification (ka)":2.77,"MassiveIntentClassification (km)":5.66,"MassiveIntentClassification (kn)":2.59,"MassiveIntentClassification (ko)":2.34,"MassiveIntentClassification (lv)":33.97,"MassiveIntentClassification (ml)":2.55,"MassiveIntentClassification (mn)":14.7,"MassiveIntentClassification (ms)":33.12,"MassiveIntentClassification (my)":4.42,"MassiveIntentClassification (nl)":37.96,"MassiveIntentClassification (pt)":43.35,"MassiveIntentClassification (ro)":42.69,"MassiveIntentClassification (ru)":14.82,"MassiveIntentClassification (sl)":34.54,"MassiveIntentClassification (sq)":38.54,"MassiveIntentClassification (sw)":32.14,"MassiveIntentClassification (ta)":1.41,"MassiveIntentClassification (te)":2.5,"MassiveIntentClassification (th)":3.71,"MassiveIntentClassification (tl)":36.04,"MassiveIntentClassification (tr)":33.77,"MassiveIntentClassification (ur)":2.99,"MassiveIntentClassification (vi)":22.62,"MassiveIntentClassification (zh-TW)":4.63,"MassiveScenarioClassification (af)":44.45,"MassiveScenarioClassification (am)":7.51,"MassiveScenarioClassification (ar)":12.32,"MassiveScenarioClassification (az)":38.41,"MassiveScenarioClassification (bn)":8.45,"MassiveScenarioClassification (cy)":35.04,"MassiveScenarioClassification (de)":59.12,"MassiveScenarioClassification (el)":17.68,"MassiveScenarioClassification (es)":55.61,"MassiveScenarioClassification (fa)":6.86,"MassiveScenarioClassification (fi)":41.34,"MassiveScenarioClassification (fr)":59.92,"MassiveScenarioClassification (he)":7.86,"MassiveScenarioClassification (hi)":7.63,"MassiveScenarioClassification (hu)":41.31,"MassiveScenarioClassification (hy)":9.23,"MassiveScenarioClassification (id)":44.64,"MassiveScenarioClassification (is)":39.63,"MassiveScenarioClassification (it)":54.58,"MassiveScenarioClassification (ja)":4.96,"MassiveScenarioClassification (jv)":40.73,"MassiveScenarioClassification (ka)":7.51,"MassiveScenarioClassification (km)":8.73,"MassiveScenarioClassification (kn)":7.99,"MassiveScenarioClassification (ko)":6.03,"MassiveScenarioClassification (lv)":36.42,"MassiveScenarioClassification (ml)":6.96,"MassiveScenarioClassification (mn)":19.85,"MassiveScenarioClassification (ms)":43.18,"MassiveScenarioClassification (my)":9.46,"MassiveScenarioClassification (nl)":50.0,"MassiveScenarioClassification (pt)":52.24,"MassiveScenarioClassification (ro)":53.7,"MassiveScenarioClassification (ru)":20.69,"MassiveScenarioClassification (sl)":39.79,"MassiveScenarioClassification (sq)":50.16,"MassiveScenarioClassification (sw)":40.48,"MassiveScenarioClassification (ta)":7.47,"MassiveScenarioClassification (te)":6.87,"MassiveScenarioClassification (th)":8.26,"MassiveScenarioClassification (tl)":48.94,"MassiveScenarioClassification (tr)":41.83,"MassiveScenarioClassification (ur)":9.77,"MassiveScenarioClassification (vi)":30.01,"MassiveScenarioClassification (zh-TW)":7.91}
-{"index":109,"Rank":28,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":24.49,"AmazonCounterfactualClassification (de)":54.46,"AmazonCounterfactualClassification (ja)":43.87,"AmazonReviewsClassification (de)":24.08,"AmazonReviewsClassification (es)":23.88,"AmazonReviewsClassification (fr)":23.31,"AmazonReviewsClassification (ja)":20.25,"AmazonReviewsClassification (zh)":20.49,"MTOPDomainClassification (de)":48.55,"MTOPDomainClassification (es)":58.39,"MTOPDomainClassification (fr)":54.61,"MTOPDomainClassification (hi)":21.22,"MTOPDomainClassification (th)":14.98,"MTOPIntentClassification (de)":35.55,"MTOPIntentClassification (es)":36.72,"MTOPIntentClassification (fr)":34.71,"MTOPIntentClassification (hi)":4.44,"MTOPIntentClassification (th)":4.67,"MassiveIntentClassification (af)":33.68,"MassiveIntentClassification (am)":2.94,"MassiveIntentClassification (ar)":10.04,"MassiveIntentClassification (az)":30.74,"MassiveIntentClassification (bn)":3.02,"MassiveIntentClassification (cy)":33.94,"MassiveIntentClassification (de)":36.06,"MassiveIntentClassification (el)":27.7,"MassiveIntentClassification (es)":35.6,"MassiveIntentClassification (fa)":17.97,"MassiveIntentClassification (fi)":35.53,"MassiveIntentClassification (fr)":38.41,"MassiveIntentClassification (he)":2.69,"MassiveIntentClassification (hi)":3.43,"MassiveIntentClassification (hu)":34.05,"MassiveIntentClassification (hy)":3.11,"MassiveIntentClassification (id)":40.02,"MassiveIntentClassification (is)":32.63,"MassiveIntentClassification (it)":39.28,"MassiveIntentClassification (ja)":4.95,"MassiveIntentClassification (jv)":34.95,"MassiveIntentClassification (ka)":2.57,"MassiveIntentClassification (km)":4.73,"MassiveIntentClassification (kn)":3.54,"MassiveIntentClassification (ko)":2.68,"MassiveIntentClassification (lv)":37.91,"MassiveIntentClassification (ml)":2.88,"MassiveIntentClassification (mn)":16.94,"MassiveIntentClassification (ms)":36.6,"MassiveIntentClassification (my)":3.96,"MassiveIntentClassification (nl)":33.95,"MassiveIntentClassification (pt)":43.05,"MassiveIntentClassification (ro)":36.2,"MassiveIntentClassification (ru)":25.3,"MassiveIntentClassification (sl)":35.9,"MassiveIntentClassification (sq)":36.6,"MassiveIntentClassification (sw)":34.81,"MassiveIntentClassification (ta)":3.11,"MassiveIntentClassification (te)":2.53,"MassiveIntentClassification (th)":4.38,"MassiveIntentClassification (tl)":35.51,"MassiveIntentClassification (tr)":32.02,"MassiveIntentClassification (ur)":9.61,"MassiveIntentClassification (vi)":37.07,"MassiveIntentClassification (zh-TW)":4.79,"MassiveScenarioClassification (af)":36.17,"MassiveScenarioClassification (am)":7.64,"MassiveScenarioClassification (ar)":15.26,"MassiveScenarioClassification (az)":30.73,"MassiveScenarioClassification (bn)":7.15,"MassiveScenarioClassification (cy)":34.73,"MassiveScenarioClassification (de)":38.62,"MassiveScenarioClassification (el)":27.18,"MassiveScenarioClassification (es)":39.44,"MassiveScenarioClassification (fa)":21.43,"MassiveScenarioClassification (fi)":33.21,"MassiveScenarioClassification (fr)":40.26,"MassiveScenarioClassification (he)":7.42,"MassiveScenarioClassification (hi)":8.06,"MassiveScenarioClassification (hu)":34.54,"MassiveScenarioClassification (hy)":8.61,"MassiveScenarioClassification (id)":40.04,"MassiveScenarioClassification (is)":33.57,"MassiveScenarioClassification (it)":40.1,"MassiveScenarioClassification (ja)":9.96,"MassiveScenarioClassification (jv)":36.11,"MassiveScenarioClassification (ka)":7.13,"MassiveScenarioClassification (km)":9.66,"MassiveScenarioClassification (kn)":7.55,"MassiveScenarioClassification (ko)":7.27,"MassiveScenarioClassification (lv)":37.03,"MassiveScenarioClassification (ml)":7.22,"MassiveScenarioClassification (mn)":21.53,"MassiveScenarioClassification (ms)":37.57,"MassiveScenarioClassification (my)":9.54,"MassiveScenarioClassification (nl)":34.62,"MassiveScenarioClassification (pt)":44.68,"MassiveScenarioClassification (ro)":37.29,"MassiveScenarioClassification (ru)":28.16,"MassiveScenarioClassification (sl)":37.95,"MassiveScenarioClassification (sq)":37.82,"MassiveScenarioClassification (sw)":35.37,"MassiveScenarioClassification (ta)":7.19,"MassiveScenarioClassification (te)":7.29,"MassiveScenarioClassification (th)":9.47,"MassiveScenarioClassification (tl)":37.31,"MassiveScenarioClassification (tr)":34.57,"MassiveScenarioClassification (ur)":16.17,"MassiveScenarioClassification (vi)":35.91,"MassiveScenarioClassification (zh-TW)":10.19}
-{"index":0,"Rank":29,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":41.59,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":90.05,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":66.09,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":62.83,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":69.71,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":1,"Rank":30,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":37.26,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":79.79,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":45.62,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":53.7,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":62.46,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":2,"Rank":31,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":42.15,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":87.68,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":59.44,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":63.08,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":70.15,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":3,"Rank":32,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":41.98,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":90.12,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":62.44,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":66.94,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":72.78,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":4,"Rank":33,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":43.36,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":90.33,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":60.52,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":68.06,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":74.29,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":6,"Rank":34,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.18,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":7,"Rank":35,"Model":"llm2vec-occiglot-mntp<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":35.07,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":75.7,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":63.76,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":56.03,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":59.3,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":8,"Rank":36,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":52.95,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":9,"Rank":37,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":1776,"Memory Usage (GB, fp32)":6.62,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":53.47,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":47.21,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":93.48,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":80.23,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":76.65,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":79.1,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":10,"Rank":38,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":55.53,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":53.98,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":96.69,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":87.47,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":81.65,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":86.64,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":11,"Rank":39,"Model":"gte-multilingual-base<\/a>","Model Size (Million Parameters)":305,"Memory Usage (GB, fp32)":1.14,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":40.11,"AmazonReviewsClassification (es)":40.17,"AmazonReviewsClassification (fr)":39.57,"AmazonReviewsClassification (ja)":35.75,"AmazonReviewsClassification (zh)":33.34,"MTOPDomainClassification (de)":88.27,"MTOPDomainClassification (es)":90.88,"MTOPDomainClassification (fr)":87.49,"MTOPDomainClassification (hi)":89.45,"MTOPDomainClassification (th)":83.46,"MTOPIntentClassification (de)":63.51,"MTOPIntentClassification (es)":71.66,"MTOPIntentClassification (fr)":64.06,"MTOPIntentClassification (hi)":65.27,"MTOPIntentClassification (th)":63.05,"MassiveIntentClassification (af)":57.95,"MassiveIntentClassification (am)":49.27,"MassiveIntentClassification (ar)":55.49,"MassiveIntentClassification (az)":60.98,"MassiveIntentClassification (bn)":57.57,"MassiveIntentClassification (cy)":48.35,"MassiveIntentClassification (de)":62.55,"MassiveIntentClassification (el)":63.09,"MassiveIntentClassification (es)":67.41,"MassiveIntentClassification (fa)":65.6,"MassiveIntentClassification (fi)":62.91,"MassiveIntentClassification (fr)":67.92,"MassiveIntentClassification (he)":55.2,"MassiveIntentClassification (hi)":65.04,"MassiveIntentClassification (hu)":63.31,"MassiveIntentClassification (hy)":55.52,"MassiveIntentClassification (id)":66.06,"MassiveIntentClassification (is)":56.02,"MassiveIntentClassification (it)":67.28,"MassiveIntentClassification (ja)":66.8,"MassiveIntentClassification (jv)":50.18,"MassiveIntentClassification (ka)":47.89,"MassiveIntentClassification (km)":50.8,"MassiveIntentClassification (kn)":57.23,"MassiveIntentClassification (ko)":64.59,"MassiveIntentClassification (lv)":59.08,"MassiveIntentClassification (ml)":59.55,"MassiveIntentClassification (mn)":53.44,"MassiveIntentClassification (ms)":61.65,"MassiveIntentClassification (my)":53.46,"MassiveIntentClassification (nl)":66.36,"MassiveIntentClassification (pt)":68.07,"MassiveIntentClassification (ro)":63.12,"MassiveIntentClassification (ru)":67.46,"MassiveIntentClassification (sl)":60.54,"MassiveIntentClassification (sq)":58.2,"MassiveIntentClassification (sw)":51.68,"MassiveIntentClassification (ta)":58.52,"MassiveIntentClassification (te)":58.53,"MassiveIntentClassification (th)":61.88,"MassiveIntentClassification (tl)":56.65,"MassiveIntentClassification (tr)":64.85,"MassiveIntentClassification (ur)":58.62,"MassiveIntentClassification (vi)":64.16,"MassiveIntentClassification (zh-TW)":64.17,"MassiveScenarioClassification (af)":64.9,"MassiveScenarioClassification (am)":57.28,"MassiveScenarioClassification (ar)":62.27,"MassiveScenarioClassification (az)":65.1,"MassiveScenarioClassification (bn)":62.41,"MassiveScenarioClassification (cy)":55.37,"MassiveScenarioClassification (de)":70.3,"MassiveScenarioClassification (el)":69.53,"MassiveScenarioClassification (es)":72.45,"MassiveScenarioClassification (fa)":70.27,"MassiveScenarioClassification (fi)":67.21,"MassiveScenarioClassification (fr)":72.65,"MassiveScenarioClassification (he)":61.7,"MassiveScenarioClassification (hi)":70.14,"MassiveScenarioClassification (hu)":70.51,"MassiveScenarioClassification (hy)":60.14,"MassiveScenarioClassification (id)":70.62,"MassiveScenarioClassification (is)":61.53,"MassiveScenarioClassification (it)":72.0,"MassiveScenarioClassification (ja)":71.59,"MassiveScenarioClassification (jv)":57.01,"MassiveScenarioClassification (ka)":53.26,"MassiveScenarioClassification (km)":57.8,"MassiveScenarioClassification (kn)":62.39,"MassiveScenarioClassification (ko)":69.54,"MassiveScenarioClassification (lv)":63.37,"MassiveScenarioClassification (ml)":64.82,"MassiveScenarioClassification (mn)":59.35,"MassiveScenarioClassification (ms)":66.68,"MassiveScenarioClassification (my)":59.43,"MassiveScenarioClassification (nl)":71.96,"MassiveScenarioClassification (pt)":71.87,"MassiveScenarioClassification (ro)":68.51,"MassiveScenarioClassification (ru)":71.65,"MassiveScenarioClassification (sl)":66.82,"MassiveScenarioClassification (sq)":65.26,"MassiveScenarioClassification (sw)":58.3,"MassiveScenarioClassification (ta)":62.73,"MassiveScenarioClassification (te)":66.58,"MassiveScenarioClassification (th)":68.23,"MassiveScenarioClassification (tl)":61.97,"MassiveScenarioClassification (tr)":70.73,"MassiveScenarioClassification (ur)":63.16,"MassiveScenarioClassification (vi)":67.92,"MassiveScenarioClassification (zh-TW)":71.61}
-{"index":12,"Rank":40,"Model":"tao<\/a>","Model Size (Million Parameters)":163,"Memory Usage (GB, fp32)":0.61,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.81,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":13,"Rank":41,"Model":"tao-8k<\/a>","Model Size (Million Parameters)":167,"Memory Usage (GB, fp32)":0.62,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.8,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":14,"Rank":42,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.15,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":15,"Rank":43,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":41.94,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":16,"Rank":44,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":41.38,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":17,"Rank":45,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":9242,"Memory Usage (GB, fp32)":34.43,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":55.19,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":54.34,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":97.2,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":93.07,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":79.6,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":82.18,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":18,"Rank":46,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":35.91,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":19,"Rank":47,"Model":"Yinka<\/a>","Model Size (Million Parameters)":164,"Memory Usage (GB, fp32)":0.61,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":49.5,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":20,"Rank":48,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":54.11,"AmazonCounterfactualClassification (ja)":53.95,"AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":39.68,"MTOPDomainClassification (de)":57.22,"MTOPDomainClassification (es)":58.4,"MTOPDomainClassification (fr)":54.97,"MTOPDomainClassification (hi)":64.17,"MTOPDomainClassification (th)":70.47,"MTOPIntentClassification (de)":32.18,"MTOPIntentClassification (es)":33.9,"MTOPIntentClassification (fr)":26.69,"MTOPIntentClassification (hi)":38.27,"MTOPIntentClassification (th)":42.73,"MassiveIntentClassification (af)":33.5,"MassiveIntentClassification (am)":19.56,"MassiveIntentClassification (ar)":26.46,"MassiveIntentClassification (az)":31.58,"MassiveIntentClassification (bn)":27.99,"MassiveIntentClassification (cy)":28.26,"MassiveIntentClassification (de)":34.2,"MassiveIntentClassification (el)":26.02,"MassiveIntentClassification (es)":36.37,"MassiveIntentClassification (fa)":48.91,"MassiveIntentClassification (fi)":30.11,"MassiveIntentClassification (fr)":37.53,"MassiveIntentClassification (he)":24.86,"MassiveIntentClassification (hi)":39.14,"MassiveIntentClassification (hu)":31.97,"MassiveIntentClassification (hy)":31.36,"MassiveIntentClassification (id)":37.04,"MassiveIntentClassification (is)":28.61,"MassiveIntentClassification (it)":37.86,"MassiveIntentClassification (ja)":47.9,"MassiveIntentClassification (jv)":29.08,"MassiveIntentClassification (ka)":25.77,"MassiveIntentClassification (km)":23.66,"MassiveIntentClassification (kn)":21.27,"MassiveIntentClassification (ko)":40.42,"MassiveIntentClassification (lv)":30.13,"MassiveIntentClassification (ml)":25.89,"MassiveIntentClassification (mn)":27.71,"MassiveIntentClassification (ms)":33.04,"MassiveIntentClassification (my)":24.19,"MassiveIntentClassification (nl)":39.31,"MassiveIntentClassification (pt)":40.26,"MassiveIntentClassification (ro)":35.42,"MassiveIntentClassification (ru)":39.69,"MassiveIntentClassification (sl)":31.09,"MassiveIntentClassification (sq)":35.15,"MassiveIntentClassification (sw)":27.91,"MassiveIntentClassification (ta)":28.12,"MassiveIntentClassification (te)":26.34,"MassiveIntentClassification (th)":48.24,"MassiveIntentClassification (tl)":32.73,"MassiveIntentClassification (tr)":30.21,"MassiveIntentClassification (ur)":30.28,"MassiveIntentClassification (vi)":40.45,"MassiveIntentClassification (zh-TW)":64.03,"MassiveScenarioClassification (af)":43.53,"MassiveScenarioClassification (am)":25.3,"MassiveScenarioClassification (ar)":34.91,"MassiveScenarioClassification (az)":36.37,"MassiveScenarioClassification (bn)":39.2,"MassiveScenarioClassification (cy)":32.18,"MassiveScenarioClassification (de)":43.92,"MassiveScenarioClassification (el)":35.03,"MassiveScenarioClassification (es)":41.96,"MassiveScenarioClassification (fa)":58.36,"MassiveScenarioClassification (fi)":33.95,"MassiveScenarioClassification (fr)":45.32,"MassiveScenarioClassification (he)":34.06,"MassiveScenarioClassification (hi)":48.77,"MassiveScenarioClassification (hu)":39.92,"MassiveScenarioClassification (hy)":38.09,"MassiveScenarioClassification (id)":45.08,"MassiveScenarioClassification (is)":36.55,"MassiveScenarioClassification (it)":44.38,"MassiveScenarioClassification (ja)":57.02,"MassiveScenarioClassification (jv)":35.51,"MassiveScenarioClassification (ka)":33.41,"MassiveScenarioClassification (km)":30.9,"MassiveScenarioClassification (kn)":26.83,"MassiveScenarioClassification (ko)":49.52,"MassiveScenarioClassification (lv)":34.02,"MassiveScenarioClassification (ml)":34.55,"MassiveScenarioClassification (mn)":34.14,"MassiveScenarioClassification (ms)":42.71,"MassiveScenarioClassification (my)":31.0,"MassiveScenarioClassification (nl)":51.44,"MassiveScenarioClassification (pt)":45.9,"MassiveScenarioClassification (ro)":45.01,"MassiveScenarioClassification (ru)":48.66,"MassiveScenarioClassification (sl)":38.34,"MassiveScenarioClassification (sq)":44.78,"MassiveScenarioClassification (sw)":36.02,"MassiveScenarioClassification (ta)":37.81,"MassiveScenarioClassification (te)":34.6,"MassiveScenarioClassification (th)":57.38,"MassiveScenarioClassification (tl)":39.36,"MassiveScenarioClassification (tr)":36.16,"MassiveScenarioClassification (ur)":36.43,"MassiveScenarioClassification (vi)":47.04,"MassiveScenarioClassification (zh-TW)":71.96}
-{"index":21,"Rank":49,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":42.04,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":25.8,"MassiveIntentClassification (am)":3.34,"MassiveIntentClassification (ar)":6.49,"MassiveIntentClassification (az)":24.77,"MassiveIntentClassification (bn)":4.3,"MassiveIntentClassification (cy)":26.39,"MassiveIntentClassification (de)":28.09,"MassiveIntentClassification (el)":19.24,"MassiveIntentClassification (es)":30.62,"MassiveIntentClassification (fa)":7.21,"MassiveIntentClassification (fi)":27.21,"MassiveIntentClassification (fr)":32.64,"MassiveIntentClassification (he)":2.66,"MassiveIntentClassification (hi)":4.59,"MassiveIntentClassification (hu)":25.65,"MassiveIntentClassification (hy)":4.86,"MassiveIntentClassification (id)":29.81,"MassiveIntentClassification (is)":23.53,"MassiveIntentClassification (it)":34.47,"MassiveIntentClassification (ja)":39.4,"MassiveIntentClassification (jv)":28.75,"MassiveIntentClassification (ka)":4.34,"MassiveIntentClassification (km)":6.1,"MassiveIntentClassification (kn)":4.46,"MassiveIntentClassification (ko)":14.16,"MassiveIntentClassification (lv)":29.86,"MassiveIntentClassification (ml)":3.69,"MassiveIntentClassification (mn)":7.86,"MassiveIntentClassification (ms)":28.05,"MassiveIntentClassification (my)":6.98,"MassiveIntentClassification (nl)":32.92,"MassiveIntentClassification (pt)":33.53,"MassiveIntentClassification (ro)":31.32,"MassiveIntentClassification (ru)":11.27,"MassiveIntentClassification (sl)":27.94,"MassiveIntentClassification (sq)":32.9,"MassiveIntentClassification (sw)":29.4,"MassiveIntentClassification (ta)":3.33,"MassiveIntentClassification (te)":3.46,"MassiveIntentClassification (th)":12.98,"MassiveIntentClassification (tl)":30.73,"MassiveIntentClassification (tr)":23.57,"MassiveIntentClassification (ur)":4.98,"MassiveIntentClassification (vi)":21.89,"MassiveIntentClassification (zh-TW)":65.53,"MassiveScenarioClassification (af)":31.55,"MassiveScenarioClassification (am)":7.49,"MassiveScenarioClassification (ar)":15.0,"MassiveScenarioClassification (az)":29.13,"MassiveScenarioClassification (bn)":9.24,"MassiveScenarioClassification (cy)":29.72,"MassiveScenarioClassification (de)":34.68,"MassiveScenarioClassification (el)":28.83,"MassiveScenarioClassification (es)":35.97,"MassiveScenarioClassification (fa)":11.12,"MassiveScenarioClassification (fi)":28.61,"MassiveScenarioClassification (fr)":40.66,"MassiveScenarioClassification (he)":9.01,"MassiveScenarioClassification (hi)":9.92,"MassiveScenarioClassification (hu)":32.07,"MassiveScenarioClassification (hy)":8.44,"MassiveScenarioClassification (id)":34.9,"MassiveScenarioClassification (is)":30.95,"MassiveScenarioClassification (it)":41.06,"MassiveScenarioClassification (ja)":48.73,"MassiveScenarioClassification (jv)":35.09,"MassiveScenarioClassification (ka)":9.29,"MassiveScenarioClassification (km)":11.19,"MassiveScenarioClassification (kn)":10.1,"MassiveScenarioClassification (ko)":19.2,"MassiveScenarioClassification (lv)":32.49,"MassiveScenarioClassification (ml)":6.37,"MassiveScenarioClassification (mn)":13.08,"MassiveScenarioClassification (ms)":39.18,"MassiveScenarioClassification (my)":12.25,"MassiveScenarioClassification (nl)":38.17,"MassiveScenarioClassification (pt)":40.01,"MassiveScenarioClassification (ro)":39.25,"MassiveScenarioClassification (ru)":16.71,"MassiveScenarioClassification (sl)":33.94,"MassiveScenarioClassification (sq)":40.4,"MassiveScenarioClassification (sw)":37.14,"MassiveScenarioClassification (ta)":8.21,"MassiveScenarioClassification (te)":7.97,"MassiveScenarioClassification (th)":21.56,"MassiveScenarioClassification (tl)":36.7,"MassiveScenarioClassification (tr)":28.8,"MassiveScenarioClassification (ur)":10.46,"MassiveScenarioClassification (vi)":27.72,"MassiveScenarioClassification (zh-TW)":71.52}
-{"index":22,"Rank":50,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":43.32,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":28.81,"MassiveIntentClassification (am)":3.04,"MassiveIntentClassification (ar)":6.75,"MassiveIntentClassification (az)":27.0,"MassiveIntentClassification (bn)":3.24,"MassiveIntentClassification (cy)":31.18,"MassiveIntentClassification (de)":30.65,"MassiveIntentClassification (el)":18.17,"MassiveIntentClassification (es)":32.53,"MassiveIntentClassification (fa)":8.72,"MassiveIntentClassification (fi)":31.79,"MassiveIntentClassification (fr)":33.16,"MassiveIntentClassification (he)":3.03,"MassiveIntentClassification (hi)":3.61,"MassiveIntentClassification (hu)":30.47,"MassiveIntentClassification (hy)":5.35,"MassiveIntentClassification (id)":32.45,"MassiveIntentClassification (is)":30.12,"MassiveIntentClassification (it)":36.32,"MassiveIntentClassification (ja)":41.09,"MassiveIntentClassification (jv)":30.42,"MassiveIntentClassification (ka)":3.79,"MassiveIntentClassification (km)":6.79,"MassiveIntentClassification (kn)":3.86,"MassiveIntentClassification (ko)":8.82,"MassiveIntentClassification (lv)":30.23,"MassiveIntentClassification (ml)":2.93,"MassiveIntentClassification (mn)":12.61,"MassiveIntentClassification (ms)":30.66,"MassiveIntentClassification (my)":5.85,"MassiveIntentClassification (nl)":34.1,"MassiveIntentClassification (pt)":36.92,"MassiveIntentClassification (ro)":33.01,"MassiveIntentClassification (ru)":10.4,"MassiveIntentClassification (sl)":30.73,"MassiveIntentClassification (sq)":36.98,"MassiveIntentClassification (sw)":31.62,"MassiveIntentClassification (ta)":3.19,"MassiveIntentClassification (te)":2.59,"MassiveIntentClassification (th)":4.61,"MassiveIntentClassification (tl)":32.55,"MassiveIntentClassification (tr)":26.87,"MassiveIntentClassification (ur)":4.23,"MassiveIntentClassification (vi)":29.24,"MassiveIntentClassification (zh-TW)":65.49,"MassiveScenarioClassification (af)":35.41,"MassiveScenarioClassification (am)":9.05,"MassiveScenarioClassification (ar)":14.92,"MassiveScenarioClassification (az)":31.97,"MassiveScenarioClassification (bn)":9.15,"MassiveScenarioClassification (cy)":37.45,"MassiveScenarioClassification (de)":38.33,"MassiveScenarioClassification (el)":24.45,"MassiveScenarioClassification (es)":37.73,"MassiveScenarioClassification (fa)":11.84,"MassiveScenarioClassification (fi)":34.49,"MassiveScenarioClassification (fr)":40.92,"MassiveScenarioClassification (he)":7.64,"MassiveScenarioClassification (hi)":8.64,"MassiveScenarioClassification (hu)":37.25,"MassiveScenarioClassification (hy)":10.91,"MassiveScenarioClassification (id)":36.11,"MassiveScenarioClassification (is)":37.8,"MassiveScenarioClassification (it)":41.68,"MassiveScenarioClassification (ja)":48.38,"MassiveScenarioClassification (jv)":35.2,"MassiveScenarioClassification (ka)":9.9,"MassiveScenarioClassification (km)":12.75,"MassiveScenarioClassification (kn)":10.31,"MassiveScenarioClassification (ko)":14.52,"MassiveScenarioClassification (lv)":33.08,"MassiveScenarioClassification (ml)":7.44,"MassiveScenarioClassification (mn)":17.98,"MassiveScenarioClassification (ms)":37.93,"MassiveScenarioClassification (my)":11.73,"MassiveScenarioClassification (nl)":40.37,"MassiveScenarioClassification (pt)":41.83,"MassiveScenarioClassification (ro)":40.63,"MassiveScenarioClassification (ru)":18.96,"MassiveScenarioClassification (sl)":35.3,"MassiveScenarioClassification (sq)":41.96,"MassiveScenarioClassification (sw)":38.88,"MassiveScenarioClassification (ta)":8.51,"MassiveScenarioClassification (te)":7.35,"MassiveScenarioClassification (th)":10.1,"MassiveScenarioClassification (tl)":35.91,"MassiveScenarioClassification (tr)":32.08,"MassiveScenarioClassification (ur)":10.37,"MassiveScenarioClassification (vi)":33.91,"MassiveScenarioClassification (zh-TW)":71.0}
-{"index":23,"Rank":51,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":38.6,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":80.79,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":50.01,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":56.31,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":59.5,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":24,"Rank":52,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":41.89,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":86.23,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":61.07,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":62.94,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":67.29,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":25,"Rank":53,"Model":"Dmeta-embedding-zh<\/a>","Model Size (Million Parameters)":103,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":44.93,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":26,"Rank":54,"Model":"Dmeta-embedding-zh-small<\/a>","Model Size (Million Parameters)":74,"Memory Usage (GB, fp32)":0.28,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":44.88,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":27,"Rank":55,"Model":"sbert-chinese-general-v1<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":37.51,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":28,"Rank":56,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":38.25,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":29,"Rank":57,"Model":"IYun-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":49.67,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":30,"Rank":58,"Model":"mist-zh<\/a>","Model Size (Million Parameters)":51,"Memory Usage (GB, fp32)":0.19,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":39.32,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":31,"Rank":59,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":33.77,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":32,"Rank":60,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":29.38,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":63.65,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":37.87,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":37.28,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":44.5,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":33,"Rank":61,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":29.35,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":63.7,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":37.85,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":37.28,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":44.47,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":34,"Rank":62,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":29.39,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":63.63,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":37.86,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":37.3,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":44.47,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":35,"Rank":63,"Model":"gte-Qwen2-7B-instruct-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":55.53,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":53.98,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":96.69,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":87.47,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":81.65,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":86.64,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":36,"Rank":64,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":55.53,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":53.98,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":96.69,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":87.47,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":81.65,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":86.64,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":40,"Rank":65,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":40.35,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":86.83,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":59.13,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":64.99,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":71.72,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":41,"Rank":66,"Model":"bilingual-embedding-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":43.42,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":88.33,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":62.85,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":66.81,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":72.99,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":42,"Rank":67,"Model":"bilingual-embedding-large-8k<\/a>","Model Size (Million Parameters)":568,"Memory Usage (GB, fp32)":2.12,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":44.11,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":87.82,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":58.63,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":66.14,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":72.74,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":43,"Rank":68,"Model":"bilingual-embedding-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":36.48,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":84.19,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":58.35,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":64.57,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":69.04,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":44,"Rank":69,"Model":"jina-embeddings-v2-base-de-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":68.92,"AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":37.72,"AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":88.37,"MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":63.83,"MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":63.89,"MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":71.25,"MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":47,"Rank":70,"Model":"SGPT-5.8B-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":5874,"Memory Usage (GB, fp32)":21.88,"Average":"","AmazonCounterfactualClassification (de)":66.64,"AmazonCounterfactualClassification (ja)":58.06,"AmazonReviewsClassification (de)":35.29,"AmazonReviewsClassification (es)":38.34,"AmazonReviewsClassification (fr)":37.84,"AmazonReviewsClassification (ja)":30.94,"AmazonReviewsClassification (zh)":33.75,"MTOPDomainClassification (de)":84.54,"MTOPDomainClassification (es)":86.46,"MTOPDomainClassification (fr)":81.32,"MTOPDomainClassification (hi)":58.23,"MTOPDomainClassification (th)":72.29,"MTOPIntentClassification (de)":60.52,"MTOPIntentClassification (es)":64.32,"MTOPIntentClassification (fr)":58.67,"MTOPIntentClassification (hi)":41.96,"MTOPIntentClassification (th)":55.28,"MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":48,"Rank":71,"Model":"Solon-embeddings-large-0.1<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":42.08,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":89.26,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":68.55,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":67.4,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":71.29,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":49,"Rank":72,"Model":"alime-embedding-large-zh<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.95,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":50,"Rank":73,"Model":"Conan-embedding-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":50.31,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":51,"Rank":74,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":34.79,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":85.52,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":63.12,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":59.41,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":65.29,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":52,"Rank":75,"Model":"EminEmbed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.54,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":53.18,"MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":62.93}
-{"index":53,"Rank":76,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":53.47,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":47.21,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":93.48,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":80.23,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":76.65,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":79.1,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":54,"Rank":77,"Model":"ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":67.16,"MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":71.93,"MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":56,"Rank":78,"Model":"acge_text_embedding<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":48.54,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":57,"Rank":79,"Model":"sgpt-bloom-1b7-nli<\/a>","Model Size (Million Parameters)":1722,"Memory Usage (GB, fp32)":6.42,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":39.29,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":37.63,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":83.8,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":63.36,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":64.58,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":69.6,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":60,"Rank":80,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":55.53,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":53.98,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":96.69,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":87.47,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":81.65,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":86.64,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":61,"Rank":81,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":36.03,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":77.1,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":43.44,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":51.59,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":61.28,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":62,"Rank":82,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":37.97,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":85.74,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":58.62,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":62.65,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":69.29,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":63,"Rank":83,"Model":"gte-Qwen2-1.5B-instruct-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":53.47,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":47.21,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":93.48,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":80.23,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":76.65,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":79.1,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":64,"Rank":84,"Model":"sft-bge-small<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":34.55,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":65,"Rank":85,"Model":"stella-large-zh-v3-1792d<\/a>","Model Size (Million Parameters)":325,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.32,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":66,"Rank":86,"Model":"stella-mrl-large-zh-v3.5-1792d<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.59,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":68,"Rank":87,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":24.9,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":25.55,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":9.49,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":6.98,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":11.41,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":69,"Rank":88,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":23.52,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":27.74,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":8.61,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":6.24,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":10.98,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":70,"Rank":89,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":22.45,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":24.27,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":9.79,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":16.41,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":22.72,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":71,"Rank":90,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":29.39,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":63.61,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":37.84,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":37.3,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":44.47,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":72,"Rank":91,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":29.02,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":64.49,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":39.4,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":38.01,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":43.63,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":73,"Rank":92,"Model":"zpoint_large_embedding_zh<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":49.72,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":74,"Rank":93,"Model":"stella-base-zh<\/a>","Model Size (Million Parameters)":51,"Memory Usage (GB, fp32)":0.19,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.25,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":75,"Rank":94,"Model":"stella-base-zh-v2<\/a>","Model Size (Million Parameters)":51,"Memory Usage (GB, fp32)":0.19,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":39.64,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":76,"Rank":95,"Model":"stella-base-zh-v3-1792d<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.34,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":77,"Rank":96,"Model":"stella-large-zh<\/a>","Model Size (Million Parameters)":163,"Memory Usage (GB, fp32)":0.61,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.57,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":78,"Rank":97,"Model":"stella-large-zh-v2<\/a>","Model Size (Million Parameters)":163,"Memory Usage (GB, fp32)":0.61,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.82,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":86,"Rank":98,"Model":"jina-embeddings-v2-base-de<\/a>","Model Size (Million Parameters)":161,"Memory Usage (GB, fp32)":0.6,"Average":"","AmazonCounterfactualClassification (de)":68.92,"AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":37.72,"AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":88.37,"MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":63.83,"MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":63.89,"MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":71.25,"MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":87,"Rank":99,"Model":"jina-embeddings-v2-base-es<\/a>","Model Size (Million Parameters)":161,"Memory Usage (GB, fp32)":0.6,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":38.68,"AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":89.89,"MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":68.76,"MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":66.93,"MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":71.23,"MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":88,"Rank":100,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":161,"Memory Usage (GB, fp32)":0.6,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":34.94,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":89,"Rank":101,"Model":"xiaobu-embedding<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.72,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":90,"Rank":102,"Model":"xiaobu-embedding-v2<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":50.07,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":91,"Rank":103,"Model":"lim_base_zh<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.67,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":92,"Rank":104,"Model":"bge-m3-custom-fr<\/a>","Model Size (Million Parameters)":568,"Memory Usage (GB, fp32)":2.12,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":42.33,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":90.39,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":60.88,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":66.7,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":74.58,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":93,"Rank":105,"Model":"sentence_croissant_alpha_v0.1<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":36.62,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":88.7,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":67.69,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":65.86,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":71.6,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":94,"Rank":106,"Model":"sentence_croissant_alpha_v0.2<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":36.48,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":88.96,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":67.76,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":66.3,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":71.7,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":95,"Rank":107,"Model":"sentence_croissant_alpha_v0.3<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":35.11,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":88.52,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":65.93,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":66.48,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":71.47,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":96,"Rank":108,"Model":"sentence_croissant_alpha_v0.4<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":34.62,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":86.97,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":62.59,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":65.67,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":71.61,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":97,"Rank":109,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":43.02,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":98,"Rank":110,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":44.44,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":99,"Rank":111,"Model":"m3e-ernie-xbase-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":47.79,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":100,"Rank":112,"Model":"mmarco-bert-base-italian-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":55.06,"MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":63.04,"MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":101,"Rank":113,"Model":"mmarco-sentence-flare-it<\/a>","Model Size (Million Parameters)":17,"Memory Usage (GB, fp32)":0.06,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":22.3,"MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":27.41,"MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":102,"Rank":114,"Model":"stsbm-sentence-flare-it<\/a>","Model Size (Million Parameters)":17,"Memory Usage (GB, fp32)":0.06,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":38.88,"MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":43.3,"MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":103,"Rank":115,"Model":"jina-embeddings-v2-base-es-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":38.68,"AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":89.89,"MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":68.76,"MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":66.93,"MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":71.23,"MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":104,"Rank":116,"Model":"piccolo-base-zh<\/a>","Model Size (Million Parameters)":51,"Memory Usage (GB, fp32)":0.19,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.24,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":105,"Rank":117,"Model":"piccolo-large-zh<\/a>","Model Size (Million Parameters)":163,"Memory Usage (GB, fp32)":0.61,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.33,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":106,"Rank":118,"Model":"piccolo-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":49.44,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":113,"Rank":119,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":27.05,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":72.97,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":37.18,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":42.64,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":49.92,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":114,"Rank":120,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","AmazonCounterfactualClassification (de)":68.35,"AmazonCounterfactualClassification (ja)":63.45,"AmazonReviewsClassification (de)":35.91,"AmazonReviewsClassification (es)":37.49,"AmazonReviewsClassification (fr)":35.3,"AmazonReviewsClassification (ja)":33.24,"AmazonReviewsClassification (zh)":35.26,"MTOPDomainClassification (de)":79.2,"MTOPDomainClassification (es)":83.04,"MTOPDomainClassification (fr)":78.63,"MTOPDomainClassification (hi)":81.36,"MTOPDomainClassification (th)":79.99,"MTOPIntentClassification (de)":54.23,"MTOPIntentClassification (es)":60.28,"MTOPIntentClassification (fr)":54.05,"MTOPIntentClassification (hi)":59.9,"MTOPIntentClassification (th)":61.96,"MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":57.52,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":64.52,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":115,"Rank":121,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","AmazonCounterfactualClassification (de)":69.95,"AmazonCounterfactualClassification (ja)":69.79,"AmazonReviewsClassification (de)":39.52,"AmazonReviewsClassification (es)":39.99,"AmazonReviewsClassification (fr)":39.0,"AmazonReviewsClassification (ja)":36.64,"AmazonReviewsClassification (zh)":37.74,"MTOPDomainClassification (de)":85.73,"MTOPDomainClassification (es)":86.96,"MTOPDomainClassification (fr)":81.21,"MTOPDomainClassification (hi)":84.76,"MTOPDomainClassification (th)":82.51,"MTOPIntentClassification (de)":61.27,"MTOPIntentClassification (es)":66.59,"MTOPIntentClassification (fr)":59.76,"MTOPIntentClassification (hi)":62.37,"MTOPIntentClassification (th)":64.8,"MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":61.88,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":67.9,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":119,"Rank":122,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":46.09,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":86.2,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":58.33,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":65.91,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":68.53,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":120,"Rank":123,"Model":"ZNV-Embedding<\/a>","Model Size (Million Parameters)":6061,"Memory Usage (GB, fp32)":22.58,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":29.75,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":121,"Rank":124,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":29.85,"MassiveIntentClassification (am)":2.13,"MassiveIntentClassification (ar)":3.41,"MassiveIntentClassification (az)":22.73,"MassiveIntentClassification (bn)":2.87,"MassiveIntentClassification (cy)":29.43,"MassiveIntentClassification (de)":30.85,"MassiveIntentClassification (el)":8.74,"MassiveIntentClassification (es)":30.63,"MassiveIntentClassification (fa)":3.16,"MassiveIntentClassification (fi)":30.4,"MassiveIntentClassification (fr)":30.84,"MassiveIntentClassification (he)":2.06,"MassiveIntentClassification (hi)":2.43,"MassiveIntentClassification (hu)":24.87,"MassiveIntentClassification (hy)":2.67,"MassiveIntentClassification (id)":32.7,"MassiveIntentClassification (is)":24.13,"MassiveIntentClassification (it)":34.58,"MassiveIntentClassification (ja)":5.99,"MassiveIntentClassification (jv)":27.6,"MassiveIntentClassification (ka)":2.14,"MassiveIntentClassification (km)":4.38,"MassiveIntentClassification (kn)":2.1,"MassiveIntentClassification (ko)":2.36,"MassiveIntentClassification (lv)":22.06,"MassiveIntentClassification (ml)":2.29,"MassiveIntentClassification (mn)":28.51,"MassiveIntentClassification (ms)":28.16,"MassiveIntentClassification (my)":3.97,"MassiveIntentClassification (nl)":30.51,"MassiveIntentClassification (pt)":33.85,"MassiveIntentClassification (ro)":30.47,"MassiveIntentClassification (ru)":58.06,"MassiveIntentClassification (sl)":29.64,"MassiveIntentClassification (sq)":31.7,"MassiveIntentClassification (sw)":27.52,"MassiveIntentClassification (ta)":1.38,"MassiveIntentClassification (te)":2.04,"MassiveIntentClassification (th)":3.79,"MassiveIntentClassification (tl)":31.44,"MassiveIntentClassification (tr)":26.22,"MassiveIntentClassification (ur)":2.55,"MassiveIntentClassification (vi)":23.1,"MassiveIntentClassification (zh-TW)":6.3,"MassiveScenarioClassification (af)":39.37,"MassiveScenarioClassification (am)":7.51,"MassiveScenarioClassification (ar)":11.36,"MassiveScenarioClassification (az)":29.62,"MassiveScenarioClassification (bn)":8.79,"MassiveScenarioClassification (cy)":38.93,"MassiveScenarioClassification (de)":40.66,"MassiveScenarioClassification (el)":16.44,"MassiveScenarioClassification (es)":36.28,"MassiveScenarioClassification (fa)":6.8,"MassiveScenarioClassification (fi)":34.5,"MassiveScenarioClassification (fr)":42.42,"MassiveScenarioClassification (he)":7.95,"MassiveScenarioClassification (hi)":7.51,"MassiveScenarioClassification (hu)":35.04,"MassiveScenarioClassification (hy)":8.53,"MassiveScenarioClassification (id)":39.6,"MassiveScenarioClassification (is)":32.61,"MassiveScenarioClassification (it)":41.2,"MassiveScenarioClassification (ja)":11.21,"MassiveScenarioClassification (jv)":36.25,"MassiveScenarioClassification (ka)":6.59,"MassiveScenarioClassification (km)":8.15,"MassiveScenarioClassification (kn)":8.05,"MassiveScenarioClassification (ko)":5.62,"MassiveScenarioClassification (lv)":28.47,"MassiveScenarioClassification (ml)":7.35,"MassiveScenarioClassification (mn)":33.48,"MassiveScenarioClassification (ms)":38.85,"MassiveScenarioClassification (my)":11.23,"MassiveScenarioClassification (nl)":38.92,"MassiveScenarioClassification (pt)":40.23,"MassiveScenarioClassification (ro)":39.78,"MassiveScenarioClassification (ru)":64.15,"MassiveScenarioClassification (sl)":35.34,"MassiveScenarioClassification (sq)":42.07,"MassiveScenarioClassification (sw)":35.33,"MassiveScenarioClassification (ta)":7.21,"MassiveScenarioClassification (te)":6.86,"MassiveScenarioClassification (th)":8.25,"MassiveScenarioClassification (tl)":38.17,"MassiveScenarioClassification (tr)":33.85,"MassiveScenarioClassification (ur)":8.74,"MassiveScenarioClassification (vi)":31.94,"MassiveScenarioClassification (zh-TW)":11.68}
-{"index":122,"Rank":125,"Model":"AGE_Hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":49.68,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":123,"Rank":126,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":34.12,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":125,"Rank":127,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":34.46,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":126,"Rank":128,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":161,"Memory Usage (GB, fp32)":0.6,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":34.94,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":128,"Rank":129,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":34.25,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":71.83,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":44.53,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":51.93,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":58.31,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":129,"Rank":130,"Model":"gte-base-zh<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":45.82,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":130,"Rank":131,"Model":"gte-large-zh<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":47.23,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":131,"Rank":132,"Model":"gte-small-zh<\/a>","Model Size (Million Parameters)":30,"Memory Usage (GB, fp32)":0.11,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":38.69,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":132,"Rank":133,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":21.96,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":133,"Rank":134,"Model":"gte-small-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":38.69,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":134,"Rank":135,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":33.51,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":85.5,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":53.98,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":61.19,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":70.22,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":135,"Rank":136,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":35.09,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":88.19,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":63.64,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":65.8,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":73.47,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":136,"Rank":137,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":26.75,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":43.83,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":19.38,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":13.58,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":23.21,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":137,"Rank":138,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":26.62,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":36.77,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":15.37,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":15.82,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":23.92,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":138,"Rank":139,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":43.51,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":85.33,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":59.01,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":65.15,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":69.94,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":139,"Rank":140,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":43.76,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":38.3,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":89.38,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":64.45,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":65.42,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":71.11,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
-{"index":140,"Rank":141,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":48.3,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":15,"Rank":1,"Model":"nb-bert-large<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":66.73,"MassiveIntentClassification (nb)":62.68,"MassiveScenarioClassification (nb)":67.44,"NoRecClassification":55.46,"NordicLangClassification":85.27,"NorwegianParliament":62.58,"ScalaNbClassification":66.97}
+{"index":14,"Rank":2,"Model":"nb-bert-base<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":63.94,"MassiveIntentClassification (nb)":60.67,"MassiveScenarioClassification (nb)":67.31,"NoRecClassification":51.32,"NordicLangClassification":84.69,"NorwegianParliament":57.41,"ScalaNbClassification":62.25}
+{"index":26,"Rank":3,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":63.64,"MassiveIntentClassification (nb)":59.83,"MassiveScenarioClassification (nb)":66.18,"NoRecClassification":62.76,"NordicLangClassification":82.29,"NorwegianParliament":60.36,"ScalaNbClassification":50.44}
+{"index":32,"Rank":4,"Model":"norbert3-base<\/a>","Model Size (Million Parameters)":131,"Memory Usage (GB, fp32)":0.49,"Average":61.75,"MassiveIntentClassification (nb)":54.2,"MassiveScenarioClassification (nb)":60.69,"NoRecClassification":53.4,"NordicLangClassification":82.67,"NorwegianParliament":59.33,"ScalaNbClassification":60.19}
+{"index":25,"Rank":5,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":61.63,"MassiveIntentClassification (nb)":59.83,"MassiveScenarioClassification (nb)":66.18,"NoRecClassification":57.58,"NordicLangClassification":75.94,"NorwegianParliament":59.94,"ScalaNbClassification":50.32}
+{"index":33,"Rank":6,"Model":"norbert3-large<\/a>","Model Size (Million Parameters)":368,"Memory Usage (GB, fp32)":1.37,"Average":60.34,"MassiveIntentClassification (nb)":47.42,"MassiveScenarioClassification (nb)":54.25,"NoRecClassification":50.46,"NordicLangClassification":84.25,"NorwegianParliament":58.85,"ScalaNbClassification":66.79}
+{"index":28,"Rank":7,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":58.86,"MassiveIntentClassification (nb)":53.96,"MassiveScenarioClassification (nb)":59.9,"NoRecClassification":53.96,"NordicLangClassification":75.15,"NorwegianParliament":60.15,"ScalaNbClassification":50.06}
+{"index":19,"Rank":8,"Model":"dfm-encoder-large-v1<\/a>","Model Size (Million Parameters)":355,"Memory Usage (GB, fp32)":1.32,"Average":58.46,"MassiveIntentClassification (nb)":52.49,"MassiveScenarioClassification (nb)":54.59,"NoRecClassification":48.3,"NordicLangClassification":77.68,"NorwegianParliament":58.78,"ScalaNbClassification":58.95}
+{"index":50,"Rank":9,"Model":"DanskBERT<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":55.0,"MassiveIntentClassification (nb)":45.38,"MassiveScenarioClassification (nb)":47.55,"NoRecClassification":46.06,"NordicLangClassification":74.25,"NorwegianParliament":56.79,"ScalaNbClassification":59.99}
+{"index":20,"Rank":10,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.92,"MassiveIntentClassification (nb)":59.9,"MassiveScenarioClassification (nb)":65.81,"NoRecClassification":48.25,"NordicLangClassification":48.4,"NorwegianParliament":55.99,"ScalaNbClassification":51.18}
+{"index":51,"Rank":11,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":54.34,"MassiveIntentClassification (nb)":40.46,"MassiveScenarioClassification (nb)":44.83,"NoRecClassification":46.28,"NordicLangClassification":79.39,"NorwegianParliament":56.75,"ScalaNbClassification":58.33}
+{"index":21,"Rank":12,"Model":"e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":50.14,"MassiveIntentClassification (nb)":41.57,"MassiveScenarioClassification (nb)":50.33,"NoRecClassification":42.0,"NordicLangClassification":59.34,"NorwegianParliament":57.42,"ScalaNbClassification":50.18}
+{"index":22,"Rank":13,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":50.01,"MassiveIntentClassification (nb)":40.63,"MassiveScenarioClassification (nb)":51.91,"NoRecClassification":41.83,"NordicLangClassification":58.3,"NorwegianParliament":57.26,"ScalaNbClassification":50.13}
+{"index":8,"Rank":14,"Model":"sentence-bert-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":48.88,"MassiveIntentClassification (nb)":42.74,"MassiveScenarioClassification (nb)":49.49,"NoRecClassification":43.53,"NordicLangClassification":51.45,"NorwegianParliament":55.74,"ScalaNbClassification":50.34}
+{"index":24,"Rank":15,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":48.46,"MassiveIntentClassification (nb)":40.25,"MassiveScenarioClassification (nb)":48.58,"NoRecClassification":41.84,"NordicLangClassification":53.47,"NorwegianParliament":56.57,"ScalaNbClassification":50.03}
+{"index":6,"Rank":16,"Model":"bert-base-swedish-cased<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":48.18,"MassiveIntentClassification (nb)":35.75,"MassiveScenarioClassification (nb)":35.76,"NoRecClassification":43.91,"NordicLangClassification":62.45,"NorwegianParliament":57.56,"ScalaNbClassification":53.63}
+{"index":36,"Rank":17,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":47.28,"MassiveIntentClassification (nb)":39.34,"MassiveScenarioClassification (nb)":44.67,"NoRecClassification":40.02,"NordicLangClassification":54.71,"NorwegianParliament":54.8,"ScalaNbClassification":50.17}
+{"index":31,"Rank":18,"Model":"electra-small-nordic<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":47.28,"MassiveIntentClassification (nb)":24.6,"MassiveScenarioClassification (nb)":27.3,"NoRecClassification":45.44,"NordicLangClassification":57.82,"NorwegianParliament":53.25,"ScalaNbClassification":75.28}
+{"index":7,"Rank":19,"Model":"electra-small-swedish-cased-discriminator<\/a>","Model Size (Million Parameters)":16,"Memory Usage (GB, fp32)":0.06,"Average":34.34,"MassiveIntentClassification (nb)":5.66,"MassiveScenarioClassification (nb)":11.26,"NoRecClassification":39.72,"NordicLangClassification":44.53,"NorwegianParliament":52.44,"ScalaNbClassification":52.41}
+{"index":0,"Rank":20,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":"","MassiveIntentClassification (nb)":37.74,"MassiveScenarioClassification (nb)":43.9,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":1,"Rank":21,"Model":"gte-multilingual-base<\/a>","Model Size (Million Parameters)":305,"Memory Usage (GB, fp32)":1.14,"Average":"","MassiveIntentClassification (nb)":63.74,"MassiveScenarioClassification (nb)":71.5,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":2,"Rank":22,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":36.89,"MassiveScenarioClassification (nb)":44.27,"NoRecClassification":43.53,"NordicLangClassification":"","NorwegianParliament":54.9,"ScalaNbClassification":""}
+{"index":3,"Rank":23,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":28.65,"MassiveScenarioClassification (nb)":35.24,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":4,"Rank":24,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":31.07,"MassiveScenarioClassification (nb)":38.02,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":5,"Rank":25,"Model":"e5-dansk-test-0.1<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NordicLangClassification":61.0,"NorwegianParliament":"","ScalaNbClassification":""}
+{"index":9,"Rank":26,"Model":"multilingual-e5-large-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":69.88,"MassiveScenarioClassification (nb)":74.84,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":10,"Rank":27,"Model":"multilingual-e5-large-instruct-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":71.66,"MassiveScenarioClassification (nb)":77.21,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":11,"Rank":28,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","MassiveIntentClassification (nb)":39.67,"MassiveScenarioClassification (nb)":50.89,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":12,"Rank":29,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","MassiveIntentClassification (nb)":39.82,"MassiveScenarioClassification (nb)":39.02,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":13,"Rank":30,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","MassiveIntentClassification (nb)":39.48,"MassiveScenarioClassification (nb)":40.47,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":16,"Rank":31,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":54.41,"MassiveScenarioClassification (nb)":64.64,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":17,"Rank":32,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":7068,"Memory Usage (GB, fp32)":26.33,"Average":"","MassiveIntentClassification (nb)":49.41,"MassiveScenarioClassification (nb)":51.8,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":18,"Rank":33,"Model":"slx-v0.1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.08,"Average":"","MassiveIntentClassification (nb)":38.18,"MassiveScenarioClassification (nb)":43.39,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":23,"Rank":34,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","MassiveIntentClassification (nb)":70.93,"MassiveScenarioClassification (nb)":75.7,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":27,"Rank":35,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","MassiveIntentClassification (nb)":71.66,"MassiveScenarioClassification (nb)":77.21,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":29,"Rank":36,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":1065,"Memory Usage (GB, fp32)":3.97,"Average":"","MassiveIntentClassification (nb)":46.18,"MassiveScenarioClassification (nb)":50.32,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":30,"Rank":37,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":559,"Memory Usage (GB, fp32)":2.08,"Average":"","MassiveIntentClassification (nb)":44.12,"MassiveScenarioClassification (nb)":46.79,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":34,"Rank":38,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","MassiveIntentClassification (nb)":57.91,"MassiveScenarioClassification (nb)":64.29,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":35,"Rank":39,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","MassiveIntentClassification (nb)":41.91,"MassiveScenarioClassification (nb)":47.36,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":37,"Rank":40,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":38,"Rank":41,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":34.75,"MassiveScenarioClassification (nb)":35.71,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":39,"Rank":42,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","MassiveIntentClassification (nb)":46.01,"MassiveScenarioClassification (nb)":54.98,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":40,"Rank":43,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","MassiveIntentClassification (nb)":43.78,"MassiveScenarioClassification (nb)":52.71,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":41,"Rank":44,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","MassiveIntentClassification (nb)":45.87,"MassiveScenarioClassification (nb)":54.44,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":42,"Rank":45,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":43,"Rank":46,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","MassiveIntentClassification (nb)":"","MassiveScenarioClassification (nb)":"","NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":44,"Rank":47,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","MassiveIntentClassification (nb)":38.53,"MassiveScenarioClassification (nb)":46.6,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":45,"Rank":48,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","MassiveIntentClassification (nb)":41.29,"MassiveScenarioClassification (nb)":49.92,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":46,"Rank":49,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":"","MassiveIntentClassification (nb)":45.91,"MassiveScenarioClassification (nb)":53.43,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":47,"Rank":50,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":"","MassiveIntentClassification (nb)":31.49,"MassiveScenarioClassification (nb)":38.05,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":48,"Rank":51,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","MassiveIntentClassification (nb)":54.64,"MassiveScenarioClassification (nb)":60.26,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
+{"index":49,"Rank":52,"Model":"multilingual-e5-large-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","MassiveIntentClassification (nb)":69.88,"MassiveScenarioClassification (nb)":74.84,"NoRecClassification":"","NordicLangClassification":"","NorwegianParliament":"","ScalaNbClassification":""}
diff --git a/all_data_tasks/41/default.jsonl b/all_data_tasks/41/default.jsonl
index 94e411b825afb3049383ce287ba8614b1c7c4604..58eba42543543dd20857eb5b82bf7bcafac3b11c 100644
--- a/all_data_tasks/41/default.jsonl
+++ b/all_data_tasks/41/default.jsonl
@@ -1,288 +1,141 @@
-{"index":21,"Rank":1,"Model":"gte-multilingual-base<\/a>","Model Size (Million Parameters)":305,"Memory Usage (GB, fp32)":1.14,"Average":73.41,"STS17 (ar-ar)":78.82,"STS17 (en-ar)":76.44,"STS17 (en-de)":84.71,"STS17 (en-tr)":76.36,"STS17 (es-en)":83.85,"STS17 (es-es)":87.04,"STS17 (fr-en)":84.43,"STS17 (it-en)":83.85,"STS17 (ko-ko)":81.59,"STS17 (nl-en)":82.46,"STS22 (ar)":58.55,"STS22 (de)":60.89,"STS22 (de-en)":62.28,"STS22 (de-fr)":56.01,"STS22 (de-pl)":53.64,"STS22 (es)":72.02,"STS22 (es-en)":79.64,"STS22 (es-it)":74.83,"STS22 (fr)":81.03,"STS22 (fr-pl)":73.25,"STS22 (it)":78.28,"STS22 (pl)":42.39,"STS22 (pl-en)":78.29,"STS22 (ru)":66.49,"STS22 (tr)":65.54,"STS22 (zh-en)":72.93,"STSBenchmark":86.46}
-{"index":164,"Rank":2,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":73.17,"STS17 (ar-ar)":81.87,"STS17 (en-ar)":77.93,"STS17 (en-de)":87.3,"STS17 (en-tr)":72.56,"STS17 (es-en)":88.24,"STS17 (es-es)":87.46,"STS17 (fr-en)":88.06,"STS17 (it-en)":89.68,"STS17 (ko-ko)":83.69,"STS17 (nl-en)":88.25,"STS22 (ar)":54.12,"STS22 (de)":49.12,"STS22 (de-en)":60.92,"STS22 (de-fr)":61.39,"STS22 (de-pl)":54.47,"STS22 (es)":67.0,"STS22 (es-en)":75.84,"STS22 (es-it)":75.04,"STS22 (fr)":69.82,"STS22 (fr-pl)":84.52,"STS22 (it)":75.87,"STS22 (pl)":39.21,"STS22 (pl-en)":73.18,"STS22 (ru)":60.83,"STS22 (tr)":68.72,"STS22 (zh-en)":71.88,"STSBenchmark":88.6}
-{"index":56,"Rank":3,"Model":"multilingual-e5-large-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.02,"STS17 (ar-ar)":77.88,"STS17 (en-ar)":75.06,"STS17 (en-de)":86.16,"STS17 (en-tr)":71.23,"STS17 (es-en)":80.75,"STS17 (es-es)":86.74,"STS17 (fr-en)":85.62,"STS17 (it-en)":84.54,"STS17 (ko-ko)":82.27,"STS17 (nl-en)":85.28,"STS22 (ar)":56.99,"STS22 (de)":56.59,"STS22 (de-en)":56.6,"STS22 (de-fr)":67.79,"STS22 (de-pl)":49.58,"STS22 (es)":64.58,"STS22 (es-en)":72.52,"STS22 (es-it)":68.93,"STS22 (fr)":76.79,"STS22 (fr-pl)":50.71,"STS22 (it)":76.98,"STS22 (pl)":34.68,"STS22 (pl-en)":65.54,"STS22 (ru)":59.9,"STS22 (tr)":63.55,"STS22 (zh-en)":66.0,"STSBenchmark":87.29}
-{"index":254,"Rank":4,"Model":"multilingual-e5-large-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.02,"STS17 (ar-ar)":77.88,"STS17 (en-ar)":75.06,"STS17 (en-de)":86.16,"STS17 (en-tr)":71.23,"STS17 (es-en)":80.75,"STS17 (es-es)":86.74,"STS17 (fr-en)":85.62,"STS17 (it-en)":84.54,"STS17 (ko-ko)":82.27,"STS17 (nl-en)":85.28,"STS22 (ar)":56.99,"STS22 (de)":56.59,"STS22 (de-en)":56.6,"STS22 (de-fr)":67.79,"STS22 (de-pl)":49.58,"STS22 (es)":64.58,"STS22 (es-en)":72.52,"STS22 (es-it)":68.93,"STS22 (fr)":76.79,"STS22 (fr-pl)":50.71,"STS22 (it)":76.98,"STS22 (pl)":34.68,"STS22 (pl-en)":65.54,"STS22 (ru)":59.9,"STS22 (tr)":63.55,"STS22 (zh-en)":66.0,"STSBenchmark":87.29}
-{"index":168,"Rank":5,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":70.02,"STS17 (ar-ar)":77.88,"STS17 (en-ar)":75.06,"STS17 (en-de)":86.16,"STS17 (en-tr)":71.23,"STS17 (es-en)":80.75,"STS17 (es-es)":86.74,"STS17 (fr-en)":85.62,"STS17 (it-en)":84.54,"STS17 (ko-ko)":82.27,"STS17 (nl-en)":85.28,"STS22 (ar)":56.99,"STS22 (de)":56.59,"STS22 (de-en)":56.6,"STS22 (de-fr)":67.79,"STS22 (de-pl)":49.58,"STS22 (es)":64.58,"STS22 (es-en)":72.52,"STS22 (es-it)":68.93,"STS22 (fr)":76.79,"STS22 (fr-pl)":50.71,"STS22 (it)":76.98,"STS22 (pl)":34.66,"STS22 (pl-en)":65.54,"STS22 (ru)":59.9,"STS22 (tr)":63.55,"STS22 (zh-en)":66.0,"STSBenchmark":87.29}
-{"index":120,"Rank":6,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.85,"STS17 (ar-ar)":79.38,"STS17 (en-ar)":58.76,"STS17 (en-de)":76.13,"STS17 (en-tr)":55.53,"STS17 (es-en)":72.26,"STS17 (es-es)":85.06,"STS17 (fr-en)":75.63,"STS17 (it-en)":71.36,"STS17 (ko-ko)":80.79,"STS17 (nl-en)":71.99,"STS22 (ar)":57.44,"STS22 (de)":60.12,"STS22 (de-en)":53.36,"STS22 (de-fr)":58.25,"STS22 (de-pl)":48.47,"STS22 (es)":68.57,"STS22 (es-en)":77.41,"STS22 (es-it)":74.69,"STS22 (fr)":81.47,"STS22 (fr-pl)":73.25,"STS22 (it)":79.28,"STS22 (pl)":42.08,"STS22 (pl-en)":77.5,"STS22 (ru)":61.71,"STS22 (tr)":66.62,"STS22 (zh-en)":69.87,"STSBenchmark":81.95}
-{"index":167,"Rank":7,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":68.84,"STS17 (ar-ar)":74.52,"STS17 (en-ar)":71.27,"STS17 (en-de)":82.09,"STS17 (en-tr)":63.33,"STS17 (es-en)":76.5,"STS17 (es-es)":86.74,"STS17 (fr-en)":80.18,"STS17 (it-en)":80.15,"STS17 (ko-ko)":79.95,"STS17 (nl-en)":79.25,"STS22 (ar)":57.87,"STS22 (de)":55.95,"STS22 (de-en)":54.93,"STS22 (de-fr)":59.47,"STS22 (de-pl)":39.35,"STS22 (es)":66.58,"STS22 (es-en)":73.99,"STS22 (es-it)":66.46,"STS22 (fr)":74.8,"STS22 (fr-pl)":73.25,"STS22 (it)":77.76,"STS22 (pl)":34.07,"STS22 (pl-en)":70.37,"STS22 (ru)":60.66,"STS22 (tr)":63.7,"STS22 (zh-en)":69.92,"STSBenchmark":85.64}
-{"index":170,"Rank":8,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":67.08,"STS17 (ar-ar)":73.03,"STS17 (en-ar)":57.41,"STS17 (en-de)":77.24,"STS17 (en-tr)":55.97,"STS17 (es-en)":72.44,"STS17 (es-es)":84.84,"STS17 (fr-en)":72.29,"STS17 (it-en)":77.33,"STS17 (ko-ko)":78.87,"STS17 (nl-en)":75.38,"STS22 (ar)":56.65,"STS22 (de)":53.45,"STS22 (de-en)":56.49,"STS22 (de-fr)":60.57,"STS22 (de-pl)":28.24,"STS22 (es)":66.88,"STS22 (es-en)":74.57,"STS22 (es-it)":71.81,"STS22 (fr)":76.58,"STS22 (fr-pl)":84.52,"STS22 (it)":76.53,"STS22 (pl)":35.8,"STS22 (pl-en)":72.69,"STS22 (ru)":59.9,"STS22 (tr)":63.71,"STS22 (zh-en)":63.74,"STSBenchmark":84.11}
-{"index":231,"Rank":9,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":66.01,"STS17 (ar-ar)":69.07,"STS17 (en-ar)":74.51,"STS17 (en-de)":73.85,"STS17 (en-tr)":72.07,"STS17 (es-en)":65.71,"STS17 (es-es)":80.83,"STS17 (fr-en)":76.98,"STS17 (it-en)":76.99,"STS17 (ko-ko)":71.32,"STS17 (nl-en)":75.22,"STS22 (ar)":57.67,"STS22 (de)":48.58,"STS22 (de-en)":50.14,"STS22 (de-fr)":53.28,"STS22 (de-pl)":58.69,"STS22 (es)":63.18,"STS22 (es-en)":71.86,"STS22 (es-it)":69.69,"STS22 (fr)":77.95,"STS22 (fr-pl)":61.98,"STS22 (it)":72.22,"STS22 (pl)":39.28,"STS22 (pl-en)":69.41,"STS22 (ru)":57.49,"STS22 (tr)":58.15,"STS22 (zh-en)":64.02,"STSBenchmark":72.25}
-{"index":253,"Rank":10,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":65.66,"STS17 (ar-ar)":78.03,"STS17 (en-ar)":78.6,"STS17 (en-de)":81.48,"STS17 (en-tr)":76.34,"STS17 (es-en)":81.81,"STS17 (es-es)":87.91,"STS17 (fr-en)":78.06,"STS17 (it-en)":80.98,"STS17 (ko-ko)":68.24,"STS17 (nl-en)":81.0,"STS22 (ar)":54.51,"STS22 (de)":46.89,"STS22 (de-en)":45.0,"STS22 (de-fr)":49.43,"STS22 (de-pl)":39.32,"STS22 (es)":58.94,"STS22 (es-en)":67.71,"STS22 (es-it)":50.79,"STS22 (fr)":74.1,"STS22 (fr-pl)":73.25,"STS22 (it)":65.86,"STS22 (pl)":34.81,"STS22 (pl-en)":60.17,"STS22 (ru)":54.51,"STS22 (tr)":57.29,"STS22 (zh-en)":61.29,"STSBenchmark":86.45}
-{"index":65,"Rank":11,"Model":"paraphrase-multilingual-mpnet-base-v2-KE_Sieve<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.52,"STS17 (ar-ar)":76.04,"STS17 (en-ar)":77.6,"STS17 (en-de)":70.4,"STS17 (en-tr)":71.04,"STS17 (es-en)":81.59,"STS17 (es-es)":82.77,"STS17 (fr-en)":77.16,"STS17 (it-en)":81.52,"STS17 (ko-ko)":77.0,"STS17 (nl-en)":80.7,"STS22 (ar)":52.61,"STS22 (de)":41.84,"STS22 (de-en)":49.09,"STS22 (de-fr)":50.6,"STS22 (de-pl)":50.44,"STS22 (es)":57.23,"STS22 (es-en)":67.29,"STS22 (es-it)":57.93,"STS22 (fr)":72.79,"STS22 (fr-pl)":73.25,"STS22 (it)":64.17,"STS22 (pl)":36.37,"STS22 (pl-en)":67.72,"STS22 (ru)":53.35,"STS22 (tr)":52.71,"STS22 (zh-en)":64.45,"STSBenchmark":81.34}
-{"index":238,"Rank":12,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":65.43,"STS17 (ar-ar)":77.34,"STS17 (en-ar)":77.46,"STS17 (en-de)":80.24,"STS17 (en-tr)":74.34,"STS17 (es-en)":77.4,"STS17 (es-es)":83.71,"STS17 (fr-en)":79.28,"STS17 (it-en)":80.82,"STS17 (ko-ko)":76.4,"STS17 (nl-en)":80.51,"STS22 (ar)":49.04,"STS22 (de)":35.73,"STS22 (de-en)":47.51,"STS22 (de-fr)":60.76,"STS22 (de-pl)":36.09,"STS22 (es)":59.34,"STS22 (es-en)":68.96,"STS22 (es-it)":63.28,"STS22 (fr)":76.41,"STS22 (fr-pl)":61.98,"STS22 (it)":65.1,"STS22 (pl)":34.58,"STS22 (pl-en)":71.33,"STS22 (ru)":52.4,"STS22 (tr)":54.07,"STS22 (zh-en)":61.75,"STSBenchmark":80.75}
-{"index":173,"Rank":13,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.1,"STS17 (ar-ar)":81.13,"STS17 (en-ar)":79.64,"STS17 (en-de)":52.52,"STS17 (en-tr)":4.75,"STS17 (es-en)":85.41,"STS17 (es-es)":87.33,"STS17 (fr-en)":83.96,"STS17 (it-en)":45.62,"STS17 (ko-ko)":61.89,"STS17 (nl-en)":46.69,"STS22 (ar)":55.0,"STS22 (de)":37.51,"STS22 (de-en)":51.66,"STS22 (de-fr)":39.66,"STS22 (de-pl)":26.11,"STS22 (es)":59.79,"STS22 (es-en)":73.59,"STS22 (es-it)":67.83,"STS22 (fr)":77.1,"STS22 (fr-pl)":84.52,"STS22 (it)":68.87,"STS22 (pl)":27.98,"STS22 (pl-en)":60.77,"STS22 (ru)":43.14,"STS22 (tr)":42.33,"STS22 (zh-en)":65.01,"STSBenchmark":85.79}
-{"index":130,"Rank":14,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":7068,"Memory Usage (GB, fp32)":26.33,"Average":58.34,"STS17 (ar-ar)":76.42,"STS17 (en-ar)":78.07,"STS17 (en-de)":59.1,"STS17 (en-tr)":11.8,"STS17 (es-en)":78.22,"STS17 (es-es)":86.0,"STS17 (fr-en)":80.46,"STS17 (it-en)":51.58,"STS17 (ko-ko)":66.89,"STS17 (nl-en)":45.85,"STS22 (ar)":58.67,"STS22 (de)":30.05,"STS22 (de-en)":51.16,"STS22 (de-fr)":53.28,"STS22 (de-pl)":43.05,"STS22 (es)":65.41,"STS22 (es-en)":75.06,"STS22 (es-it)":65.5,"STS22 (fr)":80.38,"STS22 (fr-pl)":28.17,"STS22 (it)":65.65,"STS22 (pl)":31.13,"STS22 (pl-en)":53.31,"STS22 (ru)":43.36,"STS22 (tr)":47.14,"STS22 (zh-en)":68.45,"STSBenchmark":80.9}
-{"index":150,"Rank":15,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.22,"STS17 (ar-ar)":74.97,"STS17 (en-ar)":74.05,"STS17 (en-de)":74.95,"STS17 (en-tr)":77.18,"STS17 (es-en)":72.25,"STS17 (es-es)":80.65,"STS17 (fr-en)":77.2,"STS17 (it-en)":78.67,"STS17 (ko-ko)":66.14,"STS17 (nl-en)":78.14,"STS22 (ar)":40.25,"STS22 (de)":24.09,"STS22 (de-en)":34.28,"STS22 (de-fr)":41.29,"STS22 (de-pl)":25.81,"STS22 (es)":55.4,"STS22 (es-en)":57.82,"STS22 (es-it)":49.13,"STS22 (fr)":61.72,"STS22 (fr-pl)":61.98,"STS22 (it)":62.2,"STS22 (pl)":25.31,"STS22 (pl-en)":44.72,"STS22 (ru)":43.57,"STS22 (tr)":46.46,"STS22 (zh-en)":49.19,"STSBenchmark":67.39}
-{"index":11,"Rank":16,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":52.31,"STS17 (ar-ar)":67.47,"STS17 (en-ar)":65.05,"STS17 (en-de)":66.66,"STS17 (en-tr)":70.05,"STS17 (es-en)":55.3,"STS17 (es-es)":79.67,"STS17 (fr-en)":70.82,"STS17 (it-en)":70.98,"STS17 (ko-ko)":70.52,"STS17 (nl-en)":68.12,"STS22 (ar)":42.57,"STS22 (de)":25.69,"STS22 (de-en)":32.35,"STS22 (de-fr)":37.41,"STS22 (de-pl)":15.67,"STS22 (es)":54.92,"STS22 (es-en)":54.34,"STS22 (es-it)":42.21,"STS22 (fr)":58.61,"STS22 (fr-pl)":39.44,"STS22 (it)":60.31,"STS22 (pl)":18.34,"STS22 (pl-en)":53.63,"STS22 (ru)":39.24,"STS22 (tr)":36.97,"STS22 (zh-en)":46.19,"STSBenchmark":69.77}
-{"index":175,"Rank":17,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.04,"STS17 (ar-ar)":80.6,"STS17 (en-ar)":72.6,"STS17 (en-de)":40.34,"STS17 (en-tr)":6.79,"STS17 (es-en)":81.8,"STS17 (es-es)":85.65,"STS17 (fr-en)":79.94,"STS17 (it-en)":34.8,"STS17 (ko-ko)":57.28,"STS17 (nl-en)":33.58,"STS22 (ar)":54.82,"STS22 (de)":26.63,"STS22 (de-en)":49.55,"STS22 (de-fr)":22.36,"STS22 (de-pl)":35.32,"STS22 (es)":56.31,"STS22 (es-en)":71.03,"STS22 (es-it)":61.3,"STS22 (fr)":61.35,"STS22 (fr-pl)":73.25,"STS22 (it)":62.61,"STS22 (pl)":15.06,"STS22 (pl-en)":43.72,"STS22 (ru)":28.77,"STS22 (tr)":22.11,"STS22 (zh-en)":63.9,"STSBenchmark":83.63}
-{"index":249,"Rank":18,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":50.06,"STS17 (ar-ar)":11.13,"STS17 (en-ar)":-3.93,"STS17 (en-de)":79.04,"STS17 (en-tr)":13.61,"STS17 (es-en)":71.72,"STS17 (es-es)":83.42,"STS17 (fr-en)":71.38,"STS17 (it-en)":69.5,"STS17 (ko-ko)":9.61,"STS17 (nl-en)":66.12,"STS22 (ar)":29.6,"STS22 (de)":47.72,"STS22 (de-en)":49.64,"STS22 (de-fr)":62.21,"STS22 (de-pl)":34.34,"STS22 (es)":58.16,"STS22 (es-en)":69.15,"STS22 (es-it)":65.26,"STS22 (fr)":77.49,"STS22 (fr-pl)":50.71,"STS22 (it)":66.91,"STS22 (pl)":27.04,"STS22 (pl-en)":58.85,"STS22 (ru)":26.63,"STS22 (tr)":43.36,"STS22 (zh-en)":29.0,"STSBenchmark":83.93}
-{"index":241,"Rank":19,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":49.47,"STS17 (ar-ar)":9.06,"STS17 (en-ar)":-3.22,"STS17 (en-de)":70.38,"STS17 (en-tr)":17.17,"STS17 (es-en)":60.24,"STS17 (es-es)":81.93,"STS17 (fr-en)":62.17,"STS17 (it-en)":59.11,"STS17 (ko-ko)":8.9,"STS17 (nl-en)":56.91,"STS22 (ar)":37.66,"STS22 (de)":50.58,"STS22 (de-en)":53.63,"STS22 (de-fr)":55.72,"STS22 (de-pl)":27.99,"STS22 (es)":59.14,"STS22 (es-en)":69.99,"STS22 (es-it)":60.94,"STS22 (fr)":79.43,"STS22 (fr-pl)":61.98,"STS22 (it)":67.14,"STS22 (pl)":33.74,"STS22 (pl-en)":60.18,"STS22 (ru)":32.69,"STS22 (tr)":55.79,"STS22 (zh-en)":28.85,"STSBenchmark":77.65}
-{"index":58,"Rank":20,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":47.36,"STS17 (ar-ar)":46.8,"STS17 (en-ar)":-0.78,"STS17 (en-de)":47.5,"STS17 (en-tr)":4.18,"STS17 (es-en)":44.27,"STS17 (es-es)":79.22,"STS17 (fr-en)":47.15,"STS17 (it-en)":42.65,"STS17 (ko-ko)":39.79,"STS17 (nl-en)":36.6,"STS22 (ar)":25.06,"STS22 (de)":39.49,"STS22 (de-en)":54.22,"STS22 (de-fr)":48.91,"STS22 (de-pl)":33.04,"STS22 (es)":59.47,"STS22 (es-en)":66.65,"STS22 (es-it)":64.37,"STS22 (fr)":79.88,"STS22 (fr-pl)":39.44,"STS22 (it)":68.15,"STS22 (pl)":35.38,"STS22 (pl-en)":62.7,"STS22 (ru)":30.62,"STS22 (tr)":45.65,"STS22 (zh-en)":49.25,"STSBenchmark":88.96}
-{"index":240,"Rank":21,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":46.79,"STS17 (ar-ar)":10.19,"STS17 (en-ar)":-5.77,"STS17 (en-de)":67.43,"STS17 (en-tr)":8.75,"STS17 (es-en)":54.96,"STS17 (es-es)":82.74,"STS17 (fr-en)":60.5,"STS17 (it-en)":46.26,"STS17 (ko-ko)":8.96,"STS17 (nl-en)":47.48,"STS22 (ar)":34.97,"STS22 (de)":51.7,"STS22 (de-en)":48.76,"STS22 (de-fr)":57.5,"STS22 (de-pl)":32.76,"STS22 (es)":57.49,"STS22 (es-en)":67.76,"STS22 (es-it)":57.18,"STS22 (fr)":78.7,"STS22 (fr-pl)":61.98,"STS22 (it)":67.67,"STS22 (pl)":30.68,"STS22 (pl-en)":54.17,"STS22 (ru)":15.36,"STS22 (tr)":58.12,"STS22 (zh-en)":29.42,"STSBenchmark":77.6}
-{"index":248,"Rank":22,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":44.35,"STS17 (ar-ar)":10.75,"STS17 (en-ar)":-4.71,"STS17 (en-de)":73.62,"STS17 (en-tr)":-0.42,"STS17 (es-en)":62.62,"STS17 (es-es)":82.74,"STS17 (fr-en)":67.86,"STS17 (it-en)":51.86,"STS17 (ko-ko)":9.44,"STS17 (nl-en)":45.95,"STS22 (ar)":27.01,"STS22 (de)":43.73,"STS22 (de-en)":49.93,"STS22 (de-fr)":61.58,"STS22 (de-pl)":38.83,"STS22 (es)":57.68,"STS22 (es-en)":68.09,"STS22 (es-it)":61.58,"STS22 (fr)":75.01,"STS22 (fr-pl)":5.63,"STS22 (it)":62.01,"STS22 (pl)":25.0,"STS22 (pl-en)":51.72,"STS22 (ru)":14.21,"STS22 (tr)":47.3,"STS22 (zh-en)":23.1,"STSBenchmark":85.36}
-{"index":247,"Rank":23,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":41.17,"STS17 (ar-ar)":13.36,"STS17 (en-ar)":-5.65,"STS17 (en-de)":67.11,"STS17 (en-tr)":-0.02,"STS17 (es-en)":47.72,"STS17 (es-es)":79.94,"STS17 (fr-en)":56.61,"STS17 (it-en)":30.46,"STS17 (ko-ko)":10.06,"STS17 (nl-en)":36.46,"STS22 (ar)":31.2,"STS22 (de)":42.08,"STS22 (de-en)":46.9,"STS22 (de-fr)":55.04,"STS22 (de-pl)":33.94,"STS22 (es)":53.81,"STS22 (es-en)":65.19,"STS22 (es-it)":55.29,"STS22 (fr)":77.69,"STS22 (fr-pl)":28.17,"STS22 (it)":60.65,"STS22 (pl)":24.42,"STS22 (pl-en)":42.97,"STS22 (ru)":12.13,"STS22 (tr)":40.45,"STS22 (zh-en)":20.15,"STSBenchmark":85.52}
-{"index":233,"Rank":24,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":37.71,"STS17 (ar-ar)":50.89,"STS17 (en-ar)":-4.28,"STS17 (en-de)":35.82,"STS17 (en-tr)":4.5,"STS17 (es-en)":16.31,"STS17 (es-es)":76.12,"STS17 (fr-en)":37.09,"STS17 (it-en)":24.45,"STS17 (ko-ko)":43.39,"STS17 (nl-en)":29.0,"STS22 (ar)":22.64,"STS22 (de)":31.04,"STS22 (de-en)":44.04,"STS22 (de-fr)":30.07,"STS22 (de-pl)":4.93,"STS22 (es)":54.78,"STS22 (es-en)":53.42,"STS22 (es-it)":44.27,"STS22 (fr)":77.0,"STS22 (fr-pl)":50.71,"STS22 (it)":60.4,"STS22 (pl)":26.77,"STS22 (pl-en)":32.8,"STS22 (ru)":14.72,"STS22 (tr)":33.69,"STS22 (zh-en)":41.64,"STSBenchmark":82.03}
-{"index":82,"Rank":25,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":37.32,"STS17 (ar-ar)":55.62,"STS17 (en-ar)":8.21,"STS17 (en-de)":30.18,"STS17 (en-tr)":1.04,"STS17 (es-en)":28.78,"STS17 (es-es)":71.88,"STS17 (fr-en)":26.34,"STS17 (it-en)":20.73,"STS17 (ko-ko)":52.39,"STS17 (nl-en)":25.05,"STS22 (ar)":28.19,"STS22 (de)":21.99,"STS22 (de-en)":53.07,"STS22 (de-fr)":32.97,"STS22 (de-pl)":20.45,"STS22 (es)":49.81,"STS22 (es-en)":49.51,"STS22 (es-it)":45.78,"STS22 (fr)":67.66,"STS22 (fr-pl)":61.98,"STS22 (it)":48.25,"STS22 (pl)":23.31,"STS22 (pl-en)":36.8,"STS22 (ru)":9.07,"STS22 (tr)":34.66,"STS22 (zh-en)":28.68,"STSBenchmark":75.34}
-{"index":232,"Rank":26,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":34.57,"STS17 (ar-ar)":58.71,"STS17 (en-ar)":0.54,"STS17 (en-de)":27.54,"STS17 (en-tr)":0.43,"STS17 (es-en)":22.01,"STS17 (es-es)":78.37,"STS17 (fr-en)":30.7,"STS17 (it-en)":24.28,"STS17 (ko-ko)":43.37,"STS17 (nl-en)":24.51,"STS22 (ar)":17.54,"STS22 (de)":22.53,"STS22 (de-en)":42.86,"STS22 (de-fr)":43.52,"STS22 (de-pl)":1.63,"STS22 (es)":43.98,"STS22 (es-en)":53.99,"STS22 (es-it)":40.71,"STS22 (fr)":69.51,"STS22 (fr-pl)":16.9,"STS22 (it)":47.48,"STS22 (pl)":19.22,"STS22 (pl-en)":42.67,"STS22 (ru)":11.19,"STS22 (tr)":21.6,"STS22 (zh-en)":44.39,"STSBenchmark":83.09}
-{"index":83,"Rank":27,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":27.72,"STS17 (ar-ar)":54.16,"STS17 (en-ar)":1.72,"STS17 (en-de)":25.48,"STS17 (en-tr)":2.09,"STS17 (es-en)":21.93,"STS17 (es-es)":67.8,"STS17 (fr-en)":18.91,"STS17 (it-en)":16.39,"STS17 (ko-ko)":45.66,"STS17 (nl-en)":23.49,"STS22 (ar)":5.17,"STS22 (de)":11.0,"STS22 (de-en)":53.93,"STS22 (de-fr)":25.11,"STS22 (de-pl)":20.94,"STS22 (es)":43.05,"STS22 (es-en)":32.74,"STS22 (es-it)":35.99,"STS22 (fr)":54.56,"STS22 (fr-pl)":5.63,"STS22 (it)":33.68,"STS22 (pl)":14.91,"STS22 (pl-en)":20.54,"STS22 (ru)":3.36,"STS22 (tr)":3.82,"STS22 (zh-en)":26.71,"STSBenchmark":79.54}
-{"index":235,"Rank":28,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":25.1,"STS17 (ar-ar)":27.14,"STS17 (en-ar)":6.9,"STS17 (en-de)":11.59,"STS17 (en-tr)":6.46,"STS17 (es-en)":10.86,"STS17 (es-es)":55.45,"STS17 (fr-en)":16.02,"STS17 (it-en)":19.87,"STS17 (ko-ko)":8.08,"STS17 (nl-en)":24.92,"STS22 (ar)":19.57,"STS22 (de)":17.31,"STS22 (de-en)":26.03,"STS22 (de-fr)":10.26,"STS22 (de-pl)":16.94,"STS22 (es)":48.89,"STS22 (es-en)":51.79,"STS22 (es-it)":25.24,"STS22 (fr)":53.92,"STS22 (fr-pl)":39.44,"STS22 (it)":39.43,"STS22 (pl)":13.56,"STS22 (pl-en)":25.36,"STS22 (ru)":1.11,"STS22 (tr)":31.73,"STS22 (zh-en)":8.44,"STSBenchmark":61.26}
-{"index":237,"Rank":29,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":24.28,"STS17 (ar-ar)":13.78,"STS17 (en-ar)":9.08,"STS17 (en-de)":-3.11,"STS17 (en-tr)":-0.45,"STS17 (es-en)":-8.18,"STS17 (es-es)":48.23,"STS17 (fr-en)":5.81,"STS17 (it-en)":3.64,"STS17 (ko-ko)":2.54,"STS17 (nl-en)":0.44,"STS22 (ar)":32.42,"STS22 (de)":33.04,"STS22 (de-en)":28.65,"STS22 (de-fr)":14.77,"STS22 (de-pl)":11.21,"STS22 (es)":48.53,"STS22 (es-en)":26.97,"STS22 (es-it)":41.1,"STS22 (fr)":49.43,"STS22 (fr-pl)":39.44,"STS22 (it)":57.77,"STS22 (pl)":12.47,"STS22 (pl-en)":45.55,"STS22 (ru)":19.44,"STS22 (tr)":47.38,"STS22 (zh-en)":14.05,"STSBenchmark":61.55}
-{"index":0,"Rank":30,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.33}
-{"index":1,"Rank":31,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.99}
-{"index":2,"Rank":32,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":74.85}
-{"index":3,"Rank":33,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.74,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":4,"Rank":34,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":70.51,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":5,"Rank":35,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":79.99,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":6,"Rank":36,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.22}
-{"index":7,"Rank":37,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":83.75,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":8,"Rank":38,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.86}
-{"index":9,"Rank":39,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.56}
-{"index":10,"Rank":40,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.76,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":12,"Rank":41,"Model":"Arabic_text_embedding_for_sts<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","STS17 (ar-ar)":85.05,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":13,"Rank":42,"Model":"arabic_text_embedding_sts_arabertv02_arabicnlitriplet<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","STS17 (ar-ar)":84.96,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":14,"Rank":43,"Model":"llm2vec-croissant-mntp<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":15,"Rank":44,"Model":"llm2vec-occiglot-mntp<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":67.83,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":16,"Rank":45,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.35}
-{"index":17,"Rank":46,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":1776,"Memory Usage (GB, fp32)":6.62,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":81.84,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.83,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.38}
-{"index":18,"Rank":47,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.58,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":41.02,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.85}
-{"index":19,"Rank":48,"Model":"gte-base-en-v1.5<\/a>","Model Size (Million Parameters)":137,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.04}
-{"index":20,"Rank":49,"Model":"gte-large-en-v1.5<\/a>","Model Size (Million Parameters)":434,"Memory Usage (GB, fp32)":1.62,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.07}
-{"index":22,"Rank":50,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.42}
-{"index":23,"Rank":51,"Model":"bge-en-icl<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.42}
-{"index":24,"Rank":52,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.52}
-{"index":25,"Rank":53,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":26,"Rank":54,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":9242,"Memory Usage (GB, fp32)":34.43,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":83.28,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":42.79,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.25}
-{"index":27,"Rank":55,"Model":"bge-small-en-v1.5<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.86}
-{"index":28,"Rank":56,"Model":"UniVaR-lambda-1<\/a>","Model Size (Million Parameters)":137,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.54}
-{"index":29,"Rank":57,"Model":"UniVaR-lambda-20<\/a>","Model Size (Million Parameters)":137,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.54}
-{"index":30,"Rank":58,"Model":"UniVaR-lambda-5<\/a>","Model Size (Million Parameters)":137,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.54}
-{"index":31,"Rank":59,"Model":"UniVaR-lambda-80<\/a>","Model Size (Million Parameters)":137,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.54}
-{"index":32,"Rank":60,"Model":"Cohere-embed-english-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.58}
-{"index":33,"Rank":61,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.55}
-{"index":34,"Rank":62,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.8,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.46}
-{"index":35,"Rank":63,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.76,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.79}
-{"index":36,"Rank":64,"Model":"sbert-chinese-general-v1<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":33.88,"STSBenchmark":""}
-{"index":37,"Rank":65,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":38,"Rank":66,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":39,"Rank":67,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":40,"Rank":68,"Model":"STS-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":0.98}
-{"index":41,"Rank":69,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":40.31,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":42,"Rank":70,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":40.4,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":43,"Rank":71,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":38.77,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":44,"Rank":72,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7242,"Memory Usage (GB, fp32)":26.98,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.64}
-{"index":45,"Rank":73,"Model":"GritLM-8x7B<\/a>","Model Size (Million Parameters)":46703,"Memory Usage (GB, fp32)":173.98,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.43}
-{"index":46,"Rank":74,"Model":"lodestone-base-4096-v1<\/a>","Model Size (Million Parameters)":69,"Memory Usage (GB, fp32)":0.26,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":75.57}
-{"index":47,"Rank":75,"Model":"2024-06-15_10-09-42<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":48,"Rank":76,"Model":"2024-06-17_21-37-12<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":49,"Rank":77,"Model":"2024-06-19_08-22-22<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":50,"Rank":78,"Model":"2024-06-19_10-03-38<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":51,"Rank":79,"Model":"2024-06-19_21-12-17<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":52,"Rank":80,"Model":"2024-06-19_22-23-38<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":53,"Rank":81,"Model":"e5-large-v2-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.74}
-{"index":54,"Rank":82,"Model":"gte-Qwen2-7B-instruct-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.58,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":41.02,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.85}
-{"index":55,"Rank":83,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.58,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":41.02,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.85}
-{"index":57,"Rank":84,"Model":"multilingual-e5-large-instruct-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.38}
-{"index":59,"Rank":85,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":81.14,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.44}
-{"index":60,"Rank":86,"Model":"bilingual-embedding-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":84.64,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.28}
-{"index":61,"Rank":87,"Model":"bilingual-embedding-large-8k<\/a>","Model Size (Million Parameters)":568,"Memory Usage (GB, fp32)":2.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.84,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":62,"Rank":88,"Model":"bilingual-embedding-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.57,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.08}
-{"index":63,"Rank":89,"Model":"Linq-Embed-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.81}
-{"index":64,"Rank":90,"Model":"jina-embeddings-v2-base-de-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":86.72,"STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":59.07,"STS22 (de-en)":55.97,"STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.14}
-{"index":66,"Rank":91,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.72}
-{"index":67,"Rank":92,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":78.32}
-{"index":68,"Rank":93,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.05}
-{"index":69,"Rank":94,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":77.44}
-{"index":70,"Rank":95,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.65}
-{"index":71,"Rank":96,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":80.42}
-{"index":72,"Rank":97,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.16}
-{"index":73,"Rank":98,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":73.36}
-{"index":74,"Rank":99,"Model":"Bulbasaur<\/a>","Model Size (Million Parameters)":17,"Memory Usage (GB, fp32)":0.06,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.34}
-{"index":75,"Rank":100,"Model":"Ivysaur<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.08,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.25}
-{"index":76,"Rank":101,"Model":"Squirtle<\/a>","Model Size (Million Parameters)":16,"Memory Usage (GB, fp32)":0.06,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.21}
-{"index":77,"Rank":102,"Model":"Venusaur<\/a>","Model Size (Million Parameters)":16,"Memory Usage (GB, fp32)":0.06,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":76.72}
-{"index":78,"Rank":103,"Model":"Wartortle<\/a>","Model Size (Million Parameters)":17,"Memory Usage (GB, fp32)":0.06,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":81.79}
-{"index":79,"Rank":104,"Model":"test24<\/a>","Model Size (Million Parameters)":17,"Memory Usage (GB, fp32)":0.06,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":78.93}
-{"index":80,"Rank":105,"Model":"test25<\/a>","Model Size (Million Parameters)":17,"Memory Usage (GB, fp32)":0.06,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":78.61}
-{"index":81,"Rank":106,"Model":"SGPT-1.3B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":1341,"Memory Usage (GB, fp32)":4.99,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":77.59}
-{"index":84,"Rank":107,"Model":"SGPT-2.7B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":2685,"Memory Usage (GB, fp32)":10.0,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.21}
-{"index":85,"Rank":108,"Model":"SGPT-5.8B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":5874,"Memory Usage (GB, fp32)":21.88,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":81.39}
-{"index":86,"Rank":109,"Model":"SGPT-5.8B-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":5874,"Memory Usage (GB, fp32)":21.88,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.67}
-{"index":87,"Rank":110,"Model":"Arabert-all-nli-triplet-Matryoshka<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","STS17 (ar-ar)":83.16,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":58.29,"STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":88,"Rank":111,"Model":"Arabic-MiniLM-L12-v2-all-nli-triplet<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","STS17 (ar-ar)":81.11,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":52.41,"STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":89,"Rank":112,"Model":"Arabic-Triplet-Matryoshka-V2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","STS17 (ar-ar)":85.31,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":90,"Rank":113,"Model":"Arabic-all-nli-triplet-Matryoshka<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","STS17 (ar-ar)":82.4,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":51.38,"STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":91,"Rank":114,"Model":"Arabic-labse-Matryoshka<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","STS17 (ar-ar)":82.47,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":57.26,"STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":92,"Rank":115,"Model":"Arabic-mpnet-base-all-nli-triplet<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":79.93,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":52.44,"STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":93,"Rank":116,"Model":"GATE-AraBert-v1<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","STS17 (ar-ar)":82.78,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":59.75,"STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":94,"Rank":117,"Model":"Marbert-all-nli-triplet-Matryoshka<\/a>","Model Size (Million Parameters)":163,"Memory Usage (GB, fp32)":0.61,"Average":"","STS17 (ar-ar)":82.18,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":58.08,"STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":95,"Rank":118,"Model":"Solon-embeddings-large-0.1<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":83.31,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":96,"Rank":119,"Model":"st-polish-kartonberta-base-alpha-v1<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":36.78,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":97,"Rank":120,"Model":"nomic-embed-text-v1.5-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.47}
-{"index":98,"Rank":121,"Model":"bge_m3e_stella<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":61.83}
-{"index":99,"Rank":122,"Model":"SFR-Embedding-2_R<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.6}
-{"index":100,"Rank":123,"Model":"SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.0}
-{"index":101,"Rank":124,"Model":"LLaMA-embeeding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":52.67}
-{"index":102,"Rank":125,"Model":"snowflake-arctic-embed-l<\/a>","Model Size (Million Parameters)":334,"Memory Usage (GB, fp32)":1.24,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":75.68}
-{"index":103,"Rank":126,"Model":"snowflake-arctic-embed-m<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":74.1}
-{"index":104,"Rank":127,"Model":"snowflake-arctic-embed-m-long<\/a>","Model Size (Million Parameters)":137,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":77.9}
-{"index":105,"Rank":128,"Model":"snowflake-arctic-embed-m-v1.5<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":106,"Rank":129,"Model":"snowflake-arctic-embed-s<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":81.19}
-{"index":107,"Rank":130,"Model":"snowflake-arctic-embed-xs<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.08,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.18}
-{"index":108,"Rank":131,"Model":"bge-micro<\/a>","Model Size (Million Parameters)":17,"Memory Usage (GB, fp32)":0.06,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":81.32}
-{"index":109,"Rank":132,"Model":"bge-micro-v2<\/a>","Model Size (Million Parameters)":17,"Memory Usage (GB, fp32)":0.06,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":82.5}
-{"index":110,"Rank":133,"Model":"gte-tiny<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.08,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.38}
-{"index":111,"Rank":134,"Model":"UAE-Large-V1<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.06}
-{"index":112,"Rank":135,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":78.77,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":113,"Rank":136,"Model":"EminEmbed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":55.58,"STS17 (en-de)":71.04,"STS17 (en-tr)":48.6,"STS17 (es-en)":67.38,"STS17 (es-es)":"","STS17 (fr-en)":70.42,"STS17 (it-en)":70.64,"STS17 (ko-ko)":"","STS17 (nl-en)":66.38,"STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":57.01,"STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":74.65,"STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":72.32,"STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":75.16,"STSBenchmark":""}
-{"index":114,"Rank":137,"Model":"nomic-embed-text-v1.5-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.47}
-{"index":115,"Rank":138,"Model":"German_Semantic_STS_V2<\/a>","Model Size (Million Parameters)":336,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":116,"Rank":139,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":81.84,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.83,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.38}
-{"index":117,"Rank":140,"Model":"ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":67.06,"STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":118,"Rank":141,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":119,"Rank":142,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":121,"Rank":143,"Model":"bge-small-4096<\/a>","Model Size (Million Parameters)":35,"Memory Usage (GB, fp32)":0.13,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":81.94}
-{"index":122,"Rank":144,"Model":"GIST-Embedding-v0<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.32}
-{"index":123,"Rank":145,"Model":"GIST-all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.08,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.4}
-{"index":124,"Rank":146,"Model":"GIST-large-Embedding-v0<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.3}
-{"index":125,"Rank":147,"Model":"GIST-small-Embedding-v0<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.08}
-{"index":126,"Rank":148,"Model":"NoInstruct-small-Embedding-v0<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.98}
-{"index":127,"Rank":149,"Model":"bge-base-en-v1-5-seqlen-384-bs-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.42}
-{"index":128,"Rank":150,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":47.29}
-{"index":129,"Rank":151,"Model":"sgpt-bloom-1b7-nli<\/a>","Model Size (Million Parameters)":1722,"Memory Usage (GB, fp32)":6.42,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":73.13,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":131,"Rank":152,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.85}
-{"index":132,"Rank":153,"Model":"nomic-embed-text-v1.5-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.47}
-{"index":133,"Rank":154,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.58,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":41.02,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.85}
-{"index":134,"Rank":155,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":135,"Rank":156,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":136,"Rank":157,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":137,"Rank":158,"Model":"cai-lunaris-text-embeddings<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":71.4}
-{"index":138,"Rank":159,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":77.54,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":139,"Rank":160,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":81.73,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":140,"Rank":161,"Model":"embedder-100p<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.2}
-{"index":141,"Rank":162,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":142,"Rank":163,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":143,"Rank":164,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":144,"Rank":165,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":75.98}
-{"index":145,"Rank":166,"Model":"gte-Qwen2-1.5B-instruct-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":81.84,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.83,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.38}
-{"index":146,"Rank":167,"Model":"sft-bge-small<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":147,"Rank":168,"Model":"stella_en_1.5B_v5<\/a>","Model Size (Million Parameters)":1543,"Memory Usage (GB, fp32)":5.75,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.23}
-{"index":148,"Rank":169,"Model":"stella_en_400M_v5<\/a>","Model Size (Million Parameters)":435,"Memory Usage (GB, fp32)":1.62,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.74}
-{"index":149,"Rank":170,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":112,"Memory Usage (GB, fp32)":0.42,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.52}
-{"index":151,"Rank":171,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":65.37,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":152,"Rank":172,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":55.15,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":153,"Rank":173,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":48.52,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":154,"Rank":174,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":39.05,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":155,"Rank":175,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":56.47,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":156,"Rank":176,"Model":"instructor-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.43}
-{"index":157,"Rank":177,"Model":"instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.88}
-{"index":158,"Rank":178,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.56}
-{"index":159,"Rank":179,"Model":"stella-base-en-v2<\/a>","Model Size (Million Parameters)":55,"Memory Usage (GB, fp32)":0.2,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.45}
-{"index":160,"Rank":180,"Model":"e5-base<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.18}
-{"index":161,"Rank":181,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.52}
-{"index":162,"Rank":182,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.21}
-{"index":163,"Rank":183,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.74}
-{"index":165,"Rank":184,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.36}
-{"index":166,"Rank":185,"Model":"e5-small-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.95}
-{"index":169,"Rank":186,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.38}
-{"index":171,"Rank":187,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":39.73,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":172,"Rank":188,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":38.69,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":174,"Rank":189,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":3003,"Memory Usage (GB, fp32)":11.19,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.71}
-{"index":176,"Rank":190,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":7069,"Memory Usage (GB, fp32)":26.33,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.02}
-{"index":177,"Rank":191,"Model":"sf_model_e5<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.33}
-{"index":178,"Rank":192,"Model":"jina-embedding-b-en-v1<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":82.57}
-{"index":179,"Rank":193,"Model":"jina-embedding-l-en-v1<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.6}
-{"index":180,"Rank":194,"Model":"jina-embedding-s-en-v1<\/a>","Model Size (Million Parameters)":35,"Memory Usage (GB, fp32)":0.13,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.2}
-{"index":181,"Rank":195,"Model":"jina-embeddings-v2-base-de<\/a>","Model Size (Million Parameters)":161,"Memory Usage (GB, fp32)":0.6,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":86.72,"STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":59.07,"STS22 (de-en)":55.97,"STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.14}
-{"index":182,"Rank":196,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":137,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.84}
-{"index":183,"Rank":197,"Model":"jina-embeddings-v2-base-es<\/a>","Model Size (Million Parameters)":161,"Memory Usage (GB, fp32)":0.6,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":86.49,"STS17 (es-es)":88.25,"STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":67.97,"STS22 (es-en)":78.8,"STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.84}
-{"index":184,"Rank":198,"Model":"jina-embeddings-v2-small-en<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.04}
-{"index":185,"Rank":199,"Model":"echo-mistral-7b-instruct-lasttoken<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.67}
-{"index":186,"Rank":200,"Model":"privacy_embedding_rag_10k_base_12_final<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.42}
-{"index":187,"Rank":201,"Model":"privacy_embedding_rag_10k_base_15_final<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.42}
-{"index":188,"Rank":202,"Model":"privacy_embedding_rag_10k_base_checkpoint_2<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.42}
-{"index":189,"Rank":203,"Model":"privacy_embedding_rag_10k_base_final<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.42}
-{"index":190,"Rank":204,"Model":"MegatronBert-1B3-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.69}
-{"index":191,"Rank":205,"Model":"XLM-0B6-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.5}
-{"index":192,"Rank":206,"Model":"XLM-3B5-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.46}
-{"index":193,"Rank":207,"Model":"ember-v1<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.77}
-{"index":194,"Rank":208,"Model":"bge-m3-custom-fr<\/a>","Model Size (Million Parameters)":568,"Memory Usage (GB, fp32)":2.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":81.7,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":195,"Rank":209,"Model":"sentence_croissant_alpha_v0.1<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":75.66,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":196,"Rank":210,"Model":"sentence_croissant_alpha_v0.2<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":78.68,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":197,"Rank":211,"Model":"sentence_croissant_alpha_v0.3<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":80.8,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":198,"Rank":212,"Model":"sentence_croissant_alpha_v0.4<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.35,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":199,"Rank":213,"Model":"mxbai-embed-2d-large-v1<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.2}
-{"index":200,"Rank":214,"Model":"mxbai-embed-large-v1<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.1}
-{"index":201,"Rank":215,"Model":"bge-base-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.79}
-{"index":202,"Rank":216,"Model":"bge-base-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.93}
-{"index":203,"Rank":217,"Model":"bge-large-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.89}
-{"index":204,"Rank":218,"Model":"bge-large-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.88}
-{"index":205,"Rank":219,"Model":"bge-small-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.31}
-{"index":206,"Rank":220,"Model":"bge-small-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":81.77}
-{"index":207,"Rank":221,"Model":"mmarco-bert-base-italian-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":69.44,"STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":208,"Rank":222,"Model":"mmarco-sentence-flare-it<\/a>","Model Size (Million Parameters)":17,"Memory Usage (GB, fp32)":0.06,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":37.93,"STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":209,"Rank":223,"Model":"stsbm-sentence-flare-it<\/a>","Model Size (Million Parameters)":17,"Memory Usage (GB, fp32)":0.06,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":65.71,"STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":210,"Rank":224,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":137,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.54}
-{"index":211,"Rank":225,"Model":"nomic-embed-text-v1-ablated<\/a>","Model Size (Million Parameters)":137,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.42}
-{"index":212,"Rank":226,"Model":"nomic-embed-text-v1-unsupervised<\/a>","Model Size (Million Parameters)":137,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.96}
-{"index":213,"Rank":227,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.28}
-{"index":214,"Rank":228,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.8}
-{"index":215,"Rank":229,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.14}
-{"index":216,"Rank":230,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.46}
-{"index":217,"Rank":231,"Model":"nomic-embed-text-v1.5<\/a>","Model Size (Million Parameters)":137,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.47}
-{"index":218,"Rank":232,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":78.81}
-{"index":219,"Rank":233,"Model":"NV-Embed-v1<\/a>","Model Size (Million Parameters)":7851,"Memory Usage (GB, fp32)":29.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.14}
-{"index":220,"Rank":234,"Model":"jina-embeddings-v2-base-es-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":86.49,"STS17 (es-es)":88.25,"STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":67.97,"STS22 (es-en)":78.8,"STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.84}
-{"index":221,"Rank":235,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.25}
-{"index":222,"Rank":236,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":76.52}
-{"index":223,"Rank":237,"Model":"TDTE<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.15}
-{"index":224,"Rank":238,"Model":"mmlw-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.36,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":225,"Rank":239,"Model":"mmlw-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.4,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":226,"Rank":240,"Model":"mmlw-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.63,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":227,"Rank":241,"Model":"mmlw-roberta-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.36,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":228,"Rank":242,"Model":"mmlw-roberta-large<\/a>","Model Size (Million Parameters)":435,"Memory Usage (GB, fp32)":1.62,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":39.32,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":229,"Rank":243,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.36,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":230,"Rank":244,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":37.34,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":234,"Rank":245,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.42}
-{"index":236,"Rank":246,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":61.54}
-{"index":239,"Rank":247,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.58}
-{"index":242,"Rank":248,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":77.73}
-{"index":243,"Rank":249,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":76.97}
-{"index":244,"Rank":250,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":74.62,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":245,"Rank":251,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","STS17 (ar-ar)":79.16,"STS17 (en-ar)":81.22,"STS17 (en-de)":84.22,"STS17 (en-tr)":76.74,"STS17 (es-en)":84.44,"STS17 (es-es)":85.56,"STS17 (fr-en)":76.59,"STS17 (it-en)":82.35,"STS17 (ko-ko)":77.03,"STS17 (nl-en)":81.71,"STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":70.55,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":33.73,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.42}
-{"index":246,"Rank":252,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","STS17 (ar-ar)":79.1,"STS17 (en-ar)":80.85,"STS17 (en-de)":83.28,"STS17 (en-tr)":74.9,"STS17 (es-en)":86.11,"STS17 (es-es)":85.14,"STS17 (fr-en)":81.17,"STS17 (it-en)":84.24,"STS17 (ko-ko)":83.41,"STS17 (nl-en)":82.51,"STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":74.3,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":33.64,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.82}
-{"index":250,"Rank":253,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":76.8,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.01}
-{"index":251,"Rank":254,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":252,"Rank":255,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":27.95,"STS22 (de)":8.16,"STS22 (de-en)":21.55,"STS22 (de-fr)":17.5,"STS22 (de-pl)":25.53,"STS22 (es)":45.31,"STS22 (es-en)":42.77,"STS22 (es-it)":32.83,"STS22 (fr)":42.0,"STS22 (fr-pl)":39.44,"STS22 (it)":39.69,"STS22 (pl)":9.71,"STS22 (pl-en)":42.08,"STS22 (ru)":60.06,"STS22 (tr)":15.46,"STS22 (zh-en)":31.25,"STSBenchmark":""}
-{"index":255,"Rank":256,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":74.1,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":256,"Rank":257,"Model":"gte-base<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.73}
-{"index":257,"Rank":258,"Model":"gte-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.07}
-{"index":258,"Rank":259,"Model":"gte-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.57}
-{"index":259,"Rank":260,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":44.39}
-{"index":260,"Rank":261,"Model":"tst<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":68.04}
-{"index":261,"Rank":262,"Model":"tst2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":75.32}
-{"index":262,"Rank":263,"Model":"universal-sentence-encoder-4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":77.08}
-{"index":263,"Rank":264,"Model":"universal-sentence-encoder-large-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.93}
-{"index":264,"Rank":265,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":77.91,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":80.28}
-{"index":265,"Rank":266,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":71.11,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":81.24}
-{"index":266,"Rank":267,"Model":"b1ade-embed<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.77}
-{"index":267,"Rank":268,"Model":"b1ade-embed-kd<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":82.69}
-{"index":268,"Rank":269,"Model":"b1ade-embed-kd_3<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":0.8}
-{"index":269,"Rank":270,"Model":"jina-embeddings-v2-base-en-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.84}
-{"index":270,"Rank":271,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":56.72,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":271,"Rank":272,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":55.49,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":272,"Rank":273,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":80.31,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":273,"Rank":274,"Model":"new_model<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.92}
-{"index":274,"Rank":275,"Model":"snowflake-arctic-embed-m-v1.5-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
-{"index":275,"Rank":276,"Model":"gte-large-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.4}
-{"index":276,"Rank":277,"Model":"gte-large-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.45}
-{"index":277,"Rank":278,"Model":"gte-small-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.13}
-{"index":278,"Rank":279,"Model":"bi-cse<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":70.01,"STSBenchmark":85.99}
-{"index":279,"Rank":280,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.78}
-{"index":280,"Rank":281,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.32}
-{"index":281,"Rank":282,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.02}
-{"index":282,"Rank":283,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.08}
-{"index":283,"Rank":284,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":81.09,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":64.5,"STS22 (zh-en)":"","STSBenchmark":83.17}
-{"index":284,"Rank":285,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":82.34}
-{"index":285,"Rank":286,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.56}
-{"index":286,"Rank":287,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.24}
-{"index":287,"Rank":288,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.54}
+{"index":38,"Rank":1,"Model":"multilingual-e5-large-instruct-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":71.36,"AmazonCounterfactualClassification (de)":66.71,"AmazonCounterfactualClassification (ja)":76.05,"AmazonReviewsClassification (de)":53.0,"AmazonReviewsClassification (es)":48.81,"AmazonReviewsClassification (fr)":48.51,"AmazonReviewsClassification (ja)":47.71,"AmazonReviewsClassification (zh)":44.66,"MTOPDomainClassification (de)":92.68,"MTOPDomainClassification (es)":92.27,"MTOPDomainClassification (fr)":89.97,"MTOPDomainClassification (hi)":90.03,"MTOPDomainClassification (th)":87.56,"MTOPIntentClassification (de)":79.37,"MTOPIntentClassification (es)":80.34,"MTOPIntentClassification (fr)":76.72,"MTOPIntentClassification (hi)":77.87,"MTOPIntentClassification (th)":77.74,"MassiveIntentClassification (af)":67.66,"MassiveIntentClassification (am)":60.71,"MassiveIntentClassification (ar)":63.61,"MassiveIntentClassification (az)":69.0,"MassiveIntentClassification (bn)":68.54,"MassiveIntentClassification (cy)":63.03,"MassiveIntentClassification (de)":72.47,"MassiveIntentClassification (el)":71.24,"MassiveIntentClassification (es)":72.82,"MassiveIntentClassification (fa)":74.25,"MassiveIntentClassification (fi)":72.29,"MassiveIntentClassification (fr)":73.32,"MassiveIntentClassification (he)":70.22,"MassiveIntentClassification (hi)":71.58,"MassiveIntentClassification (hu)":71.92,"MassiveIntentClassification (hy)":68.07,"MassiveIntentClassification (id)":72.62,"MassiveIntentClassification (is)":65.77,"MassiveIntentClassification (it)":73.45,"MassiveIntentClassification (ja)":74.69,"MassiveIntentClassification (jv)":63.04,"MassiveIntentClassification (ka)":58.91,"MassiveIntentClassification (km)":54.43,"MassiveIntentClassification (kn)":66.33,"MassiveIntentClassification (ko)":70.59,"MassiveIntentClassification (lv)":69.11,"MassiveIntentClassification (ml)":69.7,"MassiveIntentClassification (mn)":66.44,"MassiveIntentClassification (ms)":70.8,"MassiveIntentClassification (my)":64.79,"MassiveIntentClassification (nl)":74.43,"MassiveIntentClassification (pt)":73.63,"MassiveIntentClassification (ro)":71.89,"MassiveIntentClassification (ru)":74.16,"MassiveIntentClassification (sl)":69.96,"MassiveIntentClassification (sq)":69.5,"MassiveIntentClassification (sw)":63.01,"MassiveIntentClassification (ta)":66.91,"MassiveIntentClassification (te)":67.62,"MassiveIntentClassification (th)":69.51,"MassiveIntentClassification (tl)":69.31,"MassiveIntentClassification (tr)":72.24,"MassiveIntentClassification (ur)":67.5,"MassiveIntentClassification (vi)":71.29,"MassiveIntentClassification (zh-TW)":69.38,"MassiveScenarioClassification (af)":73.34,"MassiveScenarioClassification (am)":65.84,"MassiveScenarioClassification (ar)":69.76,"MassiveScenarioClassification (az)":72.02,"MassiveScenarioClassification (bn)":72.76,"MassiveScenarioClassification (cy)":68.02,"MassiveScenarioClassification (de)":77.68,"MassiveScenarioClassification (el)":76.13,"MassiveScenarioClassification (es)":76.97,"MassiveScenarioClassification (fa)":78.1,"MassiveScenarioClassification (fi)":75.21,"MassiveScenarioClassification (fr)":77.07,"MassiveScenarioClassification (he)":73.53,"MassiveScenarioClassification (hi)":75.75,"MassiveScenarioClassification (hu)":77.09,"MassiveScenarioClassification (hy)":71.08,"MassiveScenarioClassification (id)":77.1,"MassiveScenarioClassification (is)":71.26,"MassiveScenarioClassification (it)":77.08,"MassiveScenarioClassification (ja)":79.35,"MassiveScenarioClassification (jv)":68.42,"MassiveScenarioClassification (ka)":66.16,"MassiveScenarioClassification (km)":60.11,"MassiveScenarioClassification (kn)":71.25,"MassiveScenarioClassification (ko)":76.46,"MassiveScenarioClassification (lv)":73.25,"MassiveScenarioClassification (ml)":74.12,"MassiveScenarioClassification (mn)":70.02,"MassiveScenarioClassification (ms)":74.41,"MassiveScenarioClassification (my)":68.4,"MassiveScenarioClassification (nl)":78.52,"MassiveScenarioClassification (pt)":77.12,"MassiveScenarioClassification (ro)":75.26,"MassiveScenarioClassification (ru)":77.71,"MassiveScenarioClassification (sl)":74.84,"MassiveScenarioClassification (sq)":74.72,"MassiveScenarioClassification (sw)":67.92,"MassiveScenarioClassification (ta)":70.93,"MassiveScenarioClassification (te)":72.41,"MassiveScenarioClassification (th)":75.18,"MassiveScenarioClassification (tl)":72.86,"MassiveScenarioClassification (tr)":76.47,"MassiveScenarioClassification (ur)":71.89,"MassiveScenarioClassification (vi)":74.75,"MassiveScenarioClassification (zh-TW)":75.46}
+{"index":82,"Rank":2,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":71.36,"AmazonCounterfactualClassification (de)":66.71,"AmazonCounterfactualClassification (ja)":76.05,"AmazonReviewsClassification (de)":53.0,"AmazonReviewsClassification (es)":48.81,"AmazonReviewsClassification (fr)":48.51,"AmazonReviewsClassification (ja)":47.71,"AmazonReviewsClassification (zh)":44.66,"MTOPDomainClassification (de)":92.68,"MTOPDomainClassification (es)":92.27,"MTOPDomainClassification (fr)":89.97,"MTOPDomainClassification (hi)":90.03,"MTOPDomainClassification (th)":87.56,"MTOPIntentClassification (de)":79.37,"MTOPIntentClassification (es)":80.34,"MTOPIntentClassification (fr)":76.72,"MTOPIntentClassification (hi)":77.87,"MTOPIntentClassification (th)":77.74,"MassiveIntentClassification (af)":67.66,"MassiveIntentClassification (am)":60.71,"MassiveIntentClassification (ar)":63.61,"MassiveIntentClassification (az)":69.0,"MassiveIntentClassification (bn)":68.54,"MassiveIntentClassification (cy)":63.03,"MassiveIntentClassification (de)":72.47,"MassiveIntentClassification (el)":71.24,"MassiveIntentClassification (es)":72.82,"MassiveIntentClassification (fa)":74.25,"MassiveIntentClassification (fi)":72.29,"MassiveIntentClassification (fr)":73.32,"MassiveIntentClassification (he)":70.22,"MassiveIntentClassification (hi)":71.58,"MassiveIntentClassification (hu)":71.92,"MassiveIntentClassification (hy)":68.07,"MassiveIntentClassification (id)":72.62,"MassiveIntentClassification (is)":65.77,"MassiveIntentClassification (it)":73.45,"MassiveIntentClassification (ja)":74.69,"MassiveIntentClassification (jv)":63.04,"MassiveIntentClassification (ka)":58.91,"MassiveIntentClassification (km)":54.43,"MassiveIntentClassification (kn)":66.33,"MassiveIntentClassification (ko)":70.59,"MassiveIntentClassification (lv)":69.11,"MassiveIntentClassification (ml)":69.7,"MassiveIntentClassification (mn)":66.44,"MassiveIntentClassification (ms)":70.8,"MassiveIntentClassification (my)":64.79,"MassiveIntentClassification (nl)":74.43,"MassiveIntentClassification (pt)":73.63,"MassiveIntentClassification (ro)":71.89,"MassiveIntentClassification (ru)":74.16,"MassiveIntentClassification (sl)":69.96,"MassiveIntentClassification (sq)":69.5,"MassiveIntentClassification (sw)":63.01,"MassiveIntentClassification (ta)":66.91,"MassiveIntentClassification (te)":67.62,"MassiveIntentClassification (th)":69.51,"MassiveIntentClassification (tl)":69.31,"MassiveIntentClassification (tr)":72.24,"MassiveIntentClassification (ur)":67.5,"MassiveIntentClassification (vi)":71.29,"MassiveIntentClassification (zh-TW)":69.38,"MassiveScenarioClassification (af)":73.34,"MassiveScenarioClassification (am)":65.84,"MassiveScenarioClassification (ar)":69.76,"MassiveScenarioClassification (az)":72.02,"MassiveScenarioClassification (bn)":72.76,"MassiveScenarioClassification (cy)":68.02,"MassiveScenarioClassification (de)":77.68,"MassiveScenarioClassification (el)":76.13,"MassiveScenarioClassification (es)":76.97,"MassiveScenarioClassification (fa)":78.1,"MassiveScenarioClassification (fi)":75.21,"MassiveScenarioClassification (fr)":77.07,"MassiveScenarioClassification (he)":73.53,"MassiveScenarioClassification (hi)":75.75,"MassiveScenarioClassification (hu)":77.09,"MassiveScenarioClassification (hy)":71.08,"MassiveScenarioClassification (id)":77.1,"MassiveScenarioClassification (is)":71.26,"MassiveScenarioClassification (it)":77.08,"MassiveScenarioClassification (ja)":79.35,"MassiveScenarioClassification (jv)":68.42,"MassiveScenarioClassification (ka)":66.16,"MassiveScenarioClassification (km)":60.11,"MassiveScenarioClassification (kn)":71.25,"MassiveScenarioClassification (ko)":76.46,"MassiveScenarioClassification (lv)":73.25,"MassiveScenarioClassification (ml)":74.12,"MassiveScenarioClassification (mn)":70.02,"MassiveScenarioClassification (ms)":74.41,"MassiveScenarioClassification (my)":68.4,"MassiveScenarioClassification (nl)":78.52,"MassiveScenarioClassification (pt)":77.12,"MassiveScenarioClassification (ro)":75.26,"MassiveScenarioClassification (ru)":77.71,"MassiveScenarioClassification (sl)":74.84,"MassiveScenarioClassification (sq)":74.72,"MassiveScenarioClassification (sw)":67.92,"MassiveScenarioClassification (ta)":70.93,"MassiveScenarioClassification (te)":72.41,"MassiveScenarioClassification (th)":75.18,"MassiveScenarioClassification (tl)":72.86,"MassiveScenarioClassification (tr)":76.47,"MassiveScenarioClassification (ur)":71.89,"MassiveScenarioClassification (vi)":74.75,"MassiveScenarioClassification (zh-TW)":75.46}
+{"index":127,"Rank":3,"Model":"multilingual-e5-large-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.6,"AmazonCounterfactualClassification (de)":71.22,"AmazonCounterfactualClassification (ja)":77.84,"AmazonReviewsClassification (de)":45.4,"AmazonReviewsClassification (es)":43.07,"AmazonReviewsClassification (fr)":41.89,"AmazonReviewsClassification (ja)":40.12,"AmazonReviewsClassification (zh)":38.83,"MTOPDomainClassification (de)":91.95,"MTOPDomainClassification (es)":92.2,"MTOPDomainClassification (fr)":89.0,"MTOPDomainClassification (hi)":89.78,"MTOPDomainClassification (th)":88.75,"MTOPIntentClassification (de)":74.53,"MTOPIntentClassification (es)":75.44,"MTOPIntentClassification (fr)":70.56,"MTOPIntentClassification (hi)":73.12,"MTOPIntentClassification (th)":74.29,"MassiveIntentClassification (af)":62.38,"MassiveIntentClassification (am)":56.07,"MassiveIntentClassification (ar)":60.75,"MassiveIntentClassification (az)":64.87,"MassiveIntentClassification (bn)":63.97,"MassiveIntentClassification (cy)":54.98,"MassiveIntentClassification (de)":69.33,"MassiveIntentClassification (el)":69.09,"MassiveIntentClassification (es)":70.49,"MassiveIntentClassification (fa)":69.91,"MassiveIntentClassification (fi)":70.12,"MassiveIntentClassification (fr)":69.34,"MassiveIntentClassification (he)":67.61,"MassiveIntentClassification (hi)":67.8,"MassiveIntentClassification (hu)":69.69,"MassiveIntentClassification (hy)":62.07,"MassiveIntentClassification (id)":70.05,"MassiveIntentClassification (is)":62.42,"MassiveIntentClassification (it)":70.53,"MassiveIntentClassification (ja)":72.51,"MassiveIntentClassification (jv)":57.91,"MassiveIntentClassification (ka)":51.78,"MassiveIntentClassification (km)":47.02,"MassiveIntentClassification (kn)":62.16,"MassiveIntentClassification (ko)":69.43,"MassiveIntentClassification (lv)":67.08,"MassiveIntentClassification (ml)":65.57,"MassiveIntentClassification (mn)":61.71,"MassiveIntentClassification (ms)":66.04,"MassiveIntentClassification (my)":60.88,"MassiveIntentClassification (nl)":70.45,"MassiveIntentClassification (pt)":70.73,"MassiveIntentClassification (ro)":68.36,"MassiveIntentClassification (ru)":71.7,"MassiveIntentClassification (sl)":67.09,"MassiveIntentClassification (sq)":65.18,"MassiveIntentClassification (sw)":58.5,"MassiveIntentClassification (ta)":62.69,"MassiveIntentClassification (te)":63.02,"MassiveIntentClassification (th)":68.29,"MassiveIntentClassification (tl)":64.77,"MassiveIntentClassification (tr)":69.87,"MassiveIntentClassification (ur)":64.05,"MassiveIntentClassification (vi)":69.38,"MassiveIntentClassification (zh-TW)":66.2,"MassiveScenarioClassification (af)":68.74,"MassiveScenarioClassification (am)":60.59,"MassiveScenarioClassification (ar)":66.23,"MassiveScenarioClassification (az)":66.48,"MassiveScenarioClassification (bn)":67.75,"MassiveScenarioClassification (cy)":59.09,"MassiveScenarioClassification (de)":74.7,"MassiveScenarioClassification (el)":73.9,"MassiveScenarioClassification (es)":74.31,"MassiveScenarioClassification (fa)":72.95,"MassiveScenarioClassification (fi)":73.12,"MassiveScenarioClassification (fr)":73.87,"MassiveScenarioClassification (he)":71.4,"MassiveScenarioClassification (hi)":72.13,"MassiveScenarioClassification (hu)":74.52,"MassiveScenarioClassification (hy)":64.94,"MassiveScenarioClassification (id)":74.12,"MassiveScenarioClassification (is)":67.15,"MassiveScenarioClassification (it)":74.32,"MassiveScenarioClassification (ja)":77.47,"MassiveScenarioClassification (jv)":63.32,"MassiveScenarioClassification (ka)":58.35,"MassiveScenarioClassification (km)":50.88,"MassiveScenarioClassification (kn)":66.44,"MassiveScenarioClassification (ko)":75.05,"MassiveScenarioClassification (lv)":70.5,"MassiveScenarioClassification (ml)":69.94,"MassiveScenarioClassification (mn)":64.79,"MassiveScenarioClassification (ms)":69.88,"MassiveScenarioClassification (my)":63.25,"MassiveScenarioClassification (nl)":74.83,"MassiveScenarioClassification (pt)":73.49,"MassiveScenarioClassification (ro)":71.72,"MassiveScenarioClassification (ru)":75.14,"MassiveScenarioClassification (sl)":71.87,"MassiveScenarioClassification (sq)":70.28,"MassiveScenarioClassification (sw)":63.14,"MassiveScenarioClassification (ta)":66.28,"MassiveScenarioClassification (te)":66.69,"MassiveScenarioClassification (th)":73.45,"MassiveScenarioClassification (tl)":67.71,"MassiveScenarioClassification (tr)":73.99,"MassiveScenarioClassification (ur)":68.15,"MassiveScenarioClassification (vi)":73.11,"MassiveScenarioClassification (zh-TW)":71.81}
+{"index":37,"Rank":4,"Model":"multilingual-e5-large-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":67.6,"AmazonCounterfactualClassification (de)":71.22,"AmazonCounterfactualClassification (ja)":77.84,"AmazonReviewsClassification (de)":45.4,"AmazonReviewsClassification (es)":43.07,"AmazonReviewsClassification (fr)":41.89,"AmazonReviewsClassification (ja)":40.12,"AmazonReviewsClassification (zh)":38.83,"MTOPDomainClassification (de)":91.95,"MTOPDomainClassification (es)":92.2,"MTOPDomainClassification (fr)":89.0,"MTOPDomainClassification (hi)":89.78,"MTOPDomainClassification (th)":88.75,"MTOPIntentClassification (de)":74.53,"MTOPIntentClassification (es)":75.44,"MTOPIntentClassification (fr)":70.56,"MTOPIntentClassification (hi)":73.12,"MTOPIntentClassification (th)":74.29,"MassiveIntentClassification (af)":62.38,"MassiveIntentClassification (am)":56.07,"MassiveIntentClassification (ar)":60.75,"MassiveIntentClassification (az)":64.87,"MassiveIntentClassification (bn)":63.97,"MassiveIntentClassification (cy)":54.98,"MassiveIntentClassification (de)":69.33,"MassiveIntentClassification (el)":69.09,"MassiveIntentClassification (es)":70.49,"MassiveIntentClassification (fa)":69.91,"MassiveIntentClassification (fi)":70.12,"MassiveIntentClassification (fr)":69.34,"MassiveIntentClassification (he)":67.61,"MassiveIntentClassification (hi)":67.8,"MassiveIntentClassification (hu)":69.69,"MassiveIntentClassification (hy)":62.07,"MassiveIntentClassification (id)":70.05,"MassiveIntentClassification (is)":62.42,"MassiveIntentClassification (it)":70.53,"MassiveIntentClassification (ja)":72.51,"MassiveIntentClassification (jv)":57.91,"MassiveIntentClassification (ka)":51.78,"MassiveIntentClassification (km)":47.02,"MassiveIntentClassification (kn)":62.16,"MassiveIntentClassification (ko)":69.43,"MassiveIntentClassification (lv)":67.08,"MassiveIntentClassification (ml)":65.57,"MassiveIntentClassification (mn)":61.71,"MassiveIntentClassification (ms)":66.04,"MassiveIntentClassification (my)":60.88,"MassiveIntentClassification (nl)":70.45,"MassiveIntentClassification (pt)":70.73,"MassiveIntentClassification (ro)":68.36,"MassiveIntentClassification (ru)":71.7,"MassiveIntentClassification (sl)":67.09,"MassiveIntentClassification (sq)":65.18,"MassiveIntentClassification (sw)":58.5,"MassiveIntentClassification (ta)":62.69,"MassiveIntentClassification (te)":63.02,"MassiveIntentClassification (th)":68.29,"MassiveIntentClassification (tl)":64.77,"MassiveIntentClassification (tr)":69.87,"MassiveIntentClassification (ur)":64.05,"MassiveIntentClassification (vi)":69.38,"MassiveIntentClassification (zh-TW)":66.2,"MassiveScenarioClassification (af)":68.74,"MassiveScenarioClassification (am)":60.59,"MassiveScenarioClassification (ar)":66.23,"MassiveScenarioClassification (az)":66.48,"MassiveScenarioClassification (bn)":67.75,"MassiveScenarioClassification (cy)":59.09,"MassiveScenarioClassification (de)":74.7,"MassiveScenarioClassification (el)":73.9,"MassiveScenarioClassification (es)":74.31,"MassiveScenarioClassification (fa)":72.95,"MassiveScenarioClassification (fi)":73.12,"MassiveScenarioClassification (fr)":73.87,"MassiveScenarioClassification (he)":71.4,"MassiveScenarioClassification (hi)":72.13,"MassiveScenarioClassification (hu)":74.52,"MassiveScenarioClassification (hy)":64.94,"MassiveScenarioClassification (id)":74.12,"MassiveScenarioClassification (is)":67.15,"MassiveScenarioClassification (it)":74.32,"MassiveScenarioClassification (ja)":77.47,"MassiveScenarioClassification (jv)":63.32,"MassiveScenarioClassification (ka)":58.35,"MassiveScenarioClassification (km)":50.88,"MassiveScenarioClassification (kn)":66.44,"MassiveScenarioClassification (ko)":75.05,"MassiveScenarioClassification (lv)":70.5,"MassiveScenarioClassification (ml)":69.94,"MassiveScenarioClassification (mn)":64.79,"MassiveScenarioClassification (ms)":69.88,"MassiveScenarioClassification (my)":63.25,"MassiveScenarioClassification (nl)":74.83,"MassiveScenarioClassification (pt)":73.49,"MassiveScenarioClassification (ro)":71.72,"MassiveScenarioClassification (ru)":75.14,"MassiveScenarioClassification (sl)":71.87,"MassiveScenarioClassification (sq)":70.28,"MassiveScenarioClassification (sw)":63.14,"MassiveScenarioClassification (ta)":66.28,"MassiveScenarioClassification (te)":66.69,"MassiveScenarioClassification (th)":73.45,"MassiveScenarioClassification (tl)":67.71,"MassiveScenarioClassification (tr)":73.99,"MassiveScenarioClassification (ur)":68.15,"MassiveScenarioClassification (vi)":73.11,"MassiveScenarioClassification (zh-TW)":71.81}
+{"index":81,"Rank":5,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":67.48,"AmazonCounterfactualClassification (de)":71.22,"AmazonCounterfactualClassification (ja)":77.84,"AmazonReviewsClassification (de)":45.4,"AmazonReviewsClassification (es)":43.07,"AmazonReviewsClassification (fr)":41.91,"AmazonReviewsClassification (ja)":40.12,"AmazonReviewsClassification (zh)":38.83,"MTOPDomainClassification (de)":91.95,"MTOPDomainClassification (es)":92.2,"MTOPDomainClassification (fr)":86.41,"MTOPDomainClassification (hi)":89.78,"MTOPDomainClassification (th)":88.75,"MTOPIntentClassification (de)":74.53,"MTOPIntentClassification (es)":75.44,"MTOPIntentClassification (fr)":59.43,"MTOPIntentClassification (hi)":73.12,"MTOPIntentClassification (th)":74.29,"MassiveIntentClassification (af)":62.38,"MassiveIntentClassification (am)":56.07,"MassiveIntentClassification (ar)":60.75,"MassiveIntentClassification (az)":64.87,"MassiveIntentClassification (bn)":63.97,"MassiveIntentClassification (cy)":54.98,"MassiveIntentClassification (de)":69.33,"MassiveIntentClassification (el)":69.09,"MassiveIntentClassification (es)":70.49,"MassiveIntentClassification (fa)":69.91,"MassiveIntentClassification (fi)":70.12,"MassiveIntentClassification (fr)":69.34,"MassiveIntentClassification (he)":67.61,"MassiveIntentClassification (hi)":67.8,"MassiveIntentClassification (hu)":69.69,"MassiveIntentClassification (hy)":62.07,"MassiveIntentClassification (id)":70.05,"MassiveIntentClassification (is)":62.42,"MassiveIntentClassification (it)":70.53,"MassiveIntentClassification (ja)":72.51,"MassiveIntentClassification (jv)":57.91,"MassiveIntentClassification (ka)":51.78,"MassiveIntentClassification (km)":47.02,"MassiveIntentClassification (kn)":62.16,"MassiveIntentClassification (ko)":69.43,"MassiveIntentClassification (lv)":67.08,"MassiveIntentClassification (ml)":65.57,"MassiveIntentClassification (mn)":61.71,"MassiveIntentClassification (ms)":66.04,"MassiveIntentClassification (my)":60.88,"MassiveIntentClassification (nl)":70.45,"MassiveIntentClassification (pt)":70.73,"MassiveIntentClassification (ro)":68.36,"MassiveIntentClassification (ru)":71.7,"MassiveIntentClassification (sl)":67.09,"MassiveIntentClassification (sq)":65.18,"MassiveIntentClassification (sw)":58.5,"MassiveIntentClassification (ta)":62.69,"MassiveIntentClassification (te)":63.02,"MassiveIntentClassification (th)":68.29,"MassiveIntentClassification (tl)":64.77,"MassiveIntentClassification (tr)":69.87,"MassiveIntentClassification (ur)":64.05,"MassiveIntentClassification (vi)":69.38,"MassiveIntentClassification (zh-TW)":66.2,"MassiveScenarioClassification (af)":68.74,"MassiveScenarioClassification (am)":60.59,"MassiveScenarioClassification (ar)":66.23,"MassiveScenarioClassification (az)":66.48,"MassiveScenarioClassification (bn)":67.75,"MassiveScenarioClassification (cy)":59.09,"MassiveScenarioClassification (de)":74.7,"MassiveScenarioClassification (el)":73.9,"MassiveScenarioClassification (es)":74.31,"MassiveScenarioClassification (fa)":72.95,"MassiveScenarioClassification (fi)":73.12,"MassiveScenarioClassification (fr)":73.87,"MassiveScenarioClassification (he)":71.4,"MassiveScenarioClassification (hi)":72.13,"MassiveScenarioClassification (hu)":74.52,"MassiveScenarioClassification (hy)":64.94,"MassiveScenarioClassification (id)":74.12,"MassiveScenarioClassification (is)":67.15,"MassiveScenarioClassification (it)":74.32,"MassiveScenarioClassification (ja)":77.47,"MassiveScenarioClassification (jv)":63.32,"MassiveScenarioClassification (ka)":58.35,"MassiveScenarioClassification (km)":50.88,"MassiveScenarioClassification (kn)":66.44,"MassiveScenarioClassification (ko)":75.05,"MassiveScenarioClassification (lv)":70.5,"MassiveScenarioClassification (ml)":69.94,"MassiveScenarioClassification (mn)":64.79,"MassiveScenarioClassification (ms)":69.88,"MassiveScenarioClassification (my)":63.25,"MassiveScenarioClassification (nl)":74.83,"MassiveScenarioClassification (pt)":73.49,"MassiveScenarioClassification (ro)":71.72,"MassiveScenarioClassification (ru)":75.14,"MassiveScenarioClassification (sl)":71.87,"MassiveScenarioClassification (sq)":70.28,"MassiveScenarioClassification (sw)":63.14,"MassiveScenarioClassification (ta)":66.28,"MassiveScenarioClassification (te)":66.69,"MassiveScenarioClassification (th)":73.45,"MassiveScenarioClassification (tl)":67.71,"MassiveScenarioClassification (tr)":73.99,"MassiveScenarioClassification (ur)":68.15,"MassiveScenarioClassification (vi)":73.11,"MassiveScenarioClassification (zh-TW)":71.81}
+{"index":79,"Rank":6,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":66.64,"AmazonCounterfactualClassification (de)":74.05,"AmazonCounterfactualClassification (ja)":77.22,"AmazonReviewsClassification (de)":53.26,"AmazonReviewsClassification (es)":50.33,"AmazonReviewsClassification (fr)":36.71,"AmazonReviewsClassification (ja)":48.69,"AmazonReviewsClassification (zh)":46.24,"MTOPDomainClassification (de)":92.98,"MTOPDomainClassification (es)":93.37,"MTOPDomainClassification (fr)":74.8,"MTOPDomainClassification (hi)":88.81,"MTOPDomainClassification (th)":85.52,"MTOPIntentClassification (de)":77.77,"MTOPIntentClassification (es)":79.94,"MTOPIntentClassification (fr)":53.97,"MTOPIntentClassification (hi)":72.91,"MTOPIntentClassification (th)":73.24,"MassiveIntentClassification (af)":66.48,"MassiveIntentClassification (am)":44.29,"MassiveIntentClassification (ar)":63.17,"MassiveIntentClassification (az)":64.23,"MassiveIntentClassification (bn)":64.94,"MassiveIntentClassification (cy)":55.48,"MassiveIntentClassification (de)":74.09,"MassiveIntentClassification (el)":68.31,"MassiveIntentClassification (es)":75.09,"MassiveIntentClassification (fa)":72.21,"MassiveIntentClassification (fi)":68.74,"MassiveIntentClassification (fr)":46.39,"MassiveIntentClassification (he)":66.22,"MassiveIntentClassification (hi)":69.45,"MassiveIntentClassification (hu)":69.41,"MassiveIntentClassification (hy)":56.92,"MassiveIntentClassification (id)":72.71,"MassiveIntentClassification (is)":59.91,"MassiveIntentClassification (it)":75.25,"MassiveIntentClassification (ja)":76.36,"MassiveIntentClassification (jv)":57.92,"MassiveIntentClassification (ka)":52.55,"MassiveIntentClassification (km)":46.45,"MassiveIntentClassification (kn)":53.96,"MassiveIntentClassification (ko)":74.21,"MassiveIntentClassification (lv)":59.23,"MassiveIntentClassification (ml)":51.45,"MassiveIntentClassification (mn)":51.38,"MassiveIntentClassification (ms)":69.85,"MassiveIntentClassification (my)":49.15,"MassiveIntentClassification (nl)":74.83,"MassiveIntentClassification (pt)":75.27,"MassiveIntentClassification (ro)":69.63,"MassiveIntentClassification (ru)":76.63,"MassiveIntentClassification (sl)":67.15,"MassiveIntentClassification (sq)":58.84,"MassiveIntentClassification (sw)":57.37,"MassiveIntentClassification (ta)":53.15,"MassiveIntentClassification (te)":51.51,"MassiveIntentClassification (th)":66.91,"MassiveIntentClassification (tl)":68.73,"MassiveIntentClassification (tr)":72.07,"MassiveIntentClassification (ur)":62.09,"MassiveIntentClassification (vi)":71.17,"MassiveIntentClassification (zh-TW)":71.14,"MassiveScenarioClassification (af)":73.37,"MassiveScenarioClassification (am)":47.21,"MassiveScenarioClassification (ar)":69.84,"MassiveScenarioClassification (az)":67.0,"MassiveScenarioClassification (bn)":68.05,"MassiveScenarioClassification (cy)":61.88,"MassiveScenarioClassification (de)":79.03,"MassiveScenarioClassification (el)":72.97,"MassiveScenarioClassification (es)":78.84,"MassiveScenarioClassification (fa)":76.74,"MassiveScenarioClassification (fi)":71.22,"MassiveScenarioClassification (fr)":53.86,"MassiveScenarioClassification (he)":69.64,"MassiveScenarioClassification (hi)":73.51,"MassiveScenarioClassification (hu)":74.06,"MassiveScenarioClassification (hy)":59.55,"MassiveScenarioClassification (id)":77.41,"MassiveScenarioClassification (is)":66.58,"MassiveScenarioClassification (it)":78.39,"MassiveScenarioClassification (ja)":79.62,"MassiveScenarioClassification (jv)":64.29,"MassiveScenarioClassification (ka)":57.52,"MassiveScenarioClassification (km)":52.42,"MassiveScenarioClassification (kn)":58.55,"MassiveScenarioClassification (ko)":78.89,"MassiveScenarioClassification (lv)":63.5,"MassiveScenarioClassification (ml)":54.03,"MassiveScenarioClassification (mn)":54.24,"MassiveScenarioClassification (ms)":75.53,"MassiveScenarioClassification (my)":52.19,"MassiveScenarioClassification (nl)":78.48,"MassiveScenarioClassification (pt)":77.96,"MassiveScenarioClassification (ro)":73.19,"MassiveScenarioClassification (ru)":80.52,"MassiveScenarioClassification (sl)":73.66,"MassiveScenarioClassification (sq)":64.03,"MassiveScenarioClassification (sw)":64.66,"MassiveScenarioClassification (ta)":57.76,"MassiveScenarioClassification (te)":57.27,"MassiveScenarioClassification (th)":72.46,"MassiveScenarioClassification (tl)":73.71,"MassiveScenarioClassification (tr)":75.04,"MassiveScenarioClassification (ur)":67.05,"MassiveScenarioClassification (vi)":75.52,"MassiveScenarioClassification (zh-TW)":76.87}
+{"index":80,"Rank":7,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":64.32,"AmazonCounterfactualClassification (de)":71.72,"AmazonCounterfactualClassification (ja)":73.33,"AmazonReviewsClassification (de)":41.83,"AmazonReviewsClassification (es)":40.53,"AmazonReviewsClassification (fr)":40.94,"AmazonReviewsClassification (ja)":37.44,"AmazonReviewsClassification (zh)":37.23,"MTOPDomainClassification (de)":89.63,"MTOPDomainClassification (es)":90.59,"MTOPDomainClassification (fr)":84.79,"MTOPDomainClassification (hi)":87.32,"MTOPDomainClassification (th)":86.24,"MTOPIntentClassification (de)":71.23,"MTOPIntentClassification (es)":71.27,"MTOPIntentClassification (fr)":55.51,"MTOPIntentClassification (hi)":69.24,"MTOPIntentClassification (th)":71.71,"MassiveIntentClassification (af)":59.98,"MassiveIntentClassification (am)":53.66,"MassiveIntentClassification (ar)":57.46,"MassiveIntentClassification (az)":62.59,"MassiveIntentClassification (bn)":61.13,"MassiveIntentClassification (cy)":50.06,"MassiveIntentClassification (de)":66.09,"MassiveIntentClassification (el)":64.68,"MassiveIntentClassification (es)":68.4,"MassiveIntentClassification (fa)":67.25,"MassiveIntentClassification (fi)":65.78,"MassiveIntentClassification (fr)":67.95,"MassiveIntentClassification (he)":62.05,"MassiveIntentClassification (hi)":64.95,"MassiveIntentClassification (hu)":64.97,"MassiveIntentClassification (hy)":60.08,"MassiveIntentClassification (id)":66.64,"MassiveIntentClassification (is)":56.39,"MassiveIntentClassification (it)":68.93,"MassiveIntentClassification (ja)":68.94,"MassiveIntentClassification (jv)":54.26,"MassiveIntentClassification (ka)":48.99,"MassiveIntentClassification (km)":44.69,"MassiveIntentClassification (kn)":59.19,"MassiveIntentClassification (ko)":66.34,"MassiveIntentClassification (lv)":60.34,"MassiveIntentClassification (ml)":63.09,"MassiveIntentClassification (mn)":58.76,"MassiveIntentClassification (ms)":62.48,"MassiveIntentClassification (my)":58.56,"MassiveIntentClassification (nl)":67.3,"MassiveIntentClassification (pt)":68.98,"MassiveIntentClassification (ro)":65.54,"MassiveIntentClassification (ru)":69.02,"MassiveIntentClassification (sl)":62.35,"MassiveIntentClassification (sq)":61.23,"MassiveIntentClassification (sw)":56.0,"MassiveIntentClassification (ta)":58.71,"MassiveIntentClassification (te)":59.72,"MassiveIntentClassification (th)":65.6,"MassiveIntentClassification (tl)":60.86,"MassiveIntentClassification (tr)":67.41,"MassiveIntentClassification (ur)":61.52,"MassiveIntentClassification (vi)":66.17,"MassiveIntentClassification (zh-TW)":64.65,"MassiveScenarioClassification (af)":65.09,"MassiveScenarioClassification (am)":58.52,"MassiveScenarioClassification (ar)":62.24,"MassiveScenarioClassification (az)":63.75,"MassiveScenarioClassification (bn)":65.0,"MassiveScenarioClassification (cy)":52.84,"MassiveScenarioClassification (de)":71.95,"MassiveScenarioClassification (el)":70.18,"MassiveScenarioClassification (es)":71.5,"MassiveScenarioClassification (fa)":70.25,"MassiveScenarioClassification (fi)":69.13,"MassiveScenarioClassification (fr)":71.89,"MassiveScenarioClassification (he)":67.44,"MassiveScenarioClassification (hi)":69.16,"MassiveScenarioClassification (hu)":70.75,"MassiveScenarioClassification (hy)":63.14,"MassiveScenarioClassification (id)":70.7,"MassiveScenarioClassification (is)":60.94,"MassiveScenarioClassification (it)":72.32,"MassiveScenarioClassification (ja)":74.65,"MassiveScenarioClassification (jv)":59.69,"MassiveScenarioClassification (ka)":54.37,"MassiveScenarioClassification (km)":48.31,"MassiveScenarioClassification (kn)":62.15,"MassiveScenarioClassification (ko)":72.45,"MassiveScenarioClassification (lv)":62.81,"MassiveScenarioClassification (ml)":68.04,"MassiveScenarioClassification (mn)":61.44,"MassiveScenarioClassification (ms)":66.9,"MassiveScenarioClassification (my)":61.64,"MassiveScenarioClassification (nl)":72.11,"MassiveScenarioClassification (pt)":70.83,"MassiveScenarioClassification (ro)":69.19,"MassiveScenarioClassification (ru)":72.99,"MassiveScenarioClassification (sl)":65.26,"MassiveScenarioClassification (sq)":66.49,"MassiveScenarioClassification (sw)":59.89,"MassiveScenarioClassification (ta)":62.38,"MassiveScenarioClassification (te)":62.59,"MassiveScenarioClassification (th)":71.61,"MassiveScenarioClassification (tl)":62.74,"MassiveScenarioClassification (tr)":71.67,"MassiveScenarioClassification (ur)":64.64,"MassiveScenarioClassification (vi)":70.01,"MassiveScenarioClassification (zh-TW)":70.69}
+{"index":67,"Rank":8,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":63.11,"AmazonCounterfactualClassification (de)":72.02,"AmazonCounterfactualClassification (ja)":71.79,"AmazonReviewsClassification (de)":34.61,"AmazonReviewsClassification (es)":35.17,"AmazonReviewsClassification (fr)":34.91,"AmazonReviewsClassification (ja)":31.84,"AmazonReviewsClassification (zh)":31.91,"MTOPDomainClassification (de)":89.54,"MTOPDomainClassification (es)":90.62,"MTOPDomainClassification (fr)":86.19,"MTOPDomainClassification (hi)":89.42,"MTOPDomainClassification (th)":85.9,"MTOPIntentClassification (de)":73.43,"MTOPIntentClassification (es)":73.84,"MTOPIntentClassification (fr)":66.75,"MTOPIntentClassification (hi)":69.14,"MTOPIntentClassification (th)":68.47,"MassiveIntentClassification (af)":58.15,"MassiveIntentClassification (am)":57.91,"MassiveIntentClassification (ar)":57.23,"MassiveIntentClassification (az)":57.17,"MassiveIntentClassification (bn)":62.24,"MassiveIntentClassification (cy)":51.19,"MassiveIntentClassification (de)":61.31,"MassiveIntentClassification (el)":64.21,"MassiveIntentClassification (es)":63.97,"MassiveIntentClassification (fa)":66.67,"MassiveIntentClassification (fi)":62.86,"MassiveIntentClassification (fr)":58.55,"MassiveIntentClassification (he)":63.45,"MassiveIntentClassification (hi)":61.32,"MassiveIntentClassification (hu)":62.91,"MassiveIntentClassification (hy)":60.03,"MassiveIntentClassification (id)":63.29,"MassiveIntentClassification (is)":56.02,"MassiveIntentClassification (it)":64.44,"MassiveIntentClassification (ja)":63.7,"MassiveIntentClassification (jv)":52.0,"MassiveIntentClassification (ka)":54.09,"MassiveIntentClassification (km)":43.34,"MassiveIntentClassification (kn)":57.87,"MassiveIntentClassification (ko)":62.69,"MassiveIntentClassification (lv)":56.24,"MassiveIntentClassification (ml)":62.81,"MassiveIntentClassification (mn)":58.49,"MassiveIntentClassification (ms)":61.56,"MassiveIntentClassification (my)":59.4,"MassiveIntentClassification (nl)":64.56,"MassiveIntentClassification (pt)":63.49,"MassiveIntentClassification (ro)":62.53,"MassiveIntentClassification (ru)":62.63,"MassiveIntentClassification (sl)":63.43,"MassiveIntentClassification (sq)":61.45,"MassiveIntentClassification (sw)":56.25,"MassiveIntentClassification (ta)":59.75,"MassiveIntentClassification (te)":59.61,"MassiveIntentClassification (th)":59.42,"MassiveIntentClassification (tl)":58.12,"MassiveIntentClassification (tr)":60.91,"MassiveIntentClassification (ur)":59.49,"MassiveIntentClassification (vi)":60.48,"MassiveIntentClassification (zh-TW)":56.73,"MassiveScenarioClassification (af)":64.06,"MassiveScenarioClassification (am)":63.24,"MassiveScenarioClassification (ar)":63.69,"MassiveScenarioClassification (az)":60.86,"MassiveScenarioClassification (bn)":67.17,"MassiveScenarioClassification (cy)":56.52,"MassiveScenarioClassification (de)":67.48,"MassiveScenarioClassification (el)":70.23,"MassiveScenarioClassification (es)":69.08,"MassiveScenarioClassification (fa)":72.1,"MassiveScenarioClassification (fi)":67.16,"MassiveScenarioClassification (fr)":63.02,"MassiveScenarioClassification (he)":68.83,"MassiveScenarioClassification (hi)":66.9,"MassiveScenarioClassification (hu)":69.33,"MassiveScenarioClassification (hy)":65.82,"MassiveScenarioClassification (id)":68.98,"MassiveScenarioClassification (is)":63.14,"MassiveScenarioClassification (it)":70.04,"MassiveScenarioClassification (ja)":70.68,"MassiveScenarioClassification (jv)":59.79,"MassiveScenarioClassification (ka)":61.03,"MassiveScenarioClassification (km)":49.05,"MassiveScenarioClassification (kn)":63.78,"MassiveScenarioClassification (ko)":69.6,"MassiveScenarioClassification (lv)":59.97,"MassiveScenarioClassification (ml)":69.2,"MassiveScenarioClassification (mn)":62.72,"MassiveScenarioClassification (ms)":67.87,"MassiveScenarioClassification (my)":64.98,"MassiveScenarioClassification (nl)":69.8,"MassiveScenarioClassification (pt)":67.5,"MassiveScenarioClassification (ro)":67.53,"MassiveScenarioClassification (ru)":67.96,"MassiveScenarioClassification (sl)":69.57,"MassiveScenarioClassification (sq)":68.48,"MassiveScenarioClassification (sw)":63.18,"MassiveScenarioClassification (ta)":64.85,"MassiveScenarioClassification (te)":65.39,"MassiveScenarioClassification (th)":67.99,"MassiveScenarioClassification (tl)":63.4,"MassiveScenarioClassification (tr)":65.77,"MassiveScenarioClassification (ur)":65.81,"MassiveScenarioClassification (vi)":66.52,"MassiveScenarioClassification (zh-TW)":63.3}
+{"index":83,"Rank":9,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":61.51,"AmazonCounterfactualClassification (de)":71.65,"AmazonCounterfactualClassification (ja)":64.19,"AmazonReviewsClassification (de)":40.25,"AmazonReviewsClassification (es)":40.39,"AmazonReviewsClassification (fr)":39.68,"AmazonReviewsClassification (ja)":37.68,"AmazonReviewsClassification (zh)":37.5,"MTOPDomainClassification (de)":87.47,"MTOPDomainClassification (es)":89.27,"MTOPDomainClassification (fr)":81.2,"MTOPDomainClassification (hi)":86.02,"MTOPDomainClassification (th)":85.35,"MTOPIntentClassification (de)":65.86,"MTOPIntentClassification (es)":67.97,"MTOPIntentClassification (fr)":46.01,"MTOPIntentClassification (hi)":66.3,"MTOPIntentClassification (th)":67.52,"MassiveIntentClassification (af)":57.07,"MassiveIntentClassification (am)":51.96,"MassiveIntentClassification (ar)":54.43,"MassiveIntentClassification (az)":59.8,"MassiveIntentClassification (bn)":59.38,"MassiveIntentClassification (cy)":46.56,"MassiveIntentClassification (de)":62.73,"MassiveIntentClassification (el)":61.6,"MassiveIntentClassification (es)":66.31,"MassiveIntentClassification (fa)":65.54,"MassiveIntentClassification (fi)":61.46,"MassiveIntentClassification (fr)":65.47,"MassiveIntentClassification (he)":58.05,"MassiveIntentClassification (hi)":64.07,"MassiveIntentClassification (hu)":60.95,"MassiveIntentClassification (hy)":57.4,"MassiveIntentClassification (id)":64.17,"MassiveIntentClassification (is)":52.26,"MassiveIntentClassification (it)":65.54,"MassiveIntentClassification (ja)":68.23,"MassiveIntentClassification (jv)":50.85,"MassiveIntentClassification (ka)":48.45,"MassiveIntentClassification (km)":42.83,"MassiveIntentClassification (kn)":57.51,"MassiveIntentClassification (ko)":63.79,"MassiveIntentClassification (lv)":54.99,"MassiveIntentClassification (ml)":61.9,"MassiveIntentClassification (mn)":57.1,"MassiveIntentClassification (ms)":58.99,"MassiveIntentClassification (my)":55.9,"MassiveIntentClassification (nl)":65.64,"MassiveIntentClassification (pt)":66.85,"MassiveIntentClassification (ro)":60.81,"MassiveIntentClassification (ru)":58.65,"MassiveIntentClassification (sl)":56.52,"MassiveIntentClassification (sq)":57.99,"MassiveIntentClassification (sw)":53.57,"MassiveIntentClassification (ta)":57.26,"MassiveIntentClassification (te)":57.83,"MassiveIntentClassification (th)":64.07,"MassiveIntentClassification (tl)":58.91,"MassiveIntentClassification (tr)":63.54,"MassiveIntentClassification (ur)":59.28,"MassiveIntentClassification (vi)":64.07,"MassiveIntentClassification (zh-TW)":62.54,"MassiveScenarioClassification (af)":63.04,"MassiveScenarioClassification (am)":56.84,"MassiveScenarioClassification (ar)":59.62,"MassiveScenarioClassification (az)":60.85,"MassiveScenarioClassification (bn)":62.77,"MassiveScenarioClassification (cy)":50.18,"MassiveScenarioClassification (de)":69.19,"MassiveScenarioClassification (el)":67.07,"MassiveScenarioClassification (es)":69.83,"MassiveScenarioClassification (fa)":68.71,"MassiveScenarioClassification (fi)":65.95,"MassiveScenarioClassification (fr)":68.76,"MassiveScenarioClassification (he)":63.81,"MassiveScenarioClassification (hi)":67.69,"MassiveScenarioClassification (hu)":66.47,"MassiveScenarioClassification (hy)":59.5,"MassiveScenarioClassification (id)":67.92,"MassiveScenarioClassification (is)":56.49,"MassiveScenarioClassification (it)":69.04,"MassiveScenarioClassification (ja)":73.89,"MassiveScenarioClassification (jv)":56.63,"MassiveScenarioClassification (ka)":52.24,"MassiveScenarioClassification (km)":46.62,"MassiveScenarioClassification (kn)":59.16,"MassiveScenarioClassification (ko)":69.85,"MassiveScenarioClassification (lv)":56.66,"MassiveScenarioClassification (ml)":66.54,"MassiveScenarioClassification (mn)":59.31,"MassiveScenarioClassification (ms)":64.88,"MassiveScenarioClassification (my)":58.86,"MassiveScenarioClassification (nl)":70.87,"MassiveScenarioClassification (pt)":68.18,"MassiveScenarioClassification (ro)":64.65,"MassiveScenarioClassification (ru)":63.77,"MassiveScenarioClassification (sl)":60.18,"MassiveScenarioClassification (sq)":62.86,"MassiveScenarioClassification (sw)":58.15,"MassiveScenarioClassification (ta)":59.44,"MassiveScenarioClassification (te)":60.85,"MassiveScenarioClassification (th)":70.66,"MassiveScenarioClassification (tl)":60.88,"MassiveScenarioClassification (tr)":68.05,"MassiveScenarioClassification (ur)":62.11,"MassiveScenarioClassification (vi)":67.44,"MassiveScenarioClassification (zh-TW)":68.32}
+{"index":107,"Rank":10,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":60.56,"AmazonCounterfactualClassification (de)":73.17,"AmazonCounterfactualClassification (ja)":76.42,"AmazonReviewsClassification (de)":39.92,"AmazonReviewsClassification (es)":39.39,"AmazonReviewsClassification (fr)":38.52,"AmazonReviewsClassification (ja)":36.44,"AmazonReviewsClassification (zh)":36.45,"MTOPDomainClassification (de)":86.95,"MTOPDomainClassification (es)":84.07,"MTOPDomainClassification (fr)":84.14,"MTOPDomainClassification (hi)":85.11,"MTOPDomainClassification (th)":81.24,"MTOPIntentClassification (de)":63.42,"MTOPIntentClassification (es)":64.44,"MTOPIntentClassification (fr)":62.01,"MTOPIntentClassification (hi)":62.58,"MTOPIntentClassification (th)":64.61,"MassiveIntentClassification (af)":56.12,"MassiveIntentClassification (am)":55.71,"MassiveIntentClassification (ar)":50.86,"MassiveIntentClassification (az)":58.97,"MassiveIntentClassification (bn)":58.22,"MassiveIntentClassification (cy)":50.16,"MassiveIntentClassification (de)":56.21,"MassiveIntentClassification (el)":57.03,"MassiveIntentClassification (es)":58.32,"MassiveIntentClassification (fa)":62.33,"MassiveIntentClassification (fi)":60.12,"MassiveIntentClassification (fr)":60.47,"MassiveIntentClassification (he)":56.55,"MassiveIntentClassification (hi)":59.4,"MassiveIntentClassification (hu)":59.52,"MassiveIntentClassification (hy)":56.2,"MassiveIntentClassification (id)":61.12,"MassiveIntentClassification (is)":54.9,"MassiveIntentClassification (it)":59.83,"MassiveIntentClassification (ja)":63.11,"MassiveIntentClassification (jv)":50.98,"MassiveIntentClassification (ka)":48.35,"MassiveIntentClassification (km)":48.55,"MassiveIntentClassification (kn)":56.24,"MassiveIntentClassification (ko)":60.99,"MassiveIntentClassification (lv)":57.1,"MassiveIntentClassification (ml)":57.91,"MassiveIntentClassification (mn)":58.5,"MassiveIntentClassification (ms)":58.6,"MassiveIntentClassification (my)":57.35,"MassiveIntentClassification (nl)":59.37,"MassiveIntentClassification (pt)":60.16,"MassiveIntentClassification (ro)":57.92,"MassiveIntentClassification (ru)":60.67,"MassiveIntentClassification (sl)":59.37,"MassiveIntentClassification (sq)":58.03,"MassiveIntentClassification (sw)":51.62,"MassiveIntentClassification (ta)":55.04,"MassiveIntentClassification (te)":58.32,"MassiveIntentClassification (th)":56.58,"MassiveIntentClassification (tl)":55.28,"MassiveIntentClassification (tr)":60.91,"MassiveIntentClassification (ur)":56.7,"MassiveIntentClassification (vi)":56.67,"MassiveIntentClassification (zh-TW)":59.51,"MassiveScenarioClassification (af)":63.39,"MassiveScenarioClassification (am)":62.02,"MassiveScenarioClassification (ar)":57.72,"MassiveScenarioClassification (az)":63.48,"MassiveScenarioClassification (bn)":61.84,"MassiveScenarioClassification (cy)":56.13,"MassiveScenarioClassification (de)":62.39,"MassiveScenarioClassification (el)":64.58,"MassiveScenarioClassification (es)":63.61,"MassiveScenarioClassification (fa)":67.46,"MassiveScenarioClassification (fi)":64.58,"MassiveScenarioClassification (fr)":65.1,"MassiveScenarioClassification (he)":63.53,"MassiveScenarioClassification (hi)":64.4,"MassiveScenarioClassification (hu)":65.82,"MassiveScenarioClassification (hy)":61.25,"MassiveScenarioClassification (id)":65.84,"MassiveScenarioClassification (is)":61.94,"MassiveScenarioClassification (it)":64.09,"MassiveScenarioClassification (ja)":67.72,"MassiveScenarioClassification (jv)":58.29,"MassiveScenarioClassification (ka)":53.38,"MassiveScenarioClassification (km)":56.18,"MassiveScenarioClassification (kn)":61.74,"MassiveScenarioClassification (ko)":67.26,"MassiveScenarioClassification (lv)":61.87,"MassiveScenarioClassification (ml)":62.26,"MassiveScenarioClassification (mn)":62.6,"MassiveScenarioClassification (ms)":65.63,"MassiveScenarioClassification (my)":62.94,"MassiveScenarioClassification (nl)":65.16,"MassiveScenarioClassification (pt)":63.28,"MassiveScenarioClassification (ro)":62.41,"MassiveScenarioClassification (ru)":65.25,"MassiveScenarioClassification (sl)":64.25,"MassiveScenarioClassification (sq)":64.54,"MassiveScenarioClassification (sw)":58.36,"MassiveScenarioClassification (ta)":59.08,"MassiveScenarioClassification (te)":64.13,"MassiveScenarioClassification (th)":64.34,"MassiveScenarioClassification (tl)":60.23,"MassiveScenarioClassification (tr)":65.43,"MassiveScenarioClassification (ur)":61.52,"MassiveScenarioClassification (vi)":61.05,"MassiveScenarioClassification (zh-TW)":67.08}
+{"index":55,"Rank":11,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":54.73,"AmazonCounterfactualClassification (de)":70.94,"AmazonCounterfactualClassification (ja)":80.06,"AmazonReviewsClassification (de)":38.83,"AmazonReviewsClassification (es)":39.39,"AmazonReviewsClassification (fr)":39.47,"AmazonReviewsClassification (ja)":35.9,"AmazonReviewsClassification (zh)":36.07,"MTOPDomainClassification (de)":86.91,"MTOPDomainClassification (es)":87.74,"MTOPDomainClassification (fr)":86.22,"MTOPDomainClassification (hi)":82.92,"MTOPDomainClassification (th)":69.9,"MTOPIntentClassification (de)":63.26,"MTOPIntentClassification (es)":65.06,"MTOPIntentClassification (fr)":59.43,"MTOPIntentClassification (hi)":59.08,"MTOPIntentClassification (th)":43.75,"MassiveIntentClassification (af)":47.6,"MassiveIntentClassification (am)":31.57,"MassiveIntentClassification (ar)":52.63,"MassiveIntentClassification (az)":50.09,"MassiveIntentClassification (bn)":46.54,"MassiveIntentClassification (cy)":44.26,"MassiveIntentClassification (de)":61.87,"MassiveIntentClassification (el)":47.02,"MassiveIntentClassification (es)":62.54,"MassiveIntentClassification (fa)":55.19,"MassiveIntentClassification (fi)":48.43,"MassiveIntentClassification (fr)":64.27,"MassiveIntentClassification (he)":57.62,"MassiveIntentClassification (hi)":57.54,"MassiveIntentClassification (hu)":45.67,"MassiveIntentClassification (hy)":39.2,"MassiveIntentClassification (id)":55.0,"MassiveIntentClassification (is)":43.14,"MassiveIntentClassification (it)":61.0,"MassiveIntentClassification (ja)":64.29,"MassiveIntentClassification (jv)":43.69,"MassiveIntentClassification (ka)":38.35,"MassiveIntentClassification (km)":34.22,"MassiveIntentClassification (kn)":51.79,"MassiveIntentClassification (ko)":59.59,"MassiveIntentClassification (lv)":46.54,"MassiveIntentClassification (ml)":54.47,"MassiveIntentClassification (mn)":40.68,"MassiveIntentClassification (ms)":51.24,"MassiveIntentClassification (my)":31.76,"MassiveIntentClassification (nl)":60.82,"MassiveIntentClassification (pt)":62.74,"MassiveIntentClassification (ro)":49.68,"MassiveIntentClassification (ru)":60.85,"MassiveIntentClassification (sl)":48.59,"MassiveIntentClassification (sq)":47.17,"MassiveIntentClassification (sw)":45.97,"MassiveIntentClassification (ta)":53.6,"MassiveIntentClassification (te)":53.45,"MassiveIntentClassification (th)":46.17,"MassiveIntentClassification (tl)":49.48,"MassiveIntentClassification (tr)":58.03,"MassiveIntentClassification (ur)":39.26,"MassiveIntentClassification (vi)":52.16,"MassiveIntentClassification (zh-TW)":58.21,"MassiveScenarioClassification (af)":58.07,"MassiveScenarioClassification (am)":38.21,"MassiveScenarioClassification (ar)":57.47,"MassiveScenarioClassification (az)":54.37,"MassiveScenarioClassification (bn)":52.72,"MassiveScenarioClassification (cy)":49.5,"MassiveScenarioClassification (de)":71.28,"MassiveScenarioClassification (el)":52.42,"MassiveScenarioClassification (es)":67.04,"MassiveScenarioClassification (fa)":60.17,"MassiveScenarioClassification (fi)":54.05,"MassiveScenarioClassification (fr)":69.76,"MassiveScenarioClassification (he)":62.85,"MassiveScenarioClassification (hi)":62.18,"MassiveScenarioClassification (hu)":53.52,"MassiveScenarioClassification (hy)":45.95,"MassiveScenarioClassification (id)":60.33,"MassiveScenarioClassification (is)":50.1,"MassiveScenarioClassification (it)":66.49,"MassiveScenarioClassification (ja)":68.36,"MassiveScenarioClassification (jv)":50.59,"MassiveScenarioClassification (ka)":42.76,"MassiveScenarioClassification (km)":40.65,"MassiveScenarioClassification (kn)":57.25,"MassiveScenarioClassification (ko)":63.84,"MassiveScenarioClassification (lv)":53.14,"MassiveScenarioClassification (ml)":58.84,"MassiveScenarioClassification (mn)":44.82,"MassiveScenarioClassification (ms)":58.9,"MassiveScenarioClassification (my)":38.52,"MassiveScenarioClassification (nl)":67.54,"MassiveScenarioClassification (pt)":65.7,"MassiveScenarioClassification (ro)":57.2,"MassiveScenarioClassification (ru)":65.42,"MassiveScenarioClassification (sl)":55.15,"MassiveScenarioClassification (sq)":55.68,"MassiveScenarioClassification (sw)":52.3,"MassiveScenarioClassification (ta)":56.19,"MassiveScenarioClassification (te)":58.02,"MassiveScenarioClassification (th)":52.56,"MassiveScenarioClassification (tl)":57.43,"MassiveScenarioClassification (tr)":61.55,"MassiveScenarioClassification (ur)":47.11,"MassiveScenarioClassification (vi)":56.83,"MassiveScenarioClassification (zh-TW)":64.02}
+{"index":58,"Rank":12,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":7068,"Memory Usage (GB, fp32)":26.33,"Average":54.35,"AmazonCounterfactualClassification (de)":61.35,"AmazonCounterfactualClassification (ja)":58.23,"AmazonReviewsClassification (de)":29.7,"AmazonReviewsClassification (es)":35.97,"AmazonReviewsClassification (fr)":35.92,"AmazonReviewsClassification (ja)":27.64,"AmazonReviewsClassification (zh)":32.63,"MTOPDomainClassification (de)":82.05,"MTOPDomainClassification (es)":93.55,"MTOPDomainClassification (fr)":90.98,"MTOPDomainClassification (hi)":89.33,"MTOPDomainClassification (th)":60.49,"MTOPIntentClassification (de)":61.92,"MTOPIntentClassification (es)":74.49,"MTOPIntentClassification (fr)":69.12,"MTOPIntentClassification (hi)":64.85,"MTOPIntentClassification (th)":49.36,"MassiveIntentClassification (af)":47.85,"MassiveIntentClassification (am)":33.3,"MassiveIntentClassification (ar)":59.25,"MassiveIntentClassification (az)":45.24,"MassiveIntentClassification (bn)":61.59,"MassiveIntentClassification (cy)":44.92,"MassiveIntentClassification (de)":56.1,"MassiveIntentClassification (el)":46.13,"MassiveIntentClassification (es)":66.35,"MassiveIntentClassification (fa)":51.2,"MassiveIntentClassification (fi)":45.33,"MassiveIntentClassification (fr)":66.95,"MassiveIntentClassification (he)":43.18,"MassiveIntentClassification (hi)":63.54,"MassiveIntentClassification (hu)":44.73,"MassiveIntentClassification (hy)":38.13,"MassiveIntentClassification (id)":64.06,"MassiveIntentClassification (is)":44.35,"MassiveIntentClassification (it)":60.77,"MassiveIntentClassification (ja)":61.22,"MassiveIntentClassification (jv)":50.94,"MassiveIntentClassification (ka)":33.84,"MassiveIntentClassification (km)":37.34,"MassiveIntentClassification (kn)":53.54,"MassiveIntentClassification (ko)":53.36,"MassiveIntentClassification (lv)":46.5,"MassiveIntentClassification (ml)":58.27,"MassiveIntentClassification (mn)":40.28,"MassiveIntentClassification (ms)":59.65,"MassiveIntentClassification (my)":37.42,"MassiveIntentClassification (nl)":52.09,"MassiveIntentClassification (pt)":66.69,"MassiveIntentClassification (ro)":50.53,"MassiveIntentClassification (ru)":58.32,"MassiveIntentClassification (sl)":47.74,"MassiveIntentClassification (sq)":48.94,"MassiveIntentClassification (sw)":49.81,"MassiveIntentClassification (ta)":56.4,"MassiveIntentClassification (te)":54.71,"MassiveIntentClassification (th)":44.43,"MassiveIntentClassification (tl)":50.21,"MassiveIntentClassification (tr)":46.56,"MassiveIntentClassification (ur)":56.75,"MassiveIntentClassification (vi)":64.53,"MassiveIntentClassification (zh-TW)":62.89,"MassiveScenarioClassification (af)":51.47,"MassiveScenarioClassification (am)":34.87,"MassiveScenarioClassification (ar)":65.21,"MassiveScenarioClassification (az)":45.58,"MassiveScenarioClassification (bn)":67.3,"MassiveScenarioClassification (cy)":46.29,"MassiveScenarioClassification (de)":61.74,"MassiveScenarioClassification (el)":48.96,"MassiveScenarioClassification (es)":73.34,"MassiveScenarioClassification (fa)":53.17,"MassiveScenarioClassification (fi)":44.69,"MassiveScenarioClassification (fr)":72.91,"MassiveScenarioClassification (he)":43.1,"MassiveScenarioClassification (hi)":69.27,"MassiveScenarioClassification (hu)":45.16,"MassiveScenarioClassification (hy)":38.73,"MassiveScenarioClassification (id)":70.13,"MassiveScenarioClassification (is)":44.21,"MassiveScenarioClassification (it)":65.57,"MassiveScenarioClassification (ja)":65.76,"MassiveScenarioClassification (jv)":54.79,"MassiveScenarioClassification (ka)":32.99,"MassiveScenarioClassification (km)":39.34,"MassiveScenarioClassification (kn)":60.5,"MassiveScenarioClassification (ko)":55.69,"MassiveScenarioClassification (lv)":44.35,"MassiveScenarioClassification (ml)":65.53,"MassiveScenarioClassification (mn)":38.72,"MassiveScenarioClassification (ms)":64.99,"MassiveScenarioClassification (my)":36.84,"MassiveScenarioClassification (nl)":56.32,"MassiveScenarioClassification (pt)":71.46,"MassiveScenarioClassification (ro)":53.69,"MassiveScenarioClassification (ru)":61.6,"MassiveScenarioClassification (sl)":48.04,"MassiveScenarioClassification (sq)":50.06,"MassiveScenarioClassification (sw)":54.22,"MassiveScenarioClassification (ta)":62.77,"MassiveScenarioClassification (te)":62.59,"MassiveScenarioClassification (th)":45.18,"MassiveScenarioClassification (tl)":52.06,"MassiveScenarioClassification (tr)":47.21,"MassiveScenarioClassification (ur)":64.26,"MassiveScenarioClassification (vi)":70.61,"MassiveScenarioClassification (zh-TW)":70.3}
+{"index":124,"Rank":13,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":53.59,"AmazonCounterfactualClassification (de)":68.69,"AmazonCounterfactualClassification (ja)":61.61,"AmazonReviewsClassification (de)":33.39,"AmazonReviewsClassification (es)":34.82,"AmazonReviewsClassification (fr)":33.45,"AmazonReviewsClassification (ja)":30.05,"AmazonReviewsClassification (zh)":32.52,"MTOPDomainClassification (de)":78.59,"MTOPDomainClassification (es)":79.24,"MTOPDomainClassification (fr)":76.17,"MTOPDomainClassification (hi)":78.75,"MTOPDomainClassification (th)":77.67,"MTOPIntentClassification (de)":55.29,"MTOPIntentClassification (es)":58.68,"MTOPIntentClassification (fr)":53.26,"MTOPIntentClassification (hi)":59.62,"MTOPIntentClassification (th)":58.8,"MassiveIntentClassification (af)":45.42,"MassiveIntentClassification (am)":37.68,"MassiveIntentClassification (ar)":45.02,"MassiveIntentClassification (az)":48.71,"MassiveIntentClassification (bn)":43.79,"MassiveIntentClassification (cy)":28.76,"MassiveIntentClassification (de)":51.56,"MassiveIntentClassification (el)":56.47,"MassiveIntentClassification (es)":58.28,"MassiveIntentClassification (fa)":59.05,"MassiveIntentClassification (fi)":57.36,"MassiveIntentClassification (fr)":58.8,"MassiveIntentClassification (he)":51.18,"MassiveIntentClassification (hi)":57.06,"MassiveIntentClassification (hu)":58.36,"MassiveIntentClassification (hy)":52.11,"MassiveIntentClassification (id)":58.27,"MassiveIntentClassification (is)":35.81,"MassiveIntentClassification (it)":58.28,"MassiveIntentClassification (ja)":60.78,"MassiveIntentClassification (jv)":31.15,"MassiveIntentClassification (ka)":44.5,"MassiveIntentClassification (km)":40.99,"MassiveIntentClassification (kn)":46.96,"MassiveIntentClassification (ko)":54.73,"MassiveIntentClassification (lv)":54.87,"MassiveIntentClassification (ml)":47.89,"MassiveIntentClassification (mn)":52.23,"MassiveIntentClassification (ms)":54.28,"MassiveIntentClassification (my)":51.96,"MassiveIntentClassification (nl)":59.45,"MassiveIntentClassification (pt)":59.84,"MassiveIntentClassification (ro)":57.04,"MassiveIntentClassification (ru)":58.02,"MassiveIntentClassification (sl)":56.36,"MassiveIntentClassification (sq)":56.48,"MassiveIntentClassification (sw)":33.96,"MassiveIntentClassification (ta)":44.29,"MassiveIntentClassification (te)":47.14,"MassiveIntentClassification (th)":56.86,"MassiveIntentClassification (tl)":35.36,"MassiveIntentClassification (tr)":59.63,"MassiveIntentClassification (ur)":52.79,"MassiveIntentClassification (vi)":54.65,"MassiveIntentClassification (zh-TW)":57.47,"MassiveScenarioClassification (af)":50.86,"MassiveScenarioClassification (am)":41.18,"MassiveScenarioClassification (ar)":50.08,"MassiveScenarioClassification (az)":51.29,"MassiveScenarioClassification (bn)":46.53,"MassiveScenarioClassification (cy)":34.35,"MassiveScenarioClassification (de)":56.4,"MassiveScenarioClassification (el)":61.8,"MassiveScenarioClassification (es)":62.21,"MassiveScenarioClassification (fa)":62.44,"MassiveScenarioClassification (fi)":61.1,"MassiveScenarioClassification (fr)":63.39,"MassiveScenarioClassification (he)":56.29,"MassiveScenarioClassification (hi)":60.63,"MassiveScenarioClassification (hu)":63.29,"MassiveScenarioClassification (hy)":54.88,"MassiveScenarioClassification (id)":61.99,"MassiveScenarioClassification (is)":38.58,"MassiveScenarioClassification (it)":62.35,"MassiveScenarioClassification (ja)":65.17,"MassiveScenarioClassification (jv)":36.13,"MassiveScenarioClassification (ka)":50.27,"MassiveScenarioClassification (km)":44.24,"MassiveScenarioClassification (kn)":47.37,"MassiveScenarioClassification (ko)":58.89,"MassiveScenarioClassification (lv)":56.51,"MassiveScenarioClassification (ml)":50.06,"MassiveScenarioClassification (mn)":55.05,"MassiveScenarioClassification (ms)":59.77,"MassiveScenarioClassification (my)":55.72,"MassiveScenarioClassification (nl)":63.38,"MassiveScenarioClassification (pt)":62.41,"MassiveScenarioClassification (ro)":60.68,"MassiveScenarioClassification (ru)":62.31,"MassiveScenarioClassification (sl)":61.43,"MassiveScenarioClassification (sq)":62.23,"MassiveScenarioClassification (sw)":38.52,"MassiveScenarioClassification (ta)":47.0,"MassiveScenarioClassification (te)":51.02,"MassiveScenarioClassification (th)":63.23,"MassiveScenarioClassification (tl)":38.72,"MassiveScenarioClassification (tr)":64.49,"MassiveScenarioClassification (ur)":56.8,"MassiveScenarioClassification (vi)":57.06,"MassiveScenarioClassification (zh-TW)":63.37}
+{"index":84,"Rank":14,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":50.6,"AmazonCounterfactualClassification (de)":66.03,"AmazonCounterfactualClassification (ja)":58.77,"AmazonReviewsClassification (de)":30.45,"AmazonReviewsClassification (es)":40.8,"AmazonReviewsClassification (fr)":35.12,"AmazonReviewsClassification (ja)":32.07,"AmazonReviewsClassification (zh)":38.12,"MTOPDomainClassification (de)":74.64,"MTOPDomainClassification (es)":92.07,"MTOPDomainClassification (fr)":69.24,"MTOPDomainClassification (hi)":88.54,"MTOPDomainClassification (th)":55.63,"MTOPIntentClassification (de)":53.4,"MTOPIntentClassification (es)":71.33,"MTOPIntentClassification (fr)":51.25,"MTOPIntentClassification (hi)":66.73,"MTOPIntentClassification (th)":42.13,"MassiveIntentClassification (af)":44.98,"MassiveIntentClassification (am)":25.35,"MassiveIntentClassification (ar)":57.08,"MassiveIntentClassification (az)":39.11,"MassiveIntentClassification (bn)":61.37,"MassiveIntentClassification (cy)":42.97,"MassiveIntentClassification (de)":50.93,"MassiveIntentClassification (el)":40.09,"MassiveIntentClassification (es)":63.81,"MassiveIntentClassification (fa)":49.06,"MassiveIntentClassification (fi)":42.58,"MassiveIntentClassification (fr)":43.21,"MassiveIntentClassification (he)":37.15,"MassiveIntentClassification (hi)":62.89,"MassiveIntentClassification (hu)":41.62,"MassiveIntentClassification (hy)":32.98,"MassiveIntentClassification (id)":62.11,"MassiveIntentClassification (is)":41.04,"MassiveIntentClassification (it)":55.83,"MassiveIntentClassification (ja)":58.83,"MassiveIntentClassification (jv)":49.31,"MassiveIntentClassification (ka)":26.43,"MassiveIntentClassification (km)":28.77,"MassiveIntentClassification (kn)":52.33,"MassiveIntentClassification (ko)":46.93,"MassiveIntentClassification (lv)":44.26,"MassiveIntentClassification (ml)":57.75,"MassiveIntentClassification (mn)":33.31,"MassiveIntentClassification (ms)":55.7,"MassiveIntentClassification (my)":27.39,"MassiveIntentClassification (nl)":48.34,"MassiveIntentClassification (pt)":64.74,"MassiveIntentClassification (ro)":48.41,"MassiveIntentClassification (ru)":52.99,"MassiveIntentClassification (sl)":44.77,"MassiveIntentClassification (sq)":45.45,"MassiveIntentClassification (sw)":46.46,"MassiveIntentClassification (ta)":55.46,"MassiveIntentClassification (te)":51.41,"MassiveIntentClassification (th)":39.2,"MassiveIntentClassification (tl)":48.53,"MassiveIntentClassification (tr)":39.51,"MassiveIntentClassification (ur)":54.72,"MassiveIntentClassification (vi)":62.01,"MassiveIntentClassification (zh-TW)":62.56,"MassiveScenarioClassification (af)":50.47,"MassiveScenarioClassification (am)":27.22,"MassiveScenarioClassification (ar)":65.43,"MassiveScenarioClassification (az)":40.74,"MassiveScenarioClassification (bn)":67.65,"MassiveScenarioClassification (cy)":43.94,"MassiveScenarioClassification (de)":56.67,"MassiveScenarioClassification (el)":41.81,"MassiveScenarioClassification (es)":71.78,"MassiveScenarioClassification (fa)":49.96,"MassiveScenarioClassification (fi)":41.01,"MassiveScenarioClassification (fr)":49.78,"MassiveScenarioClassification (he)":36.69,"MassiveScenarioClassification (hi)":69.28,"MassiveScenarioClassification (hu)":44.31,"MassiveScenarioClassification (hy)":33.64,"MassiveScenarioClassification (id)":68.98,"MassiveScenarioClassification (is)":42.1,"MassiveScenarioClassification (it)":60.27,"MassiveScenarioClassification (ja)":62.48,"MassiveScenarioClassification (jv)":54.68,"MassiveScenarioClassification (ka)":27.22,"MassiveScenarioClassification (km)":32.14,"MassiveScenarioClassification (kn)":57.95,"MassiveScenarioClassification (ko)":47.95,"MassiveScenarioClassification (lv)":42.76,"MassiveScenarioClassification (ml)":62.84,"MassiveScenarioClassification (mn)":33.21,"MassiveScenarioClassification (ms)":62.57,"MassiveScenarioClassification (my)":28.84,"MassiveScenarioClassification (nl)":52.85,"MassiveScenarioClassification (pt)":70.24,"MassiveScenarioClassification (ro)":52.73,"MassiveScenarioClassification (ru)":54.26,"MassiveScenarioClassification (sl)":46.89,"MassiveScenarioClassification (sq)":47.16,"MassiveScenarioClassification (sw)":51.2,"MassiveScenarioClassification (ta)":61.84,"MassiveScenarioClassification (te)":59.79,"MassiveScenarioClassification (th)":41.62,"MassiveScenarioClassification (tl)":50.47,"MassiveScenarioClassification (tr)":43.41,"MassiveScenarioClassification (ur)":60.15,"MassiveScenarioClassification (vi)":68.99,"MassiveScenarioClassification (zh-TW)":71.7}
+{"index":85,"Rank":15,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":46.37,"AmazonCounterfactualClassification (de)":66.42,"AmazonCounterfactualClassification (ja)":56.86,"AmazonReviewsClassification (de)":26.85,"AmazonReviewsClassification (es)":38.97,"AmazonReviewsClassification (fr)":26.85,"AmazonReviewsClassification (ja)":28.31,"AmazonReviewsClassification (zh)":35.7,"MTOPDomainClassification (de)":68.42,"MTOPDomainClassification (es)":88.21,"MTOPDomainClassification (fr)":34.99,"MTOPDomainClassification (hi)":84.23,"MTOPDomainClassification (th)":53.17,"MTOPIntentClassification (de)":49.17,"MTOPIntentClassification (es)":65.72,"MTOPIntentClassification (fr)":15.76,"MTOPIntentClassification (hi)":61.88,"MTOPIntentClassification (th)":41.67,"MassiveIntentClassification (af)":43.29,"MassiveIntentClassification (am)":23.21,"MassiveIntentClassification (ar)":53.38,"MassiveIntentClassification (az)":39.56,"MassiveIntentClassification (bn)":56.74,"MassiveIntentClassification (cy)":40.0,"MassiveIntentClassification (de)":45.82,"MassiveIntentClassification (el)":37.87,"MassiveIntentClassification (es)":61.17,"MassiveIntentClassification (fa)":45.65,"MassiveIntentClassification (fi)":40.28,"MassiveIntentClassification (fr)":15.09,"MassiveIntentClassification (he)":32.23,"MassiveIntentClassification (hi)":59.46,"MassiveIntentClassification (hu)":40.91,"MassiveIntentClassification (hy)":29.94,"MassiveIntentClassification (id)":59.14,"MassiveIntentClassification (is)":39.62,"MassiveIntentClassification (it)":51.77,"MassiveIntentClassification (ja)":53.75,"MassiveIntentClassification (jv)":46.29,"MassiveIntentClassification (ka)":25.11,"MassiveIntentClassification (km)":27.22,"MassiveIntentClassification (kn)":47.97,"MassiveIntentClassification (ko)":40.54,"MassiveIntentClassification (lv)":43.14,"MassiveIntentClassification (ml)":53.69,"MassiveIntentClassification (mn)":33.37,"MassiveIntentClassification (ms)":51.94,"MassiveIntentClassification (my)":25.32,"MassiveIntentClassification (nl)":44.03,"MassiveIntentClassification (pt)":61.74,"MassiveIntentClassification (ro)":45.73,"MassiveIntentClassification (ru)":47.61,"MassiveIntentClassification (sl)":42.83,"MassiveIntentClassification (sq)":43.61,"MassiveIntentClassification (sw)":45.55,"MassiveIntentClassification (ta)":51.24,"MassiveIntentClassification (te)":47.43,"MassiveIntentClassification (th)":36.88,"MassiveIntentClassification (tl)":45.93,"MassiveIntentClassification (tr)":38.59,"MassiveIntentClassification (ur)":51.85,"MassiveIntentClassification (vi)":58.72,"MassiveIntentClassification (zh-TW)":59.95,"MassiveScenarioClassification (af)":47.42,"MassiveScenarioClassification (am)":24.71,"MassiveScenarioClassification (ar)":62.09,"MassiveScenarioClassification (az)":39.25,"MassiveScenarioClassification (bn)":63.37,"MassiveScenarioClassification (cy)":39.17,"MassiveScenarioClassification (de)":50.71,"MassiveScenarioClassification (el)":39.47,"MassiveScenarioClassification (es)":68.31,"MassiveScenarioClassification (fa)":45.65,"MassiveScenarioClassification (fi)":38.95,"MassiveScenarioClassification (fr)":21.67,"MassiveScenarioClassification (he)":32.13,"MassiveScenarioClassification (hi)":65.57,"MassiveScenarioClassification (hu)":42.97,"MassiveScenarioClassification (hy)":32.13,"MassiveScenarioClassification (id)":65.11,"MassiveScenarioClassification (is)":40.84,"MassiveScenarioClassification (it)":54.55,"MassiveScenarioClassification (ja)":57.15,"MassiveScenarioClassification (jv)":49.3,"MassiveScenarioClassification (ka)":25.86,"MassiveScenarioClassification (km)":31.18,"MassiveScenarioClassification (kn)":53.01,"MassiveScenarioClassification (ko)":40.25,"MassiveScenarioClassification (lv)":41.88,"MassiveScenarioClassification (ml)":59.08,"MassiveScenarioClassification (mn)":33.34,"MassiveScenarioClassification (ms)":57.45,"MassiveScenarioClassification (my)":27.2,"MassiveScenarioClassification (nl)":48.42,"MassiveScenarioClassification (pt)":66.41,"MassiveScenarioClassification (ro)":50.08,"MassiveScenarioClassification (ru)":49.94,"MassiveScenarioClassification (sl)":43.43,"MassiveScenarioClassification (sq)":44.08,"MassiveScenarioClassification (sw)":49.53,"MassiveScenarioClassification (ta)":56.79,"MassiveScenarioClassification (te)":54.01,"MassiveScenarioClassification (th)":38.58,"MassiveScenarioClassification (tl)":48.07,"MassiveScenarioClassification (tr)":40.65,"MassiveScenarioClassification (ur)":57.75,"MassiveScenarioClassification (vi)":65.83,"MassiveScenarioClassification (zh-TW)":69.64}
+{"index":110,"Rank":16,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":44.78,"AmazonCounterfactualClassification (de)":68.14,"AmazonCounterfactualClassification (ja)":65.39,"AmazonReviewsClassification (de)":35.03,"AmazonReviewsClassification (es)":36.24,"AmazonReviewsClassification (fr)":35.7,"AmazonReviewsClassification (ja)":31.08,"AmazonReviewsClassification (zh)":33.89,"MTOPDomainClassification (de)":86.19,"MTOPDomainClassification (es)":87.75,"MTOPDomainClassification (fr)":84.61,"MTOPDomainClassification (hi)":76.41,"MTOPDomainClassification (th)":73.62,"MTOPIntentClassification (de)":59.21,"MTOPIntentClassification (es)":57.21,"MTOPIntentClassification (fr)":53.41,"MTOPIntentClassification (hi)":45.54,"MTOPIntentClassification (th)":47.73,"MassiveIntentClassification (af)":40.02,"MassiveIntentClassification (am)":2.35,"MassiveIntentClassification (ar)":43.14,"MassiveIntentClassification (az)":25.6,"MassiveIntentClassification (bn)":4.84,"MassiveIntentClassification (cy)":15.43,"MassiveIntentClassification (de)":51.57,"MassiveIntentClassification (el)":49.65,"MassiveIntentClassification (es)":56.57,"MassiveIntentClassification (fa)":55.36,"MassiveIntentClassification (fi)":45.72,"MassiveIntentClassification (fr)":57.02,"MassiveIntentClassification (he)":46.74,"MassiveIntentClassification (hi)":48.55,"MassiveIntentClassification (hu)":50.65,"MassiveIntentClassification (hy)":40.79,"MassiveIntentClassification (id)":56.0,"MassiveIntentClassification (is)":16.08,"MassiveIntentClassification (it)":57.65,"MassiveIntentClassification (ja)":55.33,"MassiveIntentClassification (jv)":28.16,"MassiveIntentClassification (ka)":29.41,"MassiveIntentClassification (km)":4.79,"MassiveIntentClassification (kn)":3.37,"MassiveIntentClassification (ko)":49.97,"MassiveIntentClassification (lv)":44.31,"MassiveIntentClassification (ml)":3.24,"MassiveIntentClassification (mn)":40.37,"MassiveIntentClassification (ms)":47.97,"MassiveIntentClassification (my)":38.48,"MassiveIntentClassification (nl)":58.29,"MassiveIntentClassification (pt)":58.63,"MassiveIntentClassification (ro)":50.63,"MassiveIntentClassification (ru)":57.96,"MassiveIntentClassification (sl)":50.66,"MassiveIntentClassification (sq)":50.25,"MassiveIntentClassification (sw)":19.29,"MassiveIntentClassification (ta)":3.79,"MassiveIntentClassification (te)":3.36,"MassiveIntentClassification (th)":45.28,"MassiveIntentClassification (tl)":28.44,"MassiveIntentClassification (tr)":50.47,"MassiveIntentClassification (ur)":46.03,"MassiveIntentClassification (vi)":45.25,"MassiveIntentClassification (zh-TW)":54.96,"MassiveScenarioClassification (af)":53.67,"MassiveScenarioClassification (am)":7.72,"MassiveScenarioClassification (ar)":52.19,"MassiveScenarioClassification (az)":34.75,"MassiveScenarioClassification (bn)":10.65,"MassiveScenarioClassification (cy)":21.24,"MassiveScenarioClassification (de)":61.4,"MassiveScenarioClassification (el)":60.68,"MassiveScenarioClassification (es)":64.61,"MassiveScenarioClassification (fa)":59.24,"MassiveScenarioClassification (fi)":54.66,"MassiveScenarioClassification (fr)":65.2,"MassiveScenarioClassification (he)":54.74,"MassiveScenarioClassification (hi)":55.99,"MassiveScenarioClassification (hu)":61.2,"MassiveScenarioClassification (hy)":49.63,"MassiveScenarioClassification (id)":65.25,"MassiveScenarioClassification (is)":22.6,"MassiveScenarioClassification (it)":64.63,"MassiveScenarioClassification (ja)":62.32,"MassiveScenarioClassification (jv)":35.77,"MassiveScenarioClassification (ka)":39.08,"MassiveScenarioClassification (km)":9.24,"MassiveScenarioClassification (kn)":8.28,"MassiveScenarioClassification (ko)":57.6,"MassiveScenarioClassification (lv)":51.72,"MassiveScenarioClassification (ml)":8.25,"MassiveScenarioClassification (mn)":47.21,"MassiveScenarioClassification (ms)":55.65,"MassiveScenarioClassification (my)":43.31,"MassiveScenarioClassification (nl)":67.49,"MassiveScenarioClassification (pt)":64.26,"MassiveScenarioClassification (ro)":58.03,"MassiveScenarioClassification (ru)":65.41,"MassiveScenarioClassification (sl)":59.36,"MassiveScenarioClassification (sq)":62.69,"MassiveScenarioClassification (sw)":25.12,"MassiveScenarioClassification (ta)":8.67,"MassiveScenarioClassification (te)":7.82,"MassiveScenarioClassification (th)":54.65,"MassiveScenarioClassification (tl)":36.09,"MassiveScenarioClassification (tr)":60.89,"MassiveScenarioClassification (ur)":54.71,"MassiveScenarioClassification (vi)":55.15,"MassiveScenarioClassification (zh-TW)":62.89}
+{"index":5,"Rank":17,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":42.45,"AmazonCounterfactualClassification (de)":67.82,"AmazonCounterfactualClassification (ja)":68.76,"AmazonReviewsClassification (de)":31.07,"AmazonReviewsClassification (es)":32.72,"AmazonReviewsClassification (fr)":31.12,"AmazonReviewsClassification (ja)":28.94,"AmazonReviewsClassification (zh)":30.89,"MTOPDomainClassification (de)":74.08,"MTOPDomainClassification (es)":73.47,"MTOPDomainClassification (fr)":72.26,"MTOPDomainClassification (hi)":72.95,"MTOPDomainClassification (th)":72.68,"MTOPIntentClassification (de)":51.62,"MTOPIntentClassification (es)":52.75,"MTOPIntentClassification (fr)":50.12,"MTOPIntentClassification (hi)":45.55,"MTOPIntentClassification (th)":50.07,"MassiveIntentClassification (af)":38.01,"MassiveIntentClassification (am)":12.7,"MassiveIntentClassification (ar)":37.16,"MassiveIntentClassification (az)":19.98,"MassiveIntentClassification (bn)":42.51,"MassiveIntentClassification (cy)":17.33,"MassiveIntentClassification (de)":44.79,"MassiveIntentClassification (el)":46.71,"MassiveIntentClassification (es)":45.44,"MassiveIntentClassification (fa)":45.01,"MassiveIntentClassification (fi)":45.94,"MassiveIntentClassification (fr)":46.13,"MassiveIntentClassification (he)":42.55,"MassiveIntentClassification (hi)":40.2,"MassiveIntentClassification (hu)":42.77,"MassiveIntentClassification (hy)":28.07,"MassiveIntentClassification (id)":45.81,"MassiveIntentClassification (is)":39.86,"MassiveIntentClassification (it)":48.25,"MassiveIntentClassification (ja)":45.3,"MassiveIntentClassification (jv)":24.3,"MassiveIntentClassification (ka)":22.7,"MassiveIntentClassification (km)":22.48,"MassiveIntentClassification (kn)":4.32,"MassiveIntentClassification (ko)":44.26,"MassiveIntentClassification (lv)":39.75,"MassiveIntentClassification (ml)":41.33,"MassiveIntentClassification (mn)":16.2,"MassiveIntentClassification (ms)":43.23,"MassiveIntentClassification (my)":25.37,"MassiveIntentClassification (nl)":45.0,"MassiveIntentClassification (pt)":48.55,"MassiveIntentClassification (ro)":44.3,"MassiveIntentClassification (ru)":44.29,"MassiveIntentClassification (sl)":44.72,"MassiveIntentClassification (sq)":46.12,"MassiveIntentClassification (sw)":31.89,"MassiveIntentClassification (ta)":29.63,"MassiveIntentClassification (te)":36.03,"MassiveIntentClassification (th)":43.39,"MassiveIntentClassification (tl)":29.73,"MassiveIntentClassification (tr)":43.93,"MassiveIntentClassification (ur)":26.11,"MassiveIntentClassification (vi)":44.33,"MassiveIntentClassification (zh-TW)":32.93,"MassiveScenarioClassification (af)":47.1,"MassiveScenarioClassification (am)":17.7,"MassiveScenarioClassification (ar)":45.21,"MassiveScenarioClassification (az)":28.21,"MassiveScenarioClassification (bn)":50.52,"MassiveScenarioClassification (cy)":22.58,"MassiveScenarioClassification (de)":54.34,"MassiveScenarioClassification (el)":55.47,"MassiveScenarioClassification (es)":52.77,"MassiveScenarioClassification (fa)":52.5,"MassiveScenarioClassification (fi)":52.63,"MassiveScenarioClassification (fr)":54.32,"MassiveScenarioClassification (he)":52.41,"MassiveScenarioClassification (hi)":47.37,"MassiveScenarioClassification (hu)":53.43,"MassiveScenarioClassification (hy)":33.57,"MassiveScenarioClassification (id)":54.38,"MassiveScenarioClassification (is)":49.78,"MassiveScenarioClassification (it)":54.84,"MassiveScenarioClassification (ja)":54.12,"MassiveScenarioClassification (jv)":32.71,"MassiveScenarioClassification (ka)":26.92,"MassiveScenarioClassification (km)":27.23,"MassiveScenarioClassification (kn)":10.06,"MassiveScenarioClassification (ko)":52.01,"MassiveScenarioClassification (lv)":44.82,"MassiveScenarioClassification (ml)":49.1,"MassiveScenarioClassification (mn)":21.51,"MassiveScenarioClassification (ms)":53.6,"MassiveScenarioClassification (my)":29.72,"MassiveScenarioClassification (nl)":53.33,"MassiveScenarioClassification (pt)":53.41,"MassiveScenarioClassification (ro)":50.48,"MassiveScenarioClassification (ru)":51.84,"MassiveScenarioClassification (sl)":51.29,"MassiveScenarioClassification (sq)":55.65,"MassiveScenarioClassification (sw)":42.04,"MassiveScenarioClassification (ta)":36.72,"MassiveScenarioClassification (te)":42.08,"MassiveScenarioClassification (th)":52.15,"MassiveScenarioClassification (tl)":37.34,"MassiveScenarioClassification (tr)":52.56,"MassiveScenarioClassification (ur)":32.6,"MassiveScenarioClassification (vi)":50.97,"MassiveScenarioClassification (zh-TW)":42.32}
+{"index":45,"Rank":18,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":37.64,"AmazonCounterfactualClassification (de)":56.88,"AmazonCounterfactualClassification (ja)":54.65,"AmazonReviewsClassification (de)":24.79,"AmazonReviewsClassification (es)":26.64,"AmazonReviewsClassification (fr)":26.39,"AmazonReviewsClassification (ja)":22.08,"AmazonReviewsClassification (zh)":24.27,"MTOPDomainClassification (de)":62.73,"MTOPDomainClassification (es)":67.55,"MTOPDomainClassification (fr)":65.35,"MTOPDomainClassification (hi)":45.37,"MTOPDomainClassification (th)":55.28,"MTOPIntentClassification (de)":49.56,"MTOPIntentClassification (es)":49.94,"MTOPIntentClassification (fr)":46.33,"MTOPIntentClassification (hi)":32.21,"MTOPIntentClassification (th)":43.63,"MassiveIntentClassification (af)":40.55,"MassiveIntentClassification (am)":24.18,"MassiveIntentClassification (ar)":30.13,"MassiveIntentClassification (az)":35.88,"MassiveIntentClassification (bn)":29.17,"MassiveIntentClassification (cy)":41.79,"MassiveIntentClassification (de)":42.07,"MassiveIntentClassification (el)":36.25,"MassiveIntentClassification (es)":42.68,"MassiveIntentClassification (fa)":35.59,"MassiveIntentClassification (fi)":40.04,"MassiveIntentClassification (fr)":43.44,"MassiveIntentClassification (he)":31.59,"MassiveIntentClassification (hi)":27.04,"MassiveIntentClassification (hu)":38.45,"MassiveIntentClassification (hy)":27.98,"MassiveIntentClassification (id)":43.97,"MassiveIntentClassification (is)":40.3,"MassiveIntentClassification (it)":45.47,"MassiveIntentClassification (ja)":45.61,"MassiveIntentClassification (jv)":38.67,"MassiveIntentClassification (ka)":25.65,"MassiveIntentClassification (km)":28.3,"MassiveIntentClassification (kn)":23.48,"MassiveIntentClassification (ko)":36.56,"MassiveIntentClassification (lv)":41.85,"MassiveIntentClassification (ml)":24.91,"MassiveIntentClassification (mn)":29.86,"MassiveIntentClassification (ms)":42.42,"MassiveIntentClassification (my)":25.13,"MassiveIntentClassification (nl)":43.62,"MassiveIntentClassification (pt)":45.21,"MassiveIntentClassification (ro)":41.81,"MassiveIntentClassification (ru)":35.97,"MassiveIntentClassification (sl)":40.61,"MassiveIntentClassification (sq)":42.76,"MassiveIntentClassification (sw)":41.12,"MassiveIntentClassification (ta)":24.6,"MassiveIntentClassification (te)":25.04,"MassiveIntentClassification (th)":35.4,"MassiveIntentClassification (tl)":41.19,"MassiveIntentClassification (tr)":36.41,"MassiveIntentClassification (ur)":25.93,"MassiveIntentClassification (vi)":38.8,"MassiveIntentClassification (zh-TW)":42.31,"MassiveScenarioClassification (af)":43.25,"MassiveScenarioClassification (am)":25.3,"MassiveScenarioClassification (ar)":32.07,"MassiveScenarioClassification (az)":36.68,"MassiveScenarioClassification (bn)":29.57,"MassiveScenarioClassification (cy)":42.1,"MassiveScenarioClassification (de)":43.21,"MassiveScenarioClassification (el)":36.5,"MassiveScenarioClassification (es)":44.08,"MassiveScenarioClassification (fa)":32.61,"MassiveScenarioClassification (fi)":40.36,"MassiveScenarioClassification (fr)":45.07,"MassiveScenarioClassification (he)":32.18,"MassiveScenarioClassification (hi)":26.9,"MassiveScenarioClassification (hu)":40.38,"MassiveScenarioClassification (hy)":28.38,"MassiveScenarioClassification (id)":44.36,"MassiveScenarioClassification (is)":39.29,"MassiveScenarioClassification (it)":46.47,"MassiveScenarioClassification (ja)":46.26,"MassiveScenarioClassification (jv)":41.13,"MassiveScenarioClassification (ka)":24.73,"MassiveScenarioClassification (km)":29.74,"MassiveScenarioClassification (kn)":23.85,"MassiveScenarioClassification (ko)":36.57,"MassiveScenarioClassification (lv)":40.93,"MassiveScenarioClassification (ml)":25.53,"MassiveScenarioClassification (mn)":29.11,"MassiveScenarioClassification (ms)":43.79,"MassiveScenarioClassification (my)":27.27,"MassiveScenarioClassification (nl)":45.36,"MassiveScenarioClassification (pt)":45.9,"MassiveScenarioClassification (ro)":44.12,"MassiveScenarioClassification (ru)":32.76,"MassiveScenarioClassification (sl)":40.5,"MassiveScenarioClassification (sq)":42.52,"MassiveScenarioClassification (sw)":43.0,"MassiveScenarioClassification (ta)":28.33,"MassiveScenarioClassification (te)":26.59,"MassiveScenarioClassification (th)":36.79,"MassiveScenarioClassification (tl)":42.57,"MassiveScenarioClassification (tr)":37.09,"MassiveScenarioClassification (ur)":28.84,"MassiveScenarioClassification (vi)":37.36,"MassiveScenarioClassification (zh-TW)":44.42}
+{"index":46,"Rank":19,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":36.66,"AmazonCounterfactualClassification (de)":59.08,"AmazonCounterfactualClassification (ja)":56.42,"AmazonReviewsClassification (de)":24.52,"AmazonReviewsClassification (es)":29.1,"AmazonReviewsClassification (fr)":27.4,"AmazonReviewsClassification (ja)":21.72,"AmazonReviewsClassification (zh)":23.98,"MTOPDomainClassification (de)":60.37,"MTOPDomainClassification (es)":67.37,"MTOPDomainClassification (fr)":63.13,"MTOPDomainClassification (hi)":47.05,"MTOPDomainClassification (th)":52.28,"MTOPIntentClassification (de)":45.07,"MTOPIntentClassification (es)":48.81,"MTOPIntentClassification (fr)":44.34,"MTOPIntentClassification (hi)":34.2,"MTOPIntentClassification (th)":43.11,"MassiveIntentClassification (af)":37.79,"MassiveIntentClassification (am)":23.72,"MassiveIntentClassification (ar)":29.64,"MassiveIntentClassification (az)":39.48,"MassiveIntentClassification (bn)":26.55,"MassiveIntentClassification (cy)":38.78,"MassiveIntentClassification (de)":40.39,"MassiveIntentClassification (el)":37.29,"MassiveIntentClassification (es)":41.18,"MassiveIntentClassification (fa)":36.42,"MassiveIntentClassification (fi)":38.76,"MassiveIntentClassification (fr)":43.67,"MassiveIntentClassification (he)":31.98,"MassiveIntentClassification (hi)":28.04,"MassiveIntentClassification (hu)":38.14,"MassiveIntentClassification (hy)":26.05,"MassiveIntentClassification (id)":41.16,"MassiveIntentClassification (is)":38.63,"MassiveIntentClassification (it)":44.04,"MassiveIntentClassification (ja)":46.21,"MassiveIntentClassification (jv)":37.61,"MassiveIntentClassification (ka)":24.47,"MassiveIntentClassification (km)":26.24,"MassiveIntentClassification (kn)":17.83,"MassiveIntentClassification (ko)":37.27,"MassiveIntentClassification (lv)":40.93,"MassiveIntentClassification (ml)":17.89,"MassiveIntentClassification (mn)":32.98,"MassiveIntentClassification (ms)":40.91,"MassiveIntentClassification (my)":17.83,"MassiveIntentClassification (nl)":41.76,"MassiveIntentClassification (pt)":44.54,"MassiveIntentClassification (ro)":39.97,"MassiveIntentClassification (ru)":37.46,"MassiveIntentClassification (sl)":38.29,"MassiveIntentClassification (sq)":40.95,"MassiveIntentClassification (sw)":38.33,"MassiveIntentClassification (ta)":19.03,"MassiveIntentClassification (te)":19.38,"MassiveIntentClassification (th)":34.09,"MassiveIntentClassification (tl)":40.29,"MassiveIntentClassification (tr)":38.86,"MassiveIntentClassification (ur)":27.83,"MassiveIntentClassification (vi)":38.71,"MassiveIntentClassification (zh-TW)":42.32,"MassiveScenarioClassification (af)":40.25,"MassiveScenarioClassification (am)":25.69,"MassiveScenarioClassification (ar)":32.4,"MassiveScenarioClassification (az)":40.53,"MassiveScenarioClassification (bn)":27.23,"MassiveScenarioClassification (cy)":38.7,"MassiveScenarioClassification (de)":41.36,"MassiveScenarioClassification (el)":38.44,"MassiveScenarioClassification (es)":44.18,"MassiveScenarioClassification (fa)":34.83,"MassiveScenarioClassification (fi)":40.56,"MassiveScenarioClassification (fr)":45.92,"MassiveScenarioClassification (he)":32.08,"MassiveScenarioClassification (hi)":28.37,"MassiveScenarioClassification (hu)":39.49,"MassiveScenarioClassification (hy)":25.9,"MassiveScenarioClassification (id)":40.96,"MassiveScenarioClassification (is)":38.56,"MassiveScenarioClassification (it)":46.59,"MassiveScenarioClassification (ja)":46.25,"MassiveScenarioClassification (jv)":39.66,"MassiveScenarioClassification (ka)":25.28,"MassiveScenarioClassification (km)":28.97,"MassiveScenarioClassification (kn)":19.27,"MassiveScenarioClassification (ko)":35.73,"MassiveScenarioClassification (lv)":39.57,"MassiveScenarioClassification (ml)":19.9,"MassiveScenarioClassification (mn)":32.43,"MassiveScenarioClassification (ms)":42.32,"MassiveScenarioClassification (my)":20.86,"MassiveScenarioClassification (nl)":43.59,"MassiveScenarioClassification (pt)":46.31,"MassiveScenarioClassification (ro)":42.53,"MassiveScenarioClassification (ru)":35.95,"MassiveScenarioClassification (sl)":38.69,"MassiveScenarioClassification (sq)":40.47,"MassiveScenarioClassification (sw)":39.55,"MassiveScenarioClassification (ta)":22.88,"MassiveScenarioClassification (te)":20.51,"MassiveScenarioClassification (th)":34.93,"MassiveScenarioClassification (tl)":40.75,"MassiveScenarioClassification (tr)":39.07,"MassiveScenarioClassification (ur)":29.75,"MassiveScenarioClassification (vi)":38.02,"MassiveScenarioClassification (zh-TW)":45.18}
+{"index":108,"Rank":20,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":31.93,"AmazonCounterfactualClassification (de)":57.1,"AmazonCounterfactualClassification (ja)":59.91,"AmazonReviewsClassification (de)":25.91,"AmazonReviewsClassification (es)":27.63,"AmazonReviewsClassification (fr)":27.54,"AmazonReviewsClassification (ja)":23.57,"AmazonReviewsClassification (zh)":22.99,"MTOPDomainClassification (de)":72.04,"MTOPDomainClassification (es)":72.99,"MTOPDomainClassification (fr)":75.59,"MTOPDomainClassification (hi)":40.36,"MTOPDomainClassification (th)":17.1,"MTOPIntentClassification (de)":43.41,"MTOPIntentClassification (es)":41.88,"MTOPIntentClassification (fr)":38.94,"MTOPIntentClassification (hi)":17.75,"MTOPIntentClassification (th)":5.63,"MassiveIntentClassification (af)":38.94,"MassiveIntentClassification (am)":2.45,"MassiveIntentClassification (ar)":20.94,"MassiveIntentClassification (az)":34.25,"MassiveIntentClassification (bn)":13.67,"MassiveIntentClassification (cy)":35.71,"MassiveIntentClassification (de)":44.17,"MassiveIntentClassification (el)":28.7,"MassiveIntentClassification (es)":40.91,"MassiveIntentClassification (fa)":23.52,"MassiveIntentClassification (fi)":39.27,"MassiveIntentClassification (fr)":44.82,"MassiveIntentClassification (he)":23.65,"MassiveIntentClassification (hi)":17.98,"MassiveIntentClassification (hu)":38.0,"MassiveIntentClassification (hy)":8.69,"MassiveIntentClassification (id)":39.66,"MassiveIntentClassification (is)":35.14,"MassiveIntentClassification (it)":43.17,"MassiveIntentClassification (ja)":30.94,"MassiveIntentClassification (jv)":36.69,"MassiveIntentClassification (ka)":9.17,"MassiveIntentClassification (km)":4.99,"MassiveIntentClassification (kn)":3.08,"MassiveIntentClassification (ko)":19.97,"MassiveIntentClassification (lv)":38.61,"MassiveIntentClassification (ml)":2.85,"MassiveIntentClassification (mn)":23.25,"MassiveIntentClassification (ms)":36.21,"MassiveIntentClassification (my)":4.38,"MassiveIntentClassification (nl)":41.85,"MassiveIntentClassification (pt)":45.12,"MassiveIntentClassification (ro)":41.71,"MassiveIntentClassification (ru)":26.33,"MassiveIntentClassification (sl)":38.52,"MassiveIntentClassification (sq)":41.62,"MassiveIntentClassification (sw)":35.28,"MassiveIntentClassification (ta)":13.1,"MassiveIntentClassification (te)":2.56,"MassiveIntentClassification (th)":10.54,"MassiveIntentClassification (tl)":38.56,"MassiveIntentClassification (tr)":35.9,"MassiveIntentClassification (ur)":16.18,"MassiveIntentClassification (vi)":37.38,"MassiveIntentClassification (zh-TW)":22.39,"MassiveScenarioClassification (af)":45.71,"MassiveScenarioClassification (am)":7.41,"MassiveScenarioClassification (ar)":27.62,"MassiveScenarioClassification (az)":39.58,"MassiveScenarioClassification (bn)":18.98,"MassiveScenarioClassification (cy)":41.4,"MassiveScenarioClassification (de)":52.07,"MassiveScenarioClassification (el)":35.51,"MassiveScenarioClassification (es)":50.74,"MassiveScenarioClassification (fa)":29.0,"MassiveScenarioClassification (fi)":45.8,"MassiveScenarioClassification (fr)":53.76,"MassiveScenarioClassification (he)":25.68,"MassiveScenarioClassification (hi)":23.02,"MassiveScenarioClassification (hu)":44.09,"MassiveScenarioClassification (hy)":14.83,"MassiveScenarioClassification (id)":44.35,"MassiveScenarioClassification (is)":43.08,"MassiveScenarioClassification (it)":51.71,"MassiveScenarioClassification (ja)":36.75,"MassiveScenarioClassification (jv)":44.57,"MassiveScenarioClassification (ka)":14.84,"MassiveScenarioClassification (km)":9.75,"MassiveScenarioClassification (kn)":8.32,"MassiveScenarioClassification (ko)":25.72,"MassiveScenarioClassification (lv)":42.75,"MassiveScenarioClassification (ml)":7.25,"MassiveScenarioClassification (mn)":29.03,"MassiveScenarioClassification (ms)":44.65,"MassiveScenarioClassification (my)":10.07,"MassiveScenarioClassification (nl)":49.15,"MassiveScenarioClassification (pt)":53.0,"MassiveScenarioClassification (ro)":49.97,"MassiveScenarioClassification (ru)":28.75,"MassiveScenarioClassification (sl)":42.26,"MassiveScenarioClassification (sq)":49.14,"MassiveScenarioClassification (sw)":43.18,"MassiveScenarioClassification (ta)":19.38,"MassiveScenarioClassification (te)":7.74,"MassiveScenarioClassification (th)":18.32,"MassiveScenarioClassification (tl)":48.31,"MassiveScenarioClassification (tr)":41.79,"MassiveScenarioClassification (ur)":24.46,"MassiveScenarioClassification (vi)":40.94,"MassiveScenarioClassification (zh-TW)":31.16}
+{"index":39,"Rank":21,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":31.78,"AmazonCounterfactualClassification (de)":54.33,"AmazonCounterfactualClassification (ja)":56.34,"AmazonReviewsClassification (de)":27.2,"AmazonReviewsClassification (es)":34.88,"AmazonReviewsClassification (fr)":31.56,"AmazonReviewsClassification (ja)":22.71,"AmazonReviewsClassification (zh)":22.35,"MTOPDomainClassification (de)":74.86,"MTOPDomainClassification (es)":77.09,"MTOPDomainClassification (fr)":79.8,"MTOPDomainClassification (hi)":32.79,"MTOPDomainClassification (th)":16.65,"MTOPIntentClassification (de)":42.36,"MTOPIntentClassification (es)":44.73,"MTOPIntentClassification (fr)":38.96,"MTOPIntentClassification (hi)":13.58,"MTOPIntentClassification (th)":5.4,"MassiveIntentClassification (af)":37.22,"MassiveIntentClassification (am)":3.19,"MassiveIntentClassification (ar)":14.26,"MassiveIntentClassification (az)":37.22,"MassiveIntentClassification (bn)":10.76,"MassiveIntentClassification (cy)":32.5,"MassiveIntentClassification (de)":42.78,"MassiveIntentClassification (el)":33.49,"MassiveIntentClassification (es)":44.45,"MassiveIntentClassification (fa)":26.74,"MassiveIntentClassification (fi)":38.1,"MassiveIntentClassification (fr)":46.89,"MassiveIntentClassification (he)":25.2,"MassiveIntentClassification (hi)":13.94,"MassiveIntentClassification (hu)":34.71,"MassiveIntentClassification (hy)":6.71,"MassiveIntentClassification (id)":38.57,"MassiveIntentClassification (is)":32.23,"MassiveIntentClassification (it)":45.8,"MassiveIntentClassification (ja)":29.19,"MassiveIntentClassification (jv)":34.22,"MassiveIntentClassification (ka)":8.89,"MassiveIntentClassification (km)":4.62,"MassiveIntentClassification (kn)":3.17,"MassiveIntentClassification (ko)":15.03,"MassiveIntentClassification (lv)":36.1,"MassiveIntentClassification (ml)":3.0,"MassiveIntentClassification (mn)":23.3,"MassiveIntentClassification (ms)":36.13,"MassiveIntentClassification (my)":3.81,"MassiveIntentClassification (nl)":41.08,"MassiveIntentClassification (pt)":45.2,"MassiveIntentClassification (ro)":39.49,"MassiveIntentClassification (ru)":31.82,"MassiveIntentClassification (sl)":35.45,"MassiveIntentClassification (sq)":36.89,"MassiveIntentClassification (sw)":37.54,"MassiveIntentClassification (ta)":7.91,"MassiveIntentClassification (te)":2.85,"MassiveIntentClassification (th)":10.5,"MassiveIntentClassification (tl)":39.47,"MassiveIntentClassification (tr)":37.5,"MassiveIntentClassification (ur)":16.11,"MassiveIntentClassification (vi)":36.11,"MassiveIntentClassification (zh-TW)":17.22,"MassiveScenarioClassification (af)":47.8,"MassiveScenarioClassification (am)":7.08,"MassiveScenarioClassification (ar)":22.83,"MassiveScenarioClassification (az)":44.95,"MassiveScenarioClassification (bn)":16.59,"MassiveScenarioClassification (cy)":37.92,"MassiveScenarioClassification (de)":58.74,"MassiveScenarioClassification (el)":43.0,"MassiveScenarioClassification (es)":54.47,"MassiveScenarioClassification (fa)":30.58,"MassiveScenarioClassification (fi)":43.57,"MassiveScenarioClassification (fr)":56.99,"MassiveScenarioClassification (he)":28.08,"MassiveScenarioClassification (hi)":18.1,"MassiveScenarioClassification (hu)":41.74,"MassiveScenarioClassification (hy)":11.54,"MassiveScenarioClassification (id)":46.95,"MassiveScenarioClassification (is)":42.78,"MassiveScenarioClassification (it)":54.65,"MassiveScenarioClassification (ja)":35.9,"MassiveScenarioClassification (jv)":42.51,"MassiveScenarioClassification (ka)":13.8,"MassiveScenarioClassification (km)":9.45,"MassiveScenarioClassification (kn)":8.16,"MassiveScenarioClassification (ko)":19.91,"MassiveScenarioClassification (lv)":40.48,"MassiveScenarioClassification (ml)":6.7,"MassiveScenarioClassification (mn)":28.55,"MassiveScenarioClassification (ms)":46.62,"MassiveScenarioClassification (my)":9.98,"MassiveScenarioClassification (nl)":51.76,"MassiveScenarioClassification (pt)":55.6,"MassiveScenarioClassification (ro)":50.54,"MassiveScenarioClassification (ru)":37.73,"MassiveScenarioClassification (sl)":41.67,"MassiveScenarioClassification (sq)":47.38,"MassiveScenarioClassification (sw)":44.18,"MassiveScenarioClassification (ta)":12.6,"MassiveScenarioClassification (te)":7.02,"MassiveScenarioClassification (th)":19.79,"MassiveScenarioClassification (tl)":50.36,"MassiveScenarioClassification (tr)":45.48,"MassiveScenarioClassification (ur)":23.68,"MassiveScenarioClassification (vi)":41.63,"MassiveScenarioClassification (zh-TW)":27.52}
+{"index":112,"Rank":22,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":30.51,"AmazonCounterfactualClassification (de)":59.79,"AmazonCounterfactualClassification (ja)":50.59,"AmazonReviewsClassification (de)":35.06,"AmazonReviewsClassification (es)":37.18,"AmazonReviewsClassification (fr)":35.48,"AmazonReviewsClassification (ja)":22.24,"AmazonReviewsClassification (zh)":21.89,"MTOPDomainClassification (de)":85.42,"MTOPDomainClassification (es)":88.2,"MTOPDomainClassification (fr)":85.05,"MTOPDomainClassification (hi)":21.74,"MTOPDomainClassification (th)":15.87,"MTOPIntentClassification (de)":55.75,"MTOPIntentClassification (es)":57.73,"MTOPIntentClassification (fr)":51.07,"MTOPIntentClassification (hi)":3.19,"MTOPIntentClassification (th)":5.55,"MassiveIntentClassification (af)":42.6,"MassiveIntentClassification (am)":2.12,"MassiveIntentClassification (ar)":4.64,"MassiveIntentClassification (az)":35.05,"MassiveIntentClassification (bn)":2.84,"MassiveIntentClassification (cy)":36.19,"MassiveIntentClassification (de)":55.49,"MassiveIntentClassification (el)":10.14,"MassiveIntentClassification (es)":56.72,"MassiveIntentClassification (fa)":3.54,"MassiveIntentClassification (fi)":37.13,"MassiveIntentClassification (fr)":57.67,"MassiveIntentClassification (he)":2.56,"MassiveIntentClassification (hi)":3.24,"MassiveIntentClassification (hu)":34.22,"MassiveIntentClassification (hy)":3.01,"MassiveIntentClassification (id)":46.54,"MassiveIntentClassification (is)":34.77,"MassiveIntentClassification (it)":54.13,"MassiveIntentClassification (ja)":4.27,"MassiveIntentClassification (jv)":36.97,"MassiveIntentClassification (ka)":2.72,"MassiveIntentClassification (km)":5.35,"MassiveIntentClassification (kn)":3.17,"MassiveIntentClassification (ko)":2.64,"MassiveIntentClassification (lv)":36.32,"MassiveIntentClassification (ml)":3.18,"MassiveIntentClassification (mn)":22.85,"MassiveIntentClassification (ms)":42.87,"MassiveIntentClassification (my)":4.04,"MassiveIntentClassification (nl)":49.53,"MassiveIntentClassification (pt)":57.03,"MassiveIntentClassification (ro)":49.95,"MassiveIntentClassification (ru)":36.58,"MassiveIntentClassification (sl)":39.44,"MassiveIntentClassification (sq)":41.78,"MassiveIntentClassification (sw)":35.85,"MassiveIntentClassification (ta)":2.32,"MassiveIntentClassification (te)":2.2,"MassiveIntentClassification (th)":3.74,"MassiveIntentClassification (tl)":43.12,"MassiveIntentClassification (tr)":35.24,"MassiveIntentClassification (ur)":3.0,"MassiveIntentClassification (vi)":30.01,"MassiveIntentClassification (zh-TW)":3.35,"MassiveScenarioClassification (af)":52.54,"MassiveScenarioClassification (am)":6.3,"MassiveScenarioClassification (ar)":11.96,"MassiveScenarioClassification (az)":40.17,"MassiveScenarioClassification (bn)":8.29,"MassiveScenarioClassification (cy)":42.24,"MassiveScenarioClassification (de)":68.09,"MassiveScenarioClassification (el)":16.66,"MassiveScenarioClassification (es)":64.32,"MassiveScenarioClassification (fa)":6.9,"MassiveScenarioClassification (fi)":43.96,"MassiveScenarioClassification (fr)":66.72,"MassiveScenarioClassification (he)":7.51,"MassiveScenarioClassification (hi)":7.82,"MassiveScenarioClassification (hu)":42.16,"MassiveScenarioClassification (hy)":9.33,"MassiveScenarioClassification (id)":53.54,"MassiveScenarioClassification (is)":42.84,"MassiveScenarioClassification (it)":62.44,"MassiveScenarioClassification (ja)":7.29,"MassiveScenarioClassification (jv)":43.13,"MassiveScenarioClassification (ka)":7.63,"MassiveScenarioClassification (km)":9.08,"MassiveScenarioClassification (kn)":8.1,"MassiveScenarioClassification (ko)":6.35,"MassiveScenarioClassification (lv)":40.24,"MassiveScenarioClassification (ml)":7.65,"MassiveScenarioClassification (mn)":27.98,"MassiveScenarioClassification (ms)":52.41,"MassiveScenarioClassification (my)":9.21,"MassiveScenarioClassification (nl)":60.35,"MassiveScenarioClassification (pt)":62.78,"MassiveScenarioClassification (ro)":59.62,"MassiveScenarioClassification (ru)":43.44,"MassiveScenarioClassification (sl)":44.79,"MassiveScenarioClassification (sq)":50.84,"MassiveScenarioClassification (sw)":44.63,"MassiveScenarioClassification (ta)":7.95,"MassiveScenarioClassification (te)":7.5,"MassiveScenarioClassification (th)":8.79,"MassiveScenarioClassification (tl)":53.54,"MassiveScenarioClassification (tr)":42.47,"MassiveScenarioClassification (ur)":9.58,"MassiveScenarioClassification (vi)":34.68,"MassiveScenarioClassification (zh-TW)":8.77}
+{"index":59,"Rank":23,"Model":"slx-v0.1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.08,"Average":30.35,"AmazonCounterfactualClassification (de)":57.98,"AmazonCounterfactualClassification (ja)":59.38,"AmazonReviewsClassification (de)":26.73,"AmazonReviewsClassification (es)":27.23,"AmazonReviewsClassification (fr)":27.0,"AmazonReviewsClassification (ja)":23.75,"AmazonReviewsClassification (zh)":23.13,"MTOPDomainClassification (de)":70.39,"MTOPDomainClassification (es)":73.35,"MTOPDomainClassification (fr)":75.57,"MTOPDomainClassification (hi)":39.06,"MTOPDomainClassification (th)":17.09,"MTOPIntentClassification (de)":45.26,"MTOPIntentClassification (es)":44.9,"MTOPIntentClassification (fr)":41.12,"MTOPIntentClassification (hi)":18.3,"MTOPIntentClassification (th)":4.68,"MassiveIntentClassification (af)":36.53,"MassiveIntentClassification (am)":2.44,"MassiveIntentClassification (ar)":18.02,"MassiveIntentClassification (az)":28.74,"MassiveIntentClassification (bn)":12.1,"MassiveIntentClassification (cy)":33.68,"MassiveIntentClassification (de)":42.21,"MassiveIntentClassification (el)":22.5,"MassiveIntentClassification (es)":40.63,"MassiveIntentClassification (fa)":19.65,"MassiveIntentClassification (fi)":36.97,"MassiveIntentClassification (fr)":41.95,"MassiveIntentClassification (he)":20.56,"MassiveIntentClassification (hi)":17.6,"MassiveIntentClassification (hu)":33.79,"MassiveIntentClassification (hy)":7.13,"MassiveIntentClassification (id)":38.6,"MassiveIntentClassification (is)":29.51,"MassiveIntentClassification (it)":40.84,"MassiveIntentClassification (ja)":30.52,"MassiveIntentClassification (jv)":35.6,"MassiveIntentClassification (ka)":8.55,"MassiveIntentClassification (km)":4.51,"MassiveIntentClassification (kn)":3.39,"MassiveIntentClassification (ko)":16.09,"MassiveIntentClassification (lv)":36.23,"MassiveIntentClassification (ml)":2.54,"MassiveIntentClassification (mn)":18.88,"MassiveIntentClassification (ms)":34.14,"MassiveIntentClassification (my)":4.49,"MassiveIntentClassification (nl)":38.17,"MassiveIntentClassification (pt)":42.6,"MassiveIntentClassification (ro)":39.3,"MassiveIntentClassification (ru)":27.14,"MassiveIntentClassification (sl)":36.45,"MassiveIntentClassification (sq)":40.24,"MassiveIntentClassification (sw)":33.87,"MassiveIntentClassification (ta)":11.43,"MassiveIntentClassification (te)":2.25,"MassiveIntentClassification (th)":10.3,"MassiveIntentClassification (tl)":38.09,"MassiveIntentClassification (tr)":33.09,"MassiveIntentClassification (ur)":15.12,"MassiveIntentClassification (vi)":37.36,"MassiveIntentClassification (zh-TW)":24.11,"MassiveScenarioClassification (af)":43.56,"MassiveScenarioClassification (am)":7.37,"MassiveScenarioClassification (ar)":24.37,"MassiveScenarioClassification (az)":35.12,"MassiveScenarioClassification (bn)":19.76,"MassiveScenarioClassification (cy)":38.78,"MassiveScenarioClassification (de)":50.05,"MassiveScenarioClassification (el)":29.29,"MassiveScenarioClassification (es)":49.3,"MassiveScenarioClassification (fa)":22.91,"MassiveScenarioClassification (fi)":40.72,"MassiveScenarioClassification (fr)":50.59,"MassiveScenarioClassification (he)":23.92,"MassiveScenarioClassification (hi)":23.15,"MassiveScenarioClassification (hu)":39.28,"MassiveScenarioClassification (hy)":12.36,"MassiveScenarioClassification (id)":43.03,"MassiveScenarioClassification (is)":35.19,"MassiveScenarioClassification (it)":49.37,"MassiveScenarioClassification (ja)":37.64,"MassiveScenarioClassification (jv)":42.82,"MassiveScenarioClassification (ka)":14.13,"MassiveScenarioClassification (km)":8.84,"MassiveScenarioClassification (kn)":8.35,"MassiveScenarioClassification (ko)":20.59,"MassiveScenarioClassification (lv)":40.16,"MassiveScenarioClassification (ml)":7.48,"MassiveScenarioClassification (mn)":24.15,"MassiveScenarioClassification (ms)":42.39,"MassiveScenarioClassification (my)":11.19,"MassiveScenarioClassification (nl)":46.52,"MassiveScenarioClassification (pt)":49.51,"MassiveScenarioClassification (ro)":47.62,"MassiveScenarioClassification (ru)":29.95,"MassiveScenarioClassification (sl)":41.42,"MassiveScenarioClassification (sq)":45.49,"MassiveScenarioClassification (sw)":41.83,"MassiveScenarioClassification (ta)":17.58,"MassiveScenarioClassification (te)":7.58,"MassiveScenarioClassification (th)":18.84,"MassiveScenarioClassification (tl)":46.35,"MassiveScenarioClassification (tr)":38.19,"MassiveScenarioClassification (ur)":23.33,"MassiveScenarioClassification (vi)":40.89,"MassiveScenarioClassification (zh-TW)":33.27}
+{"index":118,"Rank":24,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":29.6,"AmazonCounterfactualClassification (de)":67.01,"AmazonCounterfactualClassification (ja)":45.61,"AmazonReviewsClassification (de)":44.05,"AmazonReviewsClassification (es)":45.01,"AmazonReviewsClassification (fr)":43.52,"AmazonReviewsClassification (ja)":22.23,"AmazonReviewsClassification (zh)":21.88,"MTOPDomainClassification (de)":83.28,"MTOPDomainClassification (es)":85.32,"MTOPDomainClassification (fr)":85.14,"MTOPDomainClassification (hi)":20.85,"MTOPDomainClassification (th)":15.62,"MTOPIntentClassification (de)":54.65,"MTOPIntentClassification (es)":57.38,"MTOPIntentClassification (fr)":54.39,"MTOPIntentClassification (hi)":3.28,"MTOPIntentClassification (th)":5.08,"MassiveIntentClassification (af)":40.17,"MassiveIntentClassification (am)":2.18,"MassiveIntentClassification (ar)":4.18,"MassiveIntentClassification (az)":30.02,"MassiveIntentClassification (bn)":2.6,"MassiveIntentClassification (cy)":29.15,"MassiveIntentClassification (de)":57.43,"MassiveIntentClassification (el)":9.96,"MassiveIntentClassification (es)":57.97,"MassiveIntentClassification (fa)":3.6,"MassiveIntentClassification (fi)":34.02,"MassiveIntentClassification (fr)":60.99,"MassiveIntentClassification (he)":2.51,"MassiveIntentClassification (hi)":3.02,"MassiveIntentClassification (hu)":31.66,"MassiveIntentClassification (hy)":3.32,"MassiveIntentClassification (id)":41.53,"MassiveIntentClassification (is)":30.25,"MassiveIntentClassification (it)":56.57,"MassiveIntentClassification (ja)":3.5,"MassiveIntentClassification (jv)":31.67,"MassiveIntentClassification (ka)":2.79,"MassiveIntentClassification (km)":5.43,"MassiveIntentClassification (kn)":2.79,"MassiveIntentClassification (ko)":2.67,"MassiveIntentClassification (lv)":34.25,"MassiveIntentClassification (ml)":2.98,"MassiveIntentClassification (mn)":20.99,"MassiveIntentClassification (ms)":37.43,"MassiveIntentClassification (my)":4.02,"MassiveIntentClassification (nl)":50.51,"MassiveIntentClassification (pt)":57.95,"MassiveIntentClassification (ro)":49.37,"MassiveIntentClassification (ru)":33.46,"MassiveIntentClassification (sl)":36.33,"MassiveIntentClassification (sq)":37.65,"MassiveIntentClassification (sw)":30.6,"MassiveIntentClassification (ta)":1.79,"MassiveIntentClassification (te)":2.26,"MassiveIntentClassification (th)":4.02,"MassiveIntentClassification (tl)":38.92,"MassiveIntentClassification (tr)":32.05,"MassiveIntentClassification (ur)":2.7,"MassiveIntentClassification (vi)":21.47,"MassiveIntentClassification (zh-TW)":3.24,"MassiveScenarioClassification (af)":50.81,"MassiveScenarioClassification (am)":6.95,"MassiveScenarioClassification (ar)":12.32,"MassiveScenarioClassification (az)":38.79,"MassiveScenarioClassification (bn)":8.0,"MassiveScenarioClassification (cy)":33.91,"MassiveScenarioClassification (de)":65.33,"MassiveScenarioClassification (el)":16.89,"MassiveScenarioClassification (es)":62.52,"MassiveScenarioClassification (fa)":6.08,"MassiveScenarioClassification (fi)":43.34,"MassiveScenarioClassification (fr)":66.42,"MassiveScenarioClassification (he)":7.55,"MassiveScenarioClassification (hi)":7.44,"MassiveScenarioClassification (hu)":40.85,"MassiveScenarioClassification (hy)":9.25,"MassiveScenarioClassification (id)":51.92,"MassiveScenarioClassification (is)":40.09,"MassiveScenarioClassification (it)":62.94,"MassiveScenarioClassification (ja)":7.9,"MassiveScenarioClassification (jv)":41.33,"MassiveScenarioClassification (ka)":7.76,"MassiveScenarioClassification (km)":9.19,"MassiveScenarioClassification (kn)":8.36,"MassiveScenarioClassification (ko)":6.13,"MassiveScenarioClassification (lv)":40.7,"MassiveScenarioClassification (ml)":6.98,"MassiveScenarioClassification (mn)":27.0,"MassiveScenarioClassification (ms)":46.9,"MassiveScenarioClassification (my)":9.55,"MassiveScenarioClassification (nl)":59.65,"MassiveScenarioClassification (pt)":62.18,"MassiveScenarioClassification (ro)":58.22,"MassiveScenarioClassification (ru)":40.73,"MassiveScenarioClassification (sl)":43.66,"MassiveScenarioClassification (sq)":49.25,"MassiveScenarioClassification (sw)":40.55,"MassiveScenarioClassification (ta)":7.46,"MassiveScenarioClassification (te)":7.03,"MassiveScenarioClassification (th)":8.52,"MassiveScenarioClassification (tl)":51.74,"MassiveScenarioClassification (tr)":43.01,"MassiveScenarioClassification (ur)":9.61,"MassiveScenarioClassification (vi)":28.91,"MassiveScenarioClassification (zh-TW)":7.14}
+{"index":111,"Rank":25,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":28.9,"AmazonCounterfactualClassification (de)":59.38,"AmazonCounterfactualClassification (ja)":45.87,"AmazonReviewsClassification (de)":33.06,"AmazonReviewsClassification (es)":34.0,"AmazonReviewsClassification (fr)":33.48,"AmazonReviewsClassification (ja)":21.78,"AmazonReviewsClassification (zh)":21.83,"MTOPDomainClassification (de)":81.91,"MTOPDomainClassification (es)":84.7,"MTOPDomainClassification (fr)":82.48,"MTOPDomainClassification (hi)":22.11,"MTOPDomainClassification (th)":16.36,"MTOPIntentClassification (de)":52.13,"MTOPIntentClassification (es)":52.62,"MTOPIntentClassification (fr)":46.39,"MTOPIntentClassification (hi)":3.9,"MTOPIntentClassification (th)":5.38,"MassiveIntentClassification (af)":41.02,"MassiveIntentClassification (am)":2.34,"MassiveIntentClassification (ar)":4.87,"MassiveIntentClassification (az)":34.92,"MassiveIntentClassification (bn)":2.52,"MassiveIntentClassification (cy)":35.87,"MassiveIntentClassification (de)":51.48,"MassiveIntentClassification (el)":10.0,"MassiveIntentClassification (es)":53.3,"MassiveIntentClassification (fa)":3.59,"MassiveIntentClassification (fi)":37.35,"MassiveIntentClassification (fr)":54.83,"MassiveIntentClassification (he)":2.52,"MassiveIntentClassification (hi)":2.88,"MassiveIntentClassification (hu)":33.52,"MassiveIntentClassification (hy)":3.13,"MassiveIntentClassification (id)":40.11,"MassiveIntentClassification (is)":34.77,"MassiveIntentClassification (it)":51.21,"MassiveIntentClassification (ja)":4.75,"MassiveIntentClassification (jv)":35.6,"MassiveIntentClassification (ka)":2.71,"MassiveIntentClassification (km)":5.48,"MassiveIntentClassification (kn)":2.44,"MassiveIntentClassification (ko)":2.59,"MassiveIntentClassification (lv)":38.15,"MassiveIntentClassification (ml)":2.67,"MassiveIntentClassification (mn)":18.47,"MassiveIntentClassification (ms)":35.58,"MassiveIntentClassification (my)":4.35,"MassiveIntentClassification (nl)":45.96,"MassiveIntentClassification (pt)":52.27,"MassiveIntentClassification (ro)":46.39,"MassiveIntentClassification (ru)":16.82,"MassiveIntentClassification (sl)":37.3,"MassiveIntentClassification (sq)":41.73,"MassiveIntentClassification (sw)":35.97,"MassiveIntentClassification (ta)":1.52,"MassiveIntentClassification (te)":2.57,"MassiveIntentClassification (th)":3.94,"MassiveIntentClassification (tl)":41.03,"MassiveIntentClassification (tr)":33.75,"MassiveIntentClassification (ur)":2.57,"MassiveIntentClassification (vi)":25.23,"MassiveIntentClassification (zh-TW)":4.64,"MassiveScenarioClassification (af)":51.48,"MassiveScenarioClassification (am)":7.74,"MassiveScenarioClassification (ar)":12.03,"MassiveScenarioClassification (az)":41.77,"MassiveScenarioClassification (bn)":8.07,"MassiveScenarioClassification (cy)":43.67,"MassiveScenarioClassification (de)":63.63,"MassiveScenarioClassification (el)":16.83,"MassiveScenarioClassification (es)":61.48,"MassiveScenarioClassification (fa)":6.48,"MassiveScenarioClassification (fi)":43.54,"MassiveScenarioClassification (fr)":64.06,"MassiveScenarioClassification (he)":8.03,"MassiveScenarioClassification (hi)":7.5,"MassiveScenarioClassification (hu)":42.59,"MassiveScenarioClassification (hy)":9.22,"MassiveScenarioClassification (id)":48.67,"MassiveScenarioClassification (is)":43.87,"MassiveScenarioClassification (it)":59.83,"MassiveScenarioClassification (ja)":5.62,"MassiveScenarioClassification (jv)":42.18,"MassiveScenarioClassification (ka)":7.52,"MassiveScenarioClassification (km)":9.55,"MassiveScenarioClassification (kn)":8.34,"MassiveScenarioClassification (ko)":6.11,"MassiveScenarioClassification (lv)":43.35,"MassiveScenarioClassification (ml)":7.28,"MassiveScenarioClassification (mn)":23.94,"MassiveScenarioClassification (ms)":45.18,"MassiveScenarioClassification (my)":9.33,"MassiveScenarioClassification (nl)":57.02,"MassiveScenarioClassification (pt)":59.45,"MassiveScenarioClassification (ro)":56.8,"MassiveScenarioClassification (ru)":25.85,"MassiveScenarioClassification (sl)":42.51,"MassiveScenarioClassification (sq)":50.41,"MassiveScenarioClassification (sw)":43.02,"MassiveScenarioClassification (ta)":7.21,"MassiveScenarioClassification (te)":6.9,"MassiveScenarioClassification (th)":8.7,"MassiveScenarioClassification (tl)":51.76,"MassiveScenarioClassification (tr)":42.54,"MassiveScenarioClassification (ur)":9.32,"MassiveScenarioClassification (vi)":31.51,"MassiveScenarioClassification (zh-TW)":8.16}
+{"index":117,"Rank":26,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":28.26,"AmazonCounterfactualClassification (de)":67.97,"AmazonCounterfactualClassification (ja)":45.72,"AmazonReviewsClassification (de)":43.16,"AmazonReviewsClassification (es)":42.89,"AmazonReviewsClassification (fr)":41.48,"AmazonReviewsClassification (ja)":22.49,"AmazonReviewsClassification (zh)":22.12,"MTOPDomainClassification (de)":80.56,"MTOPDomainClassification (es)":80.78,"MTOPDomainClassification (fr)":79.6,"MTOPDomainClassification (hi)":21.22,"MTOPDomainClassification (th)":15.82,"MTOPIntentClassification (de)":52.5,"MTOPIntentClassification (es)":52.07,"MTOPIntentClassification (fr)":47.73,"MTOPIntentClassification (hi)":3.74,"MTOPIntentClassification (th)":4.96,"MassiveIntentClassification (af)":38.41,"MassiveIntentClassification (am)":2.49,"MassiveIntentClassification (ar)":4.7,"MassiveIntentClassification (az)":31.77,"MassiveIntentClassification (bn)":2.77,"MassiveIntentClassification (cy)":31.69,"MassiveIntentClassification (de)":52.01,"MassiveIntentClassification (el)":9.74,"MassiveIntentClassification (es)":54.1,"MassiveIntentClassification (fa)":3.86,"MassiveIntentClassification (fi)":34.07,"MassiveIntentClassification (fr)":57.01,"MassiveIntentClassification (he)":2.14,"MassiveIntentClassification (hi)":2.97,"MassiveIntentClassification (hu)":32.01,"MassiveIntentClassification (hy)":3.17,"MassiveIntentClassification (id)":34.55,"MassiveIntentClassification (is)":32.0,"MassiveIntentClassification (it)":52.94,"MassiveIntentClassification (ja)":2.9,"MassiveIntentClassification (jv)":32.42,"MassiveIntentClassification (ka)":2.71,"MassiveIntentClassification (km)":5.5,"MassiveIntentClassification (kn)":2.41,"MassiveIntentClassification (ko)":2.57,"MassiveIntentClassification (lv)":35.09,"MassiveIntentClassification (ml)":2.95,"MassiveIntentClassification (mn)":18.33,"MassiveIntentClassification (ms)":29.69,"MassiveIntentClassification (my)":3.99,"MassiveIntentClassification (nl)":44.95,"MassiveIntentClassification (pt)":51.96,"MassiveIntentClassification (ro)":43.83,"MassiveIntentClassification (ru)":17.32,"MassiveIntentClassification (sl)":33.71,"MassiveIntentClassification (sq)":37.62,"MassiveIntentClassification (sw)":31.9,"MassiveIntentClassification (ta)":1.91,"MassiveIntentClassification (te)":2.54,"MassiveIntentClassification (th)":3.85,"MassiveIntentClassification (tl)":36.83,"MassiveIntentClassification (tr)":33.0,"MassiveIntentClassification (ur)":2.62,"MassiveIntentClassification (vi)":22.81,"MassiveIntentClassification (zh-TW)":3.49,"MassiveScenarioClassification (af)":50.28,"MassiveScenarioClassification (am)":7.15,"MassiveScenarioClassification (ar)":12.12,"MassiveScenarioClassification (az)":39.68,"MassiveScenarioClassification (bn)":8.06,"MassiveScenarioClassification (cy)":38.01,"MassiveScenarioClassification (de)":62.71,"MassiveScenarioClassification (el)":17.19,"MassiveScenarioClassification (es)":59.56,"MassiveScenarioClassification (fa)":6.5,"MassiveScenarioClassification (fi)":41.72,"MassiveScenarioClassification (fr)":63.6,"MassiveScenarioClassification (he)":7.93,"MassiveScenarioClassification (hi)":7.85,"MassiveScenarioClassification (hu)":41.37,"MassiveScenarioClassification (hy)":9.42,"MassiveScenarioClassification (id)":44.88,"MassiveScenarioClassification (is)":40.86,"MassiveScenarioClassification (it)":60.09,"MassiveScenarioClassification (ja)":6.56,"MassiveScenarioClassification (jv)":40.18,"MassiveScenarioClassification (ka)":7.37,"MassiveScenarioClassification (km)":9.56,"MassiveScenarioClassification (kn)":8.4,"MassiveScenarioClassification (ko)":5.96,"MassiveScenarioClassification (lv)":41.44,"MassiveScenarioClassification (ml)":7.47,"MassiveScenarioClassification (mn)":25.36,"MassiveScenarioClassification (ms)":39.69,"MassiveScenarioClassification (my)":9.68,"MassiveScenarioClassification (nl)":56.09,"MassiveScenarioClassification (pt)":57.99,"MassiveScenarioClassification (ro)":56.0,"MassiveScenarioClassification (ru)":27.47,"MassiveScenarioClassification (sl)":41.04,"MassiveScenarioClassification (sq)":49.38,"MassiveScenarioClassification (sw)":40.62,"MassiveScenarioClassification (ta)":7.59,"MassiveScenarioClassification (te)":7.07,"MassiveScenarioClassification (th)":8.52,"MassiveScenarioClassification (tl)":49.89,"MassiveScenarioClassification (tr)":43.08,"MassiveScenarioClassification (ur)":9.31,"MassiveScenarioClassification (vi)":27.46,"MassiveScenarioClassification (zh-TW)":7.24}
+{"index":116,"Rank":27,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":26.73,"AmazonCounterfactualClassification (de)":69.98,"AmazonCounterfactualClassification (ja)":46.05,"AmazonReviewsClassification (de)":37.9,"AmazonReviewsClassification (es)":37.33,"AmazonReviewsClassification (fr)":37.35,"AmazonReviewsClassification (ja)":22.29,"AmazonReviewsClassification (zh)":21.53,"MTOPDomainClassification (de)":76.98,"MTOPDomainClassification (es)":73.61,"MTOPDomainClassification (fr)":75.03,"MTOPDomainClassification (hi)":21.4,"MTOPDomainClassification (th)":16.21,"MTOPIntentClassification (de)":44.43,"MTOPIntentClassification (es)":42.03,"MTOPIntentClassification (fr)":43.85,"MTOPIntentClassification (hi)":3.8,"MTOPIntentClassification (th)":5.21,"MassiveIntentClassification (af)":34.32,"MassiveIntentClassification (am)":2.38,"MassiveIntentClassification (ar)":4.53,"MassiveIntentClassification (az)":31.76,"MassiveIntentClassification (bn)":2.58,"MassiveIntentClassification (cy)":28.94,"MassiveIntentClassification (de)":45.23,"MassiveIntentClassification (el)":10.05,"MassiveIntentClassification (es)":45.32,"MassiveIntentClassification (fa)":3.58,"MassiveIntentClassification (fi)":33.52,"MassiveIntentClassification (fr)":51.13,"MassiveIntentClassification (he)":2.63,"MassiveIntentClassification (hi)":2.68,"MassiveIntentClassification (hu)":32.31,"MassiveIntentClassification (hy)":3.33,"MassiveIntentClassification (id)":35.5,"MassiveIntentClassification (is)":29.82,"MassiveIntentClassification (it)":45.59,"MassiveIntentClassification (ja)":3.67,"MassiveIntentClassification (jv)":31.15,"MassiveIntentClassification (ka)":2.77,"MassiveIntentClassification (km)":5.66,"MassiveIntentClassification (kn)":2.59,"MassiveIntentClassification (ko)":2.34,"MassiveIntentClassification (lv)":33.97,"MassiveIntentClassification (ml)":2.55,"MassiveIntentClassification (mn)":14.7,"MassiveIntentClassification (ms)":33.12,"MassiveIntentClassification (my)":4.42,"MassiveIntentClassification (nl)":37.96,"MassiveIntentClassification (pt)":43.35,"MassiveIntentClassification (ro)":42.69,"MassiveIntentClassification (ru)":14.82,"MassiveIntentClassification (sl)":34.54,"MassiveIntentClassification (sq)":38.54,"MassiveIntentClassification (sw)":32.14,"MassiveIntentClassification (ta)":1.41,"MassiveIntentClassification (te)":2.5,"MassiveIntentClassification (th)":3.71,"MassiveIntentClassification (tl)":36.04,"MassiveIntentClassification (tr)":33.77,"MassiveIntentClassification (ur)":2.99,"MassiveIntentClassification (vi)":22.62,"MassiveIntentClassification (zh-TW)":4.63,"MassiveScenarioClassification (af)":44.45,"MassiveScenarioClassification (am)":7.51,"MassiveScenarioClassification (ar)":12.32,"MassiveScenarioClassification (az)":38.41,"MassiveScenarioClassification (bn)":8.45,"MassiveScenarioClassification (cy)":35.04,"MassiveScenarioClassification (de)":59.12,"MassiveScenarioClassification (el)":17.68,"MassiveScenarioClassification (es)":55.61,"MassiveScenarioClassification (fa)":6.86,"MassiveScenarioClassification (fi)":41.34,"MassiveScenarioClassification (fr)":59.92,"MassiveScenarioClassification (he)":7.86,"MassiveScenarioClassification (hi)":7.63,"MassiveScenarioClassification (hu)":41.31,"MassiveScenarioClassification (hy)":9.23,"MassiveScenarioClassification (id)":44.64,"MassiveScenarioClassification (is)":39.63,"MassiveScenarioClassification (it)":54.58,"MassiveScenarioClassification (ja)":4.96,"MassiveScenarioClassification (jv)":40.73,"MassiveScenarioClassification (ka)":7.51,"MassiveScenarioClassification (km)":8.73,"MassiveScenarioClassification (kn)":7.99,"MassiveScenarioClassification (ko)":6.03,"MassiveScenarioClassification (lv)":36.42,"MassiveScenarioClassification (ml)":6.96,"MassiveScenarioClassification (mn)":19.85,"MassiveScenarioClassification (ms)":43.18,"MassiveScenarioClassification (my)":9.46,"MassiveScenarioClassification (nl)":50.0,"MassiveScenarioClassification (pt)":52.24,"MassiveScenarioClassification (ro)":53.7,"MassiveScenarioClassification (ru)":20.69,"MassiveScenarioClassification (sl)":39.79,"MassiveScenarioClassification (sq)":50.16,"MassiveScenarioClassification (sw)":40.48,"MassiveScenarioClassification (ta)":7.47,"MassiveScenarioClassification (te)":6.87,"MassiveScenarioClassification (th)":8.26,"MassiveScenarioClassification (tl)":48.94,"MassiveScenarioClassification (tr)":41.83,"MassiveScenarioClassification (ur)":9.77,"MassiveScenarioClassification (vi)":30.01,"MassiveScenarioClassification (zh-TW)":7.91}
+{"index":109,"Rank":28,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":24.49,"AmazonCounterfactualClassification (de)":54.46,"AmazonCounterfactualClassification (ja)":43.87,"AmazonReviewsClassification (de)":24.08,"AmazonReviewsClassification (es)":23.88,"AmazonReviewsClassification (fr)":23.31,"AmazonReviewsClassification (ja)":20.25,"AmazonReviewsClassification (zh)":20.49,"MTOPDomainClassification (de)":48.55,"MTOPDomainClassification (es)":58.39,"MTOPDomainClassification (fr)":54.61,"MTOPDomainClassification (hi)":21.22,"MTOPDomainClassification (th)":14.98,"MTOPIntentClassification (de)":35.55,"MTOPIntentClassification (es)":36.72,"MTOPIntentClassification (fr)":34.71,"MTOPIntentClassification (hi)":4.44,"MTOPIntentClassification (th)":4.67,"MassiveIntentClassification (af)":33.68,"MassiveIntentClassification (am)":2.94,"MassiveIntentClassification (ar)":10.04,"MassiveIntentClassification (az)":30.74,"MassiveIntentClassification (bn)":3.02,"MassiveIntentClassification (cy)":33.94,"MassiveIntentClassification (de)":36.06,"MassiveIntentClassification (el)":27.7,"MassiveIntentClassification (es)":35.6,"MassiveIntentClassification (fa)":17.97,"MassiveIntentClassification (fi)":35.53,"MassiveIntentClassification (fr)":38.41,"MassiveIntentClassification (he)":2.69,"MassiveIntentClassification (hi)":3.43,"MassiveIntentClassification (hu)":34.05,"MassiveIntentClassification (hy)":3.11,"MassiveIntentClassification (id)":40.02,"MassiveIntentClassification (is)":32.63,"MassiveIntentClassification (it)":39.28,"MassiveIntentClassification (ja)":4.95,"MassiveIntentClassification (jv)":34.95,"MassiveIntentClassification (ka)":2.57,"MassiveIntentClassification (km)":4.73,"MassiveIntentClassification (kn)":3.54,"MassiveIntentClassification (ko)":2.68,"MassiveIntentClassification (lv)":37.91,"MassiveIntentClassification (ml)":2.88,"MassiveIntentClassification (mn)":16.94,"MassiveIntentClassification (ms)":36.6,"MassiveIntentClassification (my)":3.96,"MassiveIntentClassification (nl)":33.95,"MassiveIntentClassification (pt)":43.05,"MassiveIntentClassification (ro)":36.2,"MassiveIntentClassification (ru)":25.3,"MassiveIntentClassification (sl)":35.9,"MassiveIntentClassification (sq)":36.6,"MassiveIntentClassification (sw)":34.81,"MassiveIntentClassification (ta)":3.11,"MassiveIntentClassification (te)":2.53,"MassiveIntentClassification (th)":4.38,"MassiveIntentClassification (tl)":35.51,"MassiveIntentClassification (tr)":32.02,"MassiveIntentClassification (ur)":9.61,"MassiveIntentClassification (vi)":37.07,"MassiveIntentClassification (zh-TW)":4.79,"MassiveScenarioClassification (af)":36.17,"MassiveScenarioClassification (am)":7.64,"MassiveScenarioClassification (ar)":15.26,"MassiveScenarioClassification (az)":30.73,"MassiveScenarioClassification (bn)":7.15,"MassiveScenarioClassification (cy)":34.73,"MassiveScenarioClassification (de)":38.62,"MassiveScenarioClassification (el)":27.18,"MassiveScenarioClassification (es)":39.44,"MassiveScenarioClassification (fa)":21.43,"MassiveScenarioClassification (fi)":33.21,"MassiveScenarioClassification (fr)":40.26,"MassiveScenarioClassification (he)":7.42,"MassiveScenarioClassification (hi)":8.06,"MassiveScenarioClassification (hu)":34.54,"MassiveScenarioClassification (hy)":8.61,"MassiveScenarioClassification (id)":40.04,"MassiveScenarioClassification (is)":33.57,"MassiveScenarioClassification (it)":40.1,"MassiveScenarioClassification (ja)":9.96,"MassiveScenarioClassification (jv)":36.11,"MassiveScenarioClassification (ka)":7.13,"MassiveScenarioClassification (km)":9.66,"MassiveScenarioClassification (kn)":7.55,"MassiveScenarioClassification (ko)":7.27,"MassiveScenarioClassification (lv)":37.03,"MassiveScenarioClassification (ml)":7.22,"MassiveScenarioClassification (mn)":21.53,"MassiveScenarioClassification (ms)":37.57,"MassiveScenarioClassification (my)":9.54,"MassiveScenarioClassification (nl)":34.62,"MassiveScenarioClassification (pt)":44.68,"MassiveScenarioClassification (ro)":37.29,"MassiveScenarioClassification (ru)":28.16,"MassiveScenarioClassification (sl)":37.95,"MassiveScenarioClassification (sq)":37.82,"MassiveScenarioClassification (sw)":35.37,"MassiveScenarioClassification (ta)":7.19,"MassiveScenarioClassification (te)":7.29,"MassiveScenarioClassification (th)":9.47,"MassiveScenarioClassification (tl)":37.31,"MassiveScenarioClassification (tr)":34.57,"MassiveScenarioClassification (ur)":16.17,"MassiveScenarioClassification (vi)":35.91,"MassiveScenarioClassification (zh-TW)":10.19}
+{"index":0,"Rank":29,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":41.59,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":90.05,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":66.09,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":62.83,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":69.71,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":1,"Rank":30,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":37.26,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":79.79,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":45.62,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":53.7,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":62.46,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":2,"Rank":31,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":42.15,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":87.68,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":59.44,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":63.08,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":70.15,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":3,"Rank":32,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":41.98,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":90.12,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":62.44,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":66.94,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":72.78,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":4,"Rank":33,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":43.36,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":90.33,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":60.52,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":68.06,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":74.29,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":6,"Rank":34,"Model":"OpenSearch-text-hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.18,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":7,"Rank":35,"Model":"llm2vec-occiglot-mntp<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":35.07,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":75.7,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":63.76,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":56.03,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":59.3,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":8,"Rank":36,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":52.95,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":9,"Rank":37,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":1776,"Memory Usage (GB, fp32)":6.62,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":53.47,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":47.21,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":93.48,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":80.23,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":76.65,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":79.1,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":10,"Rank":38,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":55.53,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":53.98,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":96.69,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":87.47,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":81.65,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":86.64,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":11,"Rank":39,"Model":"gte-multilingual-base<\/a>","Model Size (Million Parameters)":305,"Memory Usage (GB, fp32)":1.14,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":40.11,"AmazonReviewsClassification (es)":40.17,"AmazonReviewsClassification (fr)":39.57,"AmazonReviewsClassification (ja)":35.75,"AmazonReviewsClassification (zh)":33.34,"MTOPDomainClassification (de)":88.27,"MTOPDomainClassification (es)":90.88,"MTOPDomainClassification (fr)":87.49,"MTOPDomainClassification (hi)":89.45,"MTOPDomainClassification (th)":83.46,"MTOPIntentClassification (de)":63.51,"MTOPIntentClassification (es)":71.66,"MTOPIntentClassification (fr)":64.06,"MTOPIntentClassification (hi)":65.27,"MTOPIntentClassification (th)":63.05,"MassiveIntentClassification (af)":57.95,"MassiveIntentClassification (am)":49.27,"MassiveIntentClassification (ar)":55.49,"MassiveIntentClassification (az)":60.98,"MassiveIntentClassification (bn)":57.57,"MassiveIntentClassification (cy)":48.35,"MassiveIntentClassification (de)":62.55,"MassiveIntentClassification (el)":63.09,"MassiveIntentClassification (es)":67.41,"MassiveIntentClassification (fa)":65.6,"MassiveIntentClassification (fi)":62.91,"MassiveIntentClassification (fr)":67.92,"MassiveIntentClassification (he)":55.2,"MassiveIntentClassification (hi)":65.04,"MassiveIntentClassification (hu)":63.31,"MassiveIntentClassification (hy)":55.52,"MassiveIntentClassification (id)":66.06,"MassiveIntentClassification (is)":56.02,"MassiveIntentClassification (it)":67.28,"MassiveIntentClassification (ja)":66.8,"MassiveIntentClassification (jv)":50.18,"MassiveIntentClassification (ka)":47.89,"MassiveIntentClassification (km)":50.8,"MassiveIntentClassification (kn)":57.23,"MassiveIntentClassification (ko)":64.59,"MassiveIntentClassification (lv)":59.08,"MassiveIntentClassification (ml)":59.55,"MassiveIntentClassification (mn)":53.44,"MassiveIntentClassification (ms)":61.65,"MassiveIntentClassification (my)":53.46,"MassiveIntentClassification (nl)":66.36,"MassiveIntentClassification (pt)":68.07,"MassiveIntentClassification (ro)":63.12,"MassiveIntentClassification (ru)":67.46,"MassiveIntentClassification (sl)":60.54,"MassiveIntentClassification (sq)":58.2,"MassiveIntentClassification (sw)":51.68,"MassiveIntentClassification (ta)":58.52,"MassiveIntentClassification (te)":58.53,"MassiveIntentClassification (th)":61.88,"MassiveIntentClassification (tl)":56.65,"MassiveIntentClassification (tr)":64.85,"MassiveIntentClassification (ur)":58.62,"MassiveIntentClassification (vi)":64.16,"MassiveIntentClassification (zh-TW)":64.17,"MassiveScenarioClassification (af)":64.9,"MassiveScenarioClassification (am)":57.28,"MassiveScenarioClassification (ar)":62.27,"MassiveScenarioClassification (az)":65.1,"MassiveScenarioClassification (bn)":62.41,"MassiveScenarioClassification (cy)":55.37,"MassiveScenarioClassification (de)":70.3,"MassiveScenarioClassification (el)":69.53,"MassiveScenarioClassification (es)":72.45,"MassiveScenarioClassification (fa)":70.27,"MassiveScenarioClassification (fi)":67.21,"MassiveScenarioClassification (fr)":72.65,"MassiveScenarioClassification (he)":61.7,"MassiveScenarioClassification (hi)":70.14,"MassiveScenarioClassification (hu)":70.51,"MassiveScenarioClassification (hy)":60.14,"MassiveScenarioClassification (id)":70.62,"MassiveScenarioClassification (is)":61.53,"MassiveScenarioClassification (it)":72.0,"MassiveScenarioClassification (ja)":71.59,"MassiveScenarioClassification (jv)":57.01,"MassiveScenarioClassification (ka)":53.26,"MassiveScenarioClassification (km)":57.8,"MassiveScenarioClassification (kn)":62.39,"MassiveScenarioClassification (ko)":69.54,"MassiveScenarioClassification (lv)":63.37,"MassiveScenarioClassification (ml)":64.82,"MassiveScenarioClassification (mn)":59.35,"MassiveScenarioClassification (ms)":66.68,"MassiveScenarioClassification (my)":59.43,"MassiveScenarioClassification (nl)":71.96,"MassiveScenarioClassification (pt)":71.87,"MassiveScenarioClassification (ro)":68.51,"MassiveScenarioClassification (ru)":71.65,"MassiveScenarioClassification (sl)":66.82,"MassiveScenarioClassification (sq)":65.26,"MassiveScenarioClassification (sw)":58.3,"MassiveScenarioClassification (ta)":62.73,"MassiveScenarioClassification (te)":66.58,"MassiveScenarioClassification (th)":68.23,"MassiveScenarioClassification (tl)":61.97,"MassiveScenarioClassification (tr)":70.73,"MassiveScenarioClassification (ur)":63.16,"MassiveScenarioClassification (vi)":67.92,"MassiveScenarioClassification (zh-TW)":71.61}
+{"index":12,"Rank":40,"Model":"tao<\/a>","Model Size (Million Parameters)":163,"Memory Usage (GB, fp32)":0.61,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.81,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":13,"Rank":41,"Model":"tao-8k<\/a>","Model Size (Million Parameters)":167,"Memory Usage (GB, fp32)":0.62,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.8,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":14,"Rank":42,"Model":"bge-base-zh-v1.5<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.15,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":15,"Rank":43,"Model":"bge-large-zh-noinstruct<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":41.94,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":16,"Rank":44,"Model":"bge-large-zh-v1.5<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":41.38,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":17,"Rank":45,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":9242,"Memory Usage (GB, fp32)":34.43,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":55.19,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":54.34,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":97.2,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":93.07,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":79.6,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":82.18,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":18,"Rank":46,"Model":"bge-small-zh-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":35.91,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":19,"Rank":47,"Model":"Yinka<\/a>","Model Size (Million Parameters)":164,"Memory Usage (GB, fp32)":0.61,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":49.5,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":20,"Rank":48,"Model":"winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":54.11,"AmazonCounterfactualClassification (ja)":53.95,"AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":39.68,"MTOPDomainClassification (de)":57.22,"MTOPDomainClassification (es)":58.4,"MTOPDomainClassification (fr)":54.97,"MTOPDomainClassification (hi)":64.17,"MTOPDomainClassification (th)":70.47,"MTOPIntentClassification (de)":32.18,"MTOPIntentClassification (es)":33.9,"MTOPIntentClassification (fr)":26.69,"MTOPIntentClassification (hi)":38.27,"MTOPIntentClassification (th)":42.73,"MassiveIntentClassification (af)":33.5,"MassiveIntentClassification (am)":19.56,"MassiveIntentClassification (ar)":26.46,"MassiveIntentClassification (az)":31.58,"MassiveIntentClassification (bn)":27.99,"MassiveIntentClassification (cy)":28.26,"MassiveIntentClassification (de)":34.2,"MassiveIntentClassification (el)":26.02,"MassiveIntentClassification (es)":36.37,"MassiveIntentClassification (fa)":48.91,"MassiveIntentClassification (fi)":30.11,"MassiveIntentClassification (fr)":37.53,"MassiveIntentClassification (he)":24.86,"MassiveIntentClassification (hi)":39.14,"MassiveIntentClassification (hu)":31.97,"MassiveIntentClassification (hy)":31.36,"MassiveIntentClassification (id)":37.04,"MassiveIntentClassification (is)":28.61,"MassiveIntentClassification (it)":37.86,"MassiveIntentClassification (ja)":47.9,"MassiveIntentClassification (jv)":29.08,"MassiveIntentClassification (ka)":25.77,"MassiveIntentClassification (km)":23.66,"MassiveIntentClassification (kn)":21.27,"MassiveIntentClassification (ko)":40.42,"MassiveIntentClassification (lv)":30.13,"MassiveIntentClassification (ml)":25.89,"MassiveIntentClassification (mn)":27.71,"MassiveIntentClassification (ms)":33.04,"MassiveIntentClassification (my)":24.19,"MassiveIntentClassification (nl)":39.31,"MassiveIntentClassification (pt)":40.26,"MassiveIntentClassification (ro)":35.42,"MassiveIntentClassification (ru)":39.69,"MassiveIntentClassification (sl)":31.09,"MassiveIntentClassification (sq)":35.15,"MassiveIntentClassification (sw)":27.91,"MassiveIntentClassification (ta)":28.12,"MassiveIntentClassification (te)":26.34,"MassiveIntentClassification (th)":48.24,"MassiveIntentClassification (tl)":32.73,"MassiveIntentClassification (tr)":30.21,"MassiveIntentClassification (ur)":30.28,"MassiveIntentClassification (vi)":40.45,"MassiveIntentClassification (zh-TW)":64.03,"MassiveScenarioClassification (af)":43.53,"MassiveScenarioClassification (am)":25.3,"MassiveScenarioClassification (ar)":34.91,"MassiveScenarioClassification (az)":36.37,"MassiveScenarioClassification (bn)":39.2,"MassiveScenarioClassification (cy)":32.18,"MassiveScenarioClassification (de)":43.92,"MassiveScenarioClassification (el)":35.03,"MassiveScenarioClassification (es)":41.96,"MassiveScenarioClassification (fa)":58.36,"MassiveScenarioClassification (fi)":33.95,"MassiveScenarioClassification (fr)":45.32,"MassiveScenarioClassification (he)":34.06,"MassiveScenarioClassification (hi)":48.77,"MassiveScenarioClassification (hu)":39.92,"MassiveScenarioClassification (hy)":38.09,"MassiveScenarioClassification (id)":45.08,"MassiveScenarioClassification (is)":36.55,"MassiveScenarioClassification (it)":44.38,"MassiveScenarioClassification (ja)":57.02,"MassiveScenarioClassification (jv)":35.51,"MassiveScenarioClassification (ka)":33.41,"MassiveScenarioClassification (km)":30.9,"MassiveScenarioClassification (kn)":26.83,"MassiveScenarioClassification (ko)":49.52,"MassiveScenarioClassification (lv)":34.02,"MassiveScenarioClassification (ml)":34.55,"MassiveScenarioClassification (mn)":34.14,"MassiveScenarioClassification (ms)":42.71,"MassiveScenarioClassification (my)":31.0,"MassiveScenarioClassification (nl)":51.44,"MassiveScenarioClassification (pt)":45.9,"MassiveScenarioClassification (ro)":45.01,"MassiveScenarioClassification (ru)":48.66,"MassiveScenarioClassification (sl)":38.34,"MassiveScenarioClassification (sq)":44.78,"MassiveScenarioClassification (sw)":36.02,"MassiveScenarioClassification (ta)":37.81,"MassiveScenarioClassification (te)":34.6,"MassiveScenarioClassification (th)":57.38,"MassiveScenarioClassification (tl)":39.36,"MassiveScenarioClassification (tr)":36.16,"MassiveScenarioClassification (ur)":36.43,"MassiveScenarioClassification (vi)":47.04,"MassiveScenarioClassification (zh-TW)":71.96}
+{"index":21,"Rank":49,"Model":"winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":42.04,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":25.8,"MassiveIntentClassification (am)":3.34,"MassiveIntentClassification (ar)":6.49,"MassiveIntentClassification (az)":24.77,"MassiveIntentClassification (bn)":4.3,"MassiveIntentClassification (cy)":26.39,"MassiveIntentClassification (de)":28.09,"MassiveIntentClassification (el)":19.24,"MassiveIntentClassification (es)":30.62,"MassiveIntentClassification (fa)":7.21,"MassiveIntentClassification (fi)":27.21,"MassiveIntentClassification (fr)":32.64,"MassiveIntentClassification (he)":2.66,"MassiveIntentClassification (hi)":4.59,"MassiveIntentClassification (hu)":25.65,"MassiveIntentClassification (hy)":4.86,"MassiveIntentClassification (id)":29.81,"MassiveIntentClassification (is)":23.53,"MassiveIntentClassification (it)":34.47,"MassiveIntentClassification (ja)":39.4,"MassiveIntentClassification (jv)":28.75,"MassiveIntentClassification (ka)":4.34,"MassiveIntentClassification (km)":6.1,"MassiveIntentClassification (kn)":4.46,"MassiveIntentClassification (ko)":14.16,"MassiveIntentClassification (lv)":29.86,"MassiveIntentClassification (ml)":3.69,"MassiveIntentClassification (mn)":7.86,"MassiveIntentClassification (ms)":28.05,"MassiveIntentClassification (my)":6.98,"MassiveIntentClassification (nl)":32.92,"MassiveIntentClassification (pt)":33.53,"MassiveIntentClassification (ro)":31.32,"MassiveIntentClassification (ru)":11.27,"MassiveIntentClassification (sl)":27.94,"MassiveIntentClassification (sq)":32.9,"MassiveIntentClassification (sw)":29.4,"MassiveIntentClassification (ta)":3.33,"MassiveIntentClassification (te)":3.46,"MassiveIntentClassification (th)":12.98,"MassiveIntentClassification (tl)":30.73,"MassiveIntentClassification (tr)":23.57,"MassiveIntentClassification (ur)":4.98,"MassiveIntentClassification (vi)":21.89,"MassiveIntentClassification (zh-TW)":65.53,"MassiveScenarioClassification (af)":31.55,"MassiveScenarioClassification (am)":7.49,"MassiveScenarioClassification (ar)":15.0,"MassiveScenarioClassification (az)":29.13,"MassiveScenarioClassification (bn)":9.24,"MassiveScenarioClassification (cy)":29.72,"MassiveScenarioClassification (de)":34.68,"MassiveScenarioClassification (el)":28.83,"MassiveScenarioClassification (es)":35.97,"MassiveScenarioClassification (fa)":11.12,"MassiveScenarioClassification (fi)":28.61,"MassiveScenarioClassification (fr)":40.66,"MassiveScenarioClassification (he)":9.01,"MassiveScenarioClassification (hi)":9.92,"MassiveScenarioClassification (hu)":32.07,"MassiveScenarioClassification (hy)":8.44,"MassiveScenarioClassification (id)":34.9,"MassiveScenarioClassification (is)":30.95,"MassiveScenarioClassification (it)":41.06,"MassiveScenarioClassification (ja)":48.73,"MassiveScenarioClassification (jv)":35.09,"MassiveScenarioClassification (ka)":9.29,"MassiveScenarioClassification (km)":11.19,"MassiveScenarioClassification (kn)":10.1,"MassiveScenarioClassification (ko)":19.2,"MassiveScenarioClassification (lv)":32.49,"MassiveScenarioClassification (ml)":6.37,"MassiveScenarioClassification (mn)":13.08,"MassiveScenarioClassification (ms)":39.18,"MassiveScenarioClassification (my)":12.25,"MassiveScenarioClassification (nl)":38.17,"MassiveScenarioClassification (pt)":40.01,"MassiveScenarioClassification (ro)":39.25,"MassiveScenarioClassification (ru)":16.71,"MassiveScenarioClassification (sl)":33.94,"MassiveScenarioClassification (sq)":40.4,"MassiveScenarioClassification (sw)":37.14,"MassiveScenarioClassification (ta)":8.21,"MassiveScenarioClassification (te)":7.97,"MassiveScenarioClassification (th)":21.56,"MassiveScenarioClassification (tl)":36.7,"MassiveScenarioClassification (tr)":28.8,"MassiveScenarioClassification (ur)":10.46,"MassiveScenarioClassification (vi)":27.72,"MassiveScenarioClassification (zh-TW)":71.52}
+{"index":22,"Rank":50,"Model":"windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":43.32,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":28.81,"MassiveIntentClassification (am)":3.04,"MassiveIntentClassification (ar)":6.75,"MassiveIntentClassification (az)":27.0,"MassiveIntentClassification (bn)":3.24,"MassiveIntentClassification (cy)":31.18,"MassiveIntentClassification (de)":30.65,"MassiveIntentClassification (el)":18.17,"MassiveIntentClassification (es)":32.53,"MassiveIntentClassification (fa)":8.72,"MassiveIntentClassification (fi)":31.79,"MassiveIntentClassification (fr)":33.16,"MassiveIntentClassification (he)":3.03,"MassiveIntentClassification (hi)":3.61,"MassiveIntentClassification (hu)":30.47,"MassiveIntentClassification (hy)":5.35,"MassiveIntentClassification (id)":32.45,"MassiveIntentClassification (is)":30.12,"MassiveIntentClassification (it)":36.32,"MassiveIntentClassification (ja)":41.09,"MassiveIntentClassification (jv)":30.42,"MassiveIntentClassification (ka)":3.79,"MassiveIntentClassification (km)":6.79,"MassiveIntentClassification (kn)":3.86,"MassiveIntentClassification (ko)":8.82,"MassiveIntentClassification (lv)":30.23,"MassiveIntentClassification (ml)":2.93,"MassiveIntentClassification (mn)":12.61,"MassiveIntentClassification (ms)":30.66,"MassiveIntentClassification (my)":5.85,"MassiveIntentClassification (nl)":34.1,"MassiveIntentClassification (pt)":36.92,"MassiveIntentClassification (ro)":33.01,"MassiveIntentClassification (ru)":10.4,"MassiveIntentClassification (sl)":30.73,"MassiveIntentClassification (sq)":36.98,"MassiveIntentClassification (sw)":31.62,"MassiveIntentClassification (ta)":3.19,"MassiveIntentClassification (te)":2.59,"MassiveIntentClassification (th)":4.61,"MassiveIntentClassification (tl)":32.55,"MassiveIntentClassification (tr)":26.87,"MassiveIntentClassification (ur)":4.23,"MassiveIntentClassification (vi)":29.24,"MassiveIntentClassification (zh-TW)":65.49,"MassiveScenarioClassification (af)":35.41,"MassiveScenarioClassification (am)":9.05,"MassiveScenarioClassification (ar)":14.92,"MassiveScenarioClassification (az)":31.97,"MassiveScenarioClassification (bn)":9.15,"MassiveScenarioClassification (cy)":37.45,"MassiveScenarioClassification (de)":38.33,"MassiveScenarioClassification (el)":24.45,"MassiveScenarioClassification (es)":37.73,"MassiveScenarioClassification (fa)":11.84,"MassiveScenarioClassification (fi)":34.49,"MassiveScenarioClassification (fr)":40.92,"MassiveScenarioClassification (he)":7.64,"MassiveScenarioClassification (hi)":8.64,"MassiveScenarioClassification (hu)":37.25,"MassiveScenarioClassification (hy)":10.91,"MassiveScenarioClassification (id)":36.11,"MassiveScenarioClassification (is)":37.8,"MassiveScenarioClassification (it)":41.68,"MassiveScenarioClassification (ja)":48.38,"MassiveScenarioClassification (jv)":35.2,"MassiveScenarioClassification (ka)":9.9,"MassiveScenarioClassification (km)":12.75,"MassiveScenarioClassification (kn)":10.31,"MassiveScenarioClassification (ko)":14.52,"MassiveScenarioClassification (lv)":33.08,"MassiveScenarioClassification (ml)":7.44,"MassiveScenarioClassification (mn)":17.98,"MassiveScenarioClassification (ms)":37.93,"MassiveScenarioClassification (my)":11.73,"MassiveScenarioClassification (nl)":40.37,"MassiveScenarioClassification (pt)":41.83,"MassiveScenarioClassification (ro)":40.63,"MassiveScenarioClassification (ru)":18.96,"MassiveScenarioClassification (sl)":35.3,"MassiveScenarioClassification (sq)":41.96,"MassiveScenarioClassification (sw)":38.88,"MassiveScenarioClassification (ta)":8.51,"MassiveScenarioClassification (te)":7.35,"MassiveScenarioClassification (th)":10.1,"MassiveScenarioClassification (tl)":35.91,"MassiveScenarioClassification (tr)":32.08,"MassiveScenarioClassification (ur)":10.37,"MassiveScenarioClassification (vi)":33.91,"MassiveScenarioClassification (zh-TW)":71.0}
+{"index":23,"Rank":51,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":38.6,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":80.79,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":50.01,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":56.31,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":59.5,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":24,"Rank":52,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":41.89,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":86.23,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":61.07,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":62.94,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":67.29,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":25,"Rank":53,"Model":"Dmeta-embedding-zh<\/a>","Model Size (Million Parameters)":103,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":44.93,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":26,"Rank":54,"Model":"Dmeta-embedding-zh-small<\/a>","Model Size (Million Parameters)":74,"Memory Usage (GB, fp32)":0.28,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":44.88,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":27,"Rank":55,"Model":"sbert-chinese-general-v1<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":37.51,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":28,"Rank":56,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":38.25,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":29,"Rank":57,"Model":"IYun-large-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":49.67,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":30,"Rank":58,"Model":"mist-zh<\/a>","Model Size (Million Parameters)":51,"Memory Usage (GB, fp32)":0.19,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":39.32,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":31,"Rank":59,"Model":"text2vec-large-chinese<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":33.77,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":32,"Rank":60,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":29.38,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":63.65,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":37.87,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":37.28,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":44.5,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":33,"Rank":61,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":29.35,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":63.7,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":37.85,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":37.28,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":44.47,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":34,"Rank":62,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":29.39,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":63.63,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":37.86,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":37.3,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":44.47,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":35,"Rank":63,"Model":"gte-Qwen2-7B-instruct-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":55.53,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":53.98,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":96.69,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":87.47,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":81.65,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":86.64,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":36,"Rank":64,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":55.53,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":53.98,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":96.69,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":87.47,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":81.65,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":86.64,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":40,"Rank":65,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":40.35,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":86.83,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":59.13,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":64.99,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":71.72,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":41,"Rank":66,"Model":"bilingual-embedding-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":43.42,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":88.33,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":62.85,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":66.81,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":72.99,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":42,"Rank":67,"Model":"bilingual-embedding-large-8k<\/a>","Model Size (Million Parameters)":568,"Memory Usage (GB, fp32)":2.12,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":44.11,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":87.82,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":58.63,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":66.14,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":72.74,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":43,"Rank":68,"Model":"bilingual-embedding-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":36.48,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":84.19,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":58.35,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":64.57,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":69.04,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":44,"Rank":69,"Model":"jina-embeddings-v2-base-de-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":68.92,"AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":37.72,"AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":88.37,"MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":63.83,"MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":63.89,"MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":71.25,"MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":47,"Rank":70,"Model":"SGPT-5.8B-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":5874,"Memory Usage (GB, fp32)":21.88,"Average":"","AmazonCounterfactualClassification (de)":66.64,"AmazonCounterfactualClassification (ja)":58.06,"AmazonReviewsClassification (de)":35.29,"AmazonReviewsClassification (es)":38.34,"AmazonReviewsClassification (fr)":37.84,"AmazonReviewsClassification (ja)":30.94,"AmazonReviewsClassification (zh)":33.75,"MTOPDomainClassification (de)":84.54,"MTOPDomainClassification (es)":86.46,"MTOPDomainClassification (fr)":81.32,"MTOPDomainClassification (hi)":58.23,"MTOPDomainClassification (th)":72.29,"MTOPIntentClassification (de)":60.52,"MTOPIntentClassification (es)":64.32,"MTOPIntentClassification (fr)":58.67,"MTOPIntentClassification (hi)":41.96,"MTOPIntentClassification (th)":55.28,"MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":48,"Rank":71,"Model":"Solon-embeddings-large-0.1<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":42.08,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":89.26,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":68.55,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":67.4,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":71.29,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":49,"Rank":72,"Model":"alime-embedding-large-zh<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.95,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":50,"Rank":73,"Model":"Conan-embedding-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":50.31,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":51,"Rank":74,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":34.79,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":85.52,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":63.12,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":59.41,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":65.29,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":52,"Rank":75,"Model":"EminEmbed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.54,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":53.18,"MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":62.93}
+{"index":53,"Rank":76,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":53.47,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":47.21,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":93.48,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":80.23,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":76.65,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":79.1,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":54,"Rank":77,"Model":"ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":67.16,"MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":71.93,"MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":56,"Rank":78,"Model":"acge_text_embedding<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":48.54,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":57,"Rank":79,"Model":"sgpt-bloom-1b7-nli<\/a>","Model Size (Million Parameters)":1722,"Memory Usage (GB, fp32)":6.42,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":39.29,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":37.63,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":83.8,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":63.36,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":64.58,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":69.6,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":60,"Rank":80,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":55.53,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":53.98,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":96.69,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":87.47,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":81.65,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":86.64,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":61,"Rank":81,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":36.03,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":77.1,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":43.44,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":51.59,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":61.28,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":62,"Rank":82,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":37.97,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":85.74,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":58.62,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":62.65,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":69.29,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":63,"Rank":83,"Model":"gte-Qwen2-1.5B-instruct-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":53.47,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":47.21,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":93.48,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":80.23,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":76.65,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":79.1,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":64,"Rank":84,"Model":"sft-bge-small<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":34.55,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":65,"Rank":85,"Model":"stella-large-zh-v3-1792d<\/a>","Model Size (Million Parameters)":325,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.32,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":66,"Rank":86,"Model":"stella-mrl-large-zh-v3.5-1792d<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.59,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":68,"Rank":87,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":24.9,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":25.55,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":9.49,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":6.98,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":11.41,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":69,"Rank":88,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":23.52,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":27.74,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":8.61,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":6.24,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":10.98,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":70,"Rank":89,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":22.45,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":24.27,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":9.79,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":16.41,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":22.72,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":71,"Rank":90,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":29.39,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":63.61,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":37.84,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":37.3,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":44.47,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":72,"Rank":91,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":29.02,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":64.49,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":39.4,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":38.01,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":43.63,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":73,"Rank":92,"Model":"zpoint_large_embedding_zh<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":49.72,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":74,"Rank":93,"Model":"stella-base-zh<\/a>","Model Size (Million Parameters)":51,"Memory Usage (GB, fp32)":0.19,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.25,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":75,"Rank":94,"Model":"stella-base-zh-v2<\/a>","Model Size (Million Parameters)":51,"Memory Usage (GB, fp32)":0.19,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":39.64,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":76,"Rank":95,"Model":"stella-base-zh-v3-1792d<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.34,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":77,"Rank":96,"Model":"stella-large-zh<\/a>","Model Size (Million Parameters)":163,"Memory Usage (GB, fp32)":0.61,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.57,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":78,"Rank":97,"Model":"stella-large-zh-v2<\/a>","Model Size (Million Parameters)":163,"Memory Usage (GB, fp32)":0.61,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.82,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":86,"Rank":98,"Model":"jina-embeddings-v2-base-de<\/a>","Model Size (Million Parameters)":161,"Memory Usage (GB, fp32)":0.6,"Average":"","AmazonCounterfactualClassification (de)":68.92,"AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":37.72,"AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":88.37,"MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":63.83,"MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":63.89,"MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":71.25,"MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":87,"Rank":99,"Model":"jina-embeddings-v2-base-es<\/a>","Model Size (Million Parameters)":161,"Memory Usage (GB, fp32)":0.6,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":38.68,"AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":89.89,"MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":68.76,"MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":66.93,"MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":71.23,"MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":88,"Rank":100,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":161,"Memory Usage (GB, fp32)":0.6,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":34.94,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":89,"Rank":101,"Model":"xiaobu-embedding<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.72,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":90,"Rank":102,"Model":"xiaobu-embedding-v2<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":50.07,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":91,"Rank":103,"Model":"lim_base_zh<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":46.67,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":92,"Rank":104,"Model":"bge-m3-custom-fr<\/a>","Model Size (Million Parameters)":568,"Memory Usage (GB, fp32)":2.12,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":42.33,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":90.39,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":60.88,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":66.7,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":74.58,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":93,"Rank":105,"Model":"sentence_croissant_alpha_v0.1<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":36.62,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":88.7,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":67.69,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":65.86,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":71.6,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":94,"Rank":106,"Model":"sentence_croissant_alpha_v0.2<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":36.48,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":88.96,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":67.76,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":66.3,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":71.7,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":95,"Rank":107,"Model":"sentence_croissant_alpha_v0.3<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":35.11,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":88.52,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":65.93,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":66.48,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":71.47,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":96,"Rank":108,"Model":"sentence_croissant_alpha_v0.4<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":34.62,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":86.97,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":62.59,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":65.67,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":71.61,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":97,"Rank":109,"Model":"m3e-base<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":43.02,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":98,"Rank":110,"Model":"m3e-large<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":44.44,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":99,"Rank":111,"Model":"m3e-ernie-xbase-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":47.79,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":100,"Rank":112,"Model":"mmarco-bert-base-italian-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":55.06,"MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":63.04,"MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":101,"Rank":113,"Model":"mmarco-sentence-flare-it<\/a>","Model Size (Million Parameters)":17,"Memory Usage (GB, fp32)":0.06,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":22.3,"MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":27.41,"MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":102,"Rank":114,"Model":"stsbm-sentence-flare-it<\/a>","Model Size (Million Parameters)":17,"Memory Usage (GB, fp32)":0.06,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":38.88,"MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":43.3,"MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":103,"Rank":115,"Model":"jina-embeddings-v2-base-es-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":38.68,"AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":89.89,"MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":68.76,"MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":66.93,"MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":71.23,"MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":104,"Rank":116,"Model":"piccolo-base-zh<\/a>","Model Size (Million Parameters)":51,"Memory Usage (GB, fp32)":0.19,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.24,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":105,"Rank":117,"Model":"piccolo-large-zh<\/a>","Model Size (Million Parameters)":163,"Memory Usage (GB, fp32)":0.61,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":40.33,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":106,"Rank":118,"Model":"piccolo-large-zh-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":49.44,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":113,"Rank":119,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":27.05,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":72.97,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":37.18,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":42.64,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":49.92,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":114,"Rank":120,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","AmazonCounterfactualClassification (de)":68.35,"AmazonCounterfactualClassification (ja)":63.45,"AmazonReviewsClassification (de)":35.91,"AmazonReviewsClassification (es)":37.49,"AmazonReviewsClassification (fr)":35.3,"AmazonReviewsClassification (ja)":33.24,"AmazonReviewsClassification (zh)":35.26,"MTOPDomainClassification (de)":79.2,"MTOPDomainClassification (es)":83.04,"MTOPDomainClassification (fr)":78.63,"MTOPDomainClassification (hi)":81.36,"MTOPDomainClassification (th)":79.99,"MTOPIntentClassification (de)":54.23,"MTOPIntentClassification (es)":60.28,"MTOPIntentClassification (fr)":54.05,"MTOPIntentClassification (hi)":59.9,"MTOPIntentClassification (th)":61.96,"MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":57.52,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":64.52,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":115,"Rank":121,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","AmazonCounterfactualClassification (de)":69.95,"AmazonCounterfactualClassification (ja)":69.79,"AmazonReviewsClassification (de)":39.52,"AmazonReviewsClassification (es)":39.99,"AmazonReviewsClassification (fr)":39.0,"AmazonReviewsClassification (ja)":36.64,"AmazonReviewsClassification (zh)":37.74,"MTOPDomainClassification (de)":85.73,"MTOPDomainClassification (es)":86.96,"MTOPDomainClassification (fr)":81.21,"MTOPDomainClassification (hi)":84.76,"MTOPDomainClassification (th)":82.51,"MTOPIntentClassification (de)":61.27,"MTOPIntentClassification (es)":66.59,"MTOPIntentClassification (fr)":59.76,"MTOPIntentClassification (hi)":62.37,"MTOPIntentClassification (th)":64.8,"MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":61.88,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":67.9,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":119,"Rank":122,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":46.09,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":86.2,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":58.33,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":65.91,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":68.53,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":120,"Rank":123,"Model":"ZNV-Embedding<\/a>","Model Size (Million Parameters)":6061,"Memory Usage (GB, fp32)":22.58,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":29.75,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":121,"Rank":124,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":29.85,"MassiveIntentClassification (am)":2.13,"MassiveIntentClassification (ar)":3.41,"MassiveIntentClassification (az)":22.73,"MassiveIntentClassification (bn)":2.87,"MassiveIntentClassification (cy)":29.43,"MassiveIntentClassification (de)":30.85,"MassiveIntentClassification (el)":8.74,"MassiveIntentClassification (es)":30.63,"MassiveIntentClassification (fa)":3.16,"MassiveIntentClassification (fi)":30.4,"MassiveIntentClassification (fr)":30.84,"MassiveIntentClassification (he)":2.06,"MassiveIntentClassification (hi)":2.43,"MassiveIntentClassification (hu)":24.87,"MassiveIntentClassification (hy)":2.67,"MassiveIntentClassification (id)":32.7,"MassiveIntentClassification (is)":24.13,"MassiveIntentClassification (it)":34.58,"MassiveIntentClassification (ja)":5.99,"MassiveIntentClassification (jv)":27.6,"MassiveIntentClassification (ka)":2.14,"MassiveIntentClassification (km)":4.38,"MassiveIntentClassification (kn)":2.1,"MassiveIntentClassification (ko)":2.36,"MassiveIntentClassification (lv)":22.06,"MassiveIntentClassification (ml)":2.29,"MassiveIntentClassification (mn)":28.51,"MassiveIntentClassification (ms)":28.16,"MassiveIntentClassification (my)":3.97,"MassiveIntentClassification (nl)":30.51,"MassiveIntentClassification (pt)":33.85,"MassiveIntentClassification (ro)":30.47,"MassiveIntentClassification (ru)":58.06,"MassiveIntentClassification (sl)":29.64,"MassiveIntentClassification (sq)":31.7,"MassiveIntentClassification (sw)":27.52,"MassiveIntentClassification (ta)":1.38,"MassiveIntentClassification (te)":2.04,"MassiveIntentClassification (th)":3.79,"MassiveIntentClassification (tl)":31.44,"MassiveIntentClassification (tr)":26.22,"MassiveIntentClassification (ur)":2.55,"MassiveIntentClassification (vi)":23.1,"MassiveIntentClassification (zh-TW)":6.3,"MassiveScenarioClassification (af)":39.37,"MassiveScenarioClassification (am)":7.51,"MassiveScenarioClassification (ar)":11.36,"MassiveScenarioClassification (az)":29.62,"MassiveScenarioClassification (bn)":8.79,"MassiveScenarioClassification (cy)":38.93,"MassiveScenarioClassification (de)":40.66,"MassiveScenarioClassification (el)":16.44,"MassiveScenarioClassification (es)":36.28,"MassiveScenarioClassification (fa)":6.8,"MassiveScenarioClassification (fi)":34.5,"MassiveScenarioClassification (fr)":42.42,"MassiveScenarioClassification (he)":7.95,"MassiveScenarioClassification (hi)":7.51,"MassiveScenarioClassification (hu)":35.04,"MassiveScenarioClassification (hy)":8.53,"MassiveScenarioClassification (id)":39.6,"MassiveScenarioClassification (is)":32.61,"MassiveScenarioClassification (it)":41.2,"MassiveScenarioClassification (ja)":11.21,"MassiveScenarioClassification (jv)":36.25,"MassiveScenarioClassification (ka)":6.59,"MassiveScenarioClassification (km)":8.15,"MassiveScenarioClassification (kn)":8.05,"MassiveScenarioClassification (ko)":5.62,"MassiveScenarioClassification (lv)":28.47,"MassiveScenarioClassification (ml)":7.35,"MassiveScenarioClassification (mn)":33.48,"MassiveScenarioClassification (ms)":38.85,"MassiveScenarioClassification (my)":11.23,"MassiveScenarioClassification (nl)":38.92,"MassiveScenarioClassification (pt)":40.23,"MassiveScenarioClassification (ro)":39.78,"MassiveScenarioClassification (ru)":64.15,"MassiveScenarioClassification (sl)":35.34,"MassiveScenarioClassification (sq)":42.07,"MassiveScenarioClassification (sw)":35.33,"MassiveScenarioClassification (ta)":7.21,"MassiveScenarioClassification (te)":6.86,"MassiveScenarioClassification (th)":8.25,"MassiveScenarioClassification (tl)":38.17,"MassiveScenarioClassification (tr)":33.85,"MassiveScenarioClassification (ur)":8.74,"MassiveScenarioClassification (vi)":31.94,"MassiveScenarioClassification (zh-TW)":11.68}
+{"index":122,"Rank":125,"Model":"AGE_Hybrid<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":49.68,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":123,"Rank":126,"Model":"text2vec-base-chinese<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":34.12,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":125,"Rank":127,"Model":"luotuo-bert-medium<\/a>","Model Size (Million Parameters)":328,"Memory Usage (GB, fp32)":1.22,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":34.46,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":126,"Rank":128,"Model":"jina-embeddings-v2-base-zh<\/a>","Model Size (Million Parameters)":161,"Memory Usage (GB, fp32)":0.6,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":34.94,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":128,"Rank":129,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":34.25,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":71.83,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":44.53,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":51.93,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":58.31,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":129,"Rank":130,"Model":"gte-base-zh<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":45.82,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":130,"Rank":131,"Model":"gte-large-zh<\/a>","Model Size (Million Parameters)":326,"Memory Usage (GB, fp32)":1.21,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":47.23,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":131,"Rank":132,"Model":"gte-small-zh<\/a>","Model Size (Million Parameters)":30,"Memory Usage (GB, fp32)":0.11,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":38.69,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":132,"Rank":133,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":21.96,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":133,"Rank":134,"Model":"gte-small-zh<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":38.69,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":134,"Rank":135,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":33.51,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":85.5,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":53.98,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":61.19,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":70.22,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":135,"Rank":136,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":35.09,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":88.19,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":63.64,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":65.8,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":73.47,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":136,"Rank":137,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":26.75,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":43.83,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":19.38,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":13.58,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":23.21,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":137,"Rank":138,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":26.62,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":36.77,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":15.37,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":15.82,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":23.92,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":138,"Rank":139,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":43.51,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":"","MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":85.33,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":59.01,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":65.15,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":69.94,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":139,"Rank":140,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":43.76,"AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":38.3,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":89.38,"MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":64.45,"MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":65.42,"MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":71.11,"MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
+{"index":140,"Rank":141,"Model":"Baichuan-text-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (de)":"","AmazonCounterfactualClassification (ja)":"","AmazonReviewsClassification (de)":"","AmazonReviewsClassification (es)":"","AmazonReviewsClassification (fr)":"","AmazonReviewsClassification (ja)":"","AmazonReviewsClassification (zh)":48.3,"MTOPDomainClassification (de)":"","MTOPDomainClassification (es)":"","MTOPDomainClassification (fr)":"","MTOPDomainClassification (hi)":"","MTOPDomainClassification (th)":"","MTOPIntentClassification (de)":"","MTOPIntentClassification (es)":"","MTOPIntentClassification (fr)":"","MTOPIntentClassification (hi)":"","MTOPIntentClassification (th)":"","MassiveIntentClassification (af)":"","MassiveIntentClassification (am)":"","MassiveIntentClassification (ar)":"","MassiveIntentClassification (az)":"","MassiveIntentClassification (bn)":"","MassiveIntentClassification (cy)":"","MassiveIntentClassification (de)":"","MassiveIntentClassification (el)":"","MassiveIntentClassification (es)":"","MassiveIntentClassification (fa)":"","MassiveIntentClassification (fi)":"","MassiveIntentClassification (fr)":"","MassiveIntentClassification (he)":"","MassiveIntentClassification (hi)":"","MassiveIntentClassification (hu)":"","MassiveIntentClassification (hy)":"","MassiveIntentClassification (id)":"","MassiveIntentClassification (is)":"","MassiveIntentClassification (it)":"","MassiveIntentClassification (ja)":"","MassiveIntentClassification (jv)":"","MassiveIntentClassification (ka)":"","MassiveIntentClassification (km)":"","MassiveIntentClassification (kn)":"","MassiveIntentClassification (ko)":"","MassiveIntentClassification (lv)":"","MassiveIntentClassification (ml)":"","MassiveIntentClassification (mn)":"","MassiveIntentClassification (ms)":"","MassiveIntentClassification (my)":"","MassiveIntentClassification (nl)":"","MassiveIntentClassification (pt)":"","MassiveIntentClassification (ro)":"","MassiveIntentClassification (ru)":"","MassiveIntentClassification (sl)":"","MassiveIntentClassification (sq)":"","MassiveIntentClassification (sw)":"","MassiveIntentClassification (ta)":"","MassiveIntentClassification (te)":"","MassiveIntentClassification (th)":"","MassiveIntentClassification (tl)":"","MassiveIntentClassification (tr)":"","MassiveIntentClassification (ur)":"","MassiveIntentClassification (vi)":"","MassiveIntentClassification (zh-TW)":"","MassiveScenarioClassification (af)":"","MassiveScenarioClassification (am)":"","MassiveScenarioClassification (ar)":"","MassiveScenarioClassification (az)":"","MassiveScenarioClassification (bn)":"","MassiveScenarioClassification (cy)":"","MassiveScenarioClassification (de)":"","MassiveScenarioClassification (el)":"","MassiveScenarioClassification (es)":"","MassiveScenarioClassification (fa)":"","MassiveScenarioClassification (fi)":"","MassiveScenarioClassification (fr)":"","MassiveScenarioClassification (he)":"","MassiveScenarioClassification (hi)":"","MassiveScenarioClassification (hu)":"","MassiveScenarioClassification (hy)":"","MassiveScenarioClassification (id)":"","MassiveScenarioClassification (is)":"","MassiveScenarioClassification (it)":"","MassiveScenarioClassification (ja)":"","MassiveScenarioClassification (jv)":"","MassiveScenarioClassification (ka)":"","MassiveScenarioClassification (km)":"","MassiveScenarioClassification (kn)":"","MassiveScenarioClassification (ko)":"","MassiveScenarioClassification (lv)":"","MassiveScenarioClassification (ml)":"","MassiveScenarioClassification (mn)":"","MassiveScenarioClassification (ms)":"","MassiveScenarioClassification (my)":"","MassiveScenarioClassification (nl)":"","MassiveScenarioClassification (pt)":"","MassiveScenarioClassification (ro)":"","MassiveScenarioClassification (ru)":"","MassiveScenarioClassification (sl)":"","MassiveScenarioClassification (sq)":"","MassiveScenarioClassification (sw)":"","MassiveScenarioClassification (ta)":"","MassiveScenarioClassification (te)":"","MassiveScenarioClassification (th)":"","MassiveScenarioClassification (tl)":"","MassiveScenarioClassification (tr)":"","MassiveScenarioClassification (ur)":"","MassiveScenarioClassification (vi)":"","MassiveScenarioClassification (zh-TW)":""}
diff --git a/all_data_tasks/42/default.jsonl b/all_data_tasks/42/default.jsonl
index a0ab7cfc551a9aa72877ad841f5d74a5de03d7e3..94e411b825afb3049383ce287ba8614b1c7c4604 100644
--- a/all_data_tasks/42/default.jsonl
+++ b/all_data_tasks/42/default.jsonl
@@ -1,36 +1,288 @@
-{"index":13,"Rank":1,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":35.2,"ARCChallenge":26.68,"AlphaNLI":34.0,"HellaSwag":39.45,"PIQA":44.35,"Quail":11.69,"RARbCode":84.0,"RARbMath":82.35,"SIQA":7.23,"SpartQA":9.29,"TempReasonL1":7.15,"TempReasonL2Fact":58.38,"TempReasonL2Pure":11.22,"TempReasonL3Fact":44.29,"TempReasonL3Pure":14.15,"WinoGrande":53.74}
-{"index":32,"Rank":2,"Model":"text-embedding-3-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":31.13,"ARCChallenge":21.22,"AlphaNLI":34.23,"HellaSwag":31.4,"PIQA":37.52,"Quail":13.6,"RARbCode":89.41,"RARbMath":87.73,"SIQA":4.99,"SpartQA":7.45,"TempReasonL1":2.07,"TempReasonL2Fact":39.77,"TempReasonL2Pure":11.04,"TempReasonL3Fact":37.04,"TempReasonL3Pure":15.51,"WinoGrande":33.92}
-{"index":12,"Rank":3,"Model":"GritLM-7B-noinstruct<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":30.57,"ARCChallenge":16.57,"AlphaNLI":29.56,"HellaSwag":36.03,"PIQA":35.8,"Quail":8.68,"RARbCode":83.14,"RARbMath":83.01,"SIQA":5.73,"SpartQA":1.56,"TempReasonL1":2.57,"TempReasonL2Fact":48.25,"TempReasonL2Pure":8.98,"TempReasonL3Fact":34.11,"TempReasonL3Pure":12.44,"WinoGrande":52.12}
-{"index":33,"Rank":4,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":29.95,"ARCChallenge":23.98,"AlphaNLI":37.27,"HellaSwag":34.12,"PIQA":41.96,"Quail":10.15,"RARbCode":89.64,"RARbMath":90.08,"SIQA":3.44,"SpartQA":7.51,"TempReasonL1":2.13,"TempReasonL2Fact":28.65,"TempReasonL2Pure":10.34,"TempReasonL3Fact":25.52,"TempReasonL3Pure":15.28,"WinoGrande":29.11}
-{"index":16,"Rank":5,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":28.41,"ARCChallenge":17.81,"AlphaNLI":26.12,"HellaSwag":34.85,"PIQA":39.37,"Quail":7.01,"RARbCode":78.46,"RARbMath":72.16,"SIQA":5.42,"SpartQA":9.92,"TempReasonL1":3.31,"TempReasonL2Fact":36.9,"TempReasonL2Pure":9.18,"TempReasonL3Fact":30.18,"TempReasonL3Pure":14.31,"WinoGrande":41.21}
-{"index":17,"Rank":6,"Model":"e5-mistral-7b-instruct-noinstruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":28.04,"ARCChallenge":20.48,"AlphaNLI":18.88,"HellaSwag":32.25,"PIQA":32.8,"Quail":6.25,"RARbCode":79.84,"RARbMath":76.19,"SIQA":5.08,"SpartQA":10.87,"TempReasonL1":3.04,"TempReasonL2Fact":35.63,"TempReasonL2Pure":9.32,"TempReasonL3Fact":30.41,"TempReasonL3Pure":14.39,"WinoGrande":45.18}
-{"index":10,"Rank":7,"Model":"Cohere-embed-english-v3.0-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":25.41,"ARCChallenge":10.1,"AlphaNLI":18.75,"HellaSwag":29.02,"PIQA":27.89,"Quail":7.77,"RARbCode":56.56,"RARbMath":72.05,"SIQA":5.03,"SpartQA":3.33,"TempReasonL1":1.43,"TempReasonL2Fact":40.46,"TempReasonL2Pure":2.39,"TempReasonL3Fact":33.87,"TempReasonL3Pure":7.52,"WinoGrande":65.02}
-{"index":19,"Rank":8,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":24.69,"ARCChallenge":10.83,"AlphaNLI":13.59,"HellaSwag":27.35,"PIQA":28.82,"Quail":4.85,"RARbCode":58.92,"RARbMath":67.32,"SIQA":5.36,"SpartQA":5.64,"TempReasonL1":1.14,"TempReasonL2Fact":42.97,"TempReasonL2Pure":2.05,"TempReasonL3Fact":38.22,"TempReasonL3Pure":8.31,"WinoGrande":54.99}
-{"index":35,"Rank":9,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":24.2,"ARCChallenge":14.63,"AlphaNLI":30.61,"HellaSwag":30.94,"PIQA":33.69,"Quail":6.11,"RARbCode":72.03,"RARbMath":71.07,"SIQA":3.03,"SpartQA":6.63,"TempReasonL1":2.35,"TempReasonL2Fact":25.68,"TempReasonL2Pure":2.76,"TempReasonL3Fact":22.09,"TempReasonL3Pure":9.79,"WinoGrande":31.53}
-{"index":11,"Rank":10,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":23.65,"ARCChallenge":9.89,"AlphaNLI":15.1,"HellaSwag":26.35,"PIQA":28.49,"Quail":4.1,"RARbCode":57.19,"RARbMath":72.26,"SIQA":4.26,"SpartQA":3.75,"TempReasonL1":1.5,"TempReasonL2Fact":35.91,"TempReasonL2Pure":1.89,"TempReasonL3Fact":27.51,"TempReasonL3Pure":8.53,"WinoGrande":58.01}
-{"index":18,"Rank":11,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":23.26,"ARCChallenge":9.61,"AlphaNLI":16.44,"HellaSwag":24.79,"PIQA":25.09,"Quail":3.52,"RARbCode":52.16,"RARbMath":65.35,"SIQA":3.72,"SpartQA":7.91,"TempReasonL1":0.72,"TempReasonL2Fact":38.76,"TempReasonL2Pure":1.63,"TempReasonL3Fact":35.85,"TempReasonL3Pure":7.11,"WinoGrande":56.18}
-{"index":31,"Rank":12,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":22.57,"ARCChallenge":13.3,"AlphaNLI":25.65,"HellaSwag":29.29,"PIQA":31.02,"Quail":5.83,"RARbCode":83.39,"RARbMath":73.21,"SIQA":3.14,"SpartQA":4.23,"TempReasonL1":1.68,"TempReasonL2Fact":19.93,"TempReasonL2Pure":2.6,"TempReasonL3Fact":18.02,"TempReasonL3Pure":7.58,"WinoGrande":19.65}
-{"index":34,"Rank":13,"Model":"text-embedding-3-small-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":22.09,"ARCChallenge":13.76,"AlphaNLI":21.14,"HellaSwag":27.2,"PIQA":29.59,"Quail":6.64,"RARbCode":72.14,"RARbMath":64.31,"SIQA":2.98,"SpartQA":3.58,"TempReasonL1":2.29,"TempReasonL2Fact":26.34,"TempReasonL2Pure":3.17,"TempReasonL3Fact":22.72,"TempReasonL3Pure":9.98,"WinoGrande":25.49}
-{"index":7,"Rank":14,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":21.48,"ARCChallenge":9.02,"AlphaNLI":24.73,"HellaSwag":25.67,"PIQA":22.93,"Quail":7.51,"RARbCode":38.8,"RARbMath":69.19,"SIQA":4.89,"SpartQA":7.49,"TempReasonL1":0.99,"TempReasonL2Fact":33.23,"TempReasonL2Pure":0.68,"TempReasonL3Fact":30.05,"TempReasonL3Pure":5.28,"WinoGrande":41.72}
-{"index":6,"Rank":15,"Model":"bge-m3-instruct<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":20.83,"ARCChallenge":9.03,"AlphaNLI":24.69,"HellaSwag":25.55,"PIQA":19.03,"Quail":7.08,"RARbCode":39.58,"RARbMath":64.51,"SIQA":4.77,"SpartQA":7.0,"TempReasonL1":0.8,"TempReasonL2Fact":34.99,"TempReasonL2Pure":0.62,"TempReasonL3Fact":32.47,"TempReasonL3Pure":7.01,"WinoGrande":35.33}
-{"index":20,"Rank":16,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":20.04,"ARCChallenge":7.14,"AlphaNLI":13.0,"HellaSwag":23.73,"PIQA":21.08,"Quail":2.38,"RARbCode":46.96,"RARbMath":63.91,"SIQA":2.57,"SpartQA":5.43,"TempReasonL1":0.8,"TempReasonL2Fact":36.76,"TempReasonL2Pure":0.62,"TempReasonL3Fact":32.42,"TempReasonL3Pure":6.36,"WinoGrande":37.46}
-{"index":24,"Rank":17,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":19.61,"ARCChallenge":9.48,"AlphaNLI":28.19,"HellaSwag":24.21,"PIQA":25.28,"Quail":3.92,"RARbCode":44.27,"RARbMath":68.19,"SIQA":1.56,"SpartQA":1.65,"TempReasonL1":1.53,"TempReasonL2Fact":17.65,"TempReasonL2Pure":0.46,"TempReasonL3Fact":14.16,"TempReasonL3Pure":6.33,"WinoGrande":47.33}
-{"index":30,"Rank":18,"Model":"text-embedding-ada-002-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":19.56,"ARCChallenge":11.85,"AlphaNLI":10.62,"HellaSwag":24.8,"PIQA":23.87,"Quail":5.79,"RARbCode":82.36,"RARbMath":67.26,"SIQA":2.64,"SpartQA":4.75,"TempReasonL1":1.44,"TempReasonL2Fact":19.38,"TempReasonL2Pure":2.43,"TempReasonL3Fact":17.58,"TempReasonL3Pure":7.31,"WinoGrande":11.36}
-{"index":1,"Rank":19,"Model":"dragon-plus<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":19.1,"ARCChallenge":8.91,"AlphaNLI":32.1,"HellaSwag":27.69,"PIQA":28.01,"Quail":4.09,"RARbCode":17.58,"RARbMath":45.09,"SIQA":2.0,"SpartQA":10.34,"TempReasonL1":1.82,"TempReasonL2Fact":17.45,"TempReasonL2Pure":0.55,"TempReasonL3Fact":15.71,"TempReasonL3Pure":7.97,"WinoGrande":67.18}
-{"index":26,"Rank":20,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":18.03,"ARCChallenge":11.8,"AlphaNLI":22.41,"HellaSwag":26.27,"PIQA":29.03,"Quail":3.41,"RARbCode":53.21,"RARbMath":71.85,"SIQA":2.38,"SpartQA":0.22,"TempReasonL1":1.77,"TempReasonL2Fact":11.2,"TempReasonL2Pure":1.15,"TempReasonL3Fact":9.42,"TempReasonL3Pure":5.59,"WinoGrande":20.8}
-{"index":5,"Rank":21,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":17.7,"ARCChallenge":9.99,"AlphaNLI":13.13,"HellaSwag":28.5,"PIQA":27.99,"Quail":1.83,"RARbCode":48.12,"RARbMath":57.36,"SIQA":1.04,"SpartQA":2.99,"TempReasonL1":1.46,"TempReasonL2Fact":24.25,"TempReasonL2Pure":2.35,"TempReasonL3Fact":20.64,"TempReasonL3Pure":6.67,"WinoGrande":19.18}
-{"index":22,"Rank":22,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":17.35,"ARCChallenge":10.23,"AlphaNLI":25.35,"HellaSwag":24.08,"PIQA":26.44,"Quail":3.08,"RARbCode":42.44,"RARbMath":66.36,"SIQA":2.09,"SpartQA":2.67,"TempReasonL1":1.66,"TempReasonL2Fact":10.31,"TempReasonL2Pure":0.63,"TempReasonL3Fact":11.11,"TempReasonL3Pure":6.63,"WinoGrande":27.2}
-{"index":0,"Rank":23,"Model":"dragon-plus-instruct<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":16.73,"ARCChallenge":8.24,"AlphaNLI":25.18,"HellaSwag":24.06,"PIQA":26.35,"Quail":4.2,"RARbCode":12.84,"RARbMath":36.15,"SIQA":1.75,"SpartQA":10.82,"TempReasonL1":1.54,"TempReasonL2Fact":16.11,"TempReasonL2Pure":0.57,"TempReasonL3Fact":14.81,"TempReasonL3Pure":7.46,"WinoGrande":60.84}
-{"index":15,"Rank":24,"Model":"contriever<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":16.09,"ARCChallenge":8.62,"AlphaNLI":31.77,"HellaSwag":17.73,"PIQA":24.64,"Quail":4.97,"RARbCode":9.28,"RARbMath":30.76,"SIQA":1.27,"SpartQA":10.94,"TempReasonL1":1.93,"TempReasonL2Fact":22.68,"TempReasonL2Pure":1.12,"TempReasonL3Fact":20.62,"TempReasonL3Pure":7.8,"WinoGrande":47.15}
-{"index":23,"Rank":25,"Model":"all-MiniLM-L6-v2-instruct<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":15.95,"ARCChallenge":9.4,"AlphaNLI":15.09,"HellaSwag":20.51,"PIQA":24.68,"Quail":3.46,"RARbCode":42.47,"RARbMath":62.39,"SIQA":1.53,"SpartQA":0.57,"TempReasonL1":1.05,"TempReasonL2Fact":16.57,"TempReasonL2Pure":0.49,"TempReasonL3Fact":14.01,"TempReasonL3Pure":6.27,"WinoGrande":20.73}
-{"index":3,"Rank":26,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":14.93,"ARCChallenge":9.66,"AlphaNLI":10.99,"HellaSwag":26.64,"PIQA":25.69,"Quail":1.42,"RARbCode":46.47,"RARbMath":46.86,"SIQA":0.94,"SpartQA":3.37,"TempReasonL1":1.07,"TempReasonL2Fact":17.23,"TempReasonL2Pure":1.29,"TempReasonL3Fact":13.36,"TempReasonL3Pure":5.2,"WinoGrande":13.76}
-{"index":4,"Rank":27,"Model":"bge-large-en-v1.5-instruct<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":14.55,"ARCChallenge":8.86,"AlphaNLI":0.86,"HellaSwag":26.24,"PIQA":23.26,"Quail":2.72,"RARbCode":45.25,"RARbMath":49.82,"SIQA":0.59,"SpartQA":2.34,"TempReasonL1":1.17,"TempReasonL2Fact":21.19,"TempReasonL2Pure":2.1,"TempReasonL3Fact":17.59,"TempReasonL3Pure":5.99,"WinoGrande":10.31}
-{"index":9,"Rank":28,"Model":"bge-small-en-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":14.15,"ARCChallenge":8.95,"AlphaNLI":11.64,"HellaSwag":25.44,"PIQA":23.92,"Quail":1.75,"RARbCode":42.36,"RARbMath":44.98,"SIQA":0.77,"SpartQA":3.55,"TempReasonL1":1.41,"TempReasonL2Fact":17.56,"TempReasonL2Pure":1.05,"TempReasonL3Fact":13.88,"TempReasonL3Pure":4.76,"WinoGrande":10.28}
-{"index":25,"Rank":29,"Model":"all-mpnet-base-v2-instruct<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":13.84,"ARCChallenge":10.35,"AlphaNLI":1.96,"HellaSwag":13.01,"PIQA":27.18,"Quail":3.02,"RARbCode":48.95,"RARbMath":69.21,"SIQA":1.29,"SpartQA":1.01,"TempReasonL1":1.52,"TempReasonL2Fact":7.28,"TempReasonL2Pure":1.03,"TempReasonL3Fact":7.03,"TempReasonL3Pure":5.16,"WinoGrande":9.66}
-{"index":2,"Rank":30,"Model":"bge-base-en-v1.5-instruct<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":13.52,"ARCChallenge":8.85,"AlphaNLI":4.13,"HellaSwag":24.03,"PIQA":23.03,"Quail":1.25,"RARbCode":46.32,"RARbMath":45.62,"SIQA":0.24,"SpartQA":2.67,"TempReasonL1":0.8,"TempReasonL2Fact":16.56,"TempReasonL2Pure":1.33,"TempReasonL3Fact":12.68,"TempReasonL3Pure":5.08,"WinoGrande":10.27}
-{"index":8,"Rank":31,"Model":"bge-small-en-v1.5-instruct<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":12.6,"ARCChallenge":7.72,"AlphaNLI":1.26,"HellaSwag":23.41,"PIQA":20.79,"Quail":2.01,"RARbCode":41.52,"RARbMath":46.5,"SIQA":0.98,"SpartQA":2.86,"TempReasonL1":1.27,"TempReasonL2Fact":16.72,"TempReasonL2Pure":1.1,"TempReasonL3Fact":12.81,"TempReasonL3Pure":4.63,"WinoGrande":5.35}
-{"index":28,"Rank":32,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":12.24,"ARCChallenge":7.19,"AlphaNLI":21.87,"HellaSwag":17.53,"PIQA":18.65,"Quail":2.98,"RARbCode":11.02,"RARbMath":30.93,"SIQA":1.21,"SpartQA":5.69,"TempReasonL1":1.94,"TempReasonL2Fact":5.34,"TempReasonL2Pure":0.33,"TempReasonL3Fact":6.79,"TempReasonL3Pure":3.19,"WinoGrande":49.01}
-{"index":27,"Rank":33,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":11.55,"ARCChallenge":6.19,"AlphaNLI":20.89,"HellaSwag":16.98,"PIQA":15.79,"Quail":2.96,"RARbCode":8.48,"RARbMath":30.02,"SIQA":0.88,"SpartQA":4.94,"TempReasonL1":1.43,"TempReasonL2Fact":6.21,"TempReasonL2Pure":0.22,"TempReasonL3Fact":6.77,"TempReasonL3Pure":4.9,"WinoGrande":46.52}
-{"index":21,"Rank":34,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":9.31,"ARCChallenge":3.78,"AlphaNLI":13.11,"HellaSwag":5.59,"PIQA":6.53,"Quail":1.91,"RARbCode":2.31,"RARbMath":27.19,"SIQA":1.07,"SpartQA":1.56,"TempReasonL1":1.56,"TempReasonL2Fact":7.06,"TempReasonL2Pure":0.14,"TempReasonL3Fact":8.74,"TempReasonL3Pure":4.73,"WinoGrande":54.3}
-{"index":14,"Rank":35,"Model":"contriever-instruct<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":"","ARCChallenge":7.63,"AlphaNLI":27.09,"HellaSwag":"","PIQA":21.73,"Quail":4.92,"RARbCode":7.12,"RARbMath":21.83,"SIQA":0.88,"SpartQA":10.56,"TempReasonL1":1.8,"TempReasonL2Fact":22.03,"TempReasonL2Pure":0.94,"TempReasonL3Fact":20.82,"TempReasonL3Pure":7.15,"WinoGrande":26.3}
-{"index":29,"Rank":36,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":"","ARCChallenge":3.85,"AlphaNLI":14.15,"HellaSwag":"","PIQA":"","Quail":"","RARbCode":"","RARbMath":"","SIQA":"","SpartQA":"","TempReasonL1":"","TempReasonL2Fact":"","TempReasonL2Pure":"","TempReasonL3Fact":"","TempReasonL3Pure":"","WinoGrande":""}
+{"index":21,"Rank":1,"Model":"gte-multilingual-base<\/a>","Model Size (Million Parameters)":305,"Memory Usage (GB, fp32)":1.14,"Average":73.41,"STS17 (ar-ar)":78.82,"STS17 (en-ar)":76.44,"STS17 (en-de)":84.71,"STS17 (en-tr)":76.36,"STS17 (es-en)":83.85,"STS17 (es-es)":87.04,"STS17 (fr-en)":84.43,"STS17 (it-en)":83.85,"STS17 (ko-ko)":81.59,"STS17 (nl-en)":82.46,"STS22 (ar)":58.55,"STS22 (de)":60.89,"STS22 (de-en)":62.28,"STS22 (de-fr)":56.01,"STS22 (de-pl)":53.64,"STS22 (es)":72.02,"STS22 (es-en)":79.64,"STS22 (es-it)":74.83,"STS22 (fr)":81.03,"STS22 (fr-pl)":73.25,"STS22 (it)":78.28,"STS22 (pl)":42.39,"STS22 (pl-en)":78.29,"STS22 (ru)":66.49,"STS22 (tr)":65.54,"STS22 (zh-en)":72.93,"STSBenchmark":86.46}
+{"index":164,"Rank":2,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":73.17,"STS17 (ar-ar)":81.87,"STS17 (en-ar)":77.93,"STS17 (en-de)":87.3,"STS17 (en-tr)":72.56,"STS17 (es-en)":88.24,"STS17 (es-es)":87.46,"STS17 (fr-en)":88.06,"STS17 (it-en)":89.68,"STS17 (ko-ko)":83.69,"STS17 (nl-en)":88.25,"STS22 (ar)":54.12,"STS22 (de)":49.12,"STS22 (de-en)":60.92,"STS22 (de-fr)":61.39,"STS22 (de-pl)":54.47,"STS22 (es)":67.0,"STS22 (es-en)":75.84,"STS22 (es-it)":75.04,"STS22 (fr)":69.82,"STS22 (fr-pl)":84.52,"STS22 (it)":75.87,"STS22 (pl)":39.21,"STS22 (pl-en)":73.18,"STS22 (ru)":60.83,"STS22 (tr)":68.72,"STS22 (zh-en)":71.88,"STSBenchmark":88.6}
+{"index":56,"Rank":3,"Model":"multilingual-e5-large-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.02,"STS17 (ar-ar)":77.88,"STS17 (en-ar)":75.06,"STS17 (en-de)":86.16,"STS17 (en-tr)":71.23,"STS17 (es-en)":80.75,"STS17 (es-es)":86.74,"STS17 (fr-en)":85.62,"STS17 (it-en)":84.54,"STS17 (ko-ko)":82.27,"STS17 (nl-en)":85.28,"STS22 (ar)":56.99,"STS22 (de)":56.59,"STS22 (de-en)":56.6,"STS22 (de-fr)":67.79,"STS22 (de-pl)":49.58,"STS22 (es)":64.58,"STS22 (es-en)":72.52,"STS22 (es-it)":68.93,"STS22 (fr)":76.79,"STS22 (fr-pl)":50.71,"STS22 (it)":76.98,"STS22 (pl)":34.68,"STS22 (pl-en)":65.54,"STS22 (ru)":59.9,"STS22 (tr)":63.55,"STS22 (zh-en)":66.0,"STSBenchmark":87.29}
+{"index":254,"Rank":4,"Model":"multilingual-e5-large-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":70.02,"STS17 (ar-ar)":77.88,"STS17 (en-ar)":75.06,"STS17 (en-de)":86.16,"STS17 (en-tr)":71.23,"STS17 (es-en)":80.75,"STS17 (es-es)":86.74,"STS17 (fr-en)":85.62,"STS17 (it-en)":84.54,"STS17 (ko-ko)":82.27,"STS17 (nl-en)":85.28,"STS22 (ar)":56.99,"STS22 (de)":56.59,"STS22 (de-en)":56.6,"STS22 (de-fr)":67.79,"STS22 (de-pl)":49.58,"STS22 (es)":64.58,"STS22 (es-en)":72.52,"STS22 (es-it)":68.93,"STS22 (fr)":76.79,"STS22 (fr-pl)":50.71,"STS22 (it)":76.98,"STS22 (pl)":34.68,"STS22 (pl-en)":65.54,"STS22 (ru)":59.9,"STS22 (tr)":63.55,"STS22 (zh-en)":66.0,"STSBenchmark":87.29}
+{"index":168,"Rank":5,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":70.02,"STS17 (ar-ar)":77.88,"STS17 (en-ar)":75.06,"STS17 (en-de)":86.16,"STS17 (en-tr)":71.23,"STS17 (es-en)":80.75,"STS17 (es-es)":86.74,"STS17 (fr-en)":85.62,"STS17 (it-en)":84.54,"STS17 (ko-ko)":82.27,"STS17 (nl-en)":85.28,"STS22 (ar)":56.99,"STS22 (de)":56.59,"STS22 (de-en)":56.6,"STS22 (de-fr)":67.79,"STS22 (de-pl)":49.58,"STS22 (es)":64.58,"STS22 (es-en)":72.52,"STS22 (es-it)":68.93,"STS22 (fr)":76.79,"STS22 (fr-pl)":50.71,"STS22 (it)":76.98,"STS22 (pl)":34.66,"STS22 (pl-en)":65.54,"STS22 (ru)":59.9,"STS22 (tr)":63.55,"STS22 (zh-en)":66.0,"STSBenchmark":87.29}
+{"index":120,"Rank":6,"Model":"Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":68.85,"STS17 (ar-ar)":79.38,"STS17 (en-ar)":58.76,"STS17 (en-de)":76.13,"STS17 (en-tr)":55.53,"STS17 (es-en)":72.26,"STS17 (es-es)":85.06,"STS17 (fr-en)":75.63,"STS17 (it-en)":71.36,"STS17 (ko-ko)":80.79,"STS17 (nl-en)":71.99,"STS22 (ar)":57.44,"STS22 (de)":60.12,"STS22 (de-en)":53.36,"STS22 (de-fr)":58.25,"STS22 (de-pl)":48.47,"STS22 (es)":68.57,"STS22 (es-en)":77.41,"STS22 (es-it)":74.69,"STS22 (fr)":81.47,"STS22 (fr-pl)":73.25,"STS22 (it)":79.28,"STS22 (pl)":42.08,"STS22 (pl-en)":77.5,"STS22 (ru)":61.71,"STS22 (tr)":66.62,"STS22 (zh-en)":69.87,"STSBenchmark":81.95}
+{"index":167,"Rank":7,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":68.84,"STS17 (ar-ar)":74.52,"STS17 (en-ar)":71.27,"STS17 (en-de)":82.09,"STS17 (en-tr)":63.33,"STS17 (es-en)":76.5,"STS17 (es-es)":86.74,"STS17 (fr-en)":80.18,"STS17 (it-en)":80.15,"STS17 (ko-ko)":79.95,"STS17 (nl-en)":79.25,"STS22 (ar)":57.87,"STS22 (de)":55.95,"STS22 (de-en)":54.93,"STS22 (de-fr)":59.47,"STS22 (de-pl)":39.35,"STS22 (es)":66.58,"STS22 (es-en)":73.99,"STS22 (es-it)":66.46,"STS22 (fr)":74.8,"STS22 (fr-pl)":73.25,"STS22 (it)":77.76,"STS22 (pl)":34.07,"STS22 (pl-en)":70.37,"STS22 (ru)":60.66,"STS22 (tr)":63.7,"STS22 (zh-en)":69.92,"STSBenchmark":85.64}
+{"index":170,"Rank":8,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":67.08,"STS17 (ar-ar)":73.03,"STS17 (en-ar)":57.41,"STS17 (en-de)":77.24,"STS17 (en-tr)":55.97,"STS17 (es-en)":72.44,"STS17 (es-es)":84.84,"STS17 (fr-en)":72.29,"STS17 (it-en)":77.33,"STS17 (ko-ko)":78.87,"STS17 (nl-en)":75.38,"STS22 (ar)":56.65,"STS22 (de)":53.45,"STS22 (de-en)":56.49,"STS22 (de-fr)":60.57,"STS22 (de-pl)":28.24,"STS22 (es)":66.88,"STS22 (es-en)":74.57,"STS22 (es-it)":71.81,"STS22 (fr)":76.58,"STS22 (fr-pl)":84.52,"STS22 (it)":76.53,"STS22 (pl)":35.8,"STS22 (pl-en)":72.69,"STS22 (ru)":59.9,"STS22 (tr)":63.71,"STS22 (zh-en)":63.74,"STSBenchmark":84.11}
+{"index":231,"Rank":9,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":66.01,"STS17 (ar-ar)":69.07,"STS17 (en-ar)":74.51,"STS17 (en-de)":73.85,"STS17 (en-tr)":72.07,"STS17 (es-en)":65.71,"STS17 (es-es)":80.83,"STS17 (fr-en)":76.98,"STS17 (it-en)":76.99,"STS17 (ko-ko)":71.32,"STS17 (nl-en)":75.22,"STS22 (ar)":57.67,"STS22 (de)":48.58,"STS22 (de-en)":50.14,"STS22 (de-fr)":53.28,"STS22 (de-pl)":58.69,"STS22 (es)":63.18,"STS22 (es-en)":71.86,"STS22 (es-it)":69.69,"STS22 (fr)":77.95,"STS22 (fr-pl)":61.98,"STS22 (it)":72.22,"STS22 (pl)":39.28,"STS22 (pl-en)":69.41,"STS22 (ru)":57.49,"STS22 (tr)":58.15,"STS22 (zh-en)":64.02,"STSBenchmark":72.25}
+{"index":253,"Rank":10,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":65.66,"STS17 (ar-ar)":78.03,"STS17 (en-ar)":78.6,"STS17 (en-de)":81.48,"STS17 (en-tr)":76.34,"STS17 (es-en)":81.81,"STS17 (es-es)":87.91,"STS17 (fr-en)":78.06,"STS17 (it-en)":80.98,"STS17 (ko-ko)":68.24,"STS17 (nl-en)":81.0,"STS22 (ar)":54.51,"STS22 (de)":46.89,"STS22 (de-en)":45.0,"STS22 (de-fr)":49.43,"STS22 (de-pl)":39.32,"STS22 (es)":58.94,"STS22 (es-en)":67.71,"STS22 (es-it)":50.79,"STS22 (fr)":74.1,"STS22 (fr-pl)":73.25,"STS22 (it)":65.86,"STS22 (pl)":34.81,"STS22 (pl-en)":60.17,"STS22 (ru)":54.51,"STS22 (tr)":57.29,"STS22 (zh-en)":61.29,"STSBenchmark":86.45}
+{"index":65,"Rank":11,"Model":"paraphrase-multilingual-mpnet-base-v2-KE_Sieve<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":65.52,"STS17 (ar-ar)":76.04,"STS17 (en-ar)":77.6,"STS17 (en-de)":70.4,"STS17 (en-tr)":71.04,"STS17 (es-en)":81.59,"STS17 (es-es)":82.77,"STS17 (fr-en)":77.16,"STS17 (it-en)":81.52,"STS17 (ko-ko)":77.0,"STS17 (nl-en)":80.7,"STS22 (ar)":52.61,"STS22 (de)":41.84,"STS22 (de-en)":49.09,"STS22 (de-fr)":50.6,"STS22 (de-pl)":50.44,"STS22 (es)":57.23,"STS22 (es-en)":67.29,"STS22 (es-it)":57.93,"STS22 (fr)":72.79,"STS22 (fr-pl)":73.25,"STS22 (it)":64.17,"STS22 (pl)":36.37,"STS22 (pl-en)":67.72,"STS22 (ru)":53.35,"STS22 (tr)":52.71,"STS22 (zh-en)":64.45,"STSBenchmark":81.34}
+{"index":238,"Rank":12,"Model":"distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":65.43,"STS17 (ar-ar)":77.34,"STS17 (en-ar)":77.46,"STS17 (en-de)":80.24,"STS17 (en-tr)":74.34,"STS17 (es-en)":77.4,"STS17 (es-es)":83.71,"STS17 (fr-en)":79.28,"STS17 (it-en)":80.82,"STS17 (ko-ko)":76.4,"STS17 (nl-en)":80.51,"STS22 (ar)":49.04,"STS22 (de)":35.73,"STS22 (de-en)":47.51,"STS22 (de-fr)":60.76,"STS22 (de-pl)":36.09,"STS22 (es)":59.34,"STS22 (es-en)":68.96,"STS22 (es-it)":63.28,"STS22 (fr)":76.41,"STS22 (fr-pl)":61.98,"STS22 (it)":65.1,"STS22 (pl)":34.58,"STS22 (pl-en)":71.33,"STS22 (ru)":52.4,"STS22 (tr)":54.07,"STS22 (zh-en)":61.75,"STSBenchmark":80.75}
+{"index":173,"Rank":13,"Model":"udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":59.1,"STS17 (ar-ar)":81.13,"STS17 (en-ar)":79.64,"STS17 (en-de)":52.52,"STS17 (en-tr)":4.75,"STS17 (es-en)":85.41,"STS17 (es-es)":87.33,"STS17 (fr-en)":83.96,"STS17 (it-en)":45.62,"STS17 (ko-ko)":61.89,"STS17 (nl-en)":46.69,"STS22 (ar)":55.0,"STS22 (de)":37.51,"STS22 (de-en)":51.66,"STS22 (de-fr)":39.66,"STS22 (de-pl)":26.11,"STS22 (es)":59.79,"STS22 (es-en)":73.59,"STS22 (es-it)":67.83,"STS22 (fr)":77.1,"STS22 (fr-pl)":84.52,"STS22 (it)":68.87,"STS22 (pl)":27.98,"STS22 (pl-en)":60.77,"STS22 (ru)":43.14,"STS22 (tr)":42.33,"STS22 (zh-en)":65.01,"STSBenchmark":85.79}
+{"index":130,"Rank":14,"Model":"sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":7068,"Memory Usage (GB, fp32)":26.33,"Average":58.34,"STS17 (ar-ar)":76.42,"STS17 (en-ar)":78.07,"STS17 (en-de)":59.1,"STS17 (en-tr)":11.8,"STS17 (es-en)":78.22,"STS17 (es-es)":86.0,"STS17 (fr-en)":80.46,"STS17 (it-en)":51.58,"STS17 (ko-ko)":66.89,"STS17 (nl-en)":45.85,"STS22 (ar)":58.67,"STS22 (de)":30.05,"STS22 (de-en)":51.16,"STS22 (de-fr)":53.28,"STS22 (de-pl)":43.05,"STS22 (es)":65.41,"STS22 (es-en)":75.06,"STS22 (es-it)":65.5,"STS22 (fr)":80.38,"STS22 (fr-pl)":28.17,"STS22 (it)":65.65,"STS22 (pl)":31.13,"STS22 (pl-en)":53.31,"STS22 (ru)":43.36,"STS22 (tr)":47.14,"STS22 (zh-en)":68.45,"STSBenchmark":80.9}
+{"index":150,"Rank":15,"Model":"SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":57.22,"STS17 (ar-ar)":74.97,"STS17 (en-ar)":74.05,"STS17 (en-de)":74.95,"STS17 (en-tr)":77.18,"STS17 (es-en)":72.25,"STS17 (es-es)":80.65,"STS17 (fr-en)":77.2,"STS17 (it-en)":78.67,"STS17 (ko-ko)":66.14,"STS17 (nl-en)":78.14,"STS22 (ar)":40.25,"STS22 (de)":24.09,"STS22 (de-en)":34.28,"STS22 (de-fr)":41.29,"STS22 (de-pl)":25.81,"STS22 (es)":55.4,"STS22 (es-en)":57.82,"STS22 (es-it)":49.13,"STS22 (fr)":61.72,"STS22 (fr-pl)":61.98,"STS22 (it)":62.2,"STS22 (pl)":25.31,"STS22 (pl-en)":44.72,"STS22 (ru)":43.57,"STS22 (tr)":46.46,"STS22 (zh-en)":49.19,"STSBenchmark":67.39}
+{"index":11,"Rank":16,"Model":"LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Average":52.31,"STS17 (ar-ar)":67.47,"STS17 (en-ar)":65.05,"STS17 (en-de)":66.66,"STS17 (en-tr)":70.05,"STS17 (es-en)":55.3,"STS17 (es-es)":79.67,"STS17 (fr-en)":70.82,"STS17 (it-en)":70.98,"STS17 (ko-ko)":70.52,"STS17 (nl-en)":68.12,"STS22 (ar)":42.57,"STS22 (de)":25.69,"STS22 (de-en)":32.35,"STS22 (de-fr)":37.41,"STS22 (de-pl)":15.67,"STS22 (es)":54.92,"STS22 (es-en)":54.34,"STS22 (es-it)":42.21,"STS22 (fr)":58.61,"STS22 (fr-pl)":39.44,"STS22 (it)":60.31,"STS22 (pl)":18.34,"STS22 (pl-en)":53.63,"STS22 (ru)":39.24,"STS22 (tr)":36.97,"STS22 (zh-en)":46.19,"STSBenchmark":69.77}
+{"index":175,"Rank":17,"Model":"udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":52.04,"STS17 (ar-ar)":80.6,"STS17 (en-ar)":72.6,"STS17 (en-de)":40.34,"STS17 (en-tr)":6.79,"STS17 (es-en)":81.8,"STS17 (es-es)":85.65,"STS17 (fr-en)":79.94,"STS17 (it-en)":34.8,"STS17 (ko-ko)":57.28,"STS17 (nl-en)":33.58,"STS22 (ar)":54.82,"STS22 (de)":26.63,"STS22 (de-en)":49.55,"STS22 (de-fr)":22.36,"STS22 (de-pl)":35.32,"STS22 (es)":56.31,"STS22 (es-en)":71.03,"STS22 (es-it)":61.3,"STS22 (fr)":61.35,"STS22 (fr-pl)":73.25,"STS22 (it)":62.61,"STS22 (pl)":15.06,"STS22 (pl-en)":43.72,"STS22 (ru)":28.77,"STS22 (tr)":22.11,"STS22 (zh-en)":63.9,"STSBenchmark":83.63}
+{"index":249,"Rank":18,"Model":"sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":50.06,"STS17 (ar-ar)":11.13,"STS17 (en-ar)":-3.93,"STS17 (en-de)":79.04,"STS17 (en-tr)":13.61,"STS17 (es-en)":71.72,"STS17 (es-es)":83.42,"STS17 (fr-en)":71.38,"STS17 (it-en)":69.5,"STS17 (ko-ko)":9.61,"STS17 (nl-en)":66.12,"STS22 (ar)":29.6,"STS22 (de)":47.72,"STS22 (de-en)":49.64,"STS22 (de-fr)":62.21,"STS22 (de-pl)":34.34,"STS22 (es)":58.16,"STS22 (es-en)":69.15,"STS22 (es-it)":65.26,"STS22 (fr)":77.49,"STS22 (fr-pl)":50.71,"STS22 (it)":66.91,"STS22 (pl)":27.04,"STS22 (pl-en)":58.85,"STS22 (ru)":26.63,"STS22 (tr)":43.36,"STS22 (zh-en)":29.0,"STSBenchmark":83.93}
+{"index":241,"Rank":19,"Model":"gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Average":49.47,"STS17 (ar-ar)":9.06,"STS17 (en-ar)":-3.22,"STS17 (en-de)":70.38,"STS17 (en-tr)":17.17,"STS17 (es-en)":60.24,"STS17 (es-es)":81.93,"STS17 (fr-en)":62.17,"STS17 (it-en)":59.11,"STS17 (ko-ko)":8.9,"STS17 (nl-en)":56.91,"STS22 (ar)":37.66,"STS22 (de)":50.58,"STS22 (de-en)":53.63,"STS22 (de-fr)":55.72,"STS22 (de-pl)":27.99,"STS22 (es)":59.14,"STS22 (es-en)":69.99,"STS22 (es-it)":60.94,"STS22 (fr)":79.43,"STS22 (fr-pl)":61.98,"STS22 (it)":67.14,"STS22 (pl)":33.74,"STS22 (pl-en)":60.18,"STS22 (ru)":32.69,"STS22 (tr)":55.79,"STS22 (zh-en)":28.85,"STSBenchmark":77.65}
+{"index":58,"Rank":20,"Model":"MUG-B-1.6<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":47.36,"STS17 (ar-ar)":46.8,"STS17 (en-ar)":-0.78,"STS17 (en-de)":47.5,"STS17 (en-tr)":4.18,"STS17 (es-en)":44.27,"STS17 (es-es)":79.22,"STS17 (fr-en)":47.15,"STS17 (it-en)":42.65,"STS17 (ko-ko)":39.79,"STS17 (nl-en)":36.6,"STS22 (ar)":25.06,"STS22 (de)":39.49,"STS22 (de-en)":54.22,"STS22 (de-fr)":48.91,"STS22 (de-pl)":33.04,"STS22 (es)":59.47,"STS22 (es-en)":66.65,"STS22 (es-it)":64.37,"STS22 (fr)":79.88,"STS22 (fr-pl)":39.44,"STS22 (it)":68.15,"STS22 (pl)":35.38,"STS22 (pl-en)":62.7,"STS22 (ru)":30.62,"STS22 (tr)":45.65,"STS22 (zh-en)":49.25,"STSBenchmark":88.96}
+{"index":240,"Rank":21,"Model":"gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":46.79,"STS17 (ar-ar)":10.19,"STS17 (en-ar)":-5.77,"STS17 (en-de)":67.43,"STS17 (en-tr)":8.75,"STS17 (es-en)":54.96,"STS17 (es-es)":82.74,"STS17 (fr-en)":60.5,"STS17 (it-en)":46.26,"STS17 (ko-ko)":8.96,"STS17 (nl-en)":47.48,"STS22 (ar)":34.97,"STS22 (de)":51.7,"STS22 (de-en)":48.76,"STS22 (de-fr)":57.5,"STS22 (de-pl)":32.76,"STS22 (es)":57.49,"STS22 (es-en)":67.76,"STS22 (es-it)":57.18,"STS22 (fr)":78.7,"STS22 (fr-pl)":61.98,"STS22 (it)":67.67,"STS22 (pl)":30.68,"STS22 (pl-en)":54.17,"STS22 (ru)":15.36,"STS22 (tr)":58.12,"STS22 (zh-en)":29.42,"STSBenchmark":77.6}
+{"index":248,"Rank":22,"Model":"sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":44.35,"STS17 (ar-ar)":10.75,"STS17 (en-ar)":-4.71,"STS17 (en-de)":73.62,"STS17 (en-tr)":-0.42,"STS17 (es-en)":62.62,"STS17 (es-es)":82.74,"STS17 (fr-en)":67.86,"STS17 (it-en)":51.86,"STS17 (ko-ko)":9.44,"STS17 (nl-en)":45.95,"STS22 (ar)":27.01,"STS22 (de)":43.73,"STS22 (de-en)":49.93,"STS22 (de-fr)":61.58,"STS22 (de-pl)":38.83,"STS22 (es)":57.68,"STS22 (es-en)":68.09,"STS22 (es-it)":61.58,"STS22 (fr)":75.01,"STS22 (fr-pl)":5.63,"STS22 (it)":62.01,"STS22 (pl)":25.0,"STS22 (pl-en)":51.72,"STS22 (ru)":14.21,"STS22 (tr)":47.3,"STS22 (zh-en)":23.1,"STSBenchmark":85.36}
+{"index":247,"Rank":23,"Model":"sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":41.17,"STS17 (ar-ar)":13.36,"STS17 (en-ar)":-5.65,"STS17 (en-de)":67.11,"STS17 (en-tr)":-0.02,"STS17 (es-en)":47.72,"STS17 (es-es)":79.94,"STS17 (fr-en)":56.61,"STS17 (it-en)":30.46,"STS17 (ko-ko)":10.06,"STS17 (nl-en)":36.46,"STS22 (ar)":31.2,"STS22 (de)":42.08,"STS22 (de-en)":46.9,"STS22 (de-fr)":55.04,"STS22 (de-pl)":33.94,"STS22 (es)":53.81,"STS22 (es-en)":65.19,"STS22 (es-it)":55.29,"STS22 (fr)":77.69,"STS22 (fr-pl)":28.17,"STS22 (it)":60.65,"STS22 (pl)":24.42,"STS22 (pl-en)":42.97,"STS22 (ru)":12.13,"STS22 (tr)":40.45,"STS22 (zh-en)":20.15,"STSBenchmark":85.52}
+{"index":233,"Rank":24,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":37.71,"STS17 (ar-ar)":50.89,"STS17 (en-ar)":-4.28,"STS17 (en-de)":35.82,"STS17 (en-tr)":4.5,"STS17 (es-en)":16.31,"STS17 (es-es)":76.12,"STS17 (fr-en)":37.09,"STS17 (it-en)":24.45,"STS17 (ko-ko)":43.39,"STS17 (nl-en)":29.0,"STS22 (ar)":22.64,"STS22 (de)":31.04,"STS22 (de-en)":44.04,"STS22 (de-fr)":30.07,"STS22 (de-pl)":4.93,"STS22 (es)":54.78,"STS22 (es-en)":53.42,"STS22 (es-it)":44.27,"STS22 (fr)":77.0,"STS22 (fr-pl)":50.71,"STS22 (it)":60.4,"STS22 (pl)":26.77,"STS22 (pl-en)":32.8,"STS22 (ru)":14.72,"STS22 (tr)":33.69,"STS22 (zh-en)":41.64,"STSBenchmark":82.03}
+{"index":82,"Rank":25,"Model":"SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":37.32,"STS17 (ar-ar)":55.62,"STS17 (en-ar)":8.21,"STS17 (en-de)":30.18,"STS17 (en-tr)":1.04,"STS17 (es-en)":28.78,"STS17 (es-es)":71.88,"STS17 (fr-en)":26.34,"STS17 (it-en)":20.73,"STS17 (ko-ko)":52.39,"STS17 (nl-en)":25.05,"STS22 (ar)":28.19,"STS22 (de)":21.99,"STS22 (de-en)":53.07,"STS22 (de-fr)":32.97,"STS22 (de-pl)":20.45,"STS22 (es)":49.81,"STS22 (es-en)":49.51,"STS22 (es-it)":45.78,"STS22 (fr)":67.66,"STS22 (fr-pl)":61.98,"STS22 (it)":48.25,"STS22 (pl)":23.31,"STS22 (pl-en)":36.8,"STS22 (ru)":9.07,"STS22 (tr)":34.66,"STS22 (zh-en)":28.68,"STSBenchmark":75.34}
+{"index":232,"Rank":26,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":34.57,"STS17 (ar-ar)":58.71,"STS17 (en-ar)":0.54,"STS17 (en-de)":27.54,"STS17 (en-tr)":0.43,"STS17 (es-en)":22.01,"STS17 (es-es)":78.37,"STS17 (fr-en)":30.7,"STS17 (it-en)":24.28,"STS17 (ko-ko)":43.37,"STS17 (nl-en)":24.51,"STS22 (ar)":17.54,"STS22 (de)":22.53,"STS22 (de-en)":42.86,"STS22 (de-fr)":43.52,"STS22 (de-pl)":1.63,"STS22 (es)":43.98,"STS22 (es-en)":53.99,"STS22 (es-it)":40.71,"STS22 (fr)":69.51,"STS22 (fr-pl)":16.9,"STS22 (it)":47.48,"STS22 (pl)":19.22,"STS22 (pl-en)":42.67,"STS22 (ru)":11.19,"STS22 (tr)":21.6,"STS22 (zh-en)":44.39,"STSBenchmark":83.09}
+{"index":83,"Rank":27,"Model":"SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":27.72,"STS17 (ar-ar)":54.16,"STS17 (en-ar)":1.72,"STS17 (en-de)":25.48,"STS17 (en-tr)":2.09,"STS17 (es-en)":21.93,"STS17 (es-es)":67.8,"STS17 (fr-en)":18.91,"STS17 (it-en)":16.39,"STS17 (ko-ko)":45.66,"STS17 (nl-en)":23.49,"STS22 (ar)":5.17,"STS22 (de)":11.0,"STS22 (de-en)":53.93,"STS22 (de-fr)":25.11,"STS22 (de-pl)":20.94,"STS22 (es)":43.05,"STS22 (es-en)":32.74,"STS22 (es-it)":35.99,"STS22 (fr)":54.56,"STS22 (fr-pl)":5.63,"STS22 (it)":33.68,"STS22 (pl)":14.91,"STS22 (pl-en)":20.54,"STS22 (ru)":3.36,"STS22 (tr)":3.82,"STS22 (zh-en)":26.71,"STSBenchmark":79.54}
+{"index":235,"Rank":28,"Model":"allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":25.1,"STS17 (ar-ar)":27.14,"STS17 (en-ar)":6.9,"STS17 (en-de)":11.59,"STS17 (en-tr)":6.46,"STS17 (es-en)":10.86,"STS17 (es-es)":55.45,"STS17 (fr-en)":16.02,"STS17 (it-en)":19.87,"STS17 (ko-ko)":8.08,"STS17 (nl-en)":24.92,"STS22 (ar)":19.57,"STS22 (de)":17.31,"STS22 (de-en)":26.03,"STS22 (de-fr)":10.26,"STS22 (de-pl)":16.94,"STS22 (es)":48.89,"STS22 (es-en)":51.79,"STS22 (es-it)":25.24,"STS22 (fr)":53.92,"STS22 (fr-pl)":39.44,"STS22 (it)":39.43,"STS22 (pl)":13.56,"STS22 (pl-en)":25.36,"STS22 (ru)":1.11,"STS22 (tr)":31.73,"STS22 (zh-en)":8.44,"STSBenchmark":61.26}
+{"index":237,"Rank":29,"Model":"komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Average":24.28,"STS17 (ar-ar)":13.78,"STS17 (en-ar)":9.08,"STS17 (en-de)":-3.11,"STS17 (en-tr)":-0.45,"STS17 (es-en)":-8.18,"STS17 (es-es)":48.23,"STS17 (fr-en)":5.81,"STS17 (it-en)":3.64,"STS17 (ko-ko)":2.54,"STS17 (nl-en)":0.44,"STS22 (ar)":32.42,"STS22 (de)":33.04,"STS22 (de-en)":28.65,"STS22 (de-fr)":14.77,"STS22 (de-pl)":11.21,"STS22 (es)":48.53,"STS22 (es-en)":26.97,"STS22 (es-it)":41.1,"STS22 (fr)":49.43,"STS22 (fr-pl)":39.44,"STS22 (it)":57.77,"STS22 (pl)":12.47,"STS22 (pl-en)":45.55,"STS22 (ru)":19.44,"STS22 (tr)":47.38,"STS22 (zh-en)":14.05,"STSBenchmark":61.55}
+{"index":0,"Rank":30,"Model":"google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.33}
+{"index":1,"Rank":31,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.99}
+{"index":2,"Rank":32,"Model":"titan-embed-text-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":74.85}
+{"index":3,"Rank":33,"Model":"mistral-embed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.74,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":4,"Rank":34,"Model":"voyage-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":70.51,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":5,"Rank":35,"Model":"voyage-code-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":79.99,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":6,"Rank":36,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.22}
+{"index":7,"Rank":37,"Model":"voyage-law-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":83.75,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":8,"Rank":38,"Model":"voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.86}
+{"index":9,"Rank":39,"Model":"voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.56}
+{"index":10,"Rank":40,"Model":"voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.76,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":12,"Rank":41,"Model":"Arabic_text_embedding_for_sts<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","STS17 (ar-ar)":85.05,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":13,"Rank":42,"Model":"arabic_text_embedding_sts_arabertv02_arabicnlitriplet<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","STS17 (ar-ar)":84.96,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":14,"Rank":43,"Model":"llm2vec-croissant-mntp<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":15,"Rank":44,"Model":"llm2vec-occiglot-mntp<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":67.83,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":16,"Rank":45,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.35}
+{"index":17,"Rank":46,"Model":"gte-Qwen2-1.5B-instruct<\/a>","Model Size (Million Parameters)":1776,"Memory Usage (GB, fp32)":6.62,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":81.84,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.83,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.38}
+{"index":18,"Rank":47,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.58,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":41.02,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.85}
+{"index":19,"Rank":48,"Model":"gte-base-en-v1.5<\/a>","Model Size (Million Parameters)":137,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.04}
+{"index":20,"Rank":49,"Model":"gte-large-en-v1.5<\/a>","Model Size (Million Parameters)":434,"Memory Usage (GB, fp32)":1.62,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.07}
+{"index":22,"Rank":50,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.42}
+{"index":23,"Rank":51,"Model":"bge-en-icl<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.42}
+{"index":24,"Rank":52,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.52}
+{"index":25,"Rank":53,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":26,"Rank":54,"Model":"bge-multilingual-gemma2<\/a>","Model Size (Million Parameters)":9242,"Memory Usage (GB, fp32)":34.43,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":83.28,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":42.79,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.25}
+{"index":27,"Rank":55,"Model":"bge-small-en-v1.5<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.86}
+{"index":28,"Rank":56,"Model":"UniVaR-lambda-1<\/a>","Model Size (Million Parameters)":137,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.54}
+{"index":29,"Rank":57,"Model":"UniVaR-lambda-20<\/a>","Model Size (Million Parameters)":137,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.54}
+{"index":30,"Rank":58,"Model":"UniVaR-lambda-5<\/a>","Model Size (Million Parameters)":137,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.54}
+{"index":31,"Rank":59,"Model":"UniVaR-lambda-80<\/a>","Model Size (Million Parameters)":137,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.54}
+{"index":32,"Rank":60,"Model":"Cohere-embed-english-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.58}
+{"index":33,"Rank":61,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.55}
+{"index":34,"Rank":62,"Model":"Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.8,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.46}
+{"index":35,"Rank":63,"Model":"Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.76,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.79}
+{"index":36,"Rank":64,"Model":"sbert-chinese-general-v1<\/a>","Model Size (Million Parameters)":102,"Memory Usage (GB, fp32)":0.38,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":33.88,"STSBenchmark":""}
+{"index":37,"Rank":65,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":38,"Rank":66,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":39,"Rank":67,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":40,"Rank":68,"Model":"STS-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":0.98}
+{"index":41,"Rank":69,"Model":"bert-base-10lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":40.31,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":42,"Rank":70,"Model":"bert-base-15lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":40.4,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":43,"Rank":71,"Model":"bert-base-25lang-cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":38.77,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":44,"Rank":72,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7242,"Memory Usage (GB, fp32)":26.98,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.64}
+{"index":45,"Rank":73,"Model":"GritLM-8x7B<\/a>","Model Size (Million Parameters)":46703,"Memory Usage (GB, fp32)":173.98,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.43}
+{"index":46,"Rank":74,"Model":"lodestone-base-4096-v1<\/a>","Model Size (Million Parameters)":69,"Memory Usage (GB, fp32)":0.26,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":75.57}
+{"index":47,"Rank":75,"Model":"2024-06-15_10-09-42<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":48,"Rank":76,"Model":"2024-06-17_21-37-12<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":49,"Rank":77,"Model":"2024-06-19_08-22-22<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":50,"Rank":78,"Model":"2024-06-19_10-03-38<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":51,"Rank":79,"Model":"2024-06-19_21-12-17<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":52,"Rank":80,"Model":"2024-06-19_22-23-38<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":53,"Rank":81,"Model":"e5-large-v2-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.74}
+{"index":54,"Rank":82,"Model":"gte-Qwen2-7B-instruct-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.58,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":41.02,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.85}
+{"index":55,"Rank":83,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.58,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":41.02,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.85}
+{"index":57,"Rank":84,"Model":"multilingual-e5-large-instruct-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.38}
+{"index":59,"Rank":85,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":81.14,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.44}
+{"index":60,"Rank":86,"Model":"bilingual-embedding-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":84.64,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.28}
+{"index":61,"Rank":87,"Model":"bilingual-embedding-large-8k<\/a>","Model Size (Million Parameters)":568,"Memory Usage (GB, fp32)":2.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.84,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":62,"Rank":88,"Model":"bilingual-embedding-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.57,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.08}
+{"index":63,"Rank":89,"Model":"Linq-Embed-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.81}
+{"index":64,"Rank":90,"Model":"jina-embeddings-v2-base-de-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":86.72,"STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":59.07,"STS22 (de-en)":55.97,"STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.14}
+{"index":66,"Rank":91,"Model":"LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.72}
+{"index":67,"Rank":92,"Model":"LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":78.32}
+{"index":68,"Rank":93,"Model":"LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.05}
+{"index":69,"Rank":94,"Model":"LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":77.44}
+{"index":70,"Rank":95,"Model":"LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.65}
+{"index":71,"Rank":96,"Model":"LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":80.42}
+{"index":72,"Rank":97,"Model":"LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.16}
+{"index":73,"Rank":98,"Model":"LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":73.36}
+{"index":74,"Rank":99,"Model":"Bulbasaur<\/a>","Model Size (Million Parameters)":17,"Memory Usage (GB, fp32)":0.06,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.34}
+{"index":75,"Rank":100,"Model":"Ivysaur<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.08,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.25}
+{"index":76,"Rank":101,"Model":"Squirtle<\/a>","Model Size (Million Parameters)":16,"Memory Usage (GB, fp32)":0.06,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.21}
+{"index":77,"Rank":102,"Model":"Venusaur<\/a>","Model Size (Million Parameters)":16,"Memory Usage (GB, fp32)":0.06,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":76.72}
+{"index":78,"Rank":103,"Model":"Wartortle<\/a>","Model Size (Million Parameters)":17,"Memory Usage (GB, fp32)":0.06,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":81.79}
+{"index":79,"Rank":104,"Model":"test24<\/a>","Model Size (Million Parameters)":17,"Memory Usage (GB, fp32)":0.06,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":78.93}
+{"index":80,"Rank":105,"Model":"test25<\/a>","Model Size (Million Parameters)":17,"Memory Usage (GB, fp32)":0.06,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":78.61}
+{"index":81,"Rank":106,"Model":"SGPT-1.3B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":1341,"Memory Usage (GB, fp32)":4.99,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":77.59}
+{"index":84,"Rank":107,"Model":"SGPT-2.7B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":2685,"Memory Usage (GB, fp32)":10.0,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.21}
+{"index":85,"Rank":108,"Model":"SGPT-5.8B-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":5874,"Memory Usage (GB, fp32)":21.88,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":81.39}
+{"index":86,"Rank":109,"Model":"SGPT-5.8B-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":5874,"Memory Usage (GB, fp32)":21.88,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.67}
+{"index":87,"Rank":110,"Model":"Arabert-all-nli-triplet-Matryoshka<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","STS17 (ar-ar)":83.16,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":58.29,"STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":88,"Rank":111,"Model":"Arabic-MiniLM-L12-v2-all-nli-triplet<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","STS17 (ar-ar)":81.11,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":52.41,"STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":89,"Rank":112,"Model":"Arabic-Triplet-Matryoshka-V2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","STS17 (ar-ar)":85.31,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":90,"Rank":113,"Model":"Arabic-all-nli-triplet-Matryoshka<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","STS17 (ar-ar)":82.4,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":51.38,"STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":91,"Rank":114,"Model":"Arabic-labse-Matryoshka<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","STS17 (ar-ar)":82.47,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":57.26,"STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":92,"Rank":115,"Model":"Arabic-mpnet-base-all-nli-triplet<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":79.93,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":52.44,"STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":93,"Rank":116,"Model":"GATE-AraBert-v1<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Average":"","STS17 (ar-ar)":82.78,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":59.75,"STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":94,"Rank":117,"Model":"Marbert-all-nli-triplet-Matryoshka<\/a>","Model Size (Million Parameters)":163,"Memory Usage (GB, fp32)":0.61,"Average":"","STS17 (ar-ar)":82.18,"STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":58.08,"STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":95,"Rank":118,"Model":"Solon-embeddings-large-0.1<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":83.31,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":96,"Rank":119,"Model":"st-polish-kartonberta-base-alpha-v1<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":36.78,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":97,"Rank":120,"Model":"nomic-embed-text-v1.5-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.47}
+{"index":98,"Rank":121,"Model":"bge_m3e_stella<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":61.83}
+{"index":99,"Rank":122,"Model":"SFR-Embedding-2_R<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.6}
+{"index":100,"Rank":123,"Model":"SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.0}
+{"index":101,"Rank":124,"Model":"LLaMA-embeeding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":52.67}
+{"index":102,"Rank":125,"Model":"snowflake-arctic-embed-l<\/a>","Model Size (Million Parameters)":334,"Memory Usage (GB, fp32)":1.24,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":75.68}
+{"index":103,"Rank":126,"Model":"snowflake-arctic-embed-m<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":74.1}
+{"index":104,"Rank":127,"Model":"snowflake-arctic-embed-m-long<\/a>","Model Size (Million Parameters)":137,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":77.9}
+{"index":105,"Rank":128,"Model":"snowflake-arctic-embed-m-v1.5<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":106,"Rank":129,"Model":"snowflake-arctic-embed-s<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":81.19}
+{"index":107,"Rank":130,"Model":"snowflake-arctic-embed-xs<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.08,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.18}
+{"index":108,"Rank":131,"Model":"bge-micro<\/a>","Model Size (Million Parameters)":17,"Memory Usage (GB, fp32)":0.06,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":81.32}
+{"index":109,"Rank":132,"Model":"bge-micro-v2<\/a>","Model Size (Million Parameters)":17,"Memory Usage (GB, fp32)":0.06,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":82.5}
+{"index":110,"Rank":133,"Model":"gte-tiny<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.08,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.38}
+{"index":111,"Rank":134,"Model":"UAE-Large-V1<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.06}
+{"index":112,"Rank":135,"Model":"sentence-croissant-llm-base<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":78.77,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":113,"Rank":136,"Model":"EminEmbed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":55.58,"STS17 (en-de)":71.04,"STS17 (en-tr)":48.6,"STS17 (es-en)":67.38,"STS17 (es-es)":"","STS17 (fr-en)":70.42,"STS17 (it-en)":70.64,"STS17 (ko-ko)":"","STS17 (nl-en)":66.38,"STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":57.01,"STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":74.65,"STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":72.32,"STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":75.16,"STSBenchmark":""}
+{"index":114,"Rank":137,"Model":"nomic-embed-text-v1.5-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.47}
+{"index":115,"Rank":138,"Model":"German_Semantic_STS_V2<\/a>","Model Size (Million Parameters)":336,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":116,"Rank":139,"Model":"gte-Qwen2-1.5B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":81.84,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.83,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.38}
+{"index":117,"Rank":140,"Model":"ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":67.06,"STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":118,"Rank":141,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":119,"Rank":142,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":121,"Rank":143,"Model":"bge-small-4096<\/a>","Model Size (Million Parameters)":35,"Memory Usage (GB, fp32)":0.13,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":81.94}
+{"index":122,"Rank":144,"Model":"GIST-Embedding-v0<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.32}
+{"index":123,"Rank":145,"Model":"GIST-all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.08,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.4}
+{"index":124,"Rank":146,"Model":"GIST-large-Embedding-v0<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.3}
+{"index":125,"Rank":147,"Model":"GIST-small-Embedding-v0<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.08}
+{"index":126,"Rank":148,"Model":"NoInstruct-small-Embedding-v0<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.98}
+{"index":127,"Rank":149,"Model":"bge-base-en-v1-5-seqlen-384-bs-1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.42}
+{"index":128,"Rank":150,"Model":"bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":47.29}
+{"index":129,"Rank":151,"Model":"sgpt-bloom-1b7-nli<\/a>","Model Size (Million Parameters)":1722,"Memory Usage (GB, fp32)":6.42,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":73.13,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":131,"Rank":152,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.85}
+{"index":132,"Rank":153,"Model":"nomic-embed-text-v1.5-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.47}
+{"index":133,"Rank":154,"Model":"gte-Qwen2-7B-instruct-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.58,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":41.02,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.85}
+{"index":134,"Rank":155,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":135,"Rank":156,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":136,"Rank":157,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":137,"Rank":158,"Model":"cai-lunaris-text-embeddings<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":71.4}
+{"index":138,"Rank":159,"Model":"sentence-camembert-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":77.54,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":139,"Rank":160,"Model":"sentence-camembert-large<\/a>","Model Size (Million Parameters)":337,"Memory Usage (GB, fp32)":1.26,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":81.73,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":140,"Rank":161,"Model":"embedder-100p<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.2}
+{"index":141,"Rank":162,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":142,"Rank":163,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":143,"Rank":164,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":144,"Rank":165,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":75.98}
+{"index":145,"Rank":166,"Model":"gte-Qwen2-1.5B-instruct-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":81.84,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.83,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.38}
+{"index":146,"Rank":167,"Model":"sft-bge-small<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":147,"Rank":168,"Model":"stella_en_1.5B_v5<\/a>","Model Size (Million Parameters)":1543,"Memory Usage (GB, fp32)":5.75,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.23}
+{"index":148,"Rank":169,"Model":"stella_en_400M_v5<\/a>","Model Size (Million Parameters)":435,"Memory Usage (GB, fp32)":1.62,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.74}
+{"index":149,"Rank":170,"Model":"e5-base-4k<\/a>","Model Size (Million Parameters)":112,"Memory Usage (GB, fp32)":0.42,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.52}
+{"index":151,"Rank":171,"Model":"flaubert_base_cased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":65.37,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":152,"Rank":172,"Model":"flaubert_base_uncased<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":55.15,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":153,"Rank":173,"Model":"flaubert_large_cased<\/a>","Model Size (Million Parameters)":372,"Memory Usage (GB, fp32)":1.39,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":48.52,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":154,"Rank":174,"Model":"bert-base-multilingual-cased<\/a>","Model Size (Million Parameters)":179,"Memory Usage (GB, fp32)":0.67,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":39.05,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":155,"Rank":175,"Model":"bert-base-multilingual-uncased<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":56.47,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":156,"Rank":176,"Model":"instructor-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.43}
+{"index":157,"Rank":177,"Model":"instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.88}
+{"index":158,"Rank":178,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.56}
+{"index":159,"Rank":179,"Model":"stella-base-en-v2<\/a>","Model Size (Million Parameters)":55,"Memory Usage (GB, fp32)":0.2,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.45}
+{"index":160,"Rank":180,"Model":"e5-base<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.18}
+{"index":161,"Rank":181,"Model":"e5-base-v2<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.52}
+{"index":162,"Rank":182,"Model":"e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.21}
+{"index":163,"Rank":183,"Model":"e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.74}
+{"index":165,"Rank":184,"Model":"e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.36}
+{"index":166,"Rank":185,"Model":"e5-small-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.95}
+{"index":169,"Rank":186,"Model":"multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.38}
+{"index":171,"Rank":187,"Model":"herbert-base-retrieval-v2<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":39.73,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":172,"Rank":188,"Model":"silver-retriever-base-v1<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":38.69,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":174,"Rank":189,"Model":"udever-bloom-3b<\/a>","Model Size (Million Parameters)":3003,"Memory Usage (GB, fp32)":11.19,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.71}
+{"index":176,"Rank":190,"Model":"udever-bloom-7b1<\/a>","Model Size (Million Parameters)":7069,"Memory Usage (GB, fp32)":26.33,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.02}
+{"index":177,"Rank":191,"Model":"sf_model_e5<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.33}
+{"index":178,"Rank":192,"Model":"jina-embedding-b-en-v1<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":82.57}
+{"index":179,"Rank":193,"Model":"jina-embedding-l-en-v1<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.6}
+{"index":180,"Rank":194,"Model":"jina-embedding-s-en-v1<\/a>","Model Size (Million Parameters)":35,"Memory Usage (GB, fp32)":0.13,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.2}
+{"index":181,"Rank":195,"Model":"jina-embeddings-v2-base-de<\/a>","Model Size (Million Parameters)":161,"Memory Usage (GB, fp32)":0.6,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":86.72,"STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":59.07,"STS22 (de-en)":55.97,"STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.14}
+{"index":182,"Rank":196,"Model":"jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":137,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.84}
+{"index":183,"Rank":197,"Model":"jina-embeddings-v2-base-es<\/a>","Model Size (Million Parameters)":161,"Memory Usage (GB, fp32)":0.6,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":86.49,"STS17 (es-es)":88.25,"STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":67.97,"STS22 (es-en)":78.8,"STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.84}
+{"index":184,"Rank":198,"Model":"jina-embeddings-v2-small-en<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.04}
+{"index":185,"Rank":199,"Model":"echo-mistral-7b-instruct-lasttoken<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.67}
+{"index":186,"Rank":200,"Model":"privacy_embedding_rag_10k_base_12_final<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.42}
+{"index":187,"Rank":201,"Model":"privacy_embedding_rag_10k_base_15_final<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.42}
+{"index":188,"Rank":202,"Model":"privacy_embedding_rag_10k_base_checkpoint_2<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.42}
+{"index":189,"Rank":203,"Model":"privacy_embedding_rag_10k_base_final<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.42}
+{"index":190,"Rank":204,"Model":"MegatronBert-1B3-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.69}
+{"index":191,"Rank":205,"Model":"XLM-0B6-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.5}
+{"index":192,"Rank":206,"Model":"XLM-3B5-embedding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.46}
+{"index":193,"Rank":207,"Model":"ember-v1<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.77}
+{"index":194,"Rank":208,"Model":"bge-m3-custom-fr<\/a>","Model Size (Million Parameters)":568,"Memory Usage (GB, fp32)":2.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":81.7,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":195,"Rank":209,"Model":"sentence_croissant_alpha_v0.1<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":75.66,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":196,"Rank":210,"Model":"sentence_croissant_alpha_v0.2<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":78.68,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":197,"Rank":211,"Model":"sentence_croissant_alpha_v0.3<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":80.8,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":198,"Rank":212,"Model":"sentence_croissant_alpha_v0.4<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":82.35,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":199,"Rank":213,"Model":"mxbai-embed-2d-large-v1<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.2}
+{"index":200,"Rank":214,"Model":"mxbai-embed-large-v1<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":89.1}
+{"index":201,"Rank":215,"Model":"bge-base-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.79}
+{"index":202,"Rank":216,"Model":"bge-base-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.93}
+{"index":203,"Rank":217,"Model":"bge-large-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.89}
+{"index":204,"Rank":218,"Model":"bge-large-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.88}
+{"index":205,"Rank":219,"Model":"bge-small-en-v1.5-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.31}
+{"index":206,"Rank":220,"Model":"bge-small-en-v1.5-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":81.77}
+{"index":207,"Rank":221,"Model":"mmarco-bert-base-italian-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":69.44,"STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":208,"Rank":222,"Model":"mmarco-sentence-flare-it<\/a>","Model Size (Million Parameters)":17,"Memory Usage (GB, fp32)":0.06,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":37.93,"STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":209,"Rank":223,"Model":"stsbm-sentence-flare-it<\/a>","Model Size (Million Parameters)":17,"Memory Usage (GB, fp32)":0.06,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":65.71,"STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":210,"Rank":224,"Model":"nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":137,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.54}
+{"index":211,"Rank":225,"Model":"nomic-embed-text-v1-ablated<\/a>","Model Size (Million Parameters)":137,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.42}
+{"index":212,"Rank":226,"Model":"nomic-embed-text-v1-unsupervised<\/a>","Model Size (Million Parameters)":137,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.96}
+{"index":213,"Rank":227,"Model":"nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.28}
+{"index":214,"Rank":228,"Model":"nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.8}
+{"index":215,"Rank":229,"Model":"nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.14}
+{"index":216,"Rank":230,"Model":"nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.46}
+{"index":217,"Rank":231,"Model":"nomic-embed-text-v1.5<\/a>","Model Size (Million Parameters)":137,"Memory Usage (GB, fp32)":0.51,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.47}
+{"index":218,"Rank":232,"Model":"contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":78.81}
+{"index":219,"Rank":233,"Model":"NV-Embed-v1<\/a>","Model Size (Million Parameters)":7851,"Memory Usage (GB, fp32)":29.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.14}
+{"index":220,"Rank":234,"Model":"jina-embeddings-v2-base-es-Q5_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":86.49,"STS17 (es-es)":88.25,"STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":67.97,"STS22 (es-en)":78.8,"STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":87.84}
+{"index":221,"Rank":235,"Model":"sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.25}
+{"index":222,"Rank":236,"Model":"unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":76.52}
+{"index":223,"Rank":237,"Model":"TDTE<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.15}
+{"index":224,"Rank":238,"Model":"mmlw-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.36,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":225,"Rank":239,"Model":"mmlw-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.4,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":226,"Rank":240,"Model":"mmlw-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.63,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":227,"Rank":241,"Model":"mmlw-roberta-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.36,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":228,"Rank":242,"Model":"mmlw-roberta-large<\/a>","Model Size (Million Parameters)":435,"Memory Usage (GB, fp32)":1.62,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":39.32,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":229,"Rank":243,"Model":"st-polish-paraphrase-from-distilroberta<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":40.36,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":230,"Rank":244,"Model":"st-polish-paraphrase-from-mpnet<\/a>","Model Size (Million Parameters)":125,"Memory Usage (GB, fp32)":0.47,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":37.34,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":234,"Rank":245,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.42}
+{"index":236,"Rank":246,"Model":"glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":61.54}
+{"index":239,"Rank":247,"Model":"gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.58}
+{"index":242,"Rank":248,"Model":"gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":77.73}
+{"index":243,"Rank":249,"Model":"msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":76.97}
+{"index":244,"Rank":250,"Model":"multi-qa-MiniLM-L6-cos-v1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":74.62,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":245,"Rank":251,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","STS17 (ar-ar)":79.16,"STS17 (en-ar)":81.22,"STS17 (en-de)":84.22,"STS17 (en-tr)":76.74,"STS17 (es-en)":84.44,"STS17 (es-es)":85.56,"STS17 (fr-en)":76.59,"STS17 (it-en)":82.35,"STS17 (ko-ko)":77.03,"STS17 (nl-en)":81.71,"STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":70.55,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":33.73,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.42}
+{"index":246,"Rank":252,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","STS17 (ar-ar)":79.1,"STS17 (en-ar)":80.85,"STS17 (en-de)":83.28,"STS17 (en-tr)":74.9,"STS17 (es-en)":86.11,"STS17 (es-es)":85.14,"STS17 (fr-en)":81.17,"STS17 (it-en)":84.24,"STS17 (ko-ko)":83.41,"STS17 (nl-en)":82.51,"STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":74.3,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":33.64,"STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.82}
+{"index":250,"Rank":253,"Model":"sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":76.8,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.01}
+{"index":251,"Rank":254,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":252,"Rank":255,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":27.95,"STS22 (de)":8.16,"STS22 (de-en)":21.55,"STS22 (de-fr)":17.5,"STS22 (de-pl)":25.53,"STS22 (es)":45.31,"STS22 (es-en)":42.77,"STS22 (es-it)":32.83,"STS22 (fr)":42.0,"STS22 (fr-pl)":39.44,"STS22 (it)":39.69,"STS22 (pl)":9.71,"STS22 (pl-en)":42.08,"STS22 (ru)":60.06,"STS22 (tr)":15.46,"STS22 (zh-en)":31.25,"STSBenchmark":""}
+{"index":255,"Rank":256,"Model":"text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":74.1,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":256,"Rank":257,"Model":"gte-base<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.73}
+{"index":257,"Rank":258,"Model":"gte-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":86.07}
+{"index":258,"Rank":259,"Model":"gte-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.57}
+{"index":259,"Rank":260,"Model":"ALL_862873<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":44.39}
+{"index":260,"Rank":261,"Model":"tst<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":68.04}
+{"index":261,"Rank":262,"Model":"tst2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":75.32}
+{"index":262,"Rank":263,"Model":"universal-sentence-encoder-4<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":77.08}
+{"index":263,"Rank":264,"Model":"universal-sentence-encoder-large-5<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.93}
+{"index":264,"Rank":265,"Model":"universal-sentence-encoder-multilingual-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":77.91,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":80.28}
+{"index":265,"Rank":266,"Model":"universal-sentence-encoder-multilingual-large-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":71.11,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":81.24}
+{"index":266,"Rank":267,"Model":"b1ade-embed<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":88.77}
+{"index":267,"Rank":268,"Model":"b1ade-embed-kd<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":82.69}
+{"index":268,"Rank":269,"Model":"b1ade-embed-kd_3<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":0.8}
+{"index":269,"Rank":270,"Model":"jina-embeddings-v2-base-en-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.84}
+{"index":270,"Rank":271,"Model":"xlm-roberta-base<\/a>","Model Size (Million Parameters)":279,"Memory Usage (GB, fp32)":1.04,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":56.72,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":271,"Rank":272,"Model":"xlm-roberta-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":55.49,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":272,"Rank":273,"Model":"bilingual-embedding-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":80.31,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":273,"Rank":274,"Model":"new_model<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.92}
+{"index":274,"Rank":275,"Model":"snowflake-arctic-embed-m-v1.5-Q8_0-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":""}
+{"index":275,"Rank":276,"Model":"gte-large-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.4}
+{"index":276,"Rank":277,"Model":"gte-large-sparse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.45}
+{"index":277,"Rank":278,"Model":"gte-small-quant<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":85.13}
+{"index":278,"Rank":279,"Model":"bi-cse<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":70.01,"STSBenchmark":85.99}
+{"index":279,"Rank":280,"Model":"text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.78}
+{"index":280,"Rank":281,"Model":"text-similarity-babbage-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.32}
+{"index":281,"Rank":282,"Model":"text-similarity-curie-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.02}
+{"index":282,"Rank":283,"Model":"text-similarity-davinci-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.08}
+{"index":283,"Rank":284,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":81.09,"STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":64.5,"STS22 (zh-en)":"","STSBenchmark":83.17}
+{"index":284,"Rank":285,"Model":"text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":82.34}
+{"index":285,"Rank":286,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":83.56}
+{"index":286,"Rank":287,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":84.24}
+{"index":287,"Rank":288,"Model":"elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","STS17 (ar-ar)":"","STS17 (en-ar)":"","STS17 (en-de)":"","STS17 (en-tr)":"","STS17 (es-en)":"","STS17 (es-es)":"","STS17 (fr-en)":"","STS17 (it-en)":"","STS17 (ko-ko)":"","STS17 (nl-en)":"","STS22 (ar)":"","STS22 (de)":"","STS22 (de-en)":"","STS22 (de-fr)":"","STS22 (de-pl)":"","STS22 (es)":"","STS22 (es-en)":"","STS22 (es-it)":"","STS22 (fr)":"","STS22 (fr-pl)":"","STS22 (it)":"","STS22 (pl)":"","STS22 (pl-en)":"","STS22 (ru)":"","STS22 (tr)":"","STS22 (zh-en)":"","STSBenchmark":79.54}
diff --git a/all_data_tasks/43/default.jsonl b/all_data_tasks/43/default.jsonl
index dd8ff771c0e30041e69501eef76afd269ec19f9d..a0ab7cfc551a9aa72877ad841f5d74a5de03d7e3 100644
--- a/all_data_tasks/43/default.jsonl
+++ b/all_data_tasks/43/default.jsonl
@@ -1,14 +1,36 @@
-{"index":4,"Rank":1,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":22.38,"BrightRetrieval (aops)":15.1,"BrightRetrieval (biology)":32.09,"BrightRetrieval (earth_science)":40.66,"BrightRetrieval (economics)":16.18,"BrightRetrieval (leetcode)":31.07,"BrightRetrieval (pony)":1.25,"BrightRetrieval (psychology)":26.58,"BrightRetrieval (robotics)":12.82,"BrightRetrieval (stackoverflow)":13.95,"BrightRetrieval (sustainable_living)":20.82,"BrightRetrieval (theoremqa_questions)":29.9,"BrightRetrieval (theoremqa_theorems)":28.15}
-{"index":3,"Rank":2,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":21.75,"BrightRetrieval (aops)":14.36,"BrightRetrieval (biology)":30.92,"BrightRetrieval (earth_science)":36.22,"BrightRetrieval (economics)":17.72,"BrightRetrieval (leetcode)":25.46,"BrightRetrieval (pony)":9.79,"BrightRetrieval (psychology)":24.61,"BrightRetrieval (robotics)":13.47,"BrightRetrieval (stackoverflow)":19.85,"BrightRetrieval (sustainable_living)":14.93,"BrightRetrieval (theoremqa_questions)":26.97,"BrightRetrieval (theoremqa_theorems)":26.66}
-{"index":7,"Rank":3,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":20.43,"BrightRetrieval (aops)":8.91,"BrightRetrieval (biology)":25.04,"BrightRetrieval (earth_science)":32.77,"BrightRetrieval (economics)":19.0,"BrightRetrieval (leetcode)":29.85,"BrightRetrieval (pony)":21.98,"BrightRetrieval (psychology)":19.92,"BrightRetrieval (robotics)":17.31,"BrightRetrieval (stackoverflow)":11.62,"BrightRetrieval (sustainable_living)":18.04,"BrightRetrieval (theoremqa_questions)":23.34,"BrightRetrieval (theoremqa_theorems)":17.41}
-{"index":0,"Rank":4,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":19.73,"BrightRetrieval (aops)":9.33,"BrightRetrieval (biology)":22.98,"BrightRetrieval (earth_science)":34.38,"BrightRetrieval (economics)":19.5,"BrightRetrieval (leetcode)":29.64,"BrightRetrieval (pony)":3.59,"BrightRetrieval (psychology)":27.86,"BrightRetrieval (robotics)":15.98,"BrightRetrieval (stackoverflow)":17.93,"BrightRetrieval (sustainable_living)":17.25,"BrightRetrieval (theoremqa_questions)":21.51,"BrightRetrieval (theoremqa_theorems)":16.77}
-{"index":10,"Rank":5,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":18.64,"BrightRetrieval (aops)":8.26,"BrightRetrieval (biology)":21.91,"BrightRetrieval (earth_science)":34.35,"BrightRetrieval (economics)":22.81,"BrightRetrieval (leetcode)":27.5,"BrightRetrieval (pony)":5.02,"BrightRetrieval (psychology)":27.43,"BrightRetrieval (robotics)":17.39,"BrightRetrieval (stackoverflow)":19.06,"BrightRetrieval (sustainable_living)":18.82,"BrightRetrieval (theoremqa_questions)":14.59,"BrightRetrieval (theoremqa_theorems)":6.5}
-{"index":8,"Rank":6,"Model":"SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":18.0,"BrightRetrieval (aops)":7.43,"BrightRetrieval (biology)":19.49,"BrightRetrieval (earth_science)":26.63,"BrightRetrieval (economics)":17.84,"BrightRetrieval (leetcode)":27.35,"BrightRetrieval (pony)":1.97,"BrightRetrieval (psychology)":18.97,"BrightRetrieval (robotics)":16.7,"BrightRetrieval (stackoverflow)":12.72,"BrightRetrieval (sustainable_living)":19.79,"BrightRetrieval (theoremqa_questions)":23.05,"BrightRetrieval (theoremqa_theorems)":24.05}
-{"index":1,"Rank":7,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":17.57,"BrightRetrieval (aops)":7.45,"BrightRetrieval (biology)":23.55,"BrightRetrieval (earth_science)":25.09,"BrightRetrieval (economics)":19.85,"BrightRetrieval (leetcode)":30.6,"BrightRetrieval (pony)":1.48,"BrightRetrieval (psychology)":24.79,"BrightRetrieval (robotics)":11.21,"BrightRetrieval (stackoverflow)":15.03,"BrightRetrieval (sustainable_living)":15.58,"BrightRetrieval (theoremqa_questions)":26.06,"BrightRetrieval (theoremqa_theorems)":10.13}
-{"index":13,"Rank":8,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":17.43,"BrightRetrieval (aops)":8.45,"BrightRetrieval (biology)":23.67,"BrightRetrieval (earth_science)":26.27,"BrightRetrieval (economics)":19.98,"BrightRetrieval (leetcode)":23.65,"BrightRetrieval (pony)":2.45,"BrightRetrieval (psychology)":27.52,"BrightRetrieval (robotics)":12.93,"BrightRetrieval (stackoverflow)":12.49,"BrightRetrieval (sustainable_living)":20.32,"BrightRetrieval (theoremqa_questions)":22.22,"BrightRetrieval (theoremqa_theorems)":9.25}
-{"index":11,"Rank":9,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":17.43,"BrightRetrieval (aops)":7.1,"BrightRetrieval (biology)":18.84,"BrightRetrieval (earth_science)":25.96,"BrightRetrieval (economics)":15.49,"BrightRetrieval (leetcode)":28.72,"BrightRetrieval (pony)":4.81,"BrightRetrieval (psychology)":15.79,"BrightRetrieval (robotics)":16.37,"BrightRetrieval (stackoverflow)":9.83,"BrightRetrieval (sustainable_living)":18.51,"BrightRetrieval (theoremqa_questions)":23.94,"BrightRetrieval (theoremqa_theorems)":23.78}
-{"index":6,"Rank":10,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":16.24,"BrightRetrieval (aops)":6.46,"BrightRetrieval (biology)":18.98,"BrightRetrieval (earth_science)":27.45,"BrightRetrieval (economics)":20.18,"BrightRetrieval (leetcode)":26.78,"BrightRetrieval (pony)":1.77,"BrightRetrieval (psychology)":21.82,"BrightRetrieval (robotics)":16.21,"BrightRetrieval (stackoverflow)":16.47,"BrightRetrieval (sustainable_living)":17.69,"BrightRetrieval (theoremqa_questions)":15.07,"BrightRetrieval (theoremqa_theorems)":6.04}
-{"index":12,"Rank":11,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":14.8,"BrightRetrieval (aops)":5.32,"BrightRetrieval (biology)":15.52,"BrightRetrieval (earth_science)":20.11,"BrightRetrieval (economics)":16.64,"BrightRetrieval (leetcode)":26.4,"BrightRetrieval (pony)":6.95,"BrightRetrieval (psychology)":22.63,"BrightRetrieval (robotics)":8.36,"BrightRetrieval (stackoverflow)":9.48,"BrightRetrieval (sustainable_living)":15.34,"BrightRetrieval (theoremqa_questions)":18.49,"BrightRetrieval (theoremqa_theorems)":12.38}
-{"index":2,"Rank":12,"Model":"bm25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":14.29,"BrightRetrieval (aops)":6.2,"BrightRetrieval (biology)":19.19,"BrightRetrieval (earth_science)":27.06,"BrightRetrieval (economics)":14.87,"BrightRetrieval (leetcode)":24.37,"BrightRetrieval (pony)":7.93,"BrightRetrieval (psychology)":12.51,"BrightRetrieval (robotics)":13.53,"BrightRetrieval (stackoverflow)":16.55,"BrightRetrieval (sustainable_living)":15.22,"BrightRetrieval (theoremqa_questions)":9.78,"BrightRetrieval (theoremqa_theorems)":4.25}
-{"index":9,"Rank":13,"Model":"instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":14.12,"BrightRetrieval (aops)":7.94,"BrightRetrieval (biology)":15.61,"BrightRetrieval (earth_science)":21.52,"BrightRetrieval (economics)":15.99,"BrightRetrieval (leetcode)":20.0,"BrightRetrieval (pony)":1.32,"BrightRetrieval (psychology)":21.94,"BrightRetrieval (robotics)":11.45,"BrightRetrieval (stackoverflow)":11.21,"BrightRetrieval (sustainable_living)":13.16,"BrightRetrieval (theoremqa_questions)":20.07,"BrightRetrieval (theoremqa_theorems)":9.29}
-{"index":5,"Rank":14,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":13.47,"BrightRetrieval (aops)":6.08,"BrightRetrieval (biology)":11.96,"BrightRetrieval (earth_science)":24.15,"BrightRetrieval (economics)":16.59,"BrightRetrieval (leetcode)":26.68,"BrightRetrieval (pony)":5.64,"BrightRetrieval (psychology)":17.44,"BrightRetrieval (robotics)":12.21,"BrightRetrieval (stackoverflow)":9.51,"BrightRetrieval (sustainable_living)":13.27,"BrightRetrieval (theoremqa_questions)":12.56,"BrightRetrieval (theoremqa_theorems)":5.51}
+{"index":13,"Rank":1,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":35.2,"ARCChallenge":26.68,"AlphaNLI":34.0,"HellaSwag":39.45,"PIQA":44.35,"Quail":11.69,"RARbCode":84.0,"RARbMath":82.35,"SIQA":7.23,"SpartQA":9.29,"TempReasonL1":7.15,"TempReasonL2Fact":58.38,"TempReasonL2Pure":11.22,"TempReasonL3Fact":44.29,"TempReasonL3Pure":14.15,"WinoGrande":53.74}
+{"index":32,"Rank":2,"Model":"text-embedding-3-large-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":31.13,"ARCChallenge":21.22,"AlphaNLI":34.23,"HellaSwag":31.4,"PIQA":37.52,"Quail":13.6,"RARbCode":89.41,"RARbMath":87.73,"SIQA":4.99,"SpartQA":7.45,"TempReasonL1":2.07,"TempReasonL2Fact":39.77,"TempReasonL2Pure":11.04,"TempReasonL3Fact":37.04,"TempReasonL3Pure":15.51,"WinoGrande":33.92}
+{"index":12,"Rank":3,"Model":"GritLM-7B-noinstruct<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":30.57,"ARCChallenge":16.57,"AlphaNLI":29.56,"HellaSwag":36.03,"PIQA":35.8,"Quail":8.68,"RARbCode":83.14,"RARbMath":83.01,"SIQA":5.73,"SpartQA":1.56,"TempReasonL1":2.57,"TempReasonL2Fact":48.25,"TempReasonL2Pure":8.98,"TempReasonL3Fact":34.11,"TempReasonL3Pure":12.44,"WinoGrande":52.12}
+{"index":33,"Rank":4,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":29.95,"ARCChallenge":23.98,"AlphaNLI":37.27,"HellaSwag":34.12,"PIQA":41.96,"Quail":10.15,"RARbCode":89.64,"RARbMath":90.08,"SIQA":3.44,"SpartQA":7.51,"TempReasonL1":2.13,"TempReasonL2Fact":28.65,"TempReasonL2Pure":10.34,"TempReasonL3Fact":25.52,"TempReasonL3Pure":15.28,"WinoGrande":29.11}
+{"index":16,"Rank":5,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":28.41,"ARCChallenge":17.81,"AlphaNLI":26.12,"HellaSwag":34.85,"PIQA":39.37,"Quail":7.01,"RARbCode":78.46,"RARbMath":72.16,"SIQA":5.42,"SpartQA":9.92,"TempReasonL1":3.31,"TempReasonL2Fact":36.9,"TempReasonL2Pure":9.18,"TempReasonL3Fact":30.18,"TempReasonL3Pure":14.31,"WinoGrande":41.21}
+{"index":17,"Rank":6,"Model":"e5-mistral-7b-instruct-noinstruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":28.04,"ARCChallenge":20.48,"AlphaNLI":18.88,"HellaSwag":32.25,"PIQA":32.8,"Quail":6.25,"RARbCode":79.84,"RARbMath":76.19,"SIQA":5.08,"SpartQA":10.87,"TempReasonL1":3.04,"TempReasonL2Fact":35.63,"TempReasonL2Pure":9.32,"TempReasonL3Fact":30.41,"TempReasonL3Pure":14.39,"WinoGrande":45.18}
+{"index":10,"Rank":7,"Model":"Cohere-embed-english-v3.0-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":25.41,"ARCChallenge":10.1,"AlphaNLI":18.75,"HellaSwag":29.02,"PIQA":27.89,"Quail":7.77,"RARbCode":56.56,"RARbMath":72.05,"SIQA":5.03,"SpartQA":3.33,"TempReasonL1":1.43,"TempReasonL2Fact":40.46,"TempReasonL2Pure":2.39,"TempReasonL3Fact":33.87,"TempReasonL3Pure":7.52,"WinoGrande":65.02}
+{"index":19,"Rank":8,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":24.69,"ARCChallenge":10.83,"AlphaNLI":13.59,"HellaSwag":27.35,"PIQA":28.82,"Quail":4.85,"RARbCode":58.92,"RARbMath":67.32,"SIQA":5.36,"SpartQA":5.64,"TempReasonL1":1.14,"TempReasonL2Fact":42.97,"TempReasonL2Pure":2.05,"TempReasonL3Fact":38.22,"TempReasonL3Pure":8.31,"WinoGrande":54.99}
+{"index":35,"Rank":9,"Model":"text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":24.2,"ARCChallenge":14.63,"AlphaNLI":30.61,"HellaSwag":30.94,"PIQA":33.69,"Quail":6.11,"RARbCode":72.03,"RARbMath":71.07,"SIQA":3.03,"SpartQA":6.63,"TempReasonL1":2.35,"TempReasonL2Fact":25.68,"TempReasonL2Pure":2.76,"TempReasonL3Fact":22.09,"TempReasonL3Pure":9.79,"WinoGrande":31.53}
+{"index":11,"Rank":10,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":23.65,"ARCChallenge":9.89,"AlphaNLI":15.1,"HellaSwag":26.35,"PIQA":28.49,"Quail":4.1,"RARbCode":57.19,"RARbMath":72.26,"SIQA":4.26,"SpartQA":3.75,"TempReasonL1":1.5,"TempReasonL2Fact":35.91,"TempReasonL2Pure":1.89,"TempReasonL3Fact":27.51,"TempReasonL3Pure":8.53,"WinoGrande":58.01}
+{"index":18,"Rank":11,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":23.26,"ARCChallenge":9.61,"AlphaNLI":16.44,"HellaSwag":24.79,"PIQA":25.09,"Quail":3.52,"RARbCode":52.16,"RARbMath":65.35,"SIQA":3.72,"SpartQA":7.91,"TempReasonL1":0.72,"TempReasonL2Fact":38.76,"TempReasonL2Pure":1.63,"TempReasonL3Fact":35.85,"TempReasonL3Pure":7.11,"WinoGrande":56.18}
+{"index":31,"Rank":12,"Model":"text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":22.57,"ARCChallenge":13.3,"AlphaNLI":25.65,"HellaSwag":29.29,"PIQA":31.02,"Quail":5.83,"RARbCode":83.39,"RARbMath":73.21,"SIQA":3.14,"SpartQA":4.23,"TempReasonL1":1.68,"TempReasonL2Fact":19.93,"TempReasonL2Pure":2.6,"TempReasonL3Fact":18.02,"TempReasonL3Pure":7.58,"WinoGrande":19.65}
+{"index":34,"Rank":13,"Model":"text-embedding-3-small-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":22.09,"ARCChallenge":13.76,"AlphaNLI":21.14,"HellaSwag":27.2,"PIQA":29.59,"Quail":6.64,"RARbCode":72.14,"RARbMath":64.31,"SIQA":2.98,"SpartQA":3.58,"TempReasonL1":2.29,"TempReasonL2Fact":26.34,"TempReasonL2Pure":3.17,"TempReasonL3Fact":22.72,"TempReasonL3Pure":9.98,"WinoGrande":25.49}
+{"index":7,"Rank":14,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":21.48,"ARCChallenge":9.02,"AlphaNLI":24.73,"HellaSwag":25.67,"PIQA":22.93,"Quail":7.51,"RARbCode":38.8,"RARbMath":69.19,"SIQA":4.89,"SpartQA":7.49,"TempReasonL1":0.99,"TempReasonL2Fact":33.23,"TempReasonL2Pure":0.68,"TempReasonL3Fact":30.05,"TempReasonL3Pure":5.28,"WinoGrande":41.72}
+{"index":6,"Rank":15,"Model":"bge-m3-instruct<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":20.83,"ARCChallenge":9.03,"AlphaNLI":24.69,"HellaSwag":25.55,"PIQA":19.03,"Quail":7.08,"RARbCode":39.58,"RARbMath":64.51,"SIQA":4.77,"SpartQA":7.0,"TempReasonL1":0.8,"TempReasonL2Fact":34.99,"TempReasonL2Pure":0.62,"TempReasonL3Fact":32.47,"TempReasonL3Pure":7.01,"WinoGrande":35.33}
+{"index":20,"Rank":16,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":20.04,"ARCChallenge":7.14,"AlphaNLI":13.0,"HellaSwag":23.73,"PIQA":21.08,"Quail":2.38,"RARbCode":46.96,"RARbMath":63.91,"SIQA":2.57,"SpartQA":5.43,"TempReasonL1":0.8,"TempReasonL2Fact":36.76,"TempReasonL2Pure":0.62,"TempReasonL3Fact":32.42,"TempReasonL3Pure":6.36,"WinoGrande":37.46}
+{"index":24,"Rank":17,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":19.61,"ARCChallenge":9.48,"AlphaNLI":28.19,"HellaSwag":24.21,"PIQA":25.28,"Quail":3.92,"RARbCode":44.27,"RARbMath":68.19,"SIQA":1.56,"SpartQA":1.65,"TempReasonL1":1.53,"TempReasonL2Fact":17.65,"TempReasonL2Pure":0.46,"TempReasonL3Fact":14.16,"TempReasonL3Pure":6.33,"WinoGrande":47.33}
+{"index":30,"Rank":18,"Model":"text-embedding-ada-002-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":19.56,"ARCChallenge":11.85,"AlphaNLI":10.62,"HellaSwag":24.8,"PIQA":23.87,"Quail":5.79,"RARbCode":82.36,"RARbMath":67.26,"SIQA":2.64,"SpartQA":4.75,"TempReasonL1":1.44,"TempReasonL2Fact":19.38,"TempReasonL2Pure":2.43,"TempReasonL3Fact":17.58,"TempReasonL3Pure":7.31,"WinoGrande":11.36}
+{"index":1,"Rank":19,"Model":"dragon-plus<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":19.1,"ARCChallenge":8.91,"AlphaNLI":32.1,"HellaSwag":27.69,"PIQA":28.01,"Quail":4.09,"RARbCode":17.58,"RARbMath":45.09,"SIQA":2.0,"SpartQA":10.34,"TempReasonL1":1.82,"TempReasonL2Fact":17.45,"TempReasonL2Pure":0.55,"TempReasonL3Fact":15.71,"TempReasonL3Pure":7.97,"WinoGrande":67.18}
+{"index":26,"Rank":20,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":18.03,"ARCChallenge":11.8,"AlphaNLI":22.41,"HellaSwag":26.27,"PIQA":29.03,"Quail":3.41,"RARbCode":53.21,"RARbMath":71.85,"SIQA":2.38,"SpartQA":0.22,"TempReasonL1":1.77,"TempReasonL2Fact":11.2,"TempReasonL2Pure":1.15,"TempReasonL3Fact":9.42,"TempReasonL3Pure":5.59,"WinoGrande":20.8}
+{"index":5,"Rank":21,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":17.7,"ARCChallenge":9.99,"AlphaNLI":13.13,"HellaSwag":28.5,"PIQA":27.99,"Quail":1.83,"RARbCode":48.12,"RARbMath":57.36,"SIQA":1.04,"SpartQA":2.99,"TempReasonL1":1.46,"TempReasonL2Fact":24.25,"TempReasonL2Pure":2.35,"TempReasonL3Fact":20.64,"TempReasonL3Pure":6.67,"WinoGrande":19.18}
+{"index":22,"Rank":22,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":17.35,"ARCChallenge":10.23,"AlphaNLI":25.35,"HellaSwag":24.08,"PIQA":26.44,"Quail":3.08,"RARbCode":42.44,"RARbMath":66.36,"SIQA":2.09,"SpartQA":2.67,"TempReasonL1":1.66,"TempReasonL2Fact":10.31,"TempReasonL2Pure":0.63,"TempReasonL3Fact":11.11,"TempReasonL3Pure":6.63,"WinoGrande":27.2}
+{"index":0,"Rank":23,"Model":"dragon-plus-instruct<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":16.73,"ARCChallenge":8.24,"AlphaNLI":25.18,"HellaSwag":24.06,"PIQA":26.35,"Quail":4.2,"RARbCode":12.84,"RARbMath":36.15,"SIQA":1.75,"SpartQA":10.82,"TempReasonL1":1.54,"TempReasonL2Fact":16.11,"TempReasonL2Pure":0.57,"TempReasonL3Fact":14.81,"TempReasonL3Pure":7.46,"WinoGrande":60.84}
+{"index":15,"Rank":24,"Model":"contriever<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":16.09,"ARCChallenge":8.62,"AlphaNLI":31.77,"HellaSwag":17.73,"PIQA":24.64,"Quail":4.97,"RARbCode":9.28,"RARbMath":30.76,"SIQA":1.27,"SpartQA":10.94,"TempReasonL1":1.93,"TempReasonL2Fact":22.68,"TempReasonL2Pure":1.12,"TempReasonL3Fact":20.62,"TempReasonL3Pure":7.8,"WinoGrande":47.15}
+{"index":23,"Rank":25,"Model":"all-MiniLM-L6-v2-instruct<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":15.95,"ARCChallenge":9.4,"AlphaNLI":15.09,"HellaSwag":20.51,"PIQA":24.68,"Quail":3.46,"RARbCode":42.47,"RARbMath":62.39,"SIQA":1.53,"SpartQA":0.57,"TempReasonL1":1.05,"TempReasonL2Fact":16.57,"TempReasonL2Pure":0.49,"TempReasonL3Fact":14.01,"TempReasonL3Pure":6.27,"WinoGrande":20.73}
+{"index":3,"Rank":26,"Model":"bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":14.93,"ARCChallenge":9.66,"AlphaNLI":10.99,"HellaSwag":26.64,"PIQA":25.69,"Quail":1.42,"RARbCode":46.47,"RARbMath":46.86,"SIQA":0.94,"SpartQA":3.37,"TempReasonL1":1.07,"TempReasonL2Fact":17.23,"TempReasonL2Pure":1.29,"TempReasonL3Fact":13.36,"TempReasonL3Pure":5.2,"WinoGrande":13.76}
+{"index":4,"Rank":27,"Model":"bge-large-en-v1.5-instruct<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":14.55,"ARCChallenge":8.86,"AlphaNLI":0.86,"HellaSwag":26.24,"PIQA":23.26,"Quail":2.72,"RARbCode":45.25,"RARbMath":49.82,"SIQA":0.59,"SpartQA":2.34,"TempReasonL1":1.17,"TempReasonL2Fact":21.19,"TempReasonL2Pure":2.1,"TempReasonL3Fact":17.59,"TempReasonL3Pure":5.99,"WinoGrande":10.31}
+{"index":9,"Rank":28,"Model":"bge-small-en-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":14.15,"ARCChallenge":8.95,"AlphaNLI":11.64,"HellaSwag":25.44,"PIQA":23.92,"Quail":1.75,"RARbCode":42.36,"RARbMath":44.98,"SIQA":0.77,"SpartQA":3.55,"TempReasonL1":1.41,"TempReasonL2Fact":17.56,"TempReasonL2Pure":1.05,"TempReasonL3Fact":13.88,"TempReasonL3Pure":4.76,"WinoGrande":10.28}
+{"index":25,"Rank":29,"Model":"all-mpnet-base-v2-instruct<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":13.84,"ARCChallenge":10.35,"AlphaNLI":1.96,"HellaSwag":13.01,"PIQA":27.18,"Quail":3.02,"RARbCode":48.95,"RARbMath":69.21,"SIQA":1.29,"SpartQA":1.01,"TempReasonL1":1.52,"TempReasonL2Fact":7.28,"TempReasonL2Pure":1.03,"TempReasonL3Fact":7.03,"TempReasonL3Pure":5.16,"WinoGrande":9.66}
+{"index":2,"Rank":30,"Model":"bge-base-en-v1.5-instruct<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":13.52,"ARCChallenge":8.85,"AlphaNLI":4.13,"HellaSwag":24.03,"PIQA":23.03,"Quail":1.25,"RARbCode":46.32,"RARbMath":45.62,"SIQA":0.24,"SpartQA":2.67,"TempReasonL1":0.8,"TempReasonL2Fact":16.56,"TempReasonL2Pure":1.33,"TempReasonL3Fact":12.68,"TempReasonL3Pure":5.08,"WinoGrande":10.27}
+{"index":8,"Rank":31,"Model":"bge-small-en-v1.5-instruct<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Average":12.6,"ARCChallenge":7.72,"AlphaNLI":1.26,"HellaSwag":23.41,"PIQA":20.79,"Quail":2.01,"RARbCode":41.52,"RARbMath":46.5,"SIQA":0.98,"SpartQA":2.86,"TempReasonL1":1.27,"TempReasonL2Fact":16.72,"TempReasonL2Pure":1.1,"TempReasonL3Fact":12.81,"TempReasonL3Pure":4.63,"WinoGrande":5.35}
+{"index":28,"Rank":32,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":12.24,"ARCChallenge":7.19,"AlphaNLI":21.87,"HellaSwag":17.53,"PIQA":18.65,"Quail":2.98,"RARbCode":11.02,"RARbMath":30.93,"SIQA":1.21,"SpartQA":5.69,"TempReasonL1":1.94,"TempReasonL2Fact":5.34,"TempReasonL2Pure":0.33,"TempReasonL3Fact":6.79,"TempReasonL3Pure":3.19,"WinoGrande":49.01}
+{"index":27,"Rank":33,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":11.55,"ARCChallenge":6.19,"AlphaNLI":20.89,"HellaSwag":16.98,"PIQA":15.79,"Quail":2.96,"RARbCode":8.48,"RARbMath":30.02,"SIQA":0.88,"SpartQA":4.94,"TempReasonL1":1.43,"TempReasonL2Fact":6.21,"TempReasonL2Pure":0.22,"TempReasonL3Fact":6.77,"TempReasonL3Pure":4.9,"WinoGrande":46.52}
+{"index":21,"Rank":34,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":9.31,"ARCChallenge":3.78,"AlphaNLI":13.11,"HellaSwag":5.59,"PIQA":6.53,"Quail":1.91,"RARbCode":2.31,"RARbMath":27.19,"SIQA":1.07,"SpartQA":1.56,"TempReasonL1":1.56,"TempReasonL2Fact":7.06,"TempReasonL2Pure":0.14,"TempReasonL3Fact":8.74,"TempReasonL3Pure":4.73,"WinoGrande":54.3}
+{"index":14,"Rank":35,"Model":"contriever-instruct<\/a>","Model Size (Million Parameters)":438,"Memory Usage (GB, fp32)":1.63,"Average":"","ARCChallenge":7.63,"AlphaNLI":27.09,"HellaSwag":"","PIQA":21.73,"Quail":4.92,"RARbCode":7.12,"RARbMath":21.83,"SIQA":0.88,"SpartQA":10.56,"TempReasonL1":1.8,"TempReasonL2Fact":22.03,"TempReasonL2Pure":0.94,"TempReasonL3Fact":20.82,"TempReasonL3Pure":7.15,"WinoGrande":26.3}
+{"index":29,"Rank":36,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":"","ARCChallenge":3.85,"AlphaNLI":14.15,"HellaSwag":"","PIQA":"","Quail":"","RARbCode":"","RARbMath":"","SIQA":"","SpartQA":"","TempReasonL1":"","TempReasonL2Fact":"","TempReasonL2Pure":"","TempReasonL3Fact":"","TempReasonL3Pure":"","WinoGrande":""}
diff --git a/all_data_tasks/44/default.jsonl b/all_data_tasks/44/default.jsonl
new file mode 100644
index 0000000000000000000000000000000000000000..61446a9b3c6fae64f0751a18e208be7bfaa05f74
--- /dev/null
+++ b/all_data_tasks/44/default.jsonl
@@ -0,0 +1,13 @@
+{"index":4,"Rank":1,"Model":"gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Average":22.38,"BrightRetrieval (aops)":15.1,"BrightRetrieval (biology)":32.09,"BrightRetrieval (earth_science)":40.66,"BrightRetrieval (economics)":16.18,"BrightRetrieval (leetcode)":31.07,"BrightRetrieval (pony)":1.25,"BrightRetrieval (psychology)":26.58,"BrightRetrieval (robotics)":12.82,"BrightRetrieval (stackoverflow)":13.95,"BrightRetrieval (sustainable_living)":20.82,"BrightRetrieval (theoremqa_questions)":29.9,"BrightRetrieval (theoremqa_theorems)":28.15}
+{"index":3,"Rank":2,"Model":"gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Average":21.75,"BrightRetrieval (aops)":14.36,"BrightRetrieval (biology)":30.92,"BrightRetrieval (earth_science)":36.22,"BrightRetrieval (economics)":17.72,"BrightRetrieval (leetcode)":25.46,"BrightRetrieval (pony)":9.79,"BrightRetrieval (psychology)":24.61,"BrightRetrieval (robotics)":13.47,"BrightRetrieval (stackoverflow)":19.85,"BrightRetrieval (sustainable_living)":14.93,"BrightRetrieval (theoremqa_questions)":26.97,"BrightRetrieval (theoremqa_theorems)":26.66}
+{"index":7,"Rank":3,"Model":"GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Average":20.43,"BrightRetrieval (aops)":8.91,"BrightRetrieval (biology)":25.04,"BrightRetrieval (earth_science)":32.77,"BrightRetrieval (economics)":19.0,"BrightRetrieval (leetcode)":29.85,"BrightRetrieval (pony)":21.98,"BrightRetrieval (psychology)":19.92,"BrightRetrieval (robotics)":17.31,"BrightRetrieval (stackoverflow)":11.62,"BrightRetrieval (sustainable_living)":18.04,"BrightRetrieval (theoremqa_questions)":23.34,"BrightRetrieval (theoremqa_theorems)":17.41}
+{"index":0,"Rank":4,"Model":"google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"Average":19.73,"BrightRetrieval (aops)":9.33,"BrightRetrieval (biology)":22.98,"BrightRetrieval (earth_science)":34.38,"BrightRetrieval (economics)":19.5,"BrightRetrieval (leetcode)":29.64,"BrightRetrieval (pony)":3.59,"BrightRetrieval (psychology)":27.86,"BrightRetrieval (robotics)":15.98,"BrightRetrieval (stackoverflow)":17.93,"BrightRetrieval (sustainable_living)":17.25,"BrightRetrieval (theoremqa_questions)":21.51,"BrightRetrieval (theoremqa_theorems)":16.77}
+{"index":10,"Rank":5,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":18.64,"BrightRetrieval (aops)":8.26,"BrightRetrieval (biology)":21.91,"BrightRetrieval (earth_science)":34.35,"BrightRetrieval (economics)":22.81,"BrightRetrieval (leetcode)":27.5,"BrightRetrieval (pony)":5.02,"BrightRetrieval (psychology)":27.43,"BrightRetrieval (robotics)":17.39,"BrightRetrieval (stackoverflow)":19.06,"BrightRetrieval (sustainable_living)":18.82,"BrightRetrieval (theoremqa_questions)":14.59,"BrightRetrieval (theoremqa_theorems)":6.5}
+{"index":8,"Rank":6,"Model":"SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":18.0,"BrightRetrieval (aops)":7.43,"BrightRetrieval (biology)":19.49,"BrightRetrieval (earth_science)":26.63,"BrightRetrieval (economics)":17.84,"BrightRetrieval (leetcode)":27.35,"BrightRetrieval (pony)":1.97,"BrightRetrieval (psychology)":18.97,"BrightRetrieval (robotics)":16.7,"BrightRetrieval (stackoverflow)":12.72,"BrightRetrieval (sustainable_living)":19.79,"BrightRetrieval (theoremqa_questions)":23.05,"BrightRetrieval (theoremqa_theorems)":24.05}
+{"index":1,"Rank":7,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":17.57,"BrightRetrieval (aops)":7.45,"BrightRetrieval (biology)":23.55,"BrightRetrieval (earth_science)":25.09,"BrightRetrieval (economics)":19.85,"BrightRetrieval (leetcode)":30.6,"BrightRetrieval (pony)":1.48,"BrightRetrieval (psychology)":24.79,"BrightRetrieval (robotics)":11.21,"BrightRetrieval (stackoverflow)":15.03,"BrightRetrieval (sustainable_living)":15.58,"BrightRetrieval (theoremqa_questions)":26.06,"BrightRetrieval (theoremqa_theorems)":10.13}
+{"index":12,"Rank":8,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":17.43,"BrightRetrieval (aops)":8.45,"BrightRetrieval (biology)":23.67,"BrightRetrieval (earth_science)":26.27,"BrightRetrieval (economics)":19.98,"BrightRetrieval (leetcode)":23.65,"BrightRetrieval (pony)":2.45,"BrightRetrieval (psychology)":27.52,"BrightRetrieval (robotics)":12.93,"BrightRetrieval (stackoverflow)":12.49,"BrightRetrieval (sustainable_living)":20.32,"BrightRetrieval (theoremqa_questions)":22.22,"BrightRetrieval (theoremqa_theorems)":9.25}
+{"index":11,"Rank":9,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":17.43,"BrightRetrieval (aops)":7.1,"BrightRetrieval (biology)":18.84,"BrightRetrieval (earth_science)":25.96,"BrightRetrieval (economics)":15.49,"BrightRetrieval (leetcode)":28.72,"BrightRetrieval (pony)":4.81,"BrightRetrieval (psychology)":15.79,"BrightRetrieval (robotics)":16.37,"BrightRetrieval (stackoverflow)":9.83,"BrightRetrieval (sustainable_living)":18.51,"BrightRetrieval (theoremqa_questions)":23.94,"BrightRetrieval (theoremqa_theorems)":23.78}
+{"index":6,"Rank":10,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":16.24,"BrightRetrieval (aops)":6.46,"BrightRetrieval (biology)":18.98,"BrightRetrieval (earth_science)":27.45,"BrightRetrieval (economics)":20.18,"BrightRetrieval (leetcode)":26.78,"BrightRetrieval (pony)":1.77,"BrightRetrieval (psychology)":21.82,"BrightRetrieval (robotics)":16.21,"BrightRetrieval (stackoverflow)":16.47,"BrightRetrieval (sustainable_living)":17.69,"BrightRetrieval (theoremqa_questions)":15.07,"BrightRetrieval (theoremqa_theorems)":6.04}
+{"index":2,"Rank":11,"Model":"bm25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":14.29,"BrightRetrieval (aops)":6.2,"BrightRetrieval (biology)":19.19,"BrightRetrieval (earth_science)":27.06,"BrightRetrieval (economics)":14.87,"BrightRetrieval (leetcode)":24.37,"BrightRetrieval (pony)":7.93,"BrightRetrieval (psychology)":12.51,"BrightRetrieval (robotics)":13.53,"BrightRetrieval (stackoverflow)":16.55,"BrightRetrieval (sustainable_living)":15.22,"BrightRetrieval (theoremqa_questions)":9.78,"BrightRetrieval (theoremqa_theorems)":4.25}
+{"index":9,"Rank":12,"Model":"instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":14.12,"BrightRetrieval (aops)":7.94,"BrightRetrieval (biology)":15.61,"BrightRetrieval (earth_science)":21.52,"BrightRetrieval (economics)":15.99,"BrightRetrieval (leetcode)":20.0,"BrightRetrieval (pony)":1.32,"BrightRetrieval (psychology)":21.94,"BrightRetrieval (robotics)":11.45,"BrightRetrieval (stackoverflow)":11.21,"BrightRetrieval (sustainable_living)":13.16,"BrightRetrieval (theoremqa_questions)":20.07,"BrightRetrieval (theoremqa_theorems)":9.29}
+{"index":5,"Rank":13,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":13.47,"BrightRetrieval (aops)":6.08,"BrightRetrieval (biology)":11.96,"BrightRetrieval (earth_science)":24.15,"BrightRetrieval (economics)":16.59,"BrightRetrieval (leetcode)":26.68,"BrightRetrieval (pony)":5.64,"BrightRetrieval (psychology)":17.44,"BrightRetrieval (robotics)":12.21,"BrightRetrieval (stackoverflow)":9.51,"BrightRetrieval (sustainable_living)":13.27,"BrightRetrieval (theoremqa_questions)":12.56,"BrightRetrieval (theoremqa_theorems)":5.51}
diff --git a/boards_data/bright/data_tasks/Retrieval/default.jsonl b/boards_data/bright/data_tasks/Retrieval/default.jsonl
index dd8ff771c0e30041e69501eef76afd269ec19f9d..61446a9b3c6fae64f0751a18e208be7bfaa05f74 100644
--- a/boards_data/bright/data_tasks/Retrieval/default.jsonl
+++ b/boards_data/bright/data_tasks/Retrieval/default.jsonl
@@ -5,10 +5,9 @@
 {"index":10,"Rank":5,"Model":"instructor-xl<\/a>","Model Size (Million Parameters)":1241,"Memory Usage (GB, fp32)":4.62,"Average":18.64,"BrightRetrieval (aops)":8.26,"BrightRetrieval (biology)":21.91,"BrightRetrieval (earth_science)":34.35,"BrightRetrieval (economics)":22.81,"BrightRetrieval (leetcode)":27.5,"BrightRetrieval (pony)":5.02,"BrightRetrieval (psychology)":27.43,"BrightRetrieval (robotics)":17.39,"BrightRetrieval (stackoverflow)":19.06,"BrightRetrieval (sustainable_living)":18.82,"BrightRetrieval (theoremqa_questions)":14.59,"BrightRetrieval (theoremqa_theorems)":6.5}
 {"index":8,"Rank":6,"Model":"SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":18.0,"BrightRetrieval (aops)":7.43,"BrightRetrieval (biology)":19.49,"BrightRetrieval (earth_science)":26.63,"BrightRetrieval (economics)":17.84,"BrightRetrieval (leetcode)":27.35,"BrightRetrieval (pony)":1.97,"BrightRetrieval (psychology)":18.97,"BrightRetrieval (robotics)":16.7,"BrightRetrieval (stackoverflow)":12.72,"BrightRetrieval (sustainable_living)":19.79,"BrightRetrieval (theoremqa_questions)":23.05,"BrightRetrieval (theoremqa_theorems)":24.05}
 {"index":1,"Rank":7,"Model":"voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":17.57,"BrightRetrieval (aops)":7.45,"BrightRetrieval (biology)":23.55,"BrightRetrieval (earth_science)":25.09,"BrightRetrieval (economics)":19.85,"BrightRetrieval (leetcode)":30.6,"BrightRetrieval (pony)":1.48,"BrightRetrieval (psychology)":24.79,"BrightRetrieval (robotics)":11.21,"BrightRetrieval (stackoverflow)":15.03,"BrightRetrieval (sustainable_living)":15.58,"BrightRetrieval (theoremqa_questions)":26.06,"BrightRetrieval (theoremqa_theorems)":10.13}
-{"index":13,"Rank":8,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":17.43,"BrightRetrieval (aops)":8.45,"BrightRetrieval (biology)":23.67,"BrightRetrieval (earth_science)":26.27,"BrightRetrieval (economics)":19.98,"BrightRetrieval (leetcode)":23.65,"BrightRetrieval (pony)":2.45,"BrightRetrieval (psychology)":27.52,"BrightRetrieval (robotics)":12.93,"BrightRetrieval (stackoverflow)":12.49,"BrightRetrieval (sustainable_living)":20.32,"BrightRetrieval (theoremqa_questions)":22.22,"BrightRetrieval (theoremqa_theorems)":9.25}
+{"index":12,"Rank":8,"Model":"text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":17.43,"BrightRetrieval (aops)":8.45,"BrightRetrieval (biology)":23.67,"BrightRetrieval (earth_science)":26.27,"BrightRetrieval (economics)":19.98,"BrightRetrieval (leetcode)":23.65,"BrightRetrieval (pony)":2.45,"BrightRetrieval (psychology)":27.52,"BrightRetrieval (robotics)":12.93,"BrightRetrieval (stackoverflow)":12.49,"BrightRetrieval (sustainable_living)":20.32,"BrightRetrieval (theoremqa_questions)":22.22,"BrightRetrieval (theoremqa_theorems)":9.25}
 {"index":11,"Rank":9,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":17.43,"BrightRetrieval (aops)":7.1,"BrightRetrieval (biology)":18.84,"BrightRetrieval (earth_science)":25.96,"BrightRetrieval (economics)":15.49,"BrightRetrieval (leetcode)":28.72,"BrightRetrieval (pony)":4.81,"BrightRetrieval (psychology)":15.79,"BrightRetrieval (robotics)":16.37,"BrightRetrieval (stackoverflow)":9.83,"BrightRetrieval (sustainable_living)":18.51,"BrightRetrieval (theoremqa_questions)":23.94,"BrightRetrieval (theoremqa_theorems)":23.78}
 {"index":6,"Rank":10,"Model":"Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":16.24,"BrightRetrieval (aops)":6.46,"BrightRetrieval (biology)":18.98,"BrightRetrieval (earth_science)":27.45,"BrightRetrieval (economics)":20.18,"BrightRetrieval (leetcode)":26.78,"BrightRetrieval (pony)":1.77,"BrightRetrieval (psychology)":21.82,"BrightRetrieval (robotics)":16.21,"BrightRetrieval (stackoverflow)":16.47,"BrightRetrieval (sustainable_living)":17.69,"BrightRetrieval (theoremqa_questions)":15.07,"BrightRetrieval (theoremqa_theorems)":6.04}
-{"index":12,"Rank":11,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":14.8,"BrightRetrieval (aops)":5.32,"BrightRetrieval (biology)":15.52,"BrightRetrieval (earth_science)":20.11,"BrightRetrieval (economics)":16.64,"BrightRetrieval (leetcode)":26.4,"BrightRetrieval (pony)":6.95,"BrightRetrieval (psychology)":22.63,"BrightRetrieval (robotics)":8.36,"BrightRetrieval (stackoverflow)":9.48,"BrightRetrieval (sustainable_living)":15.34,"BrightRetrieval (theoremqa_questions)":18.49,"BrightRetrieval (theoremqa_theorems)":12.38}
-{"index":2,"Rank":12,"Model":"bm25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":14.29,"BrightRetrieval (aops)":6.2,"BrightRetrieval (biology)":19.19,"BrightRetrieval (earth_science)":27.06,"BrightRetrieval (economics)":14.87,"BrightRetrieval (leetcode)":24.37,"BrightRetrieval (pony)":7.93,"BrightRetrieval (psychology)":12.51,"BrightRetrieval (robotics)":13.53,"BrightRetrieval (stackoverflow)":16.55,"BrightRetrieval (sustainable_living)":15.22,"BrightRetrieval (theoremqa_questions)":9.78,"BrightRetrieval (theoremqa_theorems)":4.25}
-{"index":9,"Rank":13,"Model":"instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":14.12,"BrightRetrieval (aops)":7.94,"BrightRetrieval (biology)":15.61,"BrightRetrieval (earth_science)":21.52,"BrightRetrieval (economics)":15.99,"BrightRetrieval (leetcode)":20.0,"BrightRetrieval (pony)":1.32,"BrightRetrieval (psychology)":21.94,"BrightRetrieval (robotics)":11.45,"BrightRetrieval (stackoverflow)":11.21,"BrightRetrieval (sustainable_living)":13.16,"BrightRetrieval (theoremqa_questions)":20.07,"BrightRetrieval (theoremqa_theorems)":9.29}
-{"index":5,"Rank":14,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":13.47,"BrightRetrieval (aops)":6.08,"BrightRetrieval (biology)":11.96,"BrightRetrieval (earth_science)":24.15,"BrightRetrieval (economics)":16.59,"BrightRetrieval (leetcode)":26.68,"BrightRetrieval (pony)":5.64,"BrightRetrieval (psychology)":17.44,"BrightRetrieval (robotics)":12.21,"BrightRetrieval (stackoverflow)":9.51,"BrightRetrieval (sustainable_living)":13.27,"BrightRetrieval (theoremqa_questions)":12.56,"BrightRetrieval (theoremqa_theorems)":5.51}
+{"index":2,"Rank":11,"Model":"bm25<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":14.29,"BrightRetrieval (aops)":6.2,"BrightRetrieval (biology)":19.19,"BrightRetrieval (earth_science)":27.06,"BrightRetrieval (economics)":14.87,"BrightRetrieval (leetcode)":24.37,"BrightRetrieval (pony)":7.93,"BrightRetrieval (psychology)":12.51,"BrightRetrieval (robotics)":13.53,"BrightRetrieval (stackoverflow)":16.55,"BrightRetrieval (sustainable_living)":15.22,"BrightRetrieval (theoremqa_questions)":9.78,"BrightRetrieval (theoremqa_theorems)":4.25}
+{"index":9,"Rank":12,"Model":"instructor-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Average":14.12,"BrightRetrieval (aops)":7.94,"BrightRetrieval (biology)":15.61,"BrightRetrieval (earth_science)":21.52,"BrightRetrieval (economics)":15.99,"BrightRetrieval (leetcode)":20.0,"BrightRetrieval (pony)":1.32,"BrightRetrieval (psychology)":21.94,"BrightRetrieval (robotics)":11.45,"BrightRetrieval (stackoverflow)":11.21,"BrightRetrieval (sustainable_living)":13.16,"BrightRetrieval (theoremqa_questions)":20.07,"BrightRetrieval (theoremqa_theorems)":9.29}
+{"index":5,"Rank":13,"Model":"bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Average":13.47,"BrightRetrieval (aops)":6.08,"BrightRetrieval (biology)":11.96,"BrightRetrieval (earth_science)":24.15,"BrightRetrieval (economics)":16.59,"BrightRetrieval (leetcode)":26.68,"BrightRetrieval (pony)":5.64,"BrightRetrieval (psychology)":17.44,"BrightRetrieval (robotics)":12.21,"BrightRetrieval (stackoverflow)":9.51,"BrightRetrieval (sustainable_living)":13.27,"BrightRetrieval (theoremqa_questions)":12.56,"BrightRetrieval (theoremqa_theorems)":5.51}
diff --git a/boards_data/en/data_tasks/Classification/default.jsonl b/boards_data/en/data_tasks/Classification/default.jsonl
index 76e7093fa08cbae86587f5c3ce9ad475cc060bd8..afc224fb9c48b6cd64517260dcaa0590d8262551 100644
--- a/boards_data/en/data_tasks/Classification/default.jsonl
+++ b/boards_data/en/data_tasks/Classification/default.jsonl
@@ -205,7 +205,7 @@
 {"index":107,"Rank":238,"Model":"LLaMA-embeeding<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":84.82,"AmazonPolarityClassification":76.88,"AmazonReviewsClassification (en)":36.72,"Banking77Classification":"","EmotionClassification":41.93,"ImdbClassification":"","MassiveIntentClassification (en)":65.91,"MassiveScenarioClassification (en)":67.62,"MTOPDomainClassification (en)":87.95,"MTOPIntentClassification (en)":78.43,"ToxicConversationsClassification":"","TweetSentimentExtractionClassification":56.28}
 {"index":108,"Rank":239,"Model":"yiyouliao<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":74.78,"AmazonPolarityClassification":71.89,"AmazonReviewsClassification (en)":36.7,"Banking77Classification":81.37,"EmotionClassification":42.6,"ImdbClassification":63.96,"MassiveIntentClassification (en)":68.56,"MassiveScenarioClassification (en)":74.15,"MTOPDomainClassification (en)":90.19,"MTOPIntentClassification (en)":69.5,"ToxicConversationsClassification":69.85,"TweetSentimentExtractionClassification":""}
 {"index":112,"Rank":240,"Model":"snowflake-arctic-embed-m-v1.5<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","AmazonCounterfactualClassification (en)":68.3,"AmazonPolarityClassification":"","AmazonReviewsClassification (en)":46.27,"Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":66.69,"MassiveScenarioClassification (en)":73.06,"MTOPDomainClassification (en)":91.36,"MTOPIntentClassification (en)":60.64,"ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
-{"index":121,"Rank":243,"Model":"EminEmbed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":62.97,"AmazonPolarityClassification":"","AmazonReviewsClassification (en)":48.48,"Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":66.09,"MassiveScenarioClassification (en)":72.55,"MTOPDomainClassification (en)":90.15,"MTOPIntentClassification (en)":57.92,"ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
+{"index":121,"Rank":243,"Model":"EminEmbed<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Average":"","AmazonCounterfactualClassification (en)":67.79,"AmazonPolarityClassification":"","AmazonReviewsClassification (en)":48.48,"Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":66.09,"MassiveScenarioClassification (en)":72.55,"MTOPDomainClassification (en)":90.15,"MTOPIntentClassification (en)":57.92,"ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
 {"index":140,"Rank":248,"Model":"slx-v0.1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.08,"Average":"","AmazonCounterfactualClassification (en)":61.46,"AmazonPolarityClassification":"","AmazonReviewsClassification (en)":30.3,"Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":67.94,"MassiveScenarioClassification (en)":73.91,"MTOPDomainClassification (en)":91.97,"MTOPIntentClassification (en)":63.3,"ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
 {"index":142,"Rank":249,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":"","AmazonCounterfactualClassification (en)":"","AmazonPolarityClassification":"","AmazonReviewsClassification (en)":"","Banking77Classification":"","EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":60.48,"MassiveScenarioClassification (en)":65.43,"MTOPDomainClassification (en)":"","MTOPIntentClassification (en)":"","ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
 {"index":212,"Rank":272,"Model":"fin-mpnet-base<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Average":"","AmazonCounterfactualClassification (en)":"","AmazonPolarityClassification":"","AmazonReviewsClassification (en)":29.13,"Banking77Classification":80.25,"EmotionClassification":"","ImdbClassification":"","MassiveIntentClassification (en)":"","MassiveScenarioClassification (en)":"","MTOPDomainClassification (en)":"","MTOPIntentClassification (en)":"","ToxicConversationsClassification":"","TweetSentimentExtractionClassification":""}
diff --git a/boards_data/ru/data_overall/default.jsonl b/boards_data/ru/data_overall/default.jsonl
index 4a02a822a1e38dd6625d5db72c9547f35e1f17f6..784ae127e10a0ea5cb8af1b637cc29558861c83b 100644
--- a/boards_data/ru/data_overall/default.jsonl
+++ b/boards_data/ru/data_overall/default.jsonl
@@ -1,25 +1,25 @@
-{"index":13,"Rank":1,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (16 datasets)":67.64,"Classification Average (7 datasets)":64.57,"Clustering Average (3 datasets)":59.98,"PairClassification Average (1 datasets)":59.38,"Reranking Average (1 datasets)":74.61,"Retrieval Average (2 datasets)":77.96,"STS Average (2 datasets)":80.15}
-{"index":11,"Rank":2,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Embedding Dimensions":1024,"Max Tokens":8192,"Average (16 datasets)":64.23,"Classification Average (7 datasets)":59.36,"Clustering Average (3 datasets)":53.61,"PairClassification Average (1 datasets)":64.99,"Reranking Average (1 datasets)":73.08,"Retrieval Average (2 datasets)":76.78,"STS Average (2 datasets)":79.85}
-{"index":4,"Rank":3,"Model":"ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Embedding Dimensions":1024,"Max Tokens":514,"Average (16 datasets)":64.14,"Classification Average (7 datasets)":60.84,"Clustering Average (3 datasets)":56.06,"PairClassification Average (1 datasets)":60.79,"Reranking Average (1 datasets)":70.87,"Retrieval Average (2 datasets)":72.82,"STS Average (2 datasets)":77.42}
-{"index":15,"Rank":4,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Embedding Dimensions":1024,"Max Tokens":514,"Average (16 datasets)":63.36,"Classification Average (7 datasets)":58.92,"Clustering Average (3 datasets)":52.55,"PairClassification Average (1 datasets)":58.4,"Reranking Average (1 datasets)":75.58,"Retrieval Average (2 datasets)":77.39,"STS Average (2 datasets)":77.48}
-{"index":0,"Rank":5,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Embedding Dimensions":1024,"Max Tokens":8192,"Average (16 datasets)":62.69,"Classification Average (7 datasets)":57.43,"Clustering Average (3 datasets)":52.51,"PairClassification Average (1 datasets)":60.6,"Reranking Average (1 datasets)":74.02,"Retrieval Average (2 datasets)":77.1,"STS Average (2 datasets)":77.39}
-{"index":10,"Rank":6,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":61.26,"Classification Average (7 datasets)":57.86,"Clustering Average (3 datasets)":53.42,"PairClassification Average (1 datasets)":60.02,"Reranking Average (1 datasets)":64.42,"Retrieval Average (2 datasets)":67.34,"STS Average (2 datasets)":77.91}
-{"index":23,"Rank":7,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":60.74,"Classification Average (7 datasets)":56.55,"Clustering Average (3 datasets)":53.22,"PairClassification Average (1 datasets)":57.81,"Reranking Average (1 datasets)":68.65,"Retrieval Average (2 datasets)":67.54,"STS Average (2 datasets)":77.37}
-{"index":14,"Rank":8,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (16 datasets)":60.04,"Classification Average (7 datasets)":56.19,"Clustering Average (3 datasets)":50.27,"PairClassification Average (1 datasets)":54.96,"Reranking Average (1 datasets)":72.01,"Retrieval Average (2 datasets)":69.91,"STS Average (2 datasets)":74.9}
-{"index":16,"Rank":9,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average (16 datasets)":59.64,"Classification Average (7 datasets)":55.09,"Clustering Average (3 datasets)":51.65,"PairClassification Average (1 datasets)":55.14,"Reranking Average (1 datasets)":71.46,"Retrieval Average (2 datasets)":69.27,"STS Average (2 datasets)":74.27}
-{"index":24,"Rank":10,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Embedding Dimensions":312,"Max Tokens":2048,"Average (16 datasets)":55.92,"Classification Average (7 datasets)":53.46,"Clustering Average (3 datasets)":49.57,"PairClassification Average (1 datasets)":56.09,"Reranking Average (1 datasets)":62.15,"Retrieval Average (2 datasets)":51.5,"STS Average (2 datasets)":75.32}
-{"index":22,"Rank":11,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (16 datasets)":55.42,"Classification Average (7 datasets)":54.11,"Clustering Average (3 datasets)":49.18,"PairClassification Average (1 datasets)":64.57,"Reranking Average (1 datasets)":58.77,"Retrieval Average (2 datasets)":44.4,"STS Average (2 datasets)":74.1}
-{"index":5,"Rank":12,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Embedding Dimensions":1024,"Max Tokens":514,"Average (16 datasets)":52.61,"Classification Average (7 datasets)":55.44,"Clustering Average (3 datasets)":52.65,"PairClassification Average (1 datasets)":51.97,"Reranking Average (1 datasets)":56.13,"Retrieval Average (2 datasets)":25.6,"STS Average (2 datasets)":68.19}
-{"index":17,"Rank":13,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":52.02,"Classification Average (7 datasets)":52.35,"Clustering Average (3 datasets)":47.75,"PairClassification Average (1 datasets)":55.71,"Reranking Average (1 datasets)":55.13,"Retrieval Average (2 datasets)":36.38,"STS Average (2 datasets)":69.54}
-{"index":21,"Rank":14,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average (16 datasets)":51.89,"Classification Average (7 datasets)":51.38,"Clustering Average (3 datasets)":47.75,"PairClassification Average (1 datasets)":58.56,"Reranking Average (1 datasets)":52.8,"Retrieval Average (2 datasets)":37.26,"STS Average (2 datasets)":70.71}
-{"index":7,"Rank":15,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":51.44,"Classification Average (7 datasets)":52.73,"Clustering Average (3 datasets)":46.84,"PairClassification Average (1 datasets)":55.61,"Reranking Average (1 datasets)":54.83,"Retrieval Average (2 datasets)":31.88,"STS Average (2 datasets)":69.6}
-{"index":6,"Rank":16,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Embedding Dimensions":1024,"Max Tokens":512,"Average (16 datasets)":48.98,"Classification Average (7 datasets)":55.21,"Clustering Average (3 datasets)":51.94,"PairClassification Average (1 datasets)":50.17,"Reranking Average (1 datasets)":46.81,"Retrieval Average (2 datasets)":11.78,"STS Average (2 datasets)":60.44}
-{"index":9,"Rank":17,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Embedding Dimensions":2048,"Max Tokens":514,"Average (16 datasets)":46.28,"Classification Average (7 datasets)":51.37,"Clustering Average (3 datasets)":41.23,"PairClassification Average (1 datasets)":51.87,"Reranking Average (1 datasets)":46.09,"Retrieval Average (2 datasets)":12.4,"STS Average (2 datasets)":67.28}
-{"index":3,"Rank":18,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":46.28,"Classification Average (7 datasets)":51.49,"Clustering Average (3 datasets)":43.13,"PairClassification Average (1 datasets)":59.12,"Reranking Average (1 datasets)":39.89,"Retrieval Average (2 datasets)":9.68,"STS Average (2 datasets)":66.13}
-{"index":12,"Rank":19,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":45.23,"Classification Average (7 datasets)":54.23,"Clustering Average (3 datasets)":42.92,"PairClassification Average (1 datasets)":53.78,"Reranking Average (1 datasets)":34.01,"Retrieval Average (2 datasets)":7.5,"STS Average (2 datasets)":56.25}
-{"index":1,"Rank":20,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":44.18,"Classification Average (7 datasets)":52.16,"Clustering Average (3 datasets)":38.41,"PairClassification Average (1 datasets)":52.48,"Reranking Average (1 datasets)":42.58,"Retrieval Average (2 datasets)":7.37,"STS Average (2 datasets)":58.36}
-{"index":2,"Rank":21,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Embedding Dimensions":768,"Max Tokens":512,"Average (16 datasets)":40.68,"Classification Average (7 datasets)":50.66,"Clustering Average (3 datasets)":27.91,"PairClassification Average (1 datasets)":52.12,"Reranking Average (1 datasets)":41.65,"Retrieval Average (2 datasets)":7.55,"STS Average (2 datasets)":51.84}
-{"index":8,"Rank":22,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Embedding Dimensions":512,"Max Tokens":514,"Average (16 datasets)":37.07,"Classification Average (7 datasets)":42.68,"Clustering Average (3 datasets)":30.76,"PairClassification Average (1 datasets)":51.06,"Reranking Average (1 datasets)":35.44,"Retrieval Average (2 datasets)":2.02,"STS Average (2 datasets)":55.78}
-{"index":19,"Rank":23,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Embedding Dimensions":384,"Max Tokens":512,"Average (16 datasets)":25.97,"Classification Average (7 datasets)":28.67,"Clustering Average (3 datasets)":13.3,"PairClassification Average (1 datasets)":45.03,"Reranking Average (1 datasets)":27.05,"Retrieval Average (2 datasets)":1.66,"STS Average (2 datasets)":49.74}
-{"index":18,"Rank":24,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Embedding Dimensions":384,"Max Tokens":512,"Average (16 datasets)":"","Classification Average (7 datasets)":28.33,"Clustering Average (3 datasets)":13.87,"PairClassification Average (1 datasets)":46.4,"Reranking Average (1 datasets)":38.51,"Retrieval Average (2 datasets)":"","STS Average (2 datasets)":50.9}
-{"index":20,"Rank":25,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":514,"Average (16 datasets)":"","Classification Average (7 datasets)":29.53,"Clustering Average (3 datasets)":15.83,"PairClassification Average (1 datasets)":44.52,"Reranking Average (1 datasets)":30.96,"Retrieval Average (2 datasets)":"","STS Average (2 datasets)":48.92}
+{"index":0,"Rank":1,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Embedding Dimensions":1024,"Max Tokens":8192,"Average (23 datasets)":61.54,"Classification Average (9 datasets)":60.46,"Clustering Average (3 datasets)":52.51,"PairClassification Average (1 datasets)":60.6,"Reranking Average (2 datasets)":69.7,"Retrieval Average (3 datasets)":74.77,"STS Average (3 datasets)":73.68,"MultilabelClassification Average (2 datasets)":34.25}
+{"index":15,"Rank":2,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Embedding Dimensions":1024,"Max Tokens":514,"Average (23 datasets)":61.45,"Classification Average (9 datasets)":61.01,"Clustering Average (3 datasets)":52.55,"PairClassification Average (1 datasets)":58.4,"Reranking Average (2 datasets)":69.64,"Retrieval Average (3 datasets)":74.04,"STS Average (3 datasets)":71.62,"MultilabelClassification Average (2 datasets)":36.01}
+{"index":14,"Rank":3,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (23 datasets)":58.34,"Classification Average (9 datasets)":58.26,"Clustering Average (3 datasets)":50.27,"PairClassification Average (1 datasets)":54.96,"Reranking Average (2 datasets)":66.24,"Retrieval Average (3 datasets)":67.14,"STS Average (3 datasets)":70.16,"MultilabelClassification Average (2 datasets)":33.65}
+{"index":16,"Rank":4,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average (23 datasets)":57.33,"Classification Average (9 datasets)":56.44,"Clustering Average (3 datasets)":51.65,"PairClassification Average (1 datasets)":55.14,"Reranking Average (2 datasets)":65.29,"Retrieval Average (3 datasets)":65.85,"STS Average (3 datasets)":69.48,"MultilabelClassification Average (2 datasets)":31.99}
+{"index":5,"Rank":5,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Embedding Dimensions":1024,"Max Tokens":514,"Average (23 datasets)":48.89,"Classification Average (9 datasets)":57.52,"Clustering Average (3 datasets)":52.65,"PairClassification Average (1 datasets)":51.97,"Reranking Average (2 datasets)":40.56,"Retrieval Average (3 datasets)":19.13,"STS Average (3 datasets)":64.4,"MultilabelClassification Average (2 datasets)":32.64}
+{"index":6,"Rank":6,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Embedding Dimensions":1024,"Max Tokens":512,"Average (23 datasets)":45.55,"Classification Average (9 datasets)":57.24,"Clustering Average (3 datasets)":51.94,"PairClassification Average (1 datasets)":50.17,"Reranking Average (2 datasets)":32.8,"Retrieval Average (3 datasets)":8.51,"STS Average (3 datasets)":57.21,"MultilabelClassification Average (2 datasets)":31.9}
+{"index":9,"Rank":7,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Embedding Dimensions":2048,"Max Tokens":514,"Average (23 datasets)":42.49,"Classification Average (9 datasets)":52.17,"Clustering Average (3 datasets)":41.23,"PairClassification Average (1 datasets)":51.87,"Reranking Average (2 datasets)":30.95,"Retrieval Average (3 datasets)":8.89,"STS Average (3 datasets)":61.6,"MultilabelClassification Average (2 datasets)":29.44}
+{"index":1,"Rank":8,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Embedding Dimensions":768,"Max Tokens":512,"Average (23 datasets)":"","Classification Average (9 datasets)":55.15,"Clustering Average (3 datasets)":38.41,"PairClassification Average (1 datasets)":52.48,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":56.2,"MultilabelClassification Average (2 datasets)":29.32}
+{"index":2,"Rank":9,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Embedding Dimensions":768,"Max Tokens":512,"Average (23 datasets)":"","Classification Average (9 datasets)":51.6,"Clustering Average (3 datasets)":27.91,"PairClassification Average (1 datasets)":52.12,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":46.22,"MultilabelClassification Average (2 datasets)":26.2}
+{"index":3,"Rank":10,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Embedding Dimensions":768,"Max Tokens":512,"Average (23 datasets)":"","Classification Average (9 datasets)":51.27,"Clustering Average (3 datasets)":43.13,"PairClassification Average (1 datasets)":59.12,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":61.18,"MultilabelClassification Average (2 datasets)":27.8}
+{"index":4,"Rank":11,"Model":"ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Embedding Dimensions":1024,"Max Tokens":514,"Average (23 datasets)":"","Classification Average (9 datasets)":"","Clustering Average (3 datasets)":56.06,"PairClassification Average (1 datasets)":60.79,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":"","MultilabelClassification Average (2 datasets)":38.88}
+{"index":7,"Rank":12,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Embedding Dimensions":768,"Max Tokens":512,"Average (23 datasets)":"","Classification Average (9 datasets)":54.98,"Clustering Average (3 datasets)":46.84,"PairClassification Average (1 datasets)":55.61,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":65.91,"MultilabelClassification Average (2 datasets)":31.27}
+{"index":8,"Rank":13,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Embedding Dimensions":512,"Max Tokens":514,"Average (23 datasets)":"","Classification Average (9 datasets)":44.55,"Clustering Average (3 datasets)":30.76,"PairClassification Average (1 datasets)":51.06,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":53.15,"MultilabelClassification Average (2 datasets)":27.96}
+{"index":10,"Rank":14,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Embedding Dimensions":768,"Max Tokens":512,"Average (23 datasets)":"","Classification Average (9 datasets)":59.88,"Clustering Average (3 datasets)":53.42,"PairClassification Average (1 datasets)":60.02,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":73.07,"MultilabelClassification Average (2 datasets)":36.98}
+{"index":11,"Rank":15,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Embedding Dimensions":1024,"Max Tokens":8192,"Average (23 datasets)":"","Classification Average (9 datasets)":61.92,"Clustering Average (3 datasets)":53.61,"PairClassification Average (1 datasets)":64.99,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":75.38,"MultilabelClassification Average (2 datasets)":35.88}
+{"index":12,"Rank":16,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Embedding Dimensions":768,"Max Tokens":512,"Average (23 datasets)":"","Classification Average (9 datasets)":56.18,"Clustering Average (3 datasets)":42.92,"PairClassification Average (1 datasets)":53.78,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":53.39,"MultilabelClassification Average (2 datasets)":28.9}
+{"index":13,"Rank":17,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Embedding Dimensions":4096,"Max Tokens":32768,"Average (23 datasets)":"","Classification Average (9 datasets)":67.52,"Clustering Average (3 datasets)":59.98,"PairClassification Average (1 datasets)":59.38,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":"","MultilabelClassification Average (2 datasets)":33.37}
+{"index":17,"Rank":18,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Embedding Dimensions":768,"Max Tokens":512,"Average (23 datasets)":"","Classification Average (9 datasets)":54.7,"Clustering Average (3 datasets)":47.75,"PairClassification Average (1 datasets)":55.71,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":65.52,"MultilabelClassification Average (2 datasets)":31.42}
+{"index":18,"Rank":19,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Embedding Dimensions":384,"Max Tokens":512,"Average (23 datasets)":"","Classification Average (9 datasets)":28.15,"Clustering Average (3 datasets)":13.87,"PairClassification Average (1 datasets)":46.4,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":37.66,"MultilabelClassification Average (2 datasets)":25.96}
+{"index":19,"Rank":20,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Embedding Dimensions":384,"Max Tokens":512,"Average (23 datasets)":"","Classification Average (9 datasets)":28.75,"Clustering Average (3 datasets)":13.3,"PairClassification Average (1 datasets)":45.03,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":1.23,"STS Average (3 datasets)":38.07,"MultilabelClassification Average (2 datasets)":25.27}
+{"index":20,"Rank":21,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":514,"Average (23 datasets)":"","Classification Average (9 datasets)":28.82,"Clustering Average (3 datasets)":15.83,"PairClassification Average (1 datasets)":44.52,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":37.89,"MultilabelClassification Average (2 datasets)":26.9}
+{"index":21,"Rank":22,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average (23 datasets)":"","Classification Average (9 datasets)":53.77,"Clustering Average (3 datasets)":47.75,"PairClassification Average (1 datasets)":58.56,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":66.17,"MultilabelClassification Average (2 datasets)":31.3}
+{"index":22,"Rank":23,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average (23 datasets)":"","Classification Average (9 datasets)":56.88,"Clustering Average (3 datasets)":49.18,"PairClassification Average (1 datasets)":64.57,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":68.98,"MultilabelClassification Average (2 datasets)":32.9}
+{"index":23,"Rank":24,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Embedding Dimensions":768,"Max Tokens":512,"Average (23 datasets)":"","Classification Average (9 datasets)":59.23,"Clustering Average (3 datasets)":53.22,"PairClassification Average (1 datasets)":57.81,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":72.54,"MultilabelClassification Average (2 datasets)":36.32}
+{"index":24,"Rank":25,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Embedding Dimensions":312,"Max Tokens":2048,"Average (23 datasets)":"","Classification Average (9 datasets)":55.01,"Clustering Average (3 datasets)":49.57,"PairClassification Average (1 datasets)":56.09,"Reranking Average (2 datasets)":"","Retrieval Average (3 datasets)":"","STS Average (3 datasets)":70.23,"MultilabelClassification Average (2 datasets)":31.7}
diff --git a/boards_data/ru/data_tasks/Classification/default.jsonl b/boards_data/ru/data_tasks/Classification/default.jsonl
index 346574ffc075324f912aac6963c0502e7c699ab1..7dc310344150232bb0ea8382a118fb9520db31fe 100644
--- a/boards_data/ru/data_tasks/Classification/default.jsonl
+++ b/boards_data/ru/data_tasks/Classification/default.jsonl
@@ -1,25 +1,25 @@
-{"index":13,"Rank":1,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":64.57,"GeoreviewClassification (rus-Cyrl)":50.25,"HeadlineClassification (rus-Cyrl)":85.68,"InappropriatenessClassification (rus-Cyrl)":67.19,"KinopoiskClassification (rus-Cyrl)":65.49,"RuReviewsClassification (rus-Cyrl)":67.68,"RuSciBenchGRNTIClassification (rus-Cyrl)":64.59,"RuSciBenchOECDClassification (rus-Cyrl)":51.13}
-{"index":4,"Rank":2,"Model":"ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":60.84,"GeoreviewClassification (rus-Cyrl)":49.7,"HeadlineClassification (rus-Cyrl)":78.0,"InappropriatenessClassification (rus-Cyrl)":61.32,"KinopoiskClassification (rus-Cyrl)":63.27,"RuReviewsClassification (rus-Cyrl)":67.96,"RuSciBenchGRNTIClassification (rus-Cyrl)":59.33,"RuSciBenchOECDClassification (rus-Cyrl)":46.33}
-{"index":11,"Rank":3,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":59.36,"GeoreviewClassification (rus-Cyrl)":50.98,"HeadlineClassification (rus-Cyrl)":70.09,"InappropriatenessClassification (rus-Cyrl)":60.76,"KinopoiskClassification (rus-Cyrl)":63.33,"RuReviewsClassification (rus-Cyrl)":68.52,"RuSciBenchGRNTIClassification (rus-Cyrl)":57.67,"RuSciBenchOECDClassification (rus-Cyrl)":44.2}
-{"index":15,"Rank":4,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":58.92,"GeoreviewClassification (rus-Cyrl)":49.69,"HeadlineClassification (rus-Cyrl)":77.19,"InappropriatenessClassification (rus-Cyrl)":61.6,"KinopoiskClassification (rus-Cyrl)":56.59,"RuReviewsClassification (rus-Cyrl)":65.28,"RuSciBenchGRNTIClassification (rus-Cyrl)":58.2,"RuSciBenchOECDClassification (rus-Cyrl)":43.91}
-{"index":10,"Rank":5,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":57.86,"GeoreviewClassification (rus-Cyrl)":47.23,"HeadlineClassification (rus-Cyrl)":74.88,"InappropriatenessClassification (rus-Cyrl)":61.94,"KinopoiskClassification (rus-Cyrl)":55.69,"RuReviewsClassification (rus-Cyrl)":66.44,"RuSciBenchGRNTIClassification (rus-Cyrl)":55.55,"RuSciBenchOECDClassification (rus-Cyrl)":43.28}
-{"index":0,"Rank":6,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":57.43,"GeoreviewClassification (rus-Cyrl)":48.27,"HeadlineClassification (rus-Cyrl)":70.32,"InappropriatenessClassification (rus-Cyrl)":59.87,"KinopoiskClassification (rus-Cyrl)":58.23,"RuReviewsClassification (rus-Cyrl)":66.91,"RuSciBenchGRNTIClassification (rus-Cyrl)":55.81,"RuSciBenchOECDClassification (rus-Cyrl)":42.57}
-{"index":23,"Rank":7,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":56.55,"GeoreviewClassification (rus-Cyrl)":46.04,"HeadlineClassification (rus-Cyrl)":69.98,"InappropriatenessClassification (rus-Cyrl)":61.39,"KinopoiskClassification (rus-Cyrl)":53.59,"RuReviewsClassification (rus-Cyrl)":64.58,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.67,"RuSciBenchOECDClassification (rus-Cyrl)":43.58}
-{"index":14,"Rank":8,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":56.19,"GeoreviewClassification (rus-Cyrl)":46.05,"HeadlineClassification (rus-Cyrl)":75.64,"InappropriatenessClassification (rus-Cyrl)":58.78,"KinopoiskClassification (rus-Cyrl)":50.89,"RuReviewsClassification (rus-Cyrl)":62.99,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.28,"RuSciBenchOECDClassification (rus-Cyrl)":42.69}
-{"index":5,"Rank":9,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":55.44,"GeoreviewClassification (rus-Cyrl)":39.67,"HeadlineClassification (rus-Cyrl)":77.19,"InappropriatenessClassification (rus-Cyrl)":64.64,"KinopoiskClassification (rus-Cyrl)":50.33,"RuReviewsClassification (rus-Cyrl)":58.29,"RuSciBenchGRNTIClassification (rus-Cyrl)":54.19,"RuSciBenchOECDClassification (rus-Cyrl)":43.8}
-{"index":6,"Rank":10,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":55.21,"GeoreviewClassification (rus-Cyrl)":39.97,"HeadlineClassification (rus-Cyrl)":79.26,"InappropriatenessClassification (rus-Cyrl)":62.52,"KinopoiskClassification (rus-Cyrl)":49.51,"RuReviewsClassification (rus-Cyrl)":58.27,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.9,"RuSciBenchOECDClassification (rus-Cyrl)":43.04}
-{"index":16,"Rank":11,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":55.09,"GeoreviewClassification (rus-Cyrl)":44.66,"HeadlineClassification (rus-Cyrl)":73.94,"InappropriatenessClassification (rus-Cyrl)":59.16,"KinopoiskClassification (rus-Cyrl)":49.96,"RuReviewsClassification (rus-Cyrl)":61.18,"RuSciBenchGRNTIClassification (rus-Cyrl)":54.99,"RuSciBenchOECDClassification (rus-Cyrl)":41.72}
-{"index":12,"Rank":12,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":54.23,"GeoreviewClassification (rus-Cyrl)":40.19,"HeadlineClassification (rus-Cyrl)":78.75,"InappropriatenessClassification (rus-Cyrl)":61.33,"KinopoiskClassification (rus-Cyrl)":48.78,"RuReviewsClassification (rus-Cyrl)":55.66,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.53,"RuSciBenchOECDClassification (rus-Cyrl)":41.34}
-{"index":22,"Rank":13,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":54.11,"GeoreviewClassification (rus-Cyrl)":42.33,"HeadlineClassification (rus-Cyrl)":70.35,"InappropriatenessClassification (rus-Cyrl)":59.32,"KinopoiskClassification (rus-Cyrl)":44.31,"RuReviewsClassification (rus-Cyrl)":62.33,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.01,"RuSciBenchOECDClassification (rus-Cyrl)":44.14}
-{"index":24,"Rank":14,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":53.46,"GeoreviewClassification (rus-Cyrl)":41.36,"HeadlineClassification (rus-Cyrl)":68.9,"InappropriatenessClassification (rus-Cyrl)":59.11,"KinopoiskClassification (rus-Cyrl)":50.47,"RuReviewsClassification (rus-Cyrl)":60.66,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.93,"RuSciBenchOECDClassification (rus-Cyrl)":40.79}
-{"index":7,"Rank":15,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":52.73,"GeoreviewClassification (rus-Cyrl)":40.89,"HeadlineClassification (rus-Cyrl)":68.75,"InappropriatenessClassification (rus-Cyrl)":58.48,"KinopoiskClassification (rus-Cyrl)":49.85,"RuReviewsClassification (rus-Cyrl)":58.01,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.8,"RuSciBenchOECDClassification (rus-Cyrl)":40.36}
-{"index":17,"Rank":16,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":52.35,"GeoreviewClassification (rus-Cyrl)":40.86,"HeadlineClassification (rus-Cyrl)":68.75,"InappropriatenessClassification (rus-Cyrl)":58.52,"KinopoiskClassification (rus-Cyrl)":46.77,"RuReviewsClassification (rus-Cyrl)":58.01,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.04,"RuSciBenchOECDClassification (rus-Cyrl)":40.48}
-{"index":1,"Rank":17,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":52.16,"GeoreviewClassification (rus-Cyrl)":38.95,"HeadlineClassification (rus-Cyrl)":75.59,"InappropriatenessClassification (rus-Cyrl)":60.68,"KinopoiskClassification (rus-Cyrl)":49.67,"RuReviewsClassification (rus-Cyrl)":54.05,"RuSciBenchGRNTIClassification (rus-Cyrl)":48.53,"RuSciBenchOECDClassification (rus-Cyrl)":37.65}
-{"index":3,"Rank":18,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":51.49,"GeoreviewClassification (rus-Cyrl)":38.05,"HeadlineClassification (rus-Cyrl)":67.64,"InappropriatenessClassification (rus-Cyrl)":58.27,"KinopoiskClassification (rus-Cyrl)":45.86,"RuReviewsClassification (rus-Cyrl)":58.34,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.18,"RuSciBenchOECDClassification (rus-Cyrl)":40.11}
-{"index":21,"Rank":19,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":51.38,"GeoreviewClassification (rus-Cyrl)":38.24,"HeadlineClassification (rus-Cyrl)":68.3,"InappropriatenessClassification (rus-Cyrl)":58.18,"KinopoiskClassification (rus-Cyrl)":41.45,"RuReviewsClassification (rus-Cyrl)":58.88,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.19,"RuSciBenchOECDClassification (rus-Cyrl)":41.41}
-{"index":9,"Rank":20,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":51.37,"GeoreviewClassification (rus-Cyrl)":39.64,"HeadlineClassification (rus-Cyrl)":74.19,"InappropriatenessClassification (rus-Cyrl)":58.57,"KinopoiskClassification (rus-Cyrl)":49.06,"RuReviewsClassification (rus-Cyrl)":56.99,"RuSciBenchGRNTIClassification (rus-Cyrl)":45.63,"RuSciBenchOECDClassification (rus-Cyrl)":35.48}
-{"index":2,"Rank":21,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":50.66,"GeoreviewClassification (rus-Cyrl)":37.22,"HeadlineClassification (rus-Cyrl)":75.23,"InappropriatenessClassification (rus-Cyrl)":57.34,"KinopoiskClassification (rus-Cyrl)":49.91,"RuReviewsClassification (rus-Cyrl)":50.74,"RuSciBenchGRNTIClassification (rus-Cyrl)":48.03,"RuSciBenchOECDClassification (rus-Cyrl)":36.13}
-{"index":8,"Rank":22,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":42.68,"GeoreviewClassification (rus-Cyrl)":33.45,"HeadlineClassification (rus-Cyrl)":57.65,"InappropriatenessClassification (rus-Cyrl)":54.5,"KinopoiskClassification (rus-Cyrl)":41.36,"RuReviewsClassification (rus-Cyrl)":49.56,"RuSciBenchGRNTIClassification (rus-Cyrl)":35.71,"RuSciBenchOECDClassification (rus-Cyrl)":26.51}
-{"index":20,"Rank":23,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":29.53,"GeoreviewClassification (rus-Cyrl)":25.93,"HeadlineClassification (rus-Cyrl)":28.53,"InappropriatenessClassification (rus-Cyrl)":51.82,"KinopoiskClassification (rus-Cyrl)":34.18,"RuReviewsClassification (rus-Cyrl)":42.33,"RuSciBenchGRNTIClassification (rus-Cyrl)":13.29,"RuSciBenchOECDClassification (rus-Cyrl)":10.62}
-{"index":19,"Rank":24,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":28.67,"GeoreviewClassification (rus-Cyrl)":27.08,"HeadlineClassification (rus-Cyrl)":27.77,"InappropriatenessClassification (rus-Cyrl)":51.73,"KinopoiskClassification (rus-Cyrl)":33.93,"RuReviewsClassification (rus-Cyrl)":41.79,"RuSciBenchGRNTIClassification (rus-Cyrl)":10.08,"RuSciBenchOECDClassification (rus-Cyrl)":8.3}
-{"index":18,"Rank":25,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":28.33,"GeoreviewClassification (rus-Cyrl)":23.49,"HeadlineClassification (rus-Cyrl)":28.49,"InappropriatenessClassification (rus-Cyrl)":50.85,"KinopoiskClassification (rus-Cyrl)":34.17,"RuReviewsClassification (rus-Cyrl)":42.49,"RuSciBenchGRNTIClassification (rus-Cyrl)":10.49,"RuSciBenchOECDClassification (rus-Cyrl)":8.31}
+{"index":13,"Rank":1,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":67.52,"GeoreviewClassification (rus-Cyrl)":50.25,"HeadlineClassification (rus-Cyrl)":85.68,"InappropriatenessClassification (rus-Cyrl)":67.19,"KinopoiskClassification (rus-Cyrl)":65.49,"RuReviewsClassification (rus-Cyrl)":67.68,"RuSciBenchGRNTIClassification (rus-Cyrl)":64.59,"RuSciBenchOECDClassification (rus-Cyrl)":51.13,"MassiveIntentClassification (rus-Cyrl)":76.08,"MassiveScenarioClassification (rus-Cyrl)":79.61}
+{"index":11,"Rank":2,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":61.92,"GeoreviewClassification (rus-Cyrl)":50.98,"HeadlineClassification (rus-Cyrl)":70.09,"InappropriatenessClassification (rus-Cyrl)":60.76,"KinopoiskClassification (rus-Cyrl)":63.33,"RuReviewsClassification (rus-Cyrl)":68.52,"RuSciBenchGRNTIClassification (rus-Cyrl)":57.67,"RuSciBenchOECDClassification (rus-Cyrl)":44.2,"MassiveIntentClassification (rus-Cyrl)":68.85,"MassiveScenarioClassification (rus-Cyrl)":72.9}
+{"index":15,"Rank":3,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":61.01,"GeoreviewClassification (rus-Cyrl)":49.69,"HeadlineClassification (rus-Cyrl)":77.19,"InappropriatenessClassification (rus-Cyrl)":61.6,"KinopoiskClassification (rus-Cyrl)":56.59,"RuReviewsClassification (rus-Cyrl)":65.28,"RuSciBenchGRNTIClassification (rus-Cyrl)":58.2,"RuSciBenchOECDClassification (rus-Cyrl)":43.91,"MassiveIntentClassification (rus-Cyrl)":65.76,"MassiveScenarioClassification (rus-Cyrl)":70.85}
+{"index":0,"Rank":4,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":60.46,"GeoreviewClassification (rus-Cyrl)":48.27,"HeadlineClassification (rus-Cyrl)":70.32,"InappropriatenessClassification (rus-Cyrl)":59.87,"KinopoiskClassification (rus-Cyrl)":58.23,"RuReviewsClassification (rus-Cyrl)":66.91,"RuSciBenchGRNTIClassification (rus-Cyrl)":55.81,"RuSciBenchOECDClassification (rus-Cyrl)":42.57,"MassiveIntentClassification (rus-Cyrl)":68.75,"MassiveScenarioClassification (rus-Cyrl)":73.42}
+{"index":10,"Rank":5,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":59.88,"GeoreviewClassification (rus-Cyrl)":47.23,"HeadlineClassification (rus-Cyrl)":74.88,"InappropriatenessClassification (rus-Cyrl)":61.94,"KinopoiskClassification (rus-Cyrl)":55.69,"RuReviewsClassification (rus-Cyrl)":66.44,"RuSciBenchGRNTIClassification (rus-Cyrl)":55.55,"RuSciBenchOECDClassification (rus-Cyrl)":43.28,"MassiveIntentClassification (rus-Cyrl)":65.57,"MassiveScenarioClassification (rus-Cyrl)":68.33}
+{"index":23,"Rank":6,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":59.23,"GeoreviewClassification (rus-Cyrl)":46.04,"HeadlineClassification (rus-Cyrl)":69.98,"InappropriatenessClassification (rus-Cyrl)":61.39,"KinopoiskClassification (rus-Cyrl)":53.59,"RuReviewsClassification (rus-Cyrl)":64.58,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.67,"RuSciBenchOECDClassification (rus-Cyrl)":43.58,"MassiveIntentClassification (rus-Cyrl)":66.08,"MassiveScenarioClassification (rus-Cyrl)":71.13}
+{"index":14,"Rank":7,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":58.26,"GeoreviewClassification (rus-Cyrl)":46.05,"HeadlineClassification (rus-Cyrl)":75.64,"InappropriatenessClassification (rus-Cyrl)":58.78,"KinopoiskClassification (rus-Cyrl)":50.89,"RuReviewsClassification (rus-Cyrl)":62.99,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.28,"RuSciBenchOECDClassification (rus-Cyrl)":42.69,"MassiveIntentClassification (rus-Cyrl)":62.78,"MassiveScenarioClassification (rus-Cyrl)":68.21}
+{"index":5,"Rank":8,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":57.52,"GeoreviewClassification (rus-Cyrl)":39.67,"HeadlineClassification (rus-Cyrl)":77.19,"InappropriatenessClassification (rus-Cyrl)":64.64,"KinopoiskClassification (rus-Cyrl)":50.33,"RuReviewsClassification (rus-Cyrl)":58.29,"RuSciBenchGRNTIClassification (rus-Cyrl)":54.19,"RuSciBenchOECDClassification (rus-Cyrl)":43.8,"MassiveIntentClassification (rus-Cyrl)":61.42,"MassiveScenarioClassification (rus-Cyrl)":68.13}
+{"index":6,"Rank":9,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":57.24,"GeoreviewClassification (rus-Cyrl)":39.97,"HeadlineClassification (rus-Cyrl)":79.26,"InappropriatenessClassification (rus-Cyrl)":62.52,"KinopoiskClassification (rus-Cyrl)":49.51,"RuReviewsClassification (rus-Cyrl)":58.27,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.9,"RuSciBenchOECDClassification (rus-Cyrl)":43.04,"MassiveIntentClassification (rus-Cyrl)":61.09,"MassiveScenarioClassification (rus-Cyrl)":67.6}
+{"index":22,"Rank":10,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":56.88,"GeoreviewClassification (rus-Cyrl)":42.33,"HeadlineClassification (rus-Cyrl)":70.35,"InappropriatenessClassification (rus-Cyrl)":59.32,"KinopoiskClassification (rus-Cyrl)":44.31,"RuReviewsClassification (rus-Cyrl)":62.33,"RuSciBenchGRNTIClassification (rus-Cyrl)":56.01,"RuSciBenchOECDClassification (rus-Cyrl)":44.14,"MassiveIntentClassification (rus-Cyrl)":63.23,"MassiveScenarioClassification (rus-Cyrl)":69.92}
+{"index":16,"Rank":11,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":56.44,"GeoreviewClassification (rus-Cyrl)":44.66,"HeadlineClassification (rus-Cyrl)":73.94,"InappropriatenessClassification (rus-Cyrl)":59.16,"KinopoiskClassification (rus-Cyrl)":49.96,"RuReviewsClassification (rus-Cyrl)":61.18,"RuSciBenchGRNTIClassification (rus-Cyrl)":54.99,"RuSciBenchOECDClassification (rus-Cyrl)":41.72,"MassiveIntentClassification (rus-Cyrl)":58.43,"MassiveScenarioClassification (rus-Cyrl)":63.89}
+{"index":12,"Rank":12,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":56.18,"GeoreviewClassification (rus-Cyrl)":40.19,"HeadlineClassification (rus-Cyrl)":78.75,"InappropriatenessClassification (rus-Cyrl)":61.33,"KinopoiskClassification (rus-Cyrl)":48.78,"RuReviewsClassification (rus-Cyrl)":55.66,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.53,"RuSciBenchOECDClassification (rus-Cyrl)":41.34,"MassiveIntentClassification (rus-Cyrl)":61.32,"MassiveScenarioClassification (rus-Cyrl)":64.71}
+{"index":1,"Rank":13,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":55.15,"GeoreviewClassification (rus-Cyrl)":38.95,"HeadlineClassification (rus-Cyrl)":75.59,"InappropriatenessClassification (rus-Cyrl)":60.68,"KinopoiskClassification (rus-Cyrl)":49.67,"RuReviewsClassification (rus-Cyrl)":54.05,"RuSciBenchGRNTIClassification (rus-Cyrl)":48.53,"RuSciBenchOECDClassification (rus-Cyrl)":37.65,"MassiveIntentClassification (rus-Cyrl)":63.12,"MassiveScenarioClassification (rus-Cyrl)":68.08}
+{"index":24,"Rank":14,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":55.01,"GeoreviewClassification (rus-Cyrl)":41.36,"HeadlineClassification (rus-Cyrl)":68.9,"InappropriatenessClassification (rus-Cyrl)":59.11,"KinopoiskClassification (rus-Cyrl)":50.47,"RuReviewsClassification (rus-Cyrl)":60.66,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.93,"RuSciBenchOECDClassification (rus-Cyrl)":40.79,"MassiveIntentClassification (rus-Cyrl)":57.98,"MassiveScenarioClassification (rus-Cyrl)":62.9}
+{"index":7,"Rank":15,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":54.98,"GeoreviewClassification (rus-Cyrl)":40.89,"HeadlineClassification (rus-Cyrl)":68.75,"InappropriatenessClassification (rus-Cyrl)":58.48,"KinopoiskClassification (rus-Cyrl)":49.85,"RuReviewsClassification (rus-Cyrl)":58.01,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.8,"RuSciBenchOECDClassification (rus-Cyrl)":40.36,"MassiveIntentClassification (rus-Cyrl)":60.53,"MassiveScenarioClassification (rus-Cyrl)":65.15}
+{"index":17,"Rank":16,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":54.7,"GeoreviewClassification (rus-Cyrl)":40.86,"HeadlineClassification (rus-Cyrl)":68.75,"InappropriatenessClassification (rus-Cyrl)":58.52,"KinopoiskClassification (rus-Cyrl)":46.77,"RuReviewsClassification (rus-Cyrl)":58.01,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.04,"RuSciBenchOECDClassification (rus-Cyrl)":40.48,"MassiveIntentClassification (rus-Cyrl)":60.64,"MassiveScenarioClassification (rus-Cyrl)":65.23}
+{"index":21,"Rank":17,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":53.77,"GeoreviewClassification (rus-Cyrl)":38.24,"HeadlineClassification (rus-Cyrl)":68.3,"InappropriatenessClassification (rus-Cyrl)":58.18,"KinopoiskClassification (rus-Cyrl)":41.45,"RuReviewsClassification (rus-Cyrl)":58.88,"RuSciBenchGRNTIClassification (rus-Cyrl)":53.19,"RuSciBenchOECDClassification (rus-Cyrl)":41.41,"MassiveIntentClassification (rus-Cyrl)":59.06,"MassiveScenarioClassification (rus-Cyrl)":65.25}
+{"index":9,"Rank":18,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":52.17,"GeoreviewClassification (rus-Cyrl)":39.64,"HeadlineClassification (rus-Cyrl)":74.19,"InappropriatenessClassification (rus-Cyrl)":58.57,"KinopoiskClassification (rus-Cyrl)":49.06,"RuReviewsClassification (rus-Cyrl)":56.99,"RuSciBenchGRNTIClassification (rus-Cyrl)":45.63,"RuSciBenchOECDClassification (rus-Cyrl)":35.48,"MassiveIntentClassification (rus-Cyrl)":50.83,"MassiveScenarioClassification (rus-Cyrl)":59.15}
+{"index":2,"Rank":19,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":51.6,"GeoreviewClassification (rus-Cyrl)":37.22,"HeadlineClassification (rus-Cyrl)":75.23,"InappropriatenessClassification (rus-Cyrl)":57.34,"KinopoiskClassification (rus-Cyrl)":49.91,"RuReviewsClassification (rus-Cyrl)":50.74,"RuSciBenchGRNTIClassification (rus-Cyrl)":48.03,"RuSciBenchOECDClassification (rus-Cyrl)":36.13,"MassiveIntentClassification (rus-Cyrl)":53.02,"MassiveScenarioClassification (rus-Cyrl)":56.79}
+{"index":3,"Rank":20,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":51.27,"GeoreviewClassification (rus-Cyrl)":38.05,"HeadlineClassification (rus-Cyrl)":67.64,"InappropriatenessClassification (rus-Cyrl)":58.27,"KinopoiskClassification (rus-Cyrl)":45.86,"RuReviewsClassification (rus-Cyrl)":58.34,"RuSciBenchGRNTIClassification (rus-Cyrl)":52.18,"RuSciBenchOECDClassification (rus-Cyrl)":40.11,"MassiveIntentClassification (rus-Cyrl)":49.1,"MassiveScenarioClassification (rus-Cyrl)":51.91}
+{"index":8,"Rank":21,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":44.55,"GeoreviewClassification (rus-Cyrl)":33.45,"HeadlineClassification (rus-Cyrl)":57.65,"InappropriatenessClassification (rus-Cyrl)":54.5,"KinopoiskClassification (rus-Cyrl)":41.36,"RuReviewsClassification (rus-Cyrl)":49.56,"RuSciBenchGRNTIClassification (rus-Cyrl)":35.71,"RuSciBenchOECDClassification (rus-Cyrl)":26.51,"MassiveIntentClassification (rus-Cyrl)":50.1,"MassiveScenarioClassification (rus-Cyrl)":52.15}
+{"index":20,"Rank":22,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":28.82,"GeoreviewClassification (rus-Cyrl)":25.93,"HeadlineClassification (rus-Cyrl)":28.53,"InappropriatenessClassification (rus-Cyrl)":51.82,"KinopoiskClassification (rus-Cyrl)":34.18,"RuReviewsClassification (rus-Cyrl)":42.33,"RuSciBenchGRNTIClassification (rus-Cyrl)":13.29,"RuSciBenchOECDClassification (rus-Cyrl)":10.62,"MassiveIntentClassification (rus-Cyrl)":23.98,"MassiveScenarioClassification (rus-Cyrl)":28.71}
+{"index":19,"Rank":23,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":28.75,"GeoreviewClassification (rus-Cyrl)":27.08,"HeadlineClassification (rus-Cyrl)":27.77,"InappropriatenessClassification (rus-Cyrl)":51.73,"KinopoiskClassification (rus-Cyrl)":33.93,"RuReviewsClassification (rus-Cyrl)":41.79,"RuSciBenchGRNTIClassification (rus-Cyrl)":10.08,"RuSciBenchOECDClassification (rus-Cyrl)":8.3,"MassiveIntentClassification (rus-Cyrl)":27.58,"MassiveScenarioClassification (rus-Cyrl)":30.46}
+{"index":18,"Rank":24,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":28.15,"GeoreviewClassification (rus-Cyrl)":23.49,"HeadlineClassification (rus-Cyrl)":28.49,"InappropriatenessClassification (rus-Cyrl)":50.85,"KinopoiskClassification (rus-Cyrl)":34.17,"RuReviewsClassification (rus-Cyrl)":42.49,"RuSciBenchGRNTIClassification (rus-Cyrl)":10.49,"RuSciBenchOECDClassification (rus-Cyrl)":8.31,"MassiveIntentClassification (rus-Cyrl)":26.29,"MassiveScenarioClassification (rus-Cyrl)":28.77}
+{"index":4,"Rank":25,"Model":"ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":"","GeoreviewClassification (rus-Cyrl)":49.7,"HeadlineClassification (rus-Cyrl)":78.0,"InappropriatenessClassification (rus-Cyrl)":61.32,"KinopoiskClassification (rus-Cyrl)":63.27,"RuReviewsClassification (rus-Cyrl)":67.96,"RuSciBenchGRNTIClassification (rus-Cyrl)":59.33,"RuSciBenchOECDClassification (rus-Cyrl)":46.33,"MassiveIntentClassification (rus-Cyrl)":"","MassiveScenarioClassification (rus-Cyrl)":""}
diff --git a/boards_data/ru/data_tasks/Clustering/default.jsonl b/boards_data/ru/data_tasks/Clustering/default.jsonl
index 639ccd83dc5ddfac5549c8f21a8d3b2614186cda..215b1424011c3b76cb70dcb565fae6d28a81c201 100644
--- a/boards_data/ru/data_tasks/Clustering/default.jsonl
+++ b/boards_data/ru/data_tasks/Clustering/default.jsonl
@@ -11,8 +11,8 @@
 {"index":14,"Rank":11,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":50.27,"GeoreviewClusteringP2P (rus-Cyrl)":54.46,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":51.56,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":44.78}
 {"index":24,"Rank":12,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":49.57,"GeoreviewClusteringP2P (rus-Cyrl)":59.71,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":47.55,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.44}
 {"index":22,"Rank":13,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":49.18,"GeoreviewClusteringP2P (rus-Cyrl)":56.18,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":48.47,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":42.9}
-{"index":17,"Rank":14,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":47.75,"GeoreviewClusteringP2P (rus-Cyrl)":52.19,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":49.09,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.97}
-{"index":21,"Rank":15,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":47.75,"GeoreviewClusteringP2P (rus-Cyrl)":53.35,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":48.22,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.68}
+{"index":21,"Rank":14,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":47.75,"GeoreviewClusteringP2P (rus-Cyrl)":53.35,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":48.22,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.68}
+{"index":17,"Rank":15,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":47.75,"GeoreviewClusteringP2P (rus-Cyrl)":52.19,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":49.09,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.97}
 {"index":7,"Rank":16,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":46.84,"GeoreviewClusteringP2P (rus-Cyrl)":51.89,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":47.48,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.16}
 {"index":3,"Rank":17,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":43.13,"GeoreviewClusteringP2P (rus-Cyrl)":41.82,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":46.29,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":41.28}
 {"index":12,"Rank":18,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":42.92,"GeoreviewClusteringP2P (rus-Cyrl)":58.79,"RuSciBenchGRNTIClusteringP2P (rus-Cyrl)":36.66,"RuSciBenchOECDClusteringP2P (rus-Cyrl)":33.31}
diff --git a/boards_data/ru/data_tasks/MultilabelClassification/default.jsonl b/boards_data/ru/data_tasks/MultilabelClassification/default.jsonl
new file mode 100644
index 0000000000000000000000000000000000000000..1b9fdd8b44a1b6505710a548b5391a8770332ef9
--- /dev/null
+++ b/boards_data/ru/data_tasks/MultilabelClassification/default.jsonl
@@ -0,0 +1,25 @@
+{"index":4,"Rank":1,"Model":"ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":38.88,"CEDRClassification (rus-Cyrl)":44.69,"SensitiveTopicsClassification (rus-Cyrl)":33.07}
+{"index":10,"Rank":2,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":36.98,"CEDRClassification (rus-Cyrl)":46.47,"SensitiveTopicsClassification (rus-Cyrl)":27.5}
+{"index":23,"Rank":3,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":36.32,"CEDRClassification (rus-Cyrl)":45.11,"SensitiveTopicsClassification (rus-Cyrl)":27.52}
+{"index":15,"Rank":4,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":36.01,"CEDRClassification (rus-Cyrl)":44.84,"SensitiveTopicsClassification (rus-Cyrl)":27.17}
+{"index":11,"Rank":5,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":35.88,"CEDRClassification (rus-Cyrl)":45.48,"SensitiveTopicsClassification (rus-Cyrl)":26.29}
+{"index":0,"Rank":6,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":34.25,"CEDRClassification (rus-Cyrl)":43.47,"SensitiveTopicsClassification (rus-Cyrl)":25.03}
+{"index":14,"Rank":7,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":33.65,"CEDRClassification (rus-Cyrl)":42.32,"SensitiveTopicsClassification (rus-Cyrl)":24.98}
+{"index":13,"Rank":8,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":33.37,"CEDRClassification (rus-Cyrl)":40.8,"SensitiveTopicsClassification (rus-Cyrl)":25.94}
+{"index":22,"Rank":9,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":32.9,"CEDRClassification (rus-Cyrl)":39.98,"SensitiveTopicsClassification (rus-Cyrl)":25.83}
+{"index":5,"Rank":10,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":32.64,"CEDRClassification (rus-Cyrl)":36.81,"SensitiveTopicsClassification (rus-Cyrl)":28.47}
+{"index":16,"Rank":11,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":31.99,"CEDRClassification (rus-Cyrl)":40.07,"SensitiveTopicsClassification (rus-Cyrl)":23.91}
+{"index":6,"Rank":12,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":31.9,"CEDRClassification (rus-Cyrl)":35.84,"SensitiveTopicsClassification (rus-Cyrl)":27.97}
+{"index":24,"Rank":13,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":31.7,"CEDRClassification (rus-Cyrl)":38.95,"SensitiveTopicsClassification (rus-Cyrl)":24.44}
+{"index":17,"Rank":14,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":31.42,"CEDRClassification (rus-Cyrl)":40.61,"SensitiveTopicsClassification (rus-Cyrl)":22.23}
+{"index":21,"Rank":15,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":31.3,"CEDRClassification (rus-Cyrl)":37.76,"SensitiveTopicsClassification (rus-Cyrl)":24.84}
+{"index":7,"Rank":16,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":31.27,"CEDRClassification (rus-Cyrl)":40.75,"SensitiveTopicsClassification (rus-Cyrl)":21.79}
+{"index":9,"Rank":17,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":29.44,"CEDRClassification (rus-Cyrl)":36.87,"SensitiveTopicsClassification (rus-Cyrl)":22.02}
+{"index":1,"Rank":18,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":29.32,"CEDRClassification (rus-Cyrl)":36.19,"SensitiveTopicsClassification (rus-Cyrl)":22.45}
+{"index":12,"Rank":19,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":28.9,"CEDRClassification (rus-Cyrl)":34.14,"SensitiveTopicsClassification (rus-Cyrl)":23.67}
+{"index":8,"Rank":20,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":27.96,"CEDRClassification (rus-Cyrl)":37.39,"SensitiveTopicsClassification (rus-Cyrl)":18.54}
+{"index":3,"Rank":21,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":27.8,"CEDRClassification (rus-Cyrl)":35.55,"SensitiveTopicsClassification (rus-Cyrl)":20.05}
+{"index":20,"Rank":22,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":26.9,"CEDRClassification (rus-Cyrl)":35.98,"SensitiveTopicsClassification (rus-Cyrl)":17.83}
+{"index":2,"Rank":23,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":26.2,"CEDRClassification (rus-Cyrl)":33.59,"SensitiveTopicsClassification (rus-Cyrl)":18.8}
+{"index":18,"Rank":24,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":25.96,"CEDRClassification (rus-Cyrl)":33.86,"SensitiveTopicsClassification (rus-Cyrl)":18.05}
+{"index":19,"Rank":25,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":25.27,"CEDRClassification (rus-Cyrl)":32.72,"SensitiveTopicsClassification (rus-Cyrl)":17.82}
diff --git a/boards_data/ru/data_tasks/Reranking/default.jsonl b/boards_data/ru/data_tasks/Reranking/default.jsonl
index 94b26645ae88603d8786540ad94907d69e1570f6..11bca3023892126f3f107fe108c1fa324f7d3ed2 100644
--- a/boards_data/ru/data_tasks/Reranking/default.jsonl
+++ b/boards_data/ru/data_tasks/Reranking/default.jsonl
@@ -1,25 +1,25 @@
-{"index":15,"Rank":1,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"RuBQReranking (rus-Cyrl)":75.58}
-{"index":13,"Rank":2,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"RuBQReranking (rus-Cyrl)":74.61}
-{"index":0,"Rank":3,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"RuBQReranking (rus-Cyrl)":74.02}
-{"index":11,"Rank":4,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"RuBQReranking (rus-Cyrl)":73.08}
-{"index":14,"Rank":5,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"RuBQReranking (rus-Cyrl)":72.01}
-{"index":16,"Rank":6,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"RuBQReranking (rus-Cyrl)":71.46}
-{"index":4,"Rank":7,"Model":"ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"RuBQReranking (rus-Cyrl)":70.87}
-{"index":23,"Rank":8,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"RuBQReranking (rus-Cyrl)":68.65}
-{"index":10,"Rank":9,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"RuBQReranking (rus-Cyrl)":64.42}
-{"index":24,"Rank":10,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"RuBQReranking (rus-Cyrl)":62.15}
-{"index":22,"Rank":11,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"RuBQReranking (rus-Cyrl)":58.77}
-{"index":5,"Rank":12,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"RuBQReranking (rus-Cyrl)":56.13}
-{"index":17,"Rank":13,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"RuBQReranking (rus-Cyrl)":55.13}
-{"index":7,"Rank":14,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"RuBQReranking (rus-Cyrl)":54.83}
-{"index":21,"Rank":15,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"RuBQReranking (rus-Cyrl)":52.8}
-{"index":6,"Rank":16,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"RuBQReranking (rus-Cyrl)":46.81}
-{"index":9,"Rank":17,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"RuBQReranking (rus-Cyrl)":46.09}
-{"index":1,"Rank":18,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"RuBQReranking (rus-Cyrl)":42.58}
-{"index":2,"Rank":19,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"RuBQReranking (rus-Cyrl)":41.65}
-{"index":3,"Rank":20,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"RuBQReranking (rus-Cyrl)":39.89}
-{"index":18,"Rank":21,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"RuBQReranking (rus-Cyrl)":38.51}
-{"index":8,"Rank":22,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"RuBQReranking (rus-Cyrl)":35.44}
-{"index":12,"Rank":23,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"RuBQReranking (rus-Cyrl)":34.01}
-{"index":20,"Rank":24,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"RuBQReranking (rus-Cyrl)":30.96}
-{"index":19,"Rank":25,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"RuBQReranking (rus-Cyrl)":27.05}
+{"index":0,"Rank":1,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":69.7,"RuBQReranking (rus-Cyrl)":74.02,"MIRACLReranking (rus-Cyrl)":65.38}
+{"index":15,"Rank":2,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":69.64,"RuBQReranking (rus-Cyrl)":75.58,"MIRACLReranking (rus-Cyrl)":63.71}
+{"index":14,"Rank":3,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":66.24,"RuBQReranking (rus-Cyrl)":72.01,"MIRACLReranking (rus-Cyrl)":60.47}
+{"index":16,"Rank":4,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":65.29,"RuBQReranking (rus-Cyrl)":71.46,"MIRACLReranking (rus-Cyrl)":59.12}
+{"index":5,"Rank":5,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":40.56,"RuBQReranking (rus-Cyrl)":56.13,"MIRACLReranking (rus-Cyrl)":24.99}
+{"index":6,"Rank":6,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":32.8,"RuBQReranking (rus-Cyrl)":46.81,"MIRACLReranking (rus-Cyrl)":18.8}
+{"index":9,"Rank":7,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":30.95,"RuBQReranking (rus-Cyrl)":46.09,"MIRACLReranking (rus-Cyrl)":15.81}
+{"index":1,"Rank":8,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":"","RuBQReranking (rus-Cyrl)":42.58,"MIRACLReranking (rus-Cyrl)":""}
+{"index":2,"Rank":9,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":"","RuBQReranking (rus-Cyrl)":41.65,"MIRACLReranking (rus-Cyrl)":""}
+{"index":3,"Rank":10,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":"","RuBQReranking (rus-Cyrl)":39.89,"MIRACLReranking (rus-Cyrl)":""}
+{"index":4,"Rank":11,"Model":"ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":"","RuBQReranking (rus-Cyrl)":70.87,"MIRACLReranking (rus-Cyrl)":""}
+{"index":7,"Rank":12,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":"","RuBQReranking (rus-Cyrl)":54.83,"MIRACLReranking (rus-Cyrl)":""}
+{"index":8,"Rank":13,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":"","RuBQReranking (rus-Cyrl)":35.44,"MIRACLReranking (rus-Cyrl)":""}
+{"index":10,"Rank":14,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":"","RuBQReranking (rus-Cyrl)":64.42,"MIRACLReranking (rus-Cyrl)":""}
+{"index":11,"Rank":15,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":"","RuBQReranking (rus-Cyrl)":73.08,"MIRACLReranking (rus-Cyrl)":""}
+{"index":12,"Rank":16,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":"","RuBQReranking (rus-Cyrl)":34.01,"MIRACLReranking (rus-Cyrl)":""}
+{"index":13,"Rank":17,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","RuBQReranking (rus-Cyrl)":74.61,"MIRACLReranking (rus-Cyrl)":""}
+{"index":17,"Rank":18,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","RuBQReranking (rus-Cyrl)":55.13,"MIRACLReranking (rus-Cyrl)":""}
+{"index":18,"Rank":19,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","RuBQReranking (rus-Cyrl)":38.51,"MIRACLReranking (rus-Cyrl)":""}
+{"index":19,"Rank":20,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":"","RuBQReranking (rus-Cyrl)":27.05,"MIRACLReranking (rus-Cyrl)":""}
+{"index":20,"Rank":21,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","RuBQReranking (rus-Cyrl)":30.96,"MIRACLReranking (rus-Cyrl)":""}
+{"index":21,"Rank":22,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","RuBQReranking (rus-Cyrl)":52.8,"MIRACLReranking (rus-Cyrl)":""}
+{"index":22,"Rank":23,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","RuBQReranking (rus-Cyrl)":58.77,"MIRACLReranking (rus-Cyrl)":""}
+{"index":23,"Rank":24,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":"","RuBQReranking (rus-Cyrl)":68.65,"MIRACLReranking (rus-Cyrl)":""}
+{"index":24,"Rank":25,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":"","RuBQReranking (rus-Cyrl)":62.15,"MIRACLReranking (rus-Cyrl)":""}
diff --git a/boards_data/ru/data_tasks/Retrieval/default.jsonl b/boards_data/ru/data_tasks/Retrieval/default.jsonl
index d175d7f27f48e60cc35a62157c26d2d8e9c3a16a..f317b880eb804efe197d9065ce2f3655cd62ed9c 100644
--- a/boards_data/ru/data_tasks/Retrieval/default.jsonl
+++ b/boards_data/ru/data_tasks/Retrieval/default.jsonl
@@ -1,25 +1,25 @@
-{"index":13,"Rank":1,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":77.96,"RiaNewsRetrieval (rus-Cyrl)":81.94,"RuBQRetrieval (rus-Cyrl)":73.98}
-{"index":15,"Rank":2,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":77.39,"RiaNewsRetrieval (rus-Cyrl)":80.67,"RuBQRetrieval (rus-Cyrl)":74.11}
-{"index":0,"Rank":3,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":77.1,"RiaNewsRetrieval (rus-Cyrl)":82.98,"RuBQRetrieval (rus-Cyrl)":71.21}
-{"index":11,"Rank":4,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":76.78,"RiaNewsRetrieval (rus-Cyrl)":83.53,"RuBQRetrieval (rus-Cyrl)":70.03}
-{"index":4,"Rank":5,"Model":"ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":72.82,"RiaNewsRetrieval (rus-Cyrl)":78.86,"RuBQRetrieval (rus-Cyrl)":66.77}
-{"index":14,"Rank":6,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":69.91,"RiaNewsRetrieval (rus-Cyrl)":70.24,"RuBQRetrieval (rus-Cyrl)":69.58}
-{"index":16,"Rank":7,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":69.27,"RiaNewsRetrieval (rus-Cyrl)":70.01,"RuBQRetrieval (rus-Cyrl)":68.53}
-{"index":23,"Rank":8,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":67.54,"RiaNewsRetrieval (rus-Cyrl)":69.36,"RuBQRetrieval (rus-Cyrl)":65.71}
-{"index":10,"Rank":9,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":67.34,"RiaNewsRetrieval (rus-Cyrl)":77.83,"RuBQRetrieval (rus-Cyrl)":56.86}
-{"index":24,"Rank":10,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":51.5,"RiaNewsRetrieval (rus-Cyrl)":51.27,"RuBQRetrieval (rus-Cyrl)":51.73}
-{"index":22,"Rank":11,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":44.4,"RiaNewsRetrieval (rus-Cyrl)":51.75,"RuBQRetrieval (rus-Cyrl)":37.04}
-{"index":21,"Rank":12,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":37.26,"RiaNewsRetrieval (rus-Cyrl)":44.82,"RuBQRetrieval (rus-Cyrl)":29.7}
-{"index":17,"Rank":13,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":36.38,"RiaNewsRetrieval (rus-Cyrl)":42.75,"RuBQRetrieval (rus-Cyrl)":30.02}
-{"index":7,"Rank":14,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":31.88,"RiaNewsRetrieval (rus-Cyrl)":34.73,"RuBQRetrieval (rus-Cyrl)":29.03}
-{"index":5,"Rank":15,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":25.6,"RiaNewsRetrieval (rus-Cyrl)":21.4,"RuBQRetrieval (rus-Cyrl)":29.8}
-{"index":9,"Rank":16,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":12.4,"RiaNewsRetrieval (rus-Cyrl)":13.92,"RuBQRetrieval (rus-Cyrl)":10.87}
-{"index":6,"Rank":17,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":11.78,"RiaNewsRetrieval (rus-Cyrl)":11.11,"RuBQRetrieval (rus-Cyrl)":12.45}
-{"index":3,"Rank":18,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":9.68,"RiaNewsRetrieval (rus-Cyrl)":6.72,"RuBQRetrieval (rus-Cyrl)":12.63}
-{"index":2,"Rank":19,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":7.55,"RiaNewsRetrieval (rus-Cyrl)":5.58,"RuBQRetrieval (rus-Cyrl)":9.52}
-{"index":12,"Rank":20,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":7.5,"RiaNewsRetrieval (rus-Cyrl)":4.84,"RuBQRetrieval (rus-Cyrl)":10.15}
-{"index":1,"Rank":21,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":7.37,"RiaNewsRetrieval (rus-Cyrl)":4.14,"RuBQRetrieval (rus-Cyrl)":10.6}
-{"index":8,"Rank":22,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":2.02,"RiaNewsRetrieval (rus-Cyrl)":0.79,"RuBQRetrieval (rus-Cyrl)":3.24}
-{"index":19,"Rank":23,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":1.66,"RiaNewsRetrieval (rus-Cyrl)":0.67,"RuBQRetrieval (rus-Cyrl)":2.64}
-{"index":18,"Rank":24,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","RiaNewsRetrieval (rus-Cyrl)":"","RuBQRetrieval (rus-Cyrl)":8.84}
-{"index":20,"Rank":25,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","RiaNewsRetrieval (rus-Cyrl)":"","RuBQRetrieval (rus-Cyrl)":4.75}
+{"index":0,"Rank":1,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":74.77,"RiaNewsRetrieval (rus-Cyrl)":82.98,"RuBQRetrieval (rus-Cyrl)":71.21,"MIRACLRetrieval (rus-Cyrl)":70.11}
+{"index":15,"Rank":2,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":74.04,"RiaNewsRetrieval (rus-Cyrl)":80.67,"RuBQRetrieval (rus-Cyrl)":74.11,"MIRACLRetrieval (rus-Cyrl)":67.33}
+{"index":14,"Rank":3,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":67.14,"RiaNewsRetrieval (rus-Cyrl)":70.24,"RuBQRetrieval (rus-Cyrl)":69.58,"MIRACLRetrieval (rus-Cyrl)":61.6}
+{"index":16,"Rank":4,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":65.85,"RiaNewsRetrieval (rus-Cyrl)":70.01,"RuBQRetrieval (rus-Cyrl)":68.53,"MIRACLRetrieval (rus-Cyrl)":59.01}
+{"index":5,"Rank":5,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":19.13,"RiaNewsRetrieval (rus-Cyrl)":21.4,"RuBQRetrieval (rus-Cyrl)":29.8,"MIRACLRetrieval (rus-Cyrl)":6.2}
+{"index":9,"Rank":6,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":8.89,"RiaNewsRetrieval (rus-Cyrl)":13.92,"RuBQRetrieval (rus-Cyrl)":10.87,"MIRACLRetrieval (rus-Cyrl)":1.89}
+{"index":6,"Rank":7,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":8.51,"RiaNewsRetrieval (rus-Cyrl)":11.11,"RuBQRetrieval (rus-Cyrl)":12.45,"MIRACLRetrieval (rus-Cyrl)":1.98}
+{"index":19,"Rank":8,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":1.23,"RiaNewsRetrieval (rus-Cyrl)":0.67,"RuBQRetrieval (rus-Cyrl)":2.64,"MIRACLRetrieval (rus-Cyrl)":0.39}
+{"index":1,"Rank":9,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":"","RiaNewsRetrieval (rus-Cyrl)":4.14,"RuBQRetrieval (rus-Cyrl)":10.6,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":2,"Rank":10,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":"","RiaNewsRetrieval (rus-Cyrl)":5.58,"RuBQRetrieval (rus-Cyrl)":9.52,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":3,"Rank":11,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":"","RiaNewsRetrieval (rus-Cyrl)":6.72,"RuBQRetrieval (rus-Cyrl)":12.63,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":4,"Rank":12,"Model":"ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":"","RiaNewsRetrieval (rus-Cyrl)":78.86,"RuBQRetrieval (rus-Cyrl)":66.77,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":7,"Rank":13,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":"","RiaNewsRetrieval (rus-Cyrl)":34.73,"RuBQRetrieval (rus-Cyrl)":29.03,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":8,"Rank":14,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":"","RiaNewsRetrieval (rus-Cyrl)":0.79,"RuBQRetrieval (rus-Cyrl)":3.24,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":10,"Rank":15,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":"","RiaNewsRetrieval (rus-Cyrl)":77.83,"RuBQRetrieval (rus-Cyrl)":56.86,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":11,"Rank":16,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":"","RiaNewsRetrieval (rus-Cyrl)":83.53,"RuBQRetrieval (rus-Cyrl)":70.03,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":12,"Rank":17,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":"","RiaNewsRetrieval (rus-Cyrl)":4.84,"RuBQRetrieval (rus-Cyrl)":10.15,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":13,"Rank":18,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","RiaNewsRetrieval (rus-Cyrl)":81.94,"RuBQRetrieval (rus-Cyrl)":73.98,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":17,"Rank":19,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":"","RiaNewsRetrieval (rus-Cyrl)":42.75,"RuBQRetrieval (rus-Cyrl)":30.02,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":18,"Rank":20,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":"","RiaNewsRetrieval (rus-Cyrl)":"","RuBQRetrieval (rus-Cyrl)":8.84,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":20,"Rank":21,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":"","RiaNewsRetrieval (rus-Cyrl)":"","RuBQRetrieval (rus-Cyrl)":4.75,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":21,"Rank":22,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":"","RiaNewsRetrieval (rus-Cyrl)":44.82,"RuBQRetrieval (rus-Cyrl)":29.7,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":22,"Rank":23,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":"","RiaNewsRetrieval (rus-Cyrl)":51.75,"RuBQRetrieval (rus-Cyrl)":37.04,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":23,"Rank":24,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":"","RiaNewsRetrieval (rus-Cyrl)":69.36,"RuBQRetrieval (rus-Cyrl)":65.71,"MIRACLRetrieval (rus-Cyrl)":""}
+{"index":24,"Rank":25,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":"","RiaNewsRetrieval (rus-Cyrl)":51.27,"RuBQRetrieval (rus-Cyrl)":51.73,"MIRACLRetrieval (rus-Cyrl)":""}
diff --git a/boards_data/ru/data_tasks/STS/default.jsonl b/boards_data/ru/data_tasks/STS/default.jsonl
index dcf6aaa1f46c1df70e63da7188351aef04f4bf31..2a7e044b28007213f0bb5bc5b0200bc087ba67cc 100644
--- a/boards_data/ru/data_tasks/STS/default.jsonl
+++ b/boards_data/ru/data_tasks/STS/default.jsonl
@@ -1,25 +1,25 @@
-{"index":13,"Rank":1,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":80.15,"RUParaPhraserSTS (rus-Cyrl)":76.17,"RuSTSBenchmarkSTS (rus-Cyrl)":84.13}
-{"index":11,"Rank":2,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":79.85,"RUParaPhraserSTS (rus-Cyrl)":76.36,"RuSTSBenchmarkSTS (rus-Cyrl)":83.35}
-{"index":10,"Rank":3,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":77.91,"RUParaPhraserSTS (rus-Cyrl)":73.56,"RuSTSBenchmarkSTS (rus-Cyrl)":82.26}
-{"index":15,"Rank":4,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":77.48,"RUParaPhraserSTS (rus-Cyrl)":71.82,"RuSTSBenchmarkSTS (rus-Cyrl)":83.15}
-{"index":4,"Rank":5,"Model":"ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":77.42,"RUParaPhraserSTS (rus-Cyrl)":76.16,"RuSTSBenchmarkSTS (rus-Cyrl)":78.69}
-{"index":0,"Rank":6,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":77.39,"RUParaPhraserSTS (rus-Cyrl)":74.9,"RuSTSBenchmarkSTS (rus-Cyrl)":79.87}
-{"index":23,"Rank":7,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":77.37,"RUParaPhraserSTS (rus-Cyrl)":72.97,"RuSTSBenchmarkSTS (rus-Cyrl)":81.77}
-{"index":24,"Rank":8,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":75.32,"RUParaPhraserSTS (rus-Cyrl)":72.15,"RuSTSBenchmarkSTS (rus-Cyrl)":78.48}
-{"index":14,"Rank":9,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":74.9,"RUParaPhraserSTS (rus-Cyrl)":70.17,"RuSTSBenchmarkSTS (rus-Cyrl)":79.64}
-{"index":16,"Rank":10,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":74.27,"RUParaPhraserSTS (rus-Cyrl)":70.46,"RuSTSBenchmarkSTS (rus-Cyrl)":78.08}
-{"index":22,"Rank":11,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":74.1,"RUParaPhraserSTS (rus-Cyrl)":65.74,"RuSTSBenchmarkSTS (rus-Cyrl)":82.46}
-{"index":21,"Rank":12,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":70.71,"RUParaPhraserSTS (rus-Cyrl)":61.87,"RuSTSBenchmarkSTS (rus-Cyrl)":79.55}
-{"index":7,"Rank":13,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":69.6,"RUParaPhraserSTS (rus-Cyrl)":65.87,"RuSTSBenchmarkSTS (rus-Cyrl)":73.32}
-{"index":17,"Rank":14,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":69.54,"RUParaPhraserSTS (rus-Cyrl)":65.74,"RuSTSBenchmarkSTS (rus-Cyrl)":73.34}
-{"index":5,"Rank":15,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":68.19,"RUParaPhraserSTS (rus-Cyrl)":65.17,"RuSTSBenchmarkSTS (rus-Cyrl)":71.22}
-{"index":9,"Rank":16,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":67.28,"RUParaPhraserSTS (rus-Cyrl)":65.14,"RuSTSBenchmarkSTS (rus-Cyrl)":69.43}
-{"index":3,"Rank":17,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":66.13,"RUParaPhraserSTS (rus-Cyrl)":66.24,"RuSTSBenchmarkSTS (rus-Cyrl)":66.03}
-{"index":6,"Rank":18,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":60.44,"RUParaPhraserSTS (rus-Cyrl)":62.06,"RuSTSBenchmarkSTS (rus-Cyrl)":58.82}
-{"index":1,"Rank":19,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":58.36,"RUParaPhraserSTS (rus-Cyrl)":55.01,"RuSTSBenchmarkSTS (rus-Cyrl)":61.72}
-{"index":12,"Rank":20,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":56.25,"RUParaPhraserSTS (rus-Cyrl)":54.03,"RuSTSBenchmarkSTS (rus-Cyrl)":58.47}
-{"index":8,"Rank":21,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":55.78,"RUParaPhraserSTS (rus-Cyrl)":53.41,"RuSTSBenchmarkSTS (rus-Cyrl)":58.16}
-{"index":2,"Rank":22,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":51.84,"RUParaPhraserSTS (rus-Cyrl)":49.72,"RuSTSBenchmarkSTS (rus-Cyrl)":53.95}
-{"index":18,"Rank":23,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":50.9,"RUParaPhraserSTS (rus-Cyrl)":45.47,"RuSTSBenchmarkSTS (rus-Cyrl)":56.33}
-{"index":19,"Rank":24,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":49.74,"RUParaPhraserSTS (rus-Cyrl)":43.93,"RuSTSBenchmarkSTS (rus-Cyrl)":55.56}
-{"index":20,"Rank":25,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":48.92,"RUParaPhraserSTS (rus-Cyrl)":42.15,"RuSTSBenchmarkSTS (rus-Cyrl)":55.68}
+{"index":11,"Rank":1,"Model":"USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Average":75.38,"RUParaPhraserSTS (rus-Cyrl)":76.36,"RuSTSBenchmarkSTS (rus-Cyrl)":83.35,"STS22 (rus-Cyrl)":66.42}
+{"index":0,"Rank":2,"Model":"bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Average":73.68,"RUParaPhraserSTS (rus-Cyrl)":74.9,"RuSTSBenchmarkSTS (rus-Cyrl)":79.87,"STS22 (rus-Cyrl)":66.26}
+{"index":10,"Rank":3,"Model":"USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":73.07,"RUParaPhraserSTS (rus-Cyrl)":73.56,"RuSTSBenchmarkSTS (rus-Cyrl)":82.26,"STS22 (rus-Cyrl)":63.39}
+{"index":23,"Rank":4,"Model":"LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Average":72.54,"RUParaPhraserSTS (rus-Cyrl)":72.97,"RuSTSBenchmarkSTS (rus-Cyrl)":81.77,"STS22 (rus-Cyrl)":62.89}
+{"index":15,"Rank":5,"Model":"multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Average":71.62,"RUParaPhraserSTS (rus-Cyrl)":71.82,"RuSTSBenchmarkSTS (rus-Cyrl)":83.15,"STS22 (rus-Cyrl)":59.89}
+{"index":24,"Rank":6,"Model":"rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":70.23,"RUParaPhraserSTS (rus-Cyrl)":72.15,"RuSTSBenchmarkSTS (rus-Cyrl)":78.48,"STS22 (rus-Cyrl)":60.06}
+{"index":14,"Rank":7,"Model":"multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":70.16,"RUParaPhraserSTS (rus-Cyrl)":70.17,"RuSTSBenchmarkSTS (rus-Cyrl)":79.64,"STS22 (rus-Cyrl)":60.67}
+{"index":16,"Rank":8,"Model":"multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":69.48,"RUParaPhraserSTS (rus-Cyrl)":70.46,"RuSTSBenchmarkSTS (rus-Cyrl)":78.08,"STS22 (rus-Cyrl)":59.9}
+{"index":22,"Rank":9,"Model":"paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Average":68.98,"RUParaPhraserSTS (rus-Cyrl)":65.74,"RuSTSBenchmarkSTS (rus-Cyrl)":82.46,"STS22 (rus-Cyrl)":58.74}
+{"index":21,"Rank":10,"Model":"paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Average":66.17,"RUParaPhraserSTS (rus-Cyrl)":61.87,"RuSTSBenchmarkSTS (rus-Cyrl)":79.55,"STS22 (rus-Cyrl)":57.08}
+{"index":7,"Rank":11,"Model":"LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Average":65.91,"RUParaPhraserSTS (rus-Cyrl)":65.87,"RuSTSBenchmarkSTS (rus-Cyrl)":73.32,"STS22 (rus-Cyrl)":58.53}
+{"index":17,"Rank":12,"Model":"LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Average":65.52,"RUParaPhraserSTS (rus-Cyrl)":65.74,"RuSTSBenchmarkSTS (rus-Cyrl)":73.34,"STS22 (rus-Cyrl)":57.49}
+{"index":5,"Rank":13,"Model":"sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":64.4,"RUParaPhraserSTS (rus-Cyrl)":65.17,"RuSTSBenchmarkSTS (rus-Cyrl)":71.22,"STS22 (rus-Cyrl)":56.82}
+{"index":9,"Rank":14,"Model":"rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Average":61.6,"RUParaPhraserSTS (rus-Cyrl)":65.14,"RuSTSBenchmarkSTS (rus-Cyrl)":69.43,"STS22 (rus-Cyrl)":50.23}
+{"index":3,"Rank":15,"Model":"rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":61.18,"RUParaPhraserSTS (rus-Cyrl)":66.24,"RuSTSBenchmarkSTS (rus-Cyrl)":66.03,"STS22 (rus-Cyrl)":51.27}
+{"index":6,"Rank":16,"Model":"sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Average":57.21,"RUParaPhraserSTS (rus-Cyrl)":62.06,"RuSTSBenchmarkSTS (rus-Cyrl)":58.82,"STS22 (rus-Cyrl)":50.75}
+{"index":1,"Rank":17,"Model":"distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Average":56.2,"RUParaPhraserSTS (rus-Cyrl)":55.01,"RuSTSBenchmarkSTS (rus-Cyrl)":61.72,"STS22 (rus-Cyrl)":51.87}
+{"index":12,"Rank":18,"Model":"deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Average":53.39,"RUParaPhraserSTS (rus-Cyrl)":54.03,"RuSTSBenchmarkSTS (rus-Cyrl)":58.47,"STS22 (rus-Cyrl)":47.67}
+{"index":8,"Rank":19,"Model":"rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Average":53.15,"RUParaPhraserSTS (rus-Cyrl)":53.41,"RuSTSBenchmarkSTS (rus-Cyrl)":58.16,"STS22 (rus-Cyrl)":47.88}
+{"index":2,"Rank":20,"Model":"rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Average":46.22,"RUParaPhraserSTS (rus-Cyrl)":49.72,"RuSTSBenchmarkSTS (rus-Cyrl)":53.95,"STS22 (rus-Cyrl)":34.98}
+{"index":19,"Rank":21,"Model":"all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Average":38.07,"RUParaPhraserSTS (rus-Cyrl)":43.93,"RuSTSBenchmarkSTS (rus-Cyrl)":55.56,"STS22 (rus-Cyrl)":14.72}
+{"index":20,"Rank":22,"Model":"all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Average":37.89,"RUParaPhraserSTS (rus-Cyrl)":42.15,"RuSTSBenchmarkSTS (rus-Cyrl)":55.68,"STS22 (rus-Cyrl)":15.83}
+{"index":18,"Rank":23,"Model":"all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Average":37.66,"RUParaPhraserSTS (rus-Cyrl)":45.47,"RuSTSBenchmarkSTS (rus-Cyrl)":56.33,"STS22 (rus-Cyrl)":11.19}
+{"index":4,"Rank":24,"Model":"ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Average":"","RUParaPhraserSTS (rus-Cyrl)":76.16,"RuSTSBenchmarkSTS (rus-Cyrl)":78.69,"STS22 (rus-Cyrl)":""}
+{"index":13,"Rank":25,"Model":"e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Average":"","RUParaPhraserSTS (rus-Cyrl)":76.17,"RuSTSBenchmarkSTS (rus-Cyrl)":84.13,"STS22 (rus-Cyrl)":""}
diff --git a/config.yaml b/config.yaml
index f69b40fd9611ad195ae019dfd403510c1aab120c..f6eca18dedf670715e1d1de2b0206b0b01c2ca32 100644
--- a/config.yaml
+++ b/config.yaml
@@ -43,6 +43,11 @@ tasks:
     metric: spearman
     metric_description: "Spearman correlation based on the model's similarity metric (usually cosine)"
     task_description: "Summarization is the task of generating a summary of a text."
+  MultilabelClassification:
+    icon: "🏷️"
+    metric: accuracy
+    metric_description: "Accuracy"
+    task_description: "Multilabel classification is the task of assigning multiple labels to a text."
   InstructionRetrieval:
     icon: "🔎📋"
     metric: "p-MRR"
@@ -382,6 +387,8 @@ boards:
         - RuReviewsClassification (rus-Cyrl)
         - RuSciBenchGRNTIClassification (rus-Cyrl)
         - RuSciBenchOECDClassification (rus-Cyrl)
+        - MassiveIntentClassification (rus-Cyrl)
+        - MassiveScenarioClassification (rus-Cyrl)
       Clustering:
         - GeoreviewClusteringP2P (rus-Cyrl)
         - RuSciBenchGRNTIClusteringP2P (rus-Cyrl)
@@ -390,12 +397,18 @@ boards:
         - TERRa (rus-Cyrl)
       Reranking:
         - RuBQReranking (rus-Cyrl)
+        - MIRACLReranking (rus-Cyrl)
       Retrieval:
         - RiaNewsRetrieval (rus-Cyrl)
         - RuBQRetrieval (rus-Cyrl)
+        - MIRACLRetrieval (rus-Cyrl)
       STS:
         - RUParaPhraserSTS (rus-Cyrl)
         - RuSTSBenchmarkSTS (rus-Cyrl)
+        - STS22 (rus-Cyrl)
+      MultilabelClassification:
+        - CEDRClassification (rus-Cyrl)
+        - SensitiveTopicsClassification (rus-Cyrl)
   se:
     title: Swedish
     language_long: Swedish
diff --git a/refresh.py b/refresh.py
index 9ca11d5cdc535e9a045f66df5644ff3106506015..e0ed17f3d9cc966bbe16ccec1887f25b2f6a528a 100644
--- a/refresh.py
+++ b/refresh.py
@@ -161,6 +161,8 @@ def filter_metric_external(x, task, metrics) -> bool:
     # This is a hack for the passkey and needle retrieval test, which reports ndcg_at_1 (i.e. accuracy), rather than the ndcg_at_10 that is commonly used for retrieval tasks.
     if x["mteb_dataset_name"] in ["LEMBNeedleRetrieval", "LEMBPasskeyRetrieval"]:
         return bool(x["mteb_task"] == task and x["metric"] == "ndcg_at_1")
+    elif x["mteb_dataset_name"] == "MIRACLReranking":
+        return bool(x["mteb_task"] == task and x["metric"] in ["NDCG@10(MIRACL)"])
     else:
         return bool(x["mteb_task"] == task and x["metric"] in metrics)
 
@@ -534,7 +536,7 @@ def get_mteb_average(task_dict: dict) -> tuple[Any, dict]:
         rank=False,
     )
     # Debugging:
-    # DATA_OVERALL.to_csv("overall.csv")
+    DATA_OVERALL.to_csv("overall.csv")
     DATA_OVERALL.insert(
         1,
         f"Average ({len(all_tasks)} datasets)",
@@ -608,6 +610,8 @@ def refresh_leaderboard() -> tuple[list, dict]:
         leave=True,
     )
     for board, board_config in pbar_tasks:
+        if board == "longembed":
+            pass
         boards_data[board] = {"data_overall": None, "data_tasks": {}}
         pbar_tasks.set_description(f"Fetching leaderboard results for {board!r}")
         pbar_tasks.refresh()