Model_name
string | Train_size
int64 | Test_size
int64 | arg
dict | lora
null | Parameters
int64 | Trainable_parameters
int64 | r
null | Memory Allocation
string | Training Time
string | Performance
dict |
|---|---|---|---|---|---|---|---|---|---|---|
google-t5/t5-large
| 50,775
| 12,652
|
{
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
}
| null | 738,731,021
| 738,731,021
| null |
7375.84
|
1537.37
|
{
"accuracy": 0.9052323743281695,
"f1_macro": 0.9013383020683862,
"f1_weighted": 0.9054479602946873,
"precision": 0.9022590107045835,
"recall": 0.9006419800608593
}
|
RUCAIBox/mvp
| 50,775
| 12,652
|
{
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
}
| null | 407,356,429
| 407,356,429
| null |
4020.82
|
792.58
|
{
"accuracy": 0.902782168827063,
"f1_macro": 0.8984511371599229,
"f1_weighted": 0.9029306955968787,
"precision": 0.8991251858933664,
"recall": 0.8980192081833427
}
|
facebook/bart-large-mnli
| 50,775
| 12,652
|
{
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
}
| null | 407,354,381
| 407,354,381
| null |
3802.02
|
753.74
|
{
"accuracy": 0.9034935188112552,
"f1_macro": 0.8993653479985605,
"f1_weighted": 0.9036737653282353,
"precision": 0.8999061312461678,
"recall": 0.8990265892986149
}
|
google/flan-t5-base
| 50,775
| 12,652
|
{
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
}
| null | 223,504,141
| 223,504,141
| null |
3021.62
|
843.62
|
{
"accuracy": 0.8942459690167562,
"f1_macro": 0.8890799745949748,
"f1_weighted": 0.8943831395036189,
"precision": 0.8901872032400588,
"recall": 0.8881740747405792
}
|
facebook/bart-large
| 50,775
| 12,652
|
{
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
}
| null | 407,354,381
| 407,354,381
| null |
3852.37
|
761.91
|
{
"accuracy": 0.9035725576983876,
"f1_macro": 0.8988251057596799,
"f1_weighted": 0.9038110012738504,
"precision": 0.8998869801912316,
"recall": 0.8981101341905647
}
|
FacebookAI/roberta-base
| 50,775
| 12,652
|
{
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
}
| null | 124,655,629
| 124,655,629
| null |
2059.77
|
240.64
|
{
"accuracy": 0.8910844135314575,
"f1_macro": 0.8854480532471855,
"f1_weighted": 0.89111636453489,
"precision": 0.8865684564931843,
"recall": 0.8845284091868102
}
|
google-bert/bert-base-uncased
| 50,775
| 12,652
|
{
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
}
| null | 109,492,237
| 109,492,237
| null |
1174.0
|
239.07
|
{
"accuracy": 0.8925861523869744,
"f1_macro": 0.8884670370416861,
"f1_weighted": 0.8927434196774904,
"precision": 0.8894476885442704,
"recall": 0.887771950922641
}
|
google/rembert
| 50,775
| 12,652
|
{
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
}
| null | 575,935,373
| 575,935,373
| null |
5313.94
|
929.52
|
{
"accuracy": 0.9016756244072084,
"f1_macro": 0.8980959690011386,
"f1_weighted": 0.9019092188725625,
"precision": 0.8995002934922369,
"recall": 0.8970123334894192
}
|
FacebookAI/xlm-roberta-large
| 50,775
| 12,652
|
{
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
}
| null | 559,903,757
| 559,903,757
| null |
5961.86
|
634.22
|
{
"accuracy": 0.8989092633575719,
"f1_macro": 0.8944985914086196,
"f1_weighted": 0.8991340288782671,
"precision": 0.8951417860849619,
"recall": 0.8942740325327818
}
|
FacebookAI/roberta-large
| 50,775
| 12,652
|
{
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
}
| null | 355,373,069
| 355,373,069
| null |
3342.35
|
610.48
|
{
"accuracy": 0.8980398355991147,
"f1_macro": 0.8939876034698826,
"f1_weighted": 0.8982815154877998,
"precision": 0.8938544565380185,
"recall": 0.8945658739294429
}
|
google-bert/bert-large-uncased
| 50,775
| 12,652
|
{
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
}
| null | 335,155,213
| 335,155,213
| null |
3102.98
|
611.5
|
{
"accuracy": 0.8946411634524186,
"f1_macro": 0.8902450938100885,
"f1_weighted": 0.8949261744250797,
"precision": 0.8916197995679069,
"recall": 0.8893450540850714
}
|
answerdotai/ModernBERT-large
| 50,775
| 12,652
|
{
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
}
| null | 395,844,621
| 395,844,621
| null |
3838.76
|
707.87
|
{
"accuracy": 0.874486247233639,
"f1_macro": 0.8655635850850784,
"f1_weighted": 0.8746576186428053,
"precision": 0.866043951439592,
"recall": 0.865388449241492
}
|
microsoft/deberta-large
| 50,775
| 12,652
|
{
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
}
| null | 406,225,933
| 406,225,933
| null |
4623.97
|
1046.79
|
{
"accuracy": 0.9013594688586785,
"f1_macro": 0.8972469158394862,
"f1_weighted": 0.9015860784281406,
"precision": 0.897239329011824,
"recall": 0.8975236121194432
}
|
albert/albert-xxlarge-v2
| 50,775
| 12,652
|
{
"auto_find_batch_size": true,
"gradient_accumulation_steps": 4,
"learning_rate": 0.00005,
"logging_steps": 1,
"lr_scheduler_type": "linear",
"num_train_epochs": 1,
"optim": "adamw_8bit",
"output_dir": "outputs",
"report_to": "none",
"save_strategy": "no",
"save_total_limit": 0,
"seed": 3407,
"warmup_steps": 5,
"weight_decay": 0.01
}
| null | 222,648,845
| 222,648,845
| null |
5295.56
|
2968.8
|
{
"accuracy": 0.9044419854568447,
"f1_macro": 0.899875329648251,
"f1_weighted": 0.9046316917695428,
"precision": 0.9000605679613464,
"recall": 0.8998927286198609
}
|
README.md exists but content is empty.
- Downloads last month
- 5