Skip to content

Commit

Permalink
add results for keeeeenw/MicroLlama-text-embedding (#46)
Browse files Browse the repository at this point in the history
  • Loading branch information
keeeeenw authored Nov 14, 2024
1 parent db0aadb commit 9ebd921
Show file tree
Hide file tree
Showing 24 changed files with 2,140 additions and 0 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,179 @@
{
"dataset_revision": "e8379541af4e31359cca9fbcf4b00f2671dba205",
"evaluation_time": 11.987384557723999,
"kg_co2_emissions": null,
"mteb_version": "1.19.2",
"scores": {
"test": [
{
"accuracy": 0.7337331334332833,
"ap": 0.21853937939763662,
"ap_weighted": 0.21853937939763662,
"f1": 0.6047021327420932,
"f1_weighted": 0.7829868030563938,
"hf_subset": "en-ext",
"languages": [
"eng-Latn"
],
"main_score": 0.7337331334332833,
"scores_per_experiment": [
{
"accuracy": 0.802848575712144,
"ap": 0.253603822178596,
"ap_weighted": 0.253603822178596,
"f1": 0.6575129710707296,
"f1_weighted": 0.834123326078018
},
{
"accuracy": 0.6866566716641679,
"ap": 0.20668512763849248,
"ap_weighted": 0.20668512763849248,
"f1": 0.5748936494625295,
"f1_weighted": 0.7474400697036555
},
{
"accuracy": 0.732383808095952,
"ap": 0.2165878487007201,
"ap_weighted": 0.2165878487007201,
"f1": 0.6032788114308563,
"f1_weighted": 0.7824310801277565
},
{
"accuracy": 0.7338830584707646,
"ap": 0.2313181006683483,
"ap_weighted": 0.2313181006683483,
"f1": 0.6113675623997276,
"f1_weighted": 0.7840996903717105
},
{
"accuracy": 0.704647676161919,
"ap": 0.21231496276527717,
"ap_weighted": 0.21231496276527717,
"f1": 0.5869943077701609,
"f1_weighted": 0.7614914384186111
},
{
"accuracy": 0.7158920539730135,
"ap": 0.20174624917756948,
"ap_weighted": 0.20174624917756948,
"f1": 0.5871186601237535,
"f1_weighted": 0.7696484640228388
},
{
"accuracy": 0.6911544227886057,
"ap": 0.20893594445031208,
"ap_weighted": 0.20893594445031208,
"f1": 0.5783455674760023,
"f1_weighted": 0.7509921634326823
},
{
"accuracy": 0.767616191904048,
"ap": 0.21283962335379072,
"ap_weighted": 0.21283962335379072,
"f1": 0.6185055350553506,
"f1_weighted": 0.8073075178278019
},
{
"accuracy": 0.7563718140929535,
"ap": 0.23614888342057133,
"ap_weighted": 0.23614888342057133,
"f1": 0.6252922340292213,
"f1_weighted": 0.8007295427076767
},
{
"accuracy": 0.7458770614692654,
"ap": 0.20521323162268876,
"ap_weighted": 0.20521323162268876,
"f1": 0.6037120286026,
"f1_weighted": 0.7916047378731865
}
]
},
{
"accuracy": 0.7423880597014926,
"ap": 0.37447869256341476,
"ap_weighted": 0.37447869256341476,
"f1": 0.6838136251649617,
"f1_weighted": 0.7661970582344355,
"hf_subset": "en",
"languages": [
"eng-Latn"
],
"main_score": 0.7423880597014926,
"scores_per_experiment": [
{
"accuracy": 0.6805970149253732,
"ap": 0.3160067410365431,
"ap_weighted": 0.3160067410365431,
"f1": 0.6259951377802819,
"f1_weighted": 0.713016879480271
},
{
"accuracy": 0.7970149253731343,
"ap": 0.4395383163598705,
"ap_weighted": 0.4395383163598705,
"f1": 0.7389440872783947,
"f1_weighted": 0.8139216250716027
},
{
"accuracy": 0.6895522388059702,
"ap": 0.3361540452298984,
"ap_weighted": 0.3361540452298984,
"f1": 0.6402647420210843,
"f1_weighted": 0.7213506238284771
},
{
"accuracy": 0.7253731343283583,
"ap": 0.3523102253901136,
"ap_weighted": 0.3523102253901136,
"f1": 0.6662479424759595,
"f1_weighted": 0.7517907732411321
},
{
"accuracy": 0.7895522388059701,
"ap": 0.40282679552395184,
"ap_weighted": 0.40282679552395184,
"f1": 0.7194029850746269,
"f1_weighted": 0.8048384940966806
},
{
"accuracy": 0.7417910447761195,
"ap": 0.36612848992794356,
"ap_weighted": 0.36612848992794356,
"f1": 0.6807335608495777,
"f1_weighted": 0.7657555862489942
},
{
"accuracy": 0.7865671641791044,
"ap": 0.41328064488817207,
"ap_weighted": 0.41328064488817207,
"f1": 0.7229795957312084,
"f1_weighted": 0.8038011780575062
},
{
"accuracy": 0.7820895522388059,
"ap": 0.3990137475187229,
"ap_weighted": 0.3990137475187229,
"f1": 0.7144975249836556,
"f1_weighted": 0.7990912278060524
},
{
"accuracy": 0.7059701492537314,
"ap": 0.35855036478570557,
"ap_weighted": 0.35855036478570557,
"f1": 0.6588515348967043,
"f1_weighted": 0.7360579391443631
},
{
"accuracy": 0.7253731343283583,
"ap": 0.3609775549732255,
"ap_weighted": 0.3609775549732255,
"f1": 0.6702191405581236,
"f1_weighted": 0.7523462553692759
}
]
}
]
},
"task_name": "AmazonCounterfactualClassification"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
{
"dataset_revision": "e2d317d38cd51312af73b3d32a06d1a08b442046",
"evaluation_time": 1085.8549246788025,
"kg_co2_emissions": null,
"mteb_version": "1.19.2",
"scores": {
"test": [
{
"accuracy": 0.66766525,
"ap": 0.6179460240681391,
"ap_weighted": 0.6179460240681391,
"f1": 0.664339976948822,
"f1_weighted": 0.664339976948822,
"hf_subset": "default",
"languages": [
"eng-Latn"
],
"main_score": 0.66766525,
"scores_per_experiment": [
{
"accuracy": 0.7112825,
"ap": 0.6579411226560989,
"ap_weighted": 0.6579411226560989,
"f1": 0.709725973236081,
"f1_weighted": 0.709725973236081
},
{
"accuracy": 0.634495,
"ap": 0.5858987399082332,
"ap_weighted": 0.5858987399082332,
"f1": 0.6344119180519443,
"f1_weighted": 0.6344119180519444
},
{
"accuracy": 0.64797,
"ap": 0.5934658581495289,
"ap_weighted": 0.5934658581495289,
"f1": 0.6466131140766969,
"f1_weighted": 0.6466131140766967
},
{
"accuracy": 0.691885,
"ap": 0.6494519509882285,
"ap_weighted": 0.6494519509882285,
"f1": 0.6842047319740125,
"f1_weighted": 0.6842047319740125
},
{
"accuracy": 0.736555,
"ap": 0.6802131805554019,
"ap_weighted": 0.6802131805554019,
"f1": 0.7359401246910472,
"f1_weighted": 0.7359401246910472
},
{
"accuracy": 0.65907,
"ap": 0.6066532921967269,
"ap_weighted": 0.6066532921967269,
"f1": 0.6586877623003314,
"f1_weighted": 0.6586877623003313
},
{
"accuracy": 0.63907,
"ap": 0.5840529066642646,
"ap_weighted": 0.5840529066642646,
"f1": 0.6288309727480417,
"f1_weighted": 0.6288309727480416
},
{
"accuracy": 0.696175,
"ap": 0.6592946865656213,
"ap_weighted": 0.6592946865656213,
"f1": 0.685333224077756,
"f1_weighted": 0.685333224077756
},
{
"accuracy": 0.6200525,
"ap": 0.5740974159153149,
"ap_weighted": 0.5740974159153149,
"f1": 0.6199965644229164,
"f1_weighted": 0.6199965644229164
},
{
"accuracy": 0.6400975,
"ap": 0.5883910870819724,
"ap_weighted": 0.5883910870819724,
"f1": 0.6396553839093924,
"f1_weighted": 0.6396553839093924
}
]
}
]
},
"task_name": "AmazonPolarityClassification"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
{
"dataset_revision": "1399c76144fd37290681b995c656ef9b2e06e26d",
"evaluation_time": 13.751401424407959,
"kg_co2_emissions": null,
"mteb_version": "1.19.2",
"scores": {
"test": [
{
"accuracy": 0.33256,
"f1": 0.329834021844663,
"f1_weighted": 0.329834021844663,
"hf_subset": "en",
"languages": [
"eng-Latn"
],
"main_score": 0.33256,
"scores_per_experiment": [
{
"accuracy": 0.3684,
"f1": 0.3585932746395291,
"f1_weighted": 0.3585932746395291
},
{
"accuracy": 0.3446,
"f1": 0.3442223672743794,
"f1_weighted": 0.3442223672743794
},
{
"accuracy": 0.3298,
"f1": 0.3285528766931936,
"f1_weighted": 0.32855287669319355
},
{
"accuracy": 0.3204,
"f1": 0.32472725024209953,
"f1_weighted": 0.32472725024209953
},
{
"accuracy": 0.3722,
"f1": 0.3597359132663452,
"f1_weighted": 0.35973591326634513
},
{
"accuracy": 0.3306,
"f1": 0.3258214584304595,
"f1_weighted": 0.3258214584304595
},
{
"accuracy": 0.2882,
"f1": 0.27992069615771664,
"f1_weighted": 0.2799206961577166
},
{
"accuracy": 0.351,
"f1": 0.3518605260392184,
"f1_weighted": 0.35186052603921836
},
{
"accuracy": 0.3332,
"f1": 0.3356947366029444,
"f1_weighted": 0.3356947366029444
},
{
"accuracy": 0.2872,
"f1": 0.2892111191007444,
"f1_weighted": 0.2892111191007444
}
]
}
]
},
"task_name": "AmazonReviewsClassification"
}
Loading

0 comments on commit 9ebd921

Please sign in to comment.