Skip to content

Commit

Permalink
add jasper results
Browse files Browse the repository at this point in the history
  • Loading branch information
DunZhang committed Dec 11, 2024
1 parent 2a8b9de commit c9a748c
Show file tree
Hide file tree
Showing 68 changed files with 6,342 additions and 0 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,179 @@
{
"dataset_revision": "e8379541af4e31359cca9fbcf4b00f2671dba205",
"task_name": "AmazonCounterfactualClassification",
"mteb_version": "1.21.0",
"scores": {
"test": [
{
"accuracy": 0.957271,
"f1": 0.892545,
"f1_weighted": 0.958563,
"ap": 0.671563,
"ap_weighted": 0.671563,
"scores_per_experiment": [
{
"accuracy": 0.956522,
"f1": 0.890448,
"f1_weighted": 0.957797,
"ap": 0.665797,
"ap_weighted": 0.665797
},
{
"accuracy": 0.957271,
"f1": 0.892652,
"f1_weighted": 0.958582,
"ap": 0.671812,
"ap_weighted": 0.671812
},
{
"accuracy": 0.958771,
"f1": 0.895191,
"f1_weighted": 0.959811,
"ap": 0.678327,
"ap_weighted": 0.678327
},
{
"accuracy": 0.955022,
"f1": 0.88926,
"f1_weighted": 0.956814,
"ap": 0.663741,
"ap_weighted": 0.663741
},
{
"accuracy": 0.95952,
"f1": 0.897401,
"f1_weighted": 0.960597,
"ap": 0.684391,
"ap_weighted": 0.684391
},
{
"accuracy": 0.957271,
"f1": 0.892652,
"f1_weighted": 0.958582,
"ap": 0.671812,
"ap_weighted": 0.671812
},
{
"accuracy": 0.957271,
"f1": 0.892652,
"f1_weighted": 0.958582,
"ap": 0.671812,
"ap_weighted": 0.671812
},
{
"accuracy": 0.957271,
"f1": 0.89202,
"f1_weighted": 0.958467,
"ap": 0.669921,
"ap_weighted": 0.669921
},
{
"accuracy": 0.958771,
"f1": 0.895191,
"f1_weighted": 0.959811,
"ap": 0.678327,
"ap_weighted": 0.678327
},
{
"accuracy": 0.955022,
"f1": 0.887983,
"f1_weighted": 0.956582,
"ap": 0.659695,
"ap_weighted": 0.659695
}
],
"main_score": 0.957271,
"hf_subset": "en-ext",
"languages": [
"eng-Latn"
]
},
{
"accuracy": 0.937761,
"f1": 0.907582,
"f1_weighted": 0.93974,
"ap": 0.748876,
"ap_weighted": 0.748876,
"scores_per_experiment": [
{
"accuracy": 0.935821,
"f1": 0.905149,
"f1_weighted": 0.937995,
"ap": 0.74288,
"ap_weighted": 0.74288
},
{
"accuracy": 0.938806,
"f1": 0.908639,
"f1_weighted": 0.940608,
"ap": 0.751214,
"ap_weighted": 0.751214
},
{
"accuracy": 0.940299,
"f1": 0.911095,
"f1_weighted": 0.942124,
"ap": 0.757028,
"ap_weighted": 0.757028
},
{
"accuracy": 0.940299,
"f1": 0.911095,
"f1_weighted": 0.942124,
"ap": 0.757028,
"ap_weighted": 0.757028
},
{
"accuracy": 0.934328,
"f1": 0.903186,
"f1_weighted": 0.936623,
"ap": 0.738283,
"ap_weighted": 0.738283
},
{
"accuracy": 0.943284,
"f1": 0.914666,
"f1_weighted": 0.944759,
"ap": 0.765872,
"ap_weighted": 0.765872
},
{
"accuracy": 0.940299,
"f1": 0.910638,
"f1_weighted": 0.941989,
"ap": 0.756036,
"ap_weighted": 0.756036
},
{
"accuracy": 0.941791,
"f1": 0.912191,
"f1_weighted": 0.943237,
"ap": 0.760014,
"ap_weighted": 0.760014
},
{
"accuracy": 0.935821,
"f1": 0.905149,
"f1_weighted": 0.937995,
"ap": 0.74288,
"ap_weighted": 0.74288
},
{
"accuracy": 0.926866,
"f1": 0.894018,
"f1_weighted": 0.929948,
"ap": 0.71752,
"ap_weighted": 0.71752
}
],
"main_score": 0.937761,
"hf_subset": "en",
"languages": [
"eng-Latn"
]
}
]
},
"evaluation_time": 33.89958691596985,
"kg_co2_emissions": null
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
{
"dataset_revision": "e2d317d38cd51312af73b3d32a06d1a08b442046",
"task_name": "AmazonPolarityClassification",
"mteb_version": "1.21.0",
"scores": {
"test": [
{
"accuracy": 0.975809,
"f1": 0.975808,
"f1_weighted": 0.975808,
"ap": 0.963911,
"ap_weighted": 0.963911,
"scores_per_experiment": [
{
"accuracy": 0.975982,
"f1": 0.975982,
"f1_weighted": 0.975982,
"ap": 0.96371,
"ap_weighted": 0.96371
},
{
"accuracy": 0.975827,
"f1": 0.975827,
"f1_weighted": 0.975827,
"ap": 0.962876,
"ap_weighted": 0.962876
},
{
"accuracy": 0.976105,
"f1": 0.976105,
"f1_weighted": 0.976105,
"ap": 0.965518,
"ap_weighted": 0.965518
},
{
"accuracy": 0.975648,
"f1": 0.975647,
"f1_weighted": 0.975647,
"ap": 0.965885,
"ap_weighted": 0.965885
},
{
"accuracy": 0.975555,
"f1": 0.975554,
"f1_weighted": 0.975554,
"ap": 0.966314,
"ap_weighted": 0.966314
},
{
"accuracy": 0.976137,
"f1": 0.976137,
"f1_weighted": 0.976137,
"ap": 0.964252,
"ap_weighted": 0.964252
},
{
"accuracy": 0.97609,
"f1": 0.97609,
"f1_weighted": 0.97609,
"ap": 0.964401,
"ap_weighted": 0.964401
},
{
"accuracy": 0.976093,
"f1": 0.976092,
"f1_weighted": 0.976092,
"ap": 0.964176,
"ap_weighted": 0.964176
},
{
"accuracy": 0.975413,
"f1": 0.975412,
"f1_weighted": 0.975412,
"ap": 0.961334,
"ap_weighted": 0.961334
},
{
"accuracy": 0.975237,
"f1": 0.975237,
"f1_weighted": 0.975237,
"ap": 0.96064,
"ap_weighted": 0.96064
}
],
"main_score": 0.975809,
"hf_subset": "default",
"languages": [
"eng-Latn"
]
}
]
},
"evaluation_time": 556.4878182411194,
"kg_co2_emissions": null
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
{
"dataset_revision": "1399c76144fd37290681b995c656ef9b2e06e26d",
"task_name": "AmazonReviewsClassification",
"mteb_version": "1.21.0",
"scores": {
"test": [
{
"accuracy": 0.62918,
"f1": 0.606961,
"f1_weighted": 0.606961,
"scores_per_experiment": [
{
"accuracy": 0.6372,
"f1": 0.620473,
"f1_weighted": 0.620473
},
{
"accuracy": 0.6306,
"f1": 0.609873,
"f1_weighted": 0.609873
},
{
"accuracy": 0.6298,
"f1": 0.605227,
"f1_weighted": 0.605227
},
{
"accuracy": 0.627,
"f1": 0.60998,
"f1_weighted": 0.60998
},
{
"accuracy": 0.6356,
"f1": 0.613666,
"f1_weighted": 0.613666
},
{
"accuracy": 0.6442,
"f1": 0.630082,
"f1_weighted": 0.630082
},
{
"accuracy": 0.6482,
"f1": 0.638119,
"f1_weighted": 0.638119
},
{
"accuracy": 0.6486,
"f1": 0.639602,
"f1_weighted": 0.639602
},
{
"accuracy": 0.6296,
"f1": 0.607217,
"f1_weighted": 0.607217
},
{
"accuracy": 0.561,
"f1": 0.495375,
"f1_weighted": 0.495375
}
],
"main_score": 0.62918,
"hf_subset": "en",
"languages": [
"eng-Latn"
]
}
]
},
"evaluation_time": 19.412888526916504,
"kg_co2_emissions": null
}
Loading

0 comments on commit c9a748c

Please sign in to comment.