results dict | versions dict | config_general dict |
|---|---|---|
{
"daily": {
"daily": 7
},
"quarterly": {
"quarterly": 7
},
"harness|arc_challenge|25": {
"acc": 0.29948805460750855,
"acc_stderr": 0.013385021637313567,
"acc_norm": 0.3506825938566553,
"acc_norm_stderr": 0.013944635930726089
},
"harness|hellaswag|10": {
"acc": 0.33330013941445... | {
"all": 0,
"harness|arc_challenge|25": 0,
"harness|hellaswag|10": 0,
"harness|mmlu_world_religions|5": 1,
"harness|mmlu_management|5": 1,
"harness|mmlu_miscellaneous|5": 1,
"harness|mmlu_anatomy|5": 1,
"harness|mmlu_abstract_algebra|5": 1,
"harness|mmlu_conceptual_physics|5": 1,
"harness|mmlu_virol... | {
"model_name": "01-ai/Yi-1.5-9B-32K",
"model_sha": "c0239dbc923b8a2b5ca849763bdd592d39c60850",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
} |
{
"daily": {
"daily": 9
},
"quarterly": {
"quarterly": 9
},
"harness|arc_challenge|25": {
"acc": 0.257679180887372,
"acc_stderr": 0.012780770562768416,
"acc_norm": 0.3122866894197952,
"acc_norm_stderr": 0.013542598541688065
},
"harness|hellaswag|10": {
"acc": 0.3229436367257518... | {
"all": 0,
"harness|arc_challenge|25": 0,
"harness|hellaswag|10": 0,
"harness|mmlu_world_religions|5": 1,
"harness|mmlu_management|5": 1,
"harness|mmlu_miscellaneous|5": 1,
"harness|mmlu_anatomy|5": 1,
"harness|mmlu_abstract_algebra|5": 1,
"harness|mmlu_conceptual_physics|5": 1,
"harness|mmlu_virol... | {
"model_name": "BioMistral/BioMistral-7B",
"model_sha": "9a11e1ffa817c211cbb52ee1fb312dc6b61b40a5",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
} |
{
"daily": {
"daily": 10
},
"quarterly": {
"quarterly": 10
},
"harness|arc_challenge|25": {
"acc": 0.2235494880546075,
"acc_stderr": 0.012174896631202605,
"acc_norm": 0.2815699658703072,
"acc_norm_stderr": 0.013143376735009015
},
"harness|hellaswag|10": {
"acc": 0.3345947022505... | {
"all": 0,
"harness|arc_challenge|25": 0,
"harness|hellaswag|10": 0,
"harness|mmlu_world_religions|5": 1,
"harness|mmlu_management|5": 1,
"harness|mmlu_miscellaneous|5": 1,
"harness|mmlu_anatomy|5": 1,
"harness|mmlu_abstract_algebra|5": 1,
"harness|mmlu_conceptual_physics|5": 1,
"harness|mmlu_virol... | {
"model_name": "EleutherAI/polyglot-ko-1.3b",
"model_sha": "557e162cf6e944fdbae05bab2e45d066a125eacb",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
} |
{
"daily": {
"daily": 8
},
"quarterly": {
"quarterly": 8
},
"harness|arc_challenge|25": {
"acc": 0.33532423208191126,
"acc_stderr": 0.01379618294778556,
"acc_norm": 0.3848122866894198,
"acc_norm_stderr": 0.014218371065251112
},
"harness|hellaswag|10": {
"acc": 0.354809798844851... | {
"all": 0,
"harness|arc_challenge|25": 0,
"harness|hellaswag|10": 0,
"harness|mmlu_world_religions|5": 1,
"harness|mmlu_management|5": 1,
"harness|mmlu_miscellaneous|5": 1,
"harness|mmlu_anatomy|5": 1,
"harness|mmlu_abstract_algebra|5": 1,
"harness|mmlu_conceptual_physics|5": 1,
"harness|mmlu_virol... | {
"model_name": "HuggingFaceH4/zephyr-7b-beta",
"model_sha": "3bac358730f8806e5c3dc7c7e19eb36e045bf720",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
} |
{
"daily": {
"daily": 6
},
"quarterly": {
"quarterly": 6
},
"harness|arc_challenge|25": {
"acc": 0.42918088737201365,
"acc_stderr": 0.014464085894870651,
"acc_norm": 0.46501706484641636,
"acc_norm_stderr": 0.014575583922019672
},
"harness|hellaswag|10": {
"acc": 0.4456283608842... | {
"all": 0,
"harness|arc_challenge|25": 0,
"harness|hellaswag|10": 0,
"harness|mmlu_world_religions|5": 1,
"harness|mmlu_management|5": 1,
"harness|mmlu_miscellaneous|5": 1,
"harness|mmlu_anatomy|5": 1,
"harness|mmlu_abstract_algebra|5": 1,
"harness|mmlu_conceptual_physics|5": 1,
"harness|mmlu_virol... | {
"model_name": "nlpai-lab/KULLM3",
"model_sha": "5a6bcd0fc7f240460eb6d57016f7b4060bc1f43b",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
} |
{
"daily": {
"daily": 4
},
"quarterly": {
"quarterly": 4
},
"harness|arc_challenge|25": {
"acc": 0.7465870307167235,
"acc_stderr": 0.012710896778378604,
"acc_norm": 0.7807167235494881,
"acc_norm_stderr": 0.012091245787615728
},
"harness|hellaswag|10": {
"acc": 0.638518223461461... | {
"all": 0,
"harness|arc_challenge|25": 0,
"harness|hellaswag|10": 0,
"harness|mmlu_world_religions|5": 1,
"harness|mmlu_management|5": 1,
"harness|mmlu_miscellaneous|5": 1,
"harness|mmlu_anatomy|5": 1,
"harness|mmlu_abstract_algebra|5": 1,
"harness|mmlu_conceptual_physics|5": 1,
"harness|mmlu_virol... | {
"model_name": "x2bee/POLAR-14B-DPO-v1.3",
"model_sha": "337edbed4c86db2da27e3b0e07086134f8d27a09",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
} |
{
"daily": {
"daily": 7
},
"quarterly": {
"quarterly": 7
},
"harness|arc_challenge|25": {
"acc": 0.7363481228668942,
"acc_stderr": 0.012875929151297058,
"acc_norm": 0.7491467576791809,
"acc_norm_stderr": 0.012668198621315433
},
"harness|hellaswag|10": {
"acc": 0.722863971320454... | {
"all": 0,
"harness|arc_challenge|25": 0,
"harness|hellaswag|10": 0,
"harness|mmlu_world_religions|5": 1,
"harness|mmlu_management|5": 1,
"harness|mmlu_miscellaneous|5": 1,
"harness|mmlu_anatomy|5": 1,
"harness|mmlu_abstract_algebra|5": 1,
"harness|mmlu_conceptual_physics|5": 1,
"harness|mmlu_virol... | {
"model_name": "x2bee/POLAR-14B-DPO-v1.4",
"model_sha": "a6e64075fafaa3d5e393ff89c3cb26f9615e6de9",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
} |
{
"daily": {
"daily": 5
},
"quarterly": {
"quarterly": 5
},
"harness|arc_challenge|25": {
"acc": 0.6638225255972696,
"acc_stderr": 0.013804855026205756,
"acc_norm": 0.7278156996587031,
"acc_norm_stderr": 0.013006600406423709
},
"harness|hellaswag|10": {
"acc": 0.456482772356104... | {
"all": 0,
"harness|arc_challenge|25": 0,
"harness|hellaswag|10": 0,
"harness|mmlu_world_religions|5": 1,
"harness|mmlu_management|5": 1,
"harness|mmlu_miscellaneous|5": 1,
"harness|mmlu_anatomy|5": 1,
"harness|mmlu_abstract_algebra|5": 1,
"harness|mmlu_conceptual_physics|5": 1,
"harness|mmlu_virol... | {
"model_name": "x2bee/POLAR-14B-HES-DPO-v1.5",
"model_sha": "f0bc8e2566ba28c8232d7c690098e634ea894e8d",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
} |
{
"daily": {
"daily": 3
},
"quarterly": {
"quarterly": 3
},
"harness|arc_challenge|25": {
"acc": 0.6646757679180887,
"acc_stderr": 0.013796182947785564,
"acc_norm": 0.7244027303754266,
"acc_norm_stderr": 0.01305716965576184
},
"harness|hellaswag|10": {
"acc": 0.4603664608643696... | {
"all": 0,
"harness|arc_challenge|25": 0,
"harness|hellaswag|10": 0,
"harness|mmlu_world_religions|5": 1,
"harness|mmlu_management|5": 1,
"harness|mmlu_miscellaneous|5": 1,
"harness|mmlu_anatomy|5": 1,
"harness|mmlu_abstract_algebra|5": 1,
"harness|mmlu_conceptual_physics|5": 1,
"harness|mmlu_virol... | {
"model_name": "x2bee/POLAR-14B-SON-SFT-v0.1",
"model_sha": "01286a13088332c1eda4279b5bcfa7a0a33e145f",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
} |
{
"daily": {
"daily": 2
},
"quarterly": {
"quarterly": 2
},
"harness|arc_challenge|25": {
"acc": 0.7465870307167235,
"acc_stderr": 0.012710896778378602,
"acc_norm": 0.7687713310580204,
"acc_norm_stderr": 0.012320858834772264
},
"harness|hellaswag|10": {
"acc": 0.681736705835491... | {
"all": 0,
"harness|arc_challenge|25": 0,
"harness|hellaswag|10": 0,
"harness|mmlu_world_religions|5": 1,
"harness|mmlu_management|5": 1,
"harness|mmlu_miscellaneous|5": 1,
"harness|mmlu_anatomy|5": 1,
"harness|mmlu_abstract_algebra|5": 1,
"harness|mmlu_conceptual_physics|5": 1,
"harness|mmlu_virol... | {
"model_name": "x2bee/POLAR-14B-v0.2",
"model_sha": "8d905623a3972e11260420130039c62e115cbbaa",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
} |
{
"daily": {
"daily": 1
},
"quarterly": {
"quarterly": 1
},
"harness|arc_challenge|25": {
"acc": 0.75,
"acc_stderr": 0.012653835621466646,
"acc_norm": 0.7798634812286689,
"acc_norm_stderr": 0.012108124883460988
},
"harness|hellaswag|10": {
"acc": 0.6500697072296355,
"acc_st... | {
"all": 0,
"harness|arc_challenge|25": 0,
"harness|hellaswag|10": 0,
"harness|mmlu_world_religions|5": 1,
"harness|mmlu_management|5": 1,
"harness|mmlu_miscellaneous|5": 1,
"harness|mmlu_anatomy|5": 1,
"harness|mmlu_abstract_algebra|5": 1,
"harness|mmlu_conceptual_physics|5": 1,
"harness|mmlu_virol... | {
"model_name": "x2bee/POLAR-14B-v0.5",
"model_sha": "74a1ef65a8d650e5358be229def31688738d8c6a",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
} |
README.md exists but content is empty.
- Downloads last month
- 4