Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Clémentine
commited on
Commit
·
ea04e0b
1
Parent(s):
6852f97
fix updater for adapter weights/merges + add some flags
Browse files
src/leaderboard/filter_models.py
CHANGED
|
@@ -114,7 +114,9 @@ FLAGGED_MODELS = {
|
|
| 114 |
"cloudyu/Mixtral_7Bx4_MOE_24B":"https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard/discussions/540",
|
| 115 |
"macadeliccc/laser-dolphin-mixtral-2x7b-dpo":"https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard/discussions/540",
|
| 116 |
"macadeliccc/polyglot-math-4x7b":"https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard/discussions/540",
|
| 117 |
-
|
|
|
|
|
|
|
| 118 |
}
|
| 119 |
|
| 120 |
# Models which have been requested by orgs to not be submitted on the leaderboard
|
|
|
|
| 114 |
"cloudyu/Mixtral_7Bx4_MOE_24B":"https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard/discussions/540",
|
| 115 |
"macadeliccc/laser-dolphin-mixtral-2x7b-dpo":"https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard/discussions/540",
|
| 116 |
"macadeliccc/polyglot-math-4x7b":"https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard/discussions/540",
|
| 117 |
+
# Other - contamination mostly
|
| 118 |
+
"DopeorNope/COKAL-v1-70B": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard/discussions/566",
|
| 119 |
+
"CultriX/MistralTrix-v1": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard/discussions/556",
|
| 120 |
}
|
| 121 |
|
| 122 |
# Models which have been requested by orgs to not be submitted on the leaderboard
|
src/scripts/update_all_request_files.py
CHANGED
|
@@ -32,7 +32,8 @@ def update_models(file_path, models):
|
|
| 32 |
# Is the model still on the hub?
|
| 33 |
model_name = model_id
|
| 34 |
if model_cfg.card_data is not None and model_cfg.card_data.base_model is not None:
|
| 35 |
-
|
|
|
|
| 36 |
still_on_hub, _, _ = is_model_on_hub(
|
| 37 |
model_name=model_name, revision=data.get("revision"), trust_remote_code=True, test_tokenizer=False, token=H4_TOKEN
|
| 38 |
)
|
|
|
|
| 32 |
# Is the model still on the hub?
|
| 33 |
model_name = model_id
|
| 34 |
if model_cfg.card_data is not None and model_cfg.card_data.base_model is not None:
|
| 35 |
+
if isinstance(model_cfg.card_data.base_model, str):
|
| 36 |
+
model_name = model_cfg.card_data.base_model # for adapters, we look at the parent model
|
| 37 |
still_on_hub, _, _ = is_model_on_hub(
|
| 38 |
model_name=model_name, revision=data.get("revision"), trust_remote_code=True, test_tokenizer=False, token=H4_TOKEN
|
| 39 |
)
|