ECREAM-LLM Leaderboard



The eCream-LLM leaderboard , developed within the eCream Project (enabling Clinical Research in Emergency and Acute care Medicine), is designed to evaluate Large Language Models (LLMs) on several tasks pertaining to the medical domain. Its distinguishing features are:
(i) all tasks are implemented for six languages including English, Italian, Slovak, Slovenian, Polish and Greek;
(ii) the leaderboard includes generative tasks, allowing for a more natural interaction with LLMs;
(iii) all tasks are evaluated against multiple prompts, this way mitigating the model sensitivity to specific prompts and allowing a fairer evaluation.


Generative tasks: 🏷️NER (Named Entity Recognition), 🔗REL (Relation Extraction), 😃RML(CRF RML)
Multiple-choice task: 🏥DIA (CRF Diagnosis), 📝HIS (CRF History)

Language:
N-Shot:
{
  • "headers": [
    • "Rank",
    • "Size",
    • "FS",
    • "IS_FS",
    • "LANG",
    • "Model",
    • "Avg. Comb. Perf. ⬆️",
    • "REL",
    • "REL Prompt Average",
    • "REL Prompt Std",
    • "REL Best Prompt",
    • "REL Best Prompt Id",
    • "NER",
    • "NER Prompt Average",
    • "NER Prompt Std",
    • "NER Best Prompt",
    • "NER Best Prompt Id",
    • "RML",
    • "RML Prompt Average",
    • "RML Prompt Std",
    • "RML Best Prompt",
    • "RML Best Prompt Id",
    • "DIA",
    • "DIA Prompt Average",
    • "DIA Prompt Std",
    • "DIA Best Prompt",
    • "DIA Best Prompt Id",
    • "HIS",
    • "HIS Prompt Average",
    • "HIS Prompt Std",
    • "HIS Best Prompt",
    • "HIS Best Prompt Id",
    • "Architecture",
    • "Hub License",
    • "#Params (B)",
    • "Hub ❤️",
    • "Available on the hub",
    • "Model sha"
    ],
  • "data": [
    • [
      • 1,
      • "🔵🔵",
      • "🔟",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-27b-text-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">medgemma-27b-text-it</a> 🔵🔵🏆",
      • 64.26,
      • 60.5,
      • 59.97,
      • 1.18,
      • 61.33,
      • 2,
      • 68.03,
      • 67.91,
      • 0.66,
      • 68.29,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • 2,
      • "🔵🔵",
      • "🔟",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-27b-text-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">medgemma-27b-text-it</a>",
      • 62.99,
      • 57.57,
      • 56.8,
      • 2.53,
      • 58.67,
      • 2,
      • 68.41,
      • 68.39,
      • 0.06,
      • 68.46,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • 3,
      • "🔵🔵",
      • "🔟",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-27b-text-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">medgemma-27b-text-it</a>",
      • 62.14,
      • 55.22,
      • 54.69,
      • 1.35,
      • 55.9,
      • 2,
      • 69.05,
      • 68.86,
      • 1.05,
      • 69.47,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • 4,
      • "🔵🔵",
      • "🔟",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-32B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen2.5-32B-Instruct</a>",
      • 62.04,
      • 56.81,
      • 56.43,
      • 0.79,
      • 57.33,
      • 1,
      • 67.27,
      • 67.2,
      • 0.4,
      • 67.43,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • 5,
      • "🔵🔵",
      • "🔟",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-14B-Instruct-1M" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen2.5-14B-Instruct-1M</a>",
      • 61.83,
      • 59.23,
      • 58.99,
      • 0.57,
      • 59.59,
      • 3,
      • 64.43,
      • 64.19,
      • 0.58,
      • 64.86,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen2ForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • 6,
      • "🔵🔵",
      • "🔟",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-3-27b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">gemma-3-27b-it</a>",
      • 61.57,
      • 56.77,
      • 55.92,
      • 2.08,
      • 57.95,
      • 1,
      • 66.36,
      • 66.18,
      • 0.47,
      • 66.72,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • 7,
      • "🔵🔵",
      • "🔟",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-27b-text-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">medgemma-27b-text-it</a>",
      • 61.55,
      • 51.72,
      • 51.57,
      • 0.4,
      • 51.88,
      • 2,
      • 71.39,
      • 71.38,
      • 0.09,
      • 71.43,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • 8,
      • "🔵🔵",
      • "🔟",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-32B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen2.5-32B-Instruct</a>",
      • 61.44,
      • 58.33,
      • 57.27,
      • 3.43,
      • 59.92,
      • 1,
      • 64.55,
      • 63.74,
      • 2.09,
      • 66.15,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • 9,
      • "🔵🔵",
      • "🔟",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-14B-Instruct-1M" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen2.5-14B-Instruct-1M</a>",
      • 61.44,
      • 58.83,
      • 58.37,
      • 0.97,
      • 59.49,
      • 1,
      • 64.05,
      • 63.71,
      • 1.67,
      • 64.67,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen2ForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • 10,
      • "🔵🔵",
      • "🔟",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-14B-Instruct-1M" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen2.5-14B-Instruct-1M</a>",
      • 61.21,
      • 61,
      • 59.44,
      • 4.56,
      • 63.75,
      • 3,
      • 61.43,
      • 60.92,
      • 2.32,
      • 62.26,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen2ForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • 11,
      • "🔵🔵",
      • "🔟",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-14B-Instruct-1M" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen2.5-14B-Instruct-1M</a>",
      • 60.84,
      • 61.05,
      • 60.56,
      • 1.14,
      • 61.83,
      • 3,
      • 60.64,
      • 60.28,
      • 1.57,
      • 61.19,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen2ForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • 12,
      • "🔵🔵",
      • "🔟",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-3-27b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">gemma-3-27b-it</a>",
      • 60.33,
      • 54.83,
      • 54.05,
      • 2.79,
      • 55.81,
      • 3,
      • 65.83,
      • 65.7,
      • 0.33,
      • 66.08,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • 13,
      • "🔵🔵",
      • "🔟",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-32B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen2.5-32B-Instruct</a>",
      • 60.26,
      • 58.7,
      • 58.4,
      • 1.13,
      • 59.13,
      • 1,
      • 61.83,
      • 61.74,
      • 0.38,
      • 61.96,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • 14,
      • "🔵🔵",
      • "🔟",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-3-27b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">gemma-3-27b-it</a>",
      • 59.62,
      • 51.08,
      • 50.95,
      • 0.31,
      • 51.21,
      • 1,
      • 68.17,
      • 67.86,
      • 0.85,
      • 68.85,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • 15,
      • "🔵🔵",
      • "🔟",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen3-30B-A3B-Instruct-2507" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen3-30B-A3B-Instruct-2507</a>",
      • 59.6,
      • 54.53,
      • 53.56,
      • 2.65,
      • 55.76,
      • 2,
      • 64.67,
      • 63.91,
      • 3.87,
      • 66.15,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen3MoeForCausalLM",
      • "?",
      • 31,
      • 0,
      • true,
      • ""
      ],
    • [
      • 16,
      • "🔵🔵",
      • "🔟",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-32B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen2.5-32B-Instruct</a>",
      • 59.58,
      • 59.08,
      • 58.88,
      • 0.44,
      • 59.38,
      • 3,
      • 60.07,
      • 60.07,
      • 0.02,
      • 60.08,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • 17,
      • "🔵🔵",
      • "🔟",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-3-27b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">gemma-3-27b-it</a>",
      • 59.56,
      • 50.72,
      • 49.99,
      • 2.56,
      • 51.49,
      • 1,
      • 68.41,
      • 68.06,
      • 0.97,
      • 69.18,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • 18,
      • "🔵🔵",
      • "🔟",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen3-30B-A3B-Instruct-2507" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen3-30B-A3B-Instruct-2507</a>",
      • 56.8,
      • 51.82,
      • 51.66,
      • 0.55,
      • 52,
      • 2,
      • 61.77,
      • 61.18,
      • 2.73,
      • 62.76,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen3MoeForCausalLM",
      • "?",
      • 31,
      • 0,
      • true,
      • ""
      ],
    • [
      • 19,
      • "🔵🔵",
      • "🔟",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/unsloth/phi-4" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">phi-4</a>",
      • 56.64,
      • 58.21,
      • 57.18,
      • 2.77,
      • 59.72,
      • 3,
      • 55.07,
      • 54.74,
      • 1.3,
      • 55.49,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "LlamaForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • 20,
      • "🔵",
      • "🔟",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-9b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">gemma-2-9b-it</a> 🔵🏆",
      • 56.44,
      • 52.9,
      • 51.62,
      • 2.49,
      • 54.44,
      • 3,
      • 59.98,
      • 59.43,
      • 2.42,
      • 60.83,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 10,
      • 0,
      • false,
      • ""
      ],
    • [
      • 21,
      • "🔵🔵",
      • "🔟",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen3-30B-A3B-Instruct-2507" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen3-30B-A3B-Instruct-2507</a>",
      • 56.4,
      • 52.3,
      • 51.06,
      • 2.37,
      • 53.73,
      • 3,
      • 60.51,
      • 60.3,
      • 0.96,
      • 60.85,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen3MoeForCausalLM",
      • "?",
      • 31,
      • 0,
      • true,
      • ""
      ],
    • [
      • 22,
      • "🔵🔵",
      • "🔟",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">DeepSeek-R1-Distill-Qwen-32B</a>",
      • 56.37,
      • 50.74,
      • 50.23,
      • 1.41,
      • 51.29,
      • 2,
      • 61.99,
      • 61.89,
      • 0.43,
      • 62.14,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • 23,
      • "🔵🔵",
      • "🔟",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen3-30B-A3B-Instruct-2507" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen3-30B-A3B-Instruct-2507</a>",
      • 56.23,
      • 51.84,
      • 51.49,
      • 1.16,
      • 52.23,
      • 3,
      • 60.62,
      • 59.99,
      • 2.86,
      • 61.64,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen3MoeForCausalLM",
      • "?",
      • 31,
      • 0,
      • true,
      • ""
      ],
    • [
      • 24,
      • "🔵",
      • "🔟",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-9b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">gemma-2-9b-it</a>",
      • 56.07,
      • 50.78,
      • 50.07,
      • 2.2,
      • 51.53,
      • 1,
      • 61.37,
      • 61.35,
      • 0.11,
      • 61.41,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 10,
      • 0,
      • false,
      • ""
      ],
    • [
      • 25,
      • "🔵",
      • "🔟",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-9b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">gemma-2-9b-it</a>",
      • 55.8,
      • 49.28,
      • 48.84,
      • 0.86,
      • 49.72,
      • 3,
      • 62.32,
      • 61.56,
      • 3.63,
      • 63.65,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 10,
      • 0,
      • false,
      • ""
      ],
    • [
      • 26,
      • "🔵🔵",
      • "🔟",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/unsloth/phi-4" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">phi-4</a>",
      • 55.78,
      • 54.58,
      • 52.91,
      • 3.72,
      • 56.78,
      • 3,
      • 56.97,
      • 56.82,
      • 0.61,
      • 57.17,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "LlamaForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • 27,
      • "🔵🔵",
      • "🔟",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">DeepSeek-R1-Distill-Qwen-32B</a>",
      • 55.28,
      • 50.21,
      • 49.11,
      • 2.32,
      • 51.37,
      • 1,
      • 60.35,
      • 60.26,
      • 0.2,
      • 60.49,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • 28,
      • "🔵🔵",
      • "🔟",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">DeepSeek-R1-Distill-Qwen-32B</a>",
      • 55.22,
      • 47.26,
      • 46.46,
      • 1.78,
      • 47.99,
      • 1,
      • 63.18,
      • 63.02,
      • 0.79,
      • 63.47,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • 29,
      • "🔵🔵",
      • "🔟",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/unsloth/phi-4" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">phi-4</a>",
      • 55.05,
      • 54.29,
      • 53.09,
      • 2.41,
      • 55.79,
      • 3,
      • 55.81,
      • 55.77,
      • 0.16,
      • 55.86,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "LlamaForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • 30,
      • "🔵",
      • "🔟",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-9b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">gemma-2-9b-it</a>",
      • 54.99,
      • 50.98,
      • 50.33,
      • 1.96,
      • 51.68,
      • 1,
      • 58.99,
      • 58.93,
      • 0.27,
      • 59.08,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 10,
      • 0,
      • false,
      • ""
      ],
    • [
      • 31,
      • "🔵🔵",
      • "🔟",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/unsloth/phi-4" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">phi-4</a>",
      • 54.5,
      • 53.6,
      • 52.14,
      • 2.89,
      • 55.41,
      • 3,
      • 55.4,
      • 55.24,
      • 0.65,
      • 55.61,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "LlamaForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • 32,
      • "🔵🔵",
      • "🔟",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">DeepSeek-R1-Distill-Qwen-32B</a>",
      • 52,
      • 44.99,
      • 44.15,
      • 1.85,
      • 45.69,
      • 3,
      • 59.02,
      • 58.84,
      • 0.76,
      • 59.28,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • 33,
      • "🔵🔵",
      • "🔟",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-27b-text-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">medgemma-27b-text-it</a>",
      • 50.54,
      • 55.51,
      • 55.4,
      • 0.4,
      • 55.65,
      • 3,
      • 63.7,
      • 63.24,
      • 1.49,
      • 64.55,
      • 3,
      • 36.51,
      • 35.49,
      • 1.8,
      • 37.11,
      • 1,
      • 43,
      • 33.27,
      • 25.64,
      • 54.8,
      • 1,
      • 54,
      • 49.41,
      • 11.62,
      • 61.24,
      • 3,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • 34,
      • "🔵🔵",
      • "🔟",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-27b-text-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">medgemma-27b-text-it</a>",
      • 50.37,
      • 60.64,
      • 59.6,
      • 2.57,
      • 62.35,
      • 2,
      • 71.69,
      • 71.33,
      • 1.82,
      • 72.62,
      • 1,
      • 27.8,
      • 22.82,
      • 7.26,
      • 29.92,
      • 2,
      • 48.22,
      • 40.74,
      • 30.72,
      • 58.98,
      • 1,
      • 43.5,
      • 35.17,
      • 30.46,
      • 52.85,
      • 3,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • 35,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-27b-text-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">medgemma-27b-text-it</a> 🔵🔵🎖️",
      • 50.34,
      • 42.75,
      • 41.99,
      • 1.32,
      • 43.32,
      • 2,
      • 57.94,
      • 55.85,
      • 4.69,
      • 61.26,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • 36,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-3-27b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">gemma-3-27b-it</a>",
      • 50.11,
      • 46.28,
      • 44.73,
      • 4.5,
      • 47.69,
      • 3,
      • 53.95,
      • 51.51,
      • 4.94,
      • 57.21,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • 37,
      • "🔵🔵",
      • "🔟",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/unsloth/phi-4" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">phi-4</a>",
      • 49.7,
      • 57.53,
      • 57.05,
      • 1.07,
      • 58.2,
      • 2,
      • 68.02,
      • 67.59,
      • 1.27,
      • 68.97,
      • 3,
      • 16.72,
      • 12.64,
      • 5.25,
      • 17.59,
      • 1,
      • 57.51,
      • 56.92,
      • 1.36,
      • 58.35,
      • 1,
      • 48.74,
      • 46.56,
      • 6.92,
      • 51.02,
      • 1,
      • "LlamaForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • 38,
      • "🔵🔵",
      • "🔟",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/unsloth/phi-4" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">phi-4</a>",
      • 49.37,
      • 54.79,
      • 53.64,
      • 3.93,
      • 56.26,
      • 2,
      • 60.44,
      • 59.83,
      • 2.37,
      • 61.41,
      • 3,
      • 34.71,
      • 28.78,
      • 8.25,
      • 38.41,
      • 1,
      • 51.15,
      • 47.15,
      • 7.92,
      • 56.3,
      • 2,
      • 45.77,
      • 41.38,
      • 7.65,
      • 50.2,
      • 2,
      • "LlamaForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • 39,
      • "🔵🔵",
      • "🔟",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/tiiuae/Falcon3-10B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Falcon3-10B-Instruct</a>",
      • 48.76,
      • 54.74,
      • 53.96,
      • 2.35,
      • 55.71,
      • 2,
      • 42.78,
      • 42.44,
      • 1.05,
      • 43.04,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "LlamaForCausalLM",
      • "?",
      • 11,
      • 0,
      • true,
      • ""
      ],
    • [
      • 40,
      • "🔵🔵",
      • "🔟",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen3-30B-A3B-Instruct-2507" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen3-30B-A3B-Instruct-2507</a>",
      • 48.59,
      • 60.14,
      • 59.81,
      • 1.25,
      • 60.65,
      • 3,
      • 67.11,
      • 66.73,
      • 1.96,
      • 67.93,
      • 1,
      • 24.14,
      • 19.73,
      • 5.16,
      • 25.66,
      • 2,
      • 50.98,
      • 47.94,
      • 5.82,
      • 54.64,
      • 2,
      • 40.56,
      • 30.69,
      • 17.36,
      • 50.71,
      • 2,
      • "Qwen3MoeForCausalLM",
      • "?",
      • 31,
      • 0,
      • true,
      • ""
      ],
    • [
      • 41,
      • "🔵",
      • "🅾️",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-9b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">gemma-2-9b-it</a> 🔵🎖️",
      • 47.41,
      • 40.76,
      • 40.08,
      • 1.01,
      • 41.24,
      • 1,
      • 54.06,
      • 52.92,
      • 4.46,
      • 55.49,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 10,
      • 0,
      • false,
      • ""
      ],
    • [
      • 42,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-32B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen2.5-32B-Instruct</a>",
      • 46.82,
      • 42.66,
      • 41.03,
      • 2.8,
      • 43.93,
      • 1,
      • 50.98,
      • 45.07,
      • 25.45,
      • 59.76,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • 43,
      • "🔵🔵",
      • "🔟",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen3-30B-A3B-Instruct-2507" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen3-30B-A3B-Instruct-2507</a>",
      • 46.51,
      • 53.07,
      • 52.58,
      • 1.07,
      • 53.64,
      • 3,
      • 59.98,
      • 59.07,
      • 2.83,
      • 61.43,
      • 3,
      • 34.99,
      • 33.51,
      • 2.01,
      • 35.81,
      • 2,
      • 35.76,
      • 31.95,
      • 9.3,
      • 38.1,
      • 1,
      • 48.74,
      • 42.56,
      • 13.86,
      • 56.9,
      • 3,
      • "Qwen3MoeForCausalLM",
      • "?",
      • 31,
      • 0,
      • true,
      • ""
      ],
    • [
      • 44,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-27b-text-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">medgemma-27b-text-it</a>",
      • 46.32,
      • 42.23,
      • 42.01,
      • 0.33,
      • 42.39,
      • 2,
      • 50.41,
      • 48.63,
      • 3.25,
      • 52.38,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • 45,
      • "🔵🔵",
      • "🔟",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-3-27b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">gemma-3-27b-it</a>",
      • 45.43,
      • 56.22,
      • 55.18,
      • 2.95,
      • 57.64,
      • 3,
      • 62.32,
      • 61.87,
      • 1.1,
      • 63.08,
      • 2,
      • 33.3,
      • 33.05,
      • 0.36,
      • 33.42,
      • 3,
      • 35.72,
      • 29.03,
      • 17.99,
      • 40.22,
      • 1,
      • 39.59,
      • 30.34,
      • 15.78,
      • 48.21,
      • 2,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • 46,
      • "🔵",
      • "🅾️",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-9b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">gemma-2-9b-it</a>",
      • 45.32,
      • 42.38,
      • 41.06,
      • 2.03,
      • 43.4,
      • 2,
      • 48.26,
      • 47.75,
      • 1.73,
      • 48.75,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 10,
      • 0,
      • false,
      • ""
      ],
    • [
      • 47,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-3-27b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">gemma-3-27b-it</a>",
      • 45.01,
      • 43.51,
      • 43,
      • 0.79,
      • 43.91,
      • 2,
      • 46.51,
      • 45.08,
      • 2.38,
      • 47.83,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • 48,
      • "🔵🔵",
      • "🔟",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/tiiuae/Falcon3-10B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Falcon3-10B-Instruct</a>",
      • 44.86,
      • 44.91,
      • 42.61,
      • 4.77,
      • 46.95,
      • 2,
      • 44.8,
      • 44.02,
      • 2.48,
      • 45.45,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "LlamaForCausalLM",
      • "?",
      • 11,
      • 0,
      • true,
      • ""
      ],
    • [
      • 49,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-3-27b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">gemma-3-27b-it</a>",
      • 44.3,
      • 43.5,
      • 43.07,
      • 0.66,
      • 43.84,
      • 1,
      • 45.09,
      • 45.08,
      • 0.03,
      • 45.11,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • 50,
      • "🔵🔵",
      • "🔟",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-14B-Instruct-1M" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen2.5-14B-Instruct-1M</a>",
      • 43.53,
      • 60.07,
      • 59.53,
      • 1.68,
      • 60.93,
      • 3,
      • 66.18,
      • 65.69,
      • 2.12,
      • 67.19,
      • 1,
      • 18.81,
      • 15.57,
      • 4.26,
      • 19.6,
      • 3,
      • 35.65,
      • 24.96,
      • 16.69,
      • 44.07,
      • 1,
      • 36.92,
      • 23.39,
      • 23.98,
      • 51.03,
      • 2,
      • "Qwen2ForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • 51,
      • "🔵",
      • "🅾️",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-9b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">gemma-2-9b-it</a>",
      • 43.37,
      • 40.7,
      • 40.58,
      • 0.36,
      • 40.79,
      • 1,
      • 46.03,
      • 44.87,
      • 3.82,
      • 47.07,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 10,
      • 0,
      • false,
      • ""
      ],
    • [
      • 52,
      • "🔵",
      • "🔟",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Mistral-7B-Instruct-v0.2</a>",
      • 43.31,
      • 36.62,
      • 32.96,
      • 6.06,
      • 38.95,
      • 1,
      • 50.01,
      • 49.56,
      • 0.78,
      • 50.46,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "MistralForCausalLM",
      • "?",
      • 8,
      • 0,
      • true,
      • ""
      ],
    • [
      • 53,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-27b-text-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">medgemma-27b-text-it</a>",
      • 43.31,
      • 43.83,
      • 43.32,
      • 0.89,
      • 44.24,
      • 2,
      • 42.78,
      • 42.45,
      • 0.5,
      • 43.03,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • 54,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen3-30B-A3B-Instruct-2507" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen3-30B-A3B-Instruct-2507</a>",
      • 43.2,
      • 41.23,
      • 41.19,
      • 0.06,
      • 41.26,
      • 2,
      • 45.17,
      • 45.01,
      • 0.26,
      • 45.31,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen3MoeForCausalLM",
      • "?",
      • 31,
      • 0,
      • true,
      • ""
      ],
    • [
      • 55,
      • "🔵🔵",
      • "🔟",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">DeepSeek-R1-Distill-Qwen-32B</a>",
      • 43.18,
      • 52.46,
      • 52.21,
      • 0.45,
      • 52.73,
      • 3,
      • 59.87,
      • 59.63,
      • 0.53,
      • 60.24,
      • 1,
      • 24.05,
      • 17.68,
      • 7.67,
      • 26.33,
      • 3,
      • 28.74,
      • 23.39,
      • 8.2,
      • 31.17,
      • 1,
      • 50.79,
      • 48.28,
      • 7.9,
      • 53.71,
      • 3,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • 56,
      • "🔵🔵",
      • "🔟",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-14B-Instruct-1M" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen2.5-14B-Instruct-1M</a>",
      • 43.13,
      • 62.25,
      • 61.63,
      • 1.56,
      • 63.32,
      • 1,
      • 60.87,
      • 59.93,
      • 3.1,
      • 62.43,
      • 3,
      • 31,
      • 28.43,
      • 6.19,
      • 32.22,
      • 2,
      • 26.38,
      • 16.58,
      • 12.4,
      • 30.73,
      • 1,
      • 35.17,
      • 23.7,
      • 17.86,
      • 44.29,
      • 2,
      • "Qwen2ForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • 57,
      • "🔵🔵",
      • "🔟",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-3-27b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">gemma-3-27b-it</a>",
      • 43.01,
      • 57.08,
      • 56.15,
      • 3.41,
      • 58.37,
      • 2,
      • 71.42,
      • 71.15,
      • 1.12,
      • 72.12,
      • 3,
      • 23.93,
      • 21.09,
      • 3.31,
      • 24.87,
      • 2,
      • 46.09,
      • 37.73,
      • 18.17,
      • 57.32,
      • 1,
      • 16.51,
      • 10.43,
      • 9.29,
      • 17.83,
      • 3,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • 58,
      • "🔵🔵",
      • "🔟",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-Nemo-Instruct-2407" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Mistral-Nemo-Instruct-2407</a>",
      • 42.76,
      • 46.6,
      • 43.13,
      • 7.72,
      • 50.08,
      • 2,
      • 57.95,
      • 57.62,
      • 0.87,
      • 58.41,
      • 2,
      • 26.65,
      • 25.24,
      • 1.82,
      • 27.18,
      • 2,
      • 49.41,
      • 43.29,
      • 13.78,
      • 57.78,
      • 3,
      • 33.18,
      • 22.51,
      • 16.16,
      • 40.43,
      • 2,
      • "MistralForCausalLM",
      • "?",
      • 13,
      • 0,
      • true,
      • ""
      ],
    • [
      • 59,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen3-30B-A3B-Instruct-2507" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen3-30B-A3B-Instruct-2507</a>",
      • 42.48,
      • 42.06,
      • 41.86,
      • 0.34,
      • 42.2,
      • 2,
      • 42.9,
      • 42.36,
      • 1.67,
      • 43.32,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen3MoeForCausalLM",
      • "?",
      • 31,
      • 0,
      • true,
      • ""
      ],
    • [
      • 60,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen3-30B-A3B-Instruct-2507" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen3-30B-A3B-Instruct-2507</a>",
      • 41.21,
      • 37.91,
      • 37.77,
      • 0.38,
      • 37.99,
      • 2,
      • 44.52,
      • 43.68,
      • 1.33,
      • 45.21,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen3MoeForCausalLM",
      • "?",
      • 31,
      • 0,
      • true,
      • ""
      ],
    • [
      • 61,
      • "🔵",
      • "🅾️",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-9b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">gemma-2-9b-it</a>",
      • 41.19,
      • 41.09,
      • 38.91,
      • 3.3,
      • 42.71,
      • 2,
      • 41.29,
      • 40.92,
      • 0.55,
      • 41.55,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 10,
      • 0,
      • false,
      • ""
      ],
    • [
      • 62,
      • "🔵🔵",
      • "🔟",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-Nemo-Instruct-2407" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Mistral-Nemo-Instruct-2407</a>",
      • 40.66,
      • 30.66,
      • 25.49,
      • 6.83,
      • 33.23,
      • 3,
      • 50.65,
      • 50.5,
      • 0.54,
      • 50.81,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "MistralForCausalLM",
      • "?",
      • 13,
      • 0,
      • true,
      • ""
      ],
    • [
      • 63,
      • "🔵",
      • "🔟",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-9b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">gemma-2-9b-it</a>",
      • 39.77,
      • 53.52,
      • 53.03,
      • 1.26,
      • 54.09,
      • 3,
      • 60.26,
      • 59.19,
      • 2.81,
      • 62,
      • 1,
      • 33.24,
      • 32,
      • 2.25,
      • 33.88,
      • 2,
      • 28.07,
      • 21.2,
      • 14.11,
      • 31.18,
      • 1,
      • 23.78,
      • 16.25,
      • 8.88,
      • 26.49,
      • 2,
      • "?",
      • "?",
      • 10,
      • 0,
      • false,
      • ""
      ],
    • [
      • 64,
      • "🔵🔵",
      • "🔟",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-Nemo-Instruct-2407" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Mistral-Nemo-Instruct-2407</a>",
      • 39.46,
      • 38.76,
      • 35.56,
      • 7.44,
      • 40.99,
      • 2,
      • 64.47,
      • 64.41,
      • 0.14,
      • 64.57,
      • 3,
      • 12.95,
      • 12.68,
      • 0.43,
      • 12.99,
      • 2,
      • 50.67,
      • 46.12,
      • 9.76,
      • 56.59,
      • 2,
      • 30.45,
      • 17.63,
      • 18.04,
      • 38.46,
      • 2,
      • "MistralForCausalLM",
      • "?",
      • 13,
      • 0,
      • true,
      • ""
      ],
    • [
      • 65,
      • "🔵",
      • "🔟",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-9b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">gemma-2-9b-it</a>",
      • 39.26,
      • 52.81,
      • 52.09,
      • 2.2,
      • 53.65,
      • 2,
      • 67.7,
      • 67.07,
      • 1.79,
      • 69.1,
      • 1,
      • 17.4,
      • 15.09,
      • 3.74,
      • 17.9,
      • 1,
      • 35.15,
      • 27.47,
      • 16.27,
      • 40.35,
      • 2,
      • 23.24,
      • 14.12,
      • 10.76,
      • 26.53,
      • 2,
      • "?",
      • "?",
      • 10,
      • 0,
      • false,
      • ""
      ],
    • [
      • 66,
      • "🔵🔵",
      • "🔟",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">DeepSeek-R1-Distill-Qwen-32B</a>",
      • 39.25,
      • 55.37,
      • 55.3,
      • 0.14,
      • 55.46,
      • 1,
      • 68.99,
      • 68.64,
      • 1.62,
      • 69.82,
      • 1,
      • 11.47,
      • 5.7,
      • 5.74,
      • 12.28,
      • 3,
      • 31.12,
      • 29.65,
      • 1.97,
      • 31.81,
      • 3,
      • 29.26,
      • 28.3,
      • 1.77,
      • 29.67,
      • 2,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • 67,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">DeepSeek-R1-Distill-Qwen-32B</a>",
      • 39.2,
      • 42.42,
      • 41.31,
      • 1.77,
      • 43.27,
      • 2,
      • 35.98,
      • 33.79,
      • 3.03,
      • 37.28,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • 68,
      • "🔵🔵",
      • "🔟",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/tiiuae/Falcon3-10B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Falcon3-10B-Instruct</a>",
      • 38.88,
      • 36.85,
      • 31.33,
      • 8.76,
      • 40.63,
      • 3,
      • 40.92,
      • 40.5,
      • 1.22,
      • 41.21,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "LlamaForCausalLM",
      • "?",
      • 11,
      • 0,
      • true,
      • ""
      ],
    • [
      • 69,
      • "🔵",
      • "🔟",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MediPhi-Instruct</a>",
      • 38.28,
      • 18.56,
      • 17.19,
      • 2.5,
      • 18.88,
      • 2,
      • 52.81,
      • 52.16,
      • 1.47,
      • 53.57,
      • 1,
      • 32.38,
      • 28.56,
      • 5.35,
      • 34.38,
      • 2,
      • 42.65,
      • 34.36,
      • 15.38,
      • 51.44,
      • 3,
      • 45,
      • 41.73,
      • 8.73,
      • 48.01,
      • 3,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • 70,
      • "🔵",
      • "🔟",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MediPhi-Instruct</a>",
      • 38.2,
      • 28.17,
      • 26.79,
      • 3.23,
      • 28.73,
      • 1,
      • 57.47,
      • 57.15,
      • 0.82,
      • 57.9,
      • 3,
      • 14.88,
      • 11.76,
      • 6.07,
      • 15.45,
      • 1,
      • 51.38,
      • 47.64,
      • 12.98,
      • 56.17,
      • 1,
      • 39.1,
      • 37.35,
      • 3.91,
      • 40.29,
      • 2,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • 71,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-32B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen2.5-32B-Instruct</a>",
      • 38.04,
      • 41.02,
      • 40.31,
      • 1.05,
      • 41.52,
      • 2,
      • 35.05,
      • 33.75,
      • 3.52,
      • 35.78,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • 72,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-14B-Instruct-1M" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen2.5-14B-Instruct-1M</a>",
      • 37.45,
      • 37.81,
      • 37.78,
      • 0.05,
      • 37.83,
      • 2,
      • 37.1,
      • 33.98,
      • 8.86,
      • 39.1,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen2ForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • 73,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-3-27b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">gemma-3-27b-it</a>",
      • 37.23,
      • 43.71,
      • 43.69,
      • 0.08,
      • 43.73,
      • 1,
      • 30.74,
      • 28.41,
      • 5.92,
      • 31.83,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • 74,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-32B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen2.5-32B-Instruct</a>",
      • 37.09,
      • 41.28,
      • 40.48,
      • 1.2,
      • 41.86,
      • 2,
      • 32.9,
      • 31.84,
      • 2.78,
      • 33.44,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • 75,
      • "🔵🔵",
      • "🔟",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-Nemo-Instruct-2407" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Mistral-Nemo-Instruct-2407</a>",
      • 36.89,
      • 19.82,
      • 19.06,
      • 0.82,
      • 20.01,
      • 3,
      • 53.96,
      • 53.75,
      • 0.4,
      • 54.21,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "MistralForCausalLM",
      • "?",
      • 13,
      • 0,
      • true,
      • ""
      ],
    • [
      • 76,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen3-30B-A3B-Instruct-2507" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen3-30B-A3B-Instruct-2507</a>",
      • 36.63,
      • 39.65,
      • 39.42,
      • 0.65,
      • 39.8,
      • 1,
      • 33.6,
      • 32.87,
      • 0.96,
      • 33.98,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen3MoeForCausalLM",
      • "?",
      • 31,
      • 0,
      • true,
      • ""
      ],
    • [
      • 77,
      • "🔵🔵",
      • "🔟",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-Nemo-Instruct-2407" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Mistral-Nemo-Instruct-2407</a>",
      • 36.6,
      • 19.89,
      • 17.25,
      • 3.34,
      • 20.57,
      • 2,
      • 53.31,
      • 53.27,
      • 0.07,
      • 53.35,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "MistralForCausalLM",
      • "?",
      • 13,
      • 0,
      • true,
      • ""
      ],
    • [
      • 78,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">DeepSeek-R1-Distill-Qwen-32B</a>",
      • 36.46,
      • 38.49,
      • 34.85,
      • 9.38,
      • 41.02,
      • 3,
      • 34.43,
      • 34.21,
      • 0.58,
      • 34.55,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • 79,
      • "🔵🔵",
      • "🔟",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/tiiuae/Falcon3-10B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Falcon3-10B-Instruct</a>",
      • 36.36,
      • 36.93,
      • 35.91,
      • 2.79,
      • 37.55,
      • 2,
      • 35.79,
      • 34.48,
      • 1.79,
      • 36.55,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "LlamaForCausalLM",
      • "?",
      • 11,
      • 0,
      • true,
      • ""
      ],
    • [
      • 80,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-27b-text-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">medgemma-27b-text-it</a>",
      • 36.36,
      • 44.9,
      • 44.4,
      • 0.79,
      • 45.31,
      • 2,
      • 27.82,
      • 23.36,
      • 11,
      • 29.71,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • 81,
      • "🔵🔵",
      • "🔟",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-Nemo-Instruct-2407" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Mistral-Nemo-Instruct-2407</a>",
      • 35.64,
      • 20.94,
      • 18.32,
      • 5.17,
      • 21.66,
      • 2,
      • 50.35,
      • 50.3,
      • 0.09,
      • 50.4,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "MistralForCausalLM",
      • "?",
      • 13,
      • 0,
      • true,
      • ""
      ],
    • [
      • 82,
      • "🔵",
      • "🔟",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MediPhi-Instruct</a>",
      • 35.3,
      • 25.8,
      • 22.91,
      • 6.63,
      • 26.86,
      • 2,
      • 44.79,
      • 44.47,
      • 0.51,
      • 45.06,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • 83,
      • "🔵🔵",
      • "🔟",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/tiiuae/Falcon3-10B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Falcon3-10B-Instruct</a>",
      • 34.9,
      • 53.4,
      • 51.45,
      • 7.03,
      • 55.86,
      • 2,
      • 58.1,
      • 57.3,
      • 2.71,
      • 59.28,
      • 3,
      • 24.74,
      • 16.53,
      • 12.29,
      • 27.92,
      • 1,
      • 15.62,
      • 10.81,
      • 5.07,
      • 16.58,
      • 2,
      • 22.61,
      • 11.21,
      • 13.55,
      • 26.78,
      • 2,
      • "LlamaForCausalLM",
      • "?",
      • 11,
      • 0,
      • true,
      • ""
      ],
    • [
      • 84,
      • "🔵",
      • "🔟",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Clinical" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MediPhi-Clinical</a>",
      • 34.62,
      • 11.69,
      • 11.26,
      • 0.43,
      • 11.75,
      • 1,
      • 50.28,
      • 50.08,
      • 0.42,
      • 50.49,
      • 3,
      • 32.68,
      • 31.89,
      • 1.29,
      • 33.07,
      • 2,
      • 35.53,
      • 28.79,
      • 10.87,
      • 40.02,
      • 3,
      • 42.91,
      • 37.22,
      • 10.15,
      • 48.2,
      • 3,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • 85,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">DeepSeek-R1-Distill-Qwen-32B</a>",
      • 34.34,
      • 41.16,
      • 41.16,
      • 0.01,
      • 41.16,
      • 1,
      • 27.52,
      • 26.04,
      • 3.57,
      • 28.1,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • 86,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-32B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen2.5-32B-Instruct</a>",
      • 34.07,
      • 43.43,
      • 40.74,
      • 4.3,
      • 45.69,
      • 2,
      • 24.71,
      • 24.28,
      • 1.01,
      • 24.86,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • 87,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">DeepSeek-R1-Distill-Qwen-32B</a>",
      • 33.89,
      • 40.37,
      • 39.59,
      • 1.14,
      • 40.91,
      • 2,
      • 27.42,
      • 25.21,
      • 5.33,
      • 28.29,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • 88,
      • "🔵",
      • "🔟",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-4b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">medgemma-4b-it</a>",
      • 33.59,
      • 18.86,
      • 17.53,
      • 1.63,
      • 19.17,
      • 2,
      • 55.32,
      • 54.54,
      • 1.56,
      • 56.33,
      • 1,
      • 13.2,
      • 10.96,
      • 2.48,
      • 13.55,
      • 2,
      • 43.55,
      • 35.24,
      • 30.05,
      • 52.89,
      • 2,
      • 37,
      • 28.91,
      • 24.58,
      • 43.14,
      • 1,
      • "?",
      • "?",
      • 5,
      • 0,
      • false,
      • ""
      ],
    • [
      • 89,
      • "🔵",
      • "🔟",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-4b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">medgemma-4b-it</a>",
      • 33.35,
      • 13.64,
      • 11.97,
      • 2.16,
      • 13.91,
      • 3,
      • 49.67,
      • 49.3,
      • 0.88,
      • 50.05,
      • 2,
      • 26.64,
      • 26.46,
      • 0.34,
      • 26.71,
      • 2,
      • 33.02,
      • 24.89,
      • 21.52,
      • 38,
      • 2,
      • 43.8,
      • 42.27,
      • 3.76,
      • 45.05,
      • 1,
      • "?",
      • "?",
      • 5,
      • 0,
      • false,
      • ""
      ],
    • [
      • 90,
      • "🔵",
      • "🔟",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Mistral-7B-Instruct-v0.2</a>",
      • 33.13,
      • 42.71,
      • 39.58,
      • 7.63,
      • 45.3,
      • 2,
      • 52.03,
      • 51.76,
      • 0.49,
      • 52.32,
      • 2,
      • 13.6,
      • 9.17,
      • 6.74,
      • 14.34,
      • 2,
      • 37.96,
      • 23.6,
      • 29.36,
      • 56.95,
      • 2,
      • 19.36,
      • 11.82,
      • 8.32,
      • 21.41,
      • 2,
      • "MistralForCausalLM",
      • "?",
      • 8,
      • 0,
      • true,
      • ""
      ],
    • [
      • 91,
      • "🔵",
      • "🔟",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-4b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">medgemma-4b-it</a>",
      • 32.88,
      • 15.81,
      • 14.53,
      • 2.18,
      • 16.05,
      • 2,
      • 49.96,
      • 49.53,
      • 0.74,
      • 50.39,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 5,
      • 0,
      • false,
      • ""
      ],
    • [
      • 92,
      • "🔵",
      • "🔟",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-4b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">medgemma-4b-it</a>",
      • 32.77,
      • 14.65,
      • 12.6,
      • 2.12,
      • 15.01,
      • 3,
      • 50.89,
      • 50.63,
      • 0.94,
      • 51.17,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 5,
      • 0,
      • false,
      • ""
      ],
    • [
      • 93,
      • "🔵🔵",
      • "🔟",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-32B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen2.5-32B-Instruct</a>",
      • 32.75,
      • 57.08,
      • 56.41,
      • 1.43,
      • 58.01,
      • 1,
      • 70.47,
      • 70.05,
      • 1.27,
      • 71.52,
      • 2,
      • 9.98,
      • 7.62,
      • 3.15,
      • 10.25,
      • 3,
      • 20.35,
      • 10.86,
      • 11.29,
      • 23.22,
      • 1,
      • 5.87,
      • 3.53,
      • 2.2,
      • 6.02,
      • 2,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • 94,
      • "🔵",
      • "🔟",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Clinical" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MediPhi-Clinical</a>",
      • 32.66,
      • 19.84,
      • 14.99,
      • 5.8,
      • 21.14,
      • 1,
      • 52.78,
      • 52.57,
      • 0.55,
      • 53.01,
      • 2,
      • 15.89,
      • 12.99,
      • 4.22,
      • 16.46,
      • 2,
      • 47.01,
      • 41.28,
      • 11.09,
      • 53.65,
      • 3,
      • 27.77,
      • 20.52,
      • 9.78,
      • 31.03,
      • 2,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • 95,
      • "🔵",
      • "🔟",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-4b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">medgemma-4b-it</a>",
      • 31.78,
      • 11.57,
      • 10.55,
      • 1,
      • 11.71,
      • 1,
      • 51.99,
      • 51.93,
      • 0.12,
      • 52.06,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 5,
      • 0,
      • false,
      • ""
      ],
    • [
      • 96,
      • "🔵🔵",
      • "🔟",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-32B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen2.5-32B-Instruct</a>",
      • 31.6,
      • 64.55,
      • 64.4,
      • 0.61,
      • 64.82,
      • 1,
      • 59.8,
      • 58.95,
      • 2.64,
      • 61.13,
      • 3,
      • 14.15,
      • 9.31,
      • 6.84,
      • 15.01,
      • 1,
      • 5.32,
      • 2.86,
      • 2.74,
      • 5.46,
      • 3,
      • 14.17,
      • 6.59,
      • 7.78,
      • 15.57,
      • 2,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • 97,
      • "🔵",
      • "🔟",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Mistral-7B-Instruct-v0.2</a>",
      • 31.47,
      • 20.95,
      • 20.18,
      • 0.86,
      • 21.15,
      • 3,
      • 42,
      • 41.94,
      • 0.17,
      • 42.04,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "MistralForCausalLM",
      • "?",
      • 8,
      • 0,
      • true,
      • ""
      ],
    • [
      • 98,
      • "🔵",
      • "🔟",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Mistral-7B-Instruct-v0.2</a>",
      • 31.27,
      • 22.56,
      • 21.32,
      • 1.74,
      • 22.93,
      • 3,
      • 39.97,
      • 39.51,
      • 1.36,
      • 40.29,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "MistralForCausalLM",
      • "?",
      • 8,
      • 0,
      • true,
      • ""
      ],
    • [
      • 99,
      • "🔵",
      • "🔟",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Mistral-7B-Instruct-v0.2</a>",
      • 29.99,
      • 39.17,
      • 35.92,
      • 8.67,
      • 41.48,
      • 3,
      • 47.8,
      • 47.53,
      • 0.45,
      • 48.05,
      • 3,
      • 19.7,
      • 12.22,
      • 10.49,
      • 21.78,
      • 3,
      • 14.04,
      • 6.4,
      • 7.84,
      • 15.43,
      • 3,
      • 29.26,
      • 13.77,
      • 22.11,
      • 39.29,
      • 2,
      • "MistralForCausalLM",
      • "?",
      • 8,
      • 0,
      • true,
      • ""
      ],
    • [
      • 100,
      • "🔵",
      • "🔟",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MediPhi-Instruct</a>",
      • 29.25,
      • 19.3,
      • 15.5,
      • 4.42,
      • 20.27,
      • 3,
      • 39.19,
      • 38.37,
      • 2.36,
      • 39.73,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • 101,
      • "🔵",
      • "🔟",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Mistral-7B-Instruct-v0.2</a>",
      • 29.21,
      • 22.3,
      • 18.62,
      • 7.03,
      • 23.43,
      • 2,
      • 36.12,
      • 35.48,
      • 0.87,
      • 36.48,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "MistralForCausalLM",
      • "?",
      • 8,
      • 0,
      • true,
      • ""
      ],
    • [
      • 102,
      • "🔵",
      • "🅾️",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Clinical" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MediPhi-Clinical</a>",
      • 29,
      • 29.48,
      • 26.93,
      • 5.11,
      • 30.61,
      • 3,
      • 28.52,
      • 28.3,
      • 0.27,
      • 28.61,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • 103,
      • "🔵",
      • "🔟",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-4b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">medgemma-4b-it</a>",
      • 28.98,
      • 10.88,
      • 10.35,
      • 0.52,
      • 10.95,
      • 1,
      • 47.07,
      • 46.54,
      • 1.77,
      • 47.56,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 5,
      • 0,
      • false,
      • ""
      ],
    • [
      • 104,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/unsloth/phi-4" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">phi-4</a>",
      • 28.71,
      • 30.52,
      • 27.21,
      • 8.46,
      • 32.09,
      • 1,
      • 26.89,
      • 22.4,
      • 10.91,
      • 28.7,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "LlamaForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • 105,
      • "🔵",
      • "🔟",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MediPhi-Instruct</a>",
      • 28.66,
      • 14.49,
      • 13.13,
      • 2.14,
      • 14.73,
      • 3,
      • 42.83,
      • 42.26,
      • 1.76,
      • 43.27,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • 106,
      • "🔵",
      • "🔟",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Clinical" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MediPhi-Clinical</a>",
      • 28.32,
      • 15.92,
      • 13.66,
      • 2.35,
      • 16.36,
      • 3,
      • 40.72,
      • 39.86,
      • 1.26,
      • 41.32,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • 107,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-14B-Instruct-1M" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen2.5-14B-Instruct-1M</a>",
      • 27.33,
      • 41.34,
      • 39.57,
      • 2.67,
      • 42.66,
      • 2,
      • 13.32,
      • 12.9,
      • 0.85,
      • 13.39,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen2ForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • 108,
      • "🔵",
      • "🅾️",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-4b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">medgemma-4b-it</a>",
      • 27.15,
      • 27.3,
      • 20.53,
      • 11.7,
      • 30.24,
      • 2,
      • 27,
      • 26.88,
      • 0.29,
      • 27.05,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 5,
      • 0,
      • false,
      • ""
      ],
    • [
      • 109,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-14B-Instruct-1M" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen2.5-14B-Instruct-1M</a>",
      • 27.03,
      • 42.18,
      • 41.16,
      • 1.54,
      • 42.94,
      • 2,
      • 11.88,
      • 9.55,
      • 4.58,
      • 12.2,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen2ForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • 110,
      • "🔵🔵",
      • "🔟",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/tiiuae/Falcon3-10B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Falcon3-10B-Instruct</a>",
      • 25.31,
      • 53.89,
      • 52.26,
      • 5.27,
      • 55.97,
      • 3,
      • 57.07,
      • 56.25,
      • 1.95,
      • 58.21,
      • 1,
      • 6.98,
      • 4.06,
      • 2.88,
      • 7.21,
      • 1,
      • 7.56,
      • 5.43,
      • 3.13,
      • 7.74,
      • 3,
      • 1.05,
      • 0.74,
      • 0.31,
      • 1.05,
      • 1,
      • "LlamaForCausalLM",
      • "?",
      • 11,
      • 0,
      • true,
      • ""
      ],
    • [
      • 111,
      • "🔵",
      • "🔟",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/Henrychur/MMed-Llama-3-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MMed-Llama-3-8B</a>",
      • 25.16,
      • 10.5,
      • 10.03,
      • 0.5,
      • 10.55,
      • 2,
      • 39.82,
      • 39.67,
      • 0.44,
      • 39.92,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "LlamaForCausalLM",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • 112,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-14B-Instruct-1M" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen2.5-14B-Instruct-1M</a>",
      • 24.78,
      • 42.67,
      • 40.22,
      • 3.82,
      • 44.64,
      • 2,
      • 6.89,
      • 5.86,
      • 1.92,
      • 6.97,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Qwen2ForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • 113,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-3-27b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">gemma-3-27b-it</a>",
      • 24.54,
      • 48.34,
      • 47.37,
      • 3.01,
      • 49.27,
      • 3,
      • 62.74,
      • 60.65,
      • 5.85,
      • 66.97,
      • 2,
      • 11.58,
      • 6.15,
      • 6.17,
      • 12.34,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0.07,
      • 0.02,
      • 0.04,
      • 0.07,
      • 3,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • 114,
      • "🔵",
      • "🔟",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Clinical" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MediPhi-Clinical</a>",
      • 24.26,
      • 8.22,
      • 7.48,
      • 0.78,
      • 8.29,
      • 1,
      • 40.3,
      • 40.21,
      • 0.27,
      • 40.36,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • 115,
      • "🔵",
      • "🔟",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Clinical" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MediPhi-Clinical</a>",
      • 23.94,
      • 7.16,
      • 6.13,
      • 1.08,
      • 7.24,
      • 3,
      • 40.72,
      • 40.24,
      • 1.41,
      • 41.06,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • 116,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-3-27b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">gemma-3-27b-it</a>",
      • 23.82,
      • 46.86,
      • 46.23,
      • 1.06,
      • 47.43,
      • 3,
      • 56.32,
      • 54.9,
      • 3.2,
      • 58.3,
      • 2,
      • 14.6,
      • 9.24,
      • 8.19,
      • 15.59,
      • 1,
      • 1.3,
      • 0.44,
      • 0.76,
      • 1.31,
      • 3,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • 117,
      • "🔵",
      • "🅾️",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MediPhi-Instruct</a>",
      • 23.55,
      • 30.49,
      • 28.8,
      • 2.25,
      • 31.26,
      • 2,
      • 16.61,
      • 15.67,
      • 0.98,
      • 16.8,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • 118,
      • "🔵",
      • "🔟",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/Henrychur/MMed-Llama-3-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MMed-Llama-3-8B</a>",
      • 23.49,
      • 7.85,
      • 7.62,
      • 0.38,
      • 7.87,
      • 1,
      • 39.14,
      • 37.2,
      • 2.81,
      • 40.45,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "LlamaForCausalLM",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • 119,
      • "🔵",
      • "🔟",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/Henrychur/MMed-Llama-3-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MMed-Llama-3-8B</a>",
      • 23.07,
      • 10.28,
      • 8.85,
      • 1.56,
      • 10.45,
      • 2,
      • 35.86,
      • 35.07,
      • 1.09,
      • 36.32,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "LlamaForCausalLM",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • 120,
      • "🔵",
      • "🅾️",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Clinical" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MediPhi-Clinical</a>",
      • 21.91,
      • 14.77,
      • 10.62,
      • 8.53,
      • 15.54,
      • 1,
      • 29.04,
      • 27.1,
      • 2.4,
      • 29.87,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • 121,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-27b-text-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">medgemma-27b-text-it</a>",
      • 21.71,
      • 47.22,
      • 45.21,
      • 4.43,
      • 49.16,
      • 2,
      • 56.78,
      • 53.52,
      • 9.96,
      • 62.12,
      • 2,
      • 4.58,
      • 1.8,
      • 2.55,
      • 4.72,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • 122,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/unsloth/phi-4" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">phi-4</a>",
      • 21.63,
      • 33.1,
      • 32.77,
      • 0.43,
      • 33.26,
      • 2,
      • 10.16,
      • 5.67,
      • 4.35,
      • 10.7,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "LlamaForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • 123,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-27b-text-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">medgemma-27b-text-it</a>",
      • 21.58,
      • 47.61,
      • 46.81,
      • 2.09,
      • 48.36,
      • 1,
      • 54.17,
      • 50.11,
      • 11.04,
      • 60.35,
      • 2,
      • 6.04,
      • 3.17,
      • 3.12,
      • 6.23,
      • 1,
      • 0.09,
      • 0.03,
      • 0.05,
      • 0.09,
      • 3,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • 124,
      • "🔵",
      • "🅾️",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-9b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">gemma-2-9b-it</a>",
      • 21.52,
      • 44.53,
      • 42.98,
      • 2.52,
      • 45.85,
      • 1,
      • 62.85,
      • 61.58,
      • 3.95,
      • 65.24,
      • 2,
      • 0.24,
      • 0.08,
      • 0.14,
      • 0.24,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "?",
      • "?",
      • 10,
      • 0,
      • false,
      • ""
      ],
    • [
      • 125,
      • "🔵",
      • "🅾️",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Mistral-7B-Instruct-v0.2</a>",
      • 21.29,
      • 12.55,
      • 10.06,
      • 2.48,
      • 12.92,
      • 2,
      • 30.03,
      • 29.53,
      • 1.23,
      • 30.24,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "MistralForCausalLM",
      • "?",
      • 8,
      • 0,
      • true,
      • ""
      ],
    • [
      • 126,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/unsloth/phi-4" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">phi-4</a>",
      • 20.93,
      • 38.23,
      • 38.14,
      • 0.15,
      • 38.29,
      • 2,
      • 3.63,
      • 2.79,
      • 0.75,
      • 3.66,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "LlamaForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • 127,
      • "🔵",
      • "🔟",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Clinical" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MediPhi-Clinical</a>",
      • 20.5,
      • 7.04,
      • 6.06,
      • 1.56,
      • 7.11,
      • 3,
      • 33.97,
      • 33.84,
      • 0.16,
      • 34.03,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • 128,
      • "🔵",
      • "🅾️",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-9b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">gemma-2-9b-it</a>",
      • 20.26,
      • 42.95,
      • 42.11,
      • 1.47,
      • 43.6,
      • 1,
      • 49.58,
      • 46.04,
      • 11.62,
      • 53.7,
      • 3,
      • 7.52,
      • 2.67,
      • 4.57,
      • 7.94,
      • 2,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 1.25,
      • 0.46,
      • 0.69,
      • 1.26,
      • 2,
      • "?",
      • "?",
      • 10,
      • 0,
      • false,
      • ""
      ],
    • [
      • 129,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/tiiuae/Falcon3-10B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Falcon3-10B-Instruct</a>",
      • 19.31,
      • 14.2,
      • 9.63,
      • 7.37,
      • 15.01,
      • 1,
      • 24.43,
      • 24.14,
      • 0.66,
      • 24.52,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "LlamaForCausalLM",
      • "?",
      • 11,
      • 0,
      • true,
      • ""
      ],
    • [
      • 130,
      • "🔵",
      • "🅾️",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/epfl-llm/meditron-7b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meditron-7b</a>",
      • 19.22,
      • 14.06,
      • 5.92,
      • 8.37,
      • 15.56,
      • 1,
      • 24.39,
      • 24.26,
      • 0.15,
      • 24.43,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 7,
      • 0,
      • false,
      • ""
      ],
    • [
      • 131,
      • "🔵",
      • "🅾️",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-4b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">medgemma-4b-it</a>",
      • 19.07,
      • 13.73,
      • 12.12,
      • 1.62,
      • 13.99,
      • 2,
      • 24.42,
      • 24.27,
      • 0.35,
      • 24.47,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 5,
      • 0,
      • false,
      • ""
      ],
    • [
      • 132,
      • "🔵",
      • "🔟",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/epfl-llm/meditron-7b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meditron-7b</a>",
      • 19.03,
      • 5.34,
      • 5.1,
      • 0.42,
      • 5.35,
      • 3,
      • 32.72,
      • 32.22,
      • 0.65,
      • 32.97,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 7,
      • 0,
      • false,
      • ""
      ],
    • [
      • 133,
      • "🔵",
      • "🔟",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MediPhi-Instruct</a>",
      • 18.66,
      • 7.69,
      • 6.76,
      • 1.01,
      • 7.77,
      • 3,
      • 29.64,
      • 28.81,
      • 1.02,
      • 29.99,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • 134,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-Nemo-Instruct-2407" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Mistral-Nemo-Instruct-2407</a>",
      • 18.51,
      • 29.71,
      • 23.26,
      • 8.75,
      • 32.87,
      • 3,
      • 7.31,
      • 7.17,
      • 0.26,
      • 7.32,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "MistralForCausalLM",
      • "?",
      • 13,
      • 0,
      • true,
      • ""
      ],
    • [
      • 135,
      • "🔵",
      • "🅾️",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MediPhi-Instruct</a>",
      • 18.49,
      • 16.77,
      • 12.21,
      • 9.61,
      • 17.76,
      • 1,
      • 20.2,
      • 17.88,
      • 2.54,
      • 20.81,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • 136,
      • "🔵",
      • "🅾️",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-4b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">medgemma-4b-it</a>",
      • 18.27,
      • 10.81,
      • 10.12,
      • 0.67,
      • 10.89,
      • 2,
      • 25.73,
      • 25.69,
      • 0.09,
      • 25.74,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 5,
      • 0,
      • false,
      • ""
      ],
    • [
      • 137,
      • "🔵",
      • "🔟",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/epfl-llm/meditron-7b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meditron-7b</a>",
      • 18.12,
      • 5.27,
      • 5.02,
      • 0.26,
      • 5.28,
      • 3,
      • 30.98,
      • 30.51,
      • 1.17,
      • 31.19,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 7,
      • 0,
      • false,
      • ""
      ],
    • [
      • 138,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen3-30B-A3B-Instruct-2507" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen3-30B-A3B-Instruct-2507</a>",
      • 17.76,
      • 42.83,
      • 41.41,
      • 2.2,
      • 43.94,
      • 1,
      • 45.95,
      • 44.45,
      • 4.01,
      • 47.29,
      • 3,
      • 0.03,
      • 0.01,
      • 0.02,
      • 0.03,
      • 1,
      • 0.01,
      • 0,
      • 0.01,
      • 0.01,
      • 3,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "Qwen3MoeForCausalLM",
      • "?",
      • 31,
      • 0,
      • true,
      • ""
      ],
    • [
      • 139,
      • "🔵",
      • "🅾️",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-4b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">medgemma-4b-it</a>",
      • 17.73,
      • 12.95,
      • 11.73,
      • 1.31,
      • 13.14,
      • 2,
      • 22.5,
      • 22.31,
      • 0.42,
      • 22.55,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 5,
      • 0,
      • false,
      • ""
      ],
    • [
      • 140,
      • "🔵",
      • "🅾️",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Mistral-7B-Instruct-v0.2</a>",
      • 17.57,
      • 15.9,
      • 10.76,
      • 5.36,
      • 16.95,
      • 2,
      • 19.24,
      • 18.26,
      • 1.05,
      • 19.47,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "MistralForCausalLM",
      • "?",
      • 8,
      • 0,
      • true,
      • ""
      ],
    • [
      • 141,
      • "🔵",
      • "🔟",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/epfl-llm/meditron-7b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meditron-7b</a>",
      • 17.22,
      • 4.43,
      • 4.04,
      • 0.36,
      • 4.45,
      • 1,
      • 30.01,
      • 29.93,
      • 0.2,
      • 30.04,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 7,
      • 0,
      • false,
      • ""
      ],
    • [
      • 142,
      • "🔵",
      • "🔟",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/Henrychur/MMed-Llama-3-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MMed-Llama-3-8B</a>",
      • 17.12,
      • 20.75,
      • 16.81,
      • 4.98,
      • 21.85,
      • 3,
      • 22.2,
      • 21.42,
      • 1.31,
      • 22.43,
      • 2,
      • 18.77,
      • 17.79,
      • 1.49,
      • 19,
      • 3,
      • 21.94,
      • 15,
      • 8.77,
      • 24.15,
      • 1,
      • 1.93,
      • 1.47,
      • 0.69,
      • 1.94,
      • 3,
      • "LlamaForCausalLM",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • 143,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen3-30B-A3B-Instruct-2507" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen3-30B-A3B-Instruct-2507</a>",
      • 16.89,
      • 40.56,
      • 39.6,
      • 1.7,
      • 41.23,
      • 2,
      • 43.89,
      • 35.72,
      • 23.61,
      • 53.16,
      • 2,
      • 0.02,
      • 0.01,
      • 0.01,
      • 0.02,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "Qwen3MoeForCausalLM",
      • "?",
      • 31,
      • 0,
      • true,
      • ""
      ],
    • [
      • 144,
      • "🔵",
      • "🅾️",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Clinical" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MediPhi-Clinical</a>",
      • 16.79,
      • 3.91,
      • 3.04,
      • 1.58,
      • 3.95,
      • 1,
      • 29.66,
      • 28.92,
      • 1.84,
      • 29.98,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • 145,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-32B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen2.5-32B-Instruct</a>",
      • 16.73,
      • 46.98,
      • 46.58,
      • 0.72,
      • 47.34,
      • 1,
      • 36.04,
      • 32.79,
      • 4.58,
      • 38.04,
      • 1,
      • 0.57,
      • 0.16,
      • 0.28,
      • 0.57,
      • 2,
      • 0.06,
      • 0.01,
      • 0.03,
      • 0.06,
      • 3,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • 146,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/tiiuae/Falcon3-10B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Falcon3-10B-Instruct</a>",
      • 16.69,
      • 2.58,
      • 1.82,
      • 0.68,
      • 2.6,
      • 2,
      • 30.8,
      • 28.71,
      • 2.66,
      • 31.78,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "LlamaForCausalLM",
      • "?",
      • 11,
      • 0,
      • true,
      • ""
      ],
    • [
      • 147,
      • "🔵",
      • "🅾️",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MediPhi-Instruct</a>",
      • 16.38,
      • 14.29,
      • 13.25,
      • 2.09,
      • 14.46,
      • 1,
      • 18.47,
      • 17.92,
      • 0.59,
      • 18.6,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • 148,
      • "🔵",
      • "🅾️",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Clinical" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MediPhi-Clinical</a>",
      • 15.98,
      • 13.56,
      • 9.77,
      • 3.82,
      • 14.18,
      • 3,
      • 18.41,
      • 17.17,
      • 1.32,
      • 18.69,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • 149,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">DeepSeek-R1-Distill-Qwen-32B</a>",
      • 15.96,
      • 45.38,
      • 44.52,
      • 2,
      • 46.11,
      • 2,
      • 34.44,
      • 32.2,
      • 4.76,
      • 35.68,
      • 2,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • 150,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-32B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen2.5-32B-Instruct</a>",
      • 15.68,
      • 44.44,
      • 43.7,
      • 1.85,
      • 45.05,
      • 1,
      • 33.73,
      • 27.34,
      • 10.57,
      • 37.58,
      • 1,
      • 0.17,
      • 0.04,
      • 0.08,
      • 0.17,
      • 2,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0.08,
      • 0.03,
      • 0.05,
      • 0.08,
      • 3,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • 151,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">DeepSeek-R1-Distill-Qwen-32B</a>",
      • 15.44,
      • 44.64,
      • 44.3,
      • 1.03,
      • 44.92,
      • 2,
      • 32.58,
      • 28.77,
      • 8.01,
      • 34.59,
      • 2,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • 152,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-Nemo-Instruct-2407" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Mistral-Nemo-Instruct-2407</a>",
      • 15.38,
      • 22.16,
      • 19.05,
      • 7,
      • 23.09,
      • 1,
      • 8.59,
      • 8.42,
      • 0.32,
      • 8.61,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "MistralForCausalLM",
      • "?",
      • 13,
      • 0,
      • true,
      • ""
      ],
    • [
      • 153,
      • "🔵",
      • "🅾️",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MediPhi-Instruct</a>",
      • 15.26,
      • 17.76,
      • 11.84,
      • 6.5,
      • 19.16,
      • 3,
      • 12.77,
      • 11.59,
      • 2.33,
      • 12.94,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • 154,
      • "🔵",
      • "🅾️",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Mistral-7B-Instruct-v0.2</a>",
      • 14.88,
      • 8.31,
      • 7.82,
      • 0.46,
      • 8.35,
      • 2,
      • 21.46,
      • 21.44,
      • 0.02,
      • 21.46,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "MistralForCausalLM",
      • "?",
      • 8,
      • 0,
      • true,
      • ""
      ],
    • [
      • 155,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-14B-Instruct-1M" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen2.5-14B-Instruct-1M</a>",
      • 14.48,
      • 41.31,
      • 40.75,
      • 1.38,
      • 41.72,
      • 3,
      • 31.08,
      • 25,
      • 11.73,
      • 34.25,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0.02,
      • 0.01,
      • 0.01,
      • 0.02,
      • 3,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • 156,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/tiiuae/Falcon3-10B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Falcon3-10B-Instruct</a>",
      • 14.25,
      • 8.36,
      • 5.06,
      • 3.22,
      • 8.67,
      • 3,
      • 20.14,
      • 15.85,
      • 9.44,
      • 21.3,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "LlamaForCausalLM",
      • "?",
      • 11,
      • 0,
      • true,
      • ""
      ],
    • [
      • 157,
      • "🔵",
      • "🅾️",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Mistral-7B-Instruct-v0.2</a>",
      • 14.12,
      • 9.54,
      • 5.91,
      • 3.51,
      • 9.94,
      • 3,
      • 18.7,
      • 17.05,
      • 1.77,
      • 19.09,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "MistralForCausalLM",
      • "?",
      • 8,
      • 0,
      • true,
      • ""
      ],
    • [
      • 158,
      • "🔵",
      • "🔟",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/Henrychur/MMed-Llama-3-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MMed-Llama-3-8B</a>",
      • 13.61,
      • 17.07,
      • 13.31,
      • 4.16,
      • 17.89,
      • 3,
      • 39.15,
      • 37.53,
      • 3.96,
      • 40.23,
      • 2,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 11.73,
      • 10.43,
      • 1.96,
      • 11.9,
      • 3,
      • 0.1,
      • 0.07,
      • 0.04,
      • 0.1,
      • 1,
      • "LlamaForCausalLM",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • 159,
      • "🔵",
      • "🔟",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/Henrychur/MMed-Llama-3-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MMed-Llama-3-8B</a>",
      • 13.4,
      • 10.44,
      • 9.7,
      • 1.29,
      • 10.53,
      • 2,
      • 16.35,
      • 14.74,
      • 3.34,
      • 16.67,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "LlamaForCausalLM",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • 160,
      • "🔵",
      • "🅾️",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Clinical" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MediPhi-Clinical</a>",
      • 13.33,
      • 36.62,
      • 32.48,
      • 8.66,
      • 39.29,
      • 2,
      • 29.96,
      • 27.86,
      • 2.94,
      • 30.89,
      • 2,
      • 0.03,
      • 0.01,
      • 0.02,
      • 0.03,
      • 2,
      • 0.03,
      • 0.01,
      • 0.02,
      • 0.03,
      • 3,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • 161,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-Nemo-Instruct-2407" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Mistral-Nemo-Instruct-2407</a>",
      • 13.28,
      • 36.12,
      • 34.72,
      • 2.27,
      • 36.94,
      • 1,
      • 26.3,
      • 22.71,
      • 5.1,
      • 27.67,
      • 1,
      • 3.75,
      • 1.29,
      • 2.21,
      • 3.85,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0.24,
      • 0.08,
      • 0.14,
      • 0.24,
      • 2,
      • "MistralForCausalLM",
      • "?",
      • 13,
      • 0,
      • true,
      • ""
      ],
    • [
      • 162,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/unsloth/phi-4" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">phi-4</a>",
      • 13.21,
      • 26.43,
      • 20.11,
      • 10.03,
      • 29.01,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "LlamaForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • 163,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/tiiuae/Falcon3-10B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Falcon3-10B-Instruct</a>",
      • 13.16,
      • 36.29,
      • 32.8,
      • 9.73,
      • 38.48,
      • 3,
      • 28.95,
      • 26.58,
      • 3.66,
      • 29.96,
      • 3,
      • 0.55,
      • 0.18,
      • 0.32,
      • 0.55,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "LlamaForCausalLM",
      • "?",
      • 11,
      • 0,
      • true,
      • ""
      ],
    • [
      • 164,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/unsloth/phi-4" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">phi-4</a>",
      • 13.05,
      • 36.82,
      • 35.89,
      • 2.06,
      • 37.37,
      • 2,
      • 28.42,
      • 17.17,
      • 17.14,
      • 34.28,
      • 3,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "LlamaForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • 165,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-14B-Instruct-1M" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen2.5-14B-Instruct-1M</a>",
      • 13,
      • 41.08,
      • 40.16,
      • 2.16,
      • 41.73,
      • 1,
      • 23.86,
      • 21.37,
      • 3.88,
      • 24.67,
      • 1,
      • 0.07,
      • 0.02,
      • 0.04,
      • 0.07,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • 166,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-Nemo-Instruct-2407" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Mistral-Nemo-Instruct-2407</a>",
      • 12.71,
      • 20.59,
      • 18.23,
      • 2.6,
      • 21.23,
      • 1,
      • 4.82,
      • 4.68,
      • 0.25,
      • 4.83,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "MistralForCausalLM",
      • "?",
      • 13,
      • 0,
      • true,
      • ""
      ],
    • [
      • 167,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/tiiuae/Falcon3-10B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Falcon3-10B-Instruct</a>",
      • 12.61,
      • 0.58,
      • 0.51,
      • 0.06,
      • 0.58,
      • 2,
      • 24.63,
      • 22.97,
      • 3.85,
      • 25.19,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "LlamaForCausalLM",
      • "?",
      • 11,
      • 0,
      • true,
      • ""
      ],
    • [
      • 168,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-Nemo-Instruct-2407" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Mistral-Nemo-Instruct-2407</a>",
      • 12.57,
      • 16.79,
      • 15.96,
      • 1.73,
      • 16.96,
      • 1,
      • 8.35,
      • 7.38,
      • 0.92,
      • 8.44,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "MistralForCausalLM",
      • "?",
      • 13,
      • 0,
      • true,
      • ""
      ],
    • [
      • 169,
      • "🔵",
      • "🅾️",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Mistral-7B-Instruct-v0.2</a>",
      • 11.81,
      • 34.37,
      • 30.08,
      • 11.39,
      • 36.88,
      • 1,
      • 24.66,
      • 22.78,
      • 2.17,
      • 25.29,
      • 1,
      • 0.02,
      • 0.01,
      • 0.01,
      • 0.02,
      • 2,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "MistralForCausalLM",
      • "?",
      • 8,
      • 0,
      • true,
      • ""
      ],
    • [
      • 170,
      • "🔵",
      • "🅾️",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-4b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">medgemma-4b-it</a>",
      • 11.78,
      • 31.25,
      • 28.52,
      • 6.56,
      • 32.57,
      • 2,
      • 27.06,
      • 26.25,
      • 1.17,
      • 27.37,
      • 3,
      • 0.61,
      • 0.39,
      • 0.34,
      • 0.61,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "?",
      • "?",
      • 5,
      • 0,
      • false,
      • ""
      ],
    • [
      • 171,
      • "🔵",
      • "🅾️",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MediPhi-Instruct</a>",
      • 11.73,
      • 35.96,
      • 29.82,
      • 16.02,
      • 40.06,
      • 2,
      • 22.14,
      • 15.99,
      • 8.25,
      • 24.1,
      • 2,
      • 0.45,
      • 0.15,
      • 0.26,
      • 0.45,
      • 2,
      • 0.12,
      • 0.04,
      • 0.07,
      • 0.12,
      • 3,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • 172,
      • "🔵",
      • "🅾️",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MediPhi-Instruct</a>",
      • 11.24,
      • 30.53,
      • 26.23,
      • 8.1,
      • 32.61,
      • 3,
      • 25.28,
      • 20.23,
      • 10.08,
      • 27.17,
      • 3,
      • 0.38,
      • 0.13,
      • 0.22,
      • 0.38,
      • 2,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • 173,
      • "🔵",
      • "🅾️",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/Henrychur/MMed-Llama-3-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MMed-Llama-3-8B</a>",
      • 11.18,
      • 17.82,
      • 12.78,
      • 5.39,
      • 19,
      • 2,
      • 4.55,
      • 4.38,
      • 0.16,
      • 4.56,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "LlamaForCausalLM",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • 174,
      • "🔵",
      • "🅾️",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-4b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">medgemma-4b-it</a>",
      • 10.75,
      • 22.9,
      • 17.68,
      • 9.48,
      • 24.61,
      • 2,
      • 30.85,
      • 29.29,
      • 2.73,
      • 31.57,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "?",
      • "?",
      • 5,
      • 0,
      • false,
      • ""
      ],
    • [
      • 175,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-Nemo-Instruct-2407" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Mistral-Nemo-Instruct-2407</a>",
      • 10.6,
      • 27.11,
      • 23.65,
      • 4.94,
      • 28.49,
      • 1,
      • 25.6,
      • 19.6,
      • 7.56,
      • 27.92,
      • 1,
      • 0.29,
      • 0.1,
      • 0.17,
      • 0.29,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "MistralForCausalLM",
      • "?",
      • 13,
      • 0,
      • true,
      • ""
      ],
    • [
      • 176,
      • "🔵",
      • "🅾️",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Clinical" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MediPhi-Clinical</a>",
      • 9.63,
      • 13.85,
      • 7.91,
      • 6.72,
      • 14.89,
      • 1,
      • 33.67,
      • 33.08,
      • 0.86,
      • 33.97,
      • 1,
      • 0.64,
      • 0.21,
      • 0.37,
      • 0.64,
      • 2,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • 177,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/unsloth/phi-4" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">phi-4</a>",
      • 9.44,
      • 41.65,
      • 40.9,
      • 1.12,
      • 42.19,
      • 2,
      • 5.55,
      • 2.75,
      • 2.87,
      • 5.72,
      • 3,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "LlamaForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • 178,
      • "🔵🔵",
      • "🅾️",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/tiiuae/Falcon3-10B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Falcon3-10B-Instruct</a>",
      • 9.23,
      • 22.74,
      • 18.65,
      • 4.78,
      • 24.04,
      • 1,
      • 23.38,
      • 20.11,
      • 6.52,
      • 24.44,
      • 3,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "LlamaForCausalLM",
      • "?",
      • 11,
      • 0,
      • true,
      • ""
      ],
    • [
      • 179,
      • "🔵",
      • "🔟",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/epfl-llm/meditron-7b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meditron-7b</a>",
      • 8.85,
      • 9.61,
      • 8.96,
      • 0.68,
      • 9.68,
      • 3,
      • 34.65,
      • 32.88,
      • 2.87,
      • 35.63,
      • 2,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "?",
      • "?",
      • 7,
      • 0,
      • false,
      • ""
      ],
    • [
      • 180,
      • "🔵",
      • "🅾️",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/epfl-llm/meditron-7b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meditron-7b</a>",
      • 8.78,
      • 5.89,
      • 4.44,
      • 2.66,
      • 5.98,
      • 1,
      • 11.68,
      • 9.51,
      • 4.26,
      • 11.97,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 7,
      • 0,
      • false,
      • ""
      ],
    • [
      • 181,
      • "🔵",
      • "🅾️",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/Henrychur/MMed-Llama-3-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MMed-Llama-3-8B</a>",
      • 8.38,
      • 31.26,
      • 26.04,
      • 11.48,
      • 33.94,
      • 2,
      • 10.65,
      • 9.18,
      • 2.51,
      • 10.83,
      • 3,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "LlamaForCausalLM",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • 182,
      • "🔵",
      • "🅾️",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/Henrychur/MMed-Llama-3-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MMed-Llama-3-8B</a>",
      • 8.31,
      • 10.43,
      • 8.63,
      • 3.1,
      • 10.65,
      • 3,
      • 6.19,
      • 6.11,
      • 0.16,
      • 6.2,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "LlamaForCausalLM",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • 183,
      • "🔵",
      • "🅾️",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/Henrychur/MMed-Llama-3-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MMed-Llama-3-8B</a>",
      • 8.1,
      • 12.41,
      • 8.91,
      • 3.59,
      • 12.93,
      • 2,
      • 3.79,
      • 3.79,
      • 0.01,
      • 3.79,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "LlamaForCausalLM",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • 184,
      • "🔵",
      • "🅾️",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Mistral-7B-Instruct-v0.2</a>",
      • 7.92,
      • 12.69,
      • 5.62,
      • 7.11,
      • 13.82,
      • 1,
      • 26.89,
      • 24.33,
      • 3.81,
      • 27.88,
      • 1,
      • 0.02,
      • 0.01,
      • 0.01,
      • 0.02,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "MistralForCausalLM",
      • "?",
      • 8,
      • 0,
      • true,
      • ""
      ],
    • [
      • 185,
      • "🔵",
      • "🔟",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/HiTZ/Medical-mT5-large" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Medical-mT5-large</a>",
      • 7.92,
      • 0.4,
      • 0.32,
      • 0.09,
      • 0.4,
      • 1,
      • 15.43,
      • 15.16,
      • 0.28,
      • 15.48,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "MT5ForConditionalGeneration",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • 186,
      • "🔵",
      • "🔟",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/HiTZ/Medical-mT5-large" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Medical-mT5-large</a>",
      • 7.78,
      • 0.93,
      • 0.8,
      • 0.11,
      • 0.93,
      • 3,
      • 14.63,
      • 14.22,
      • 0.84,
      • 14.7,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "MT5ForConditionalGeneration",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • 187,
      • "🔵",
      • "🔟",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/HiTZ/Medical-mT5-large" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Medical-mT5-large</a>",
      • 7.58,
      • 0.38,
      • 0.27,
      • 0.09,
      • 0.38,
      • 1,
      • 14.79,
      • 14.43,
      • 0.72,
      • 14.85,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "MT5ForConditionalGeneration",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • 188,
      • "🔵",
      • "🔟",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/HiTZ/Medical-mT5-large" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Medical-mT5-large</a>",
      • 7.39,
      • 0.24,
      • 0.1,
      • 0.12,
      • 0.24,
      • 1,
      • 14.54,
      • 14.48,
      • 0.12,
      • 14.55,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "MT5ForConditionalGeneration",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • 189,
      • "🔵",
      • "🅾️",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/epfl-llm/meditron-7b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meditron-7b</a>",
      • 6.37,
      • 0.76,
      • 0.25,
      • 0.44,
      • 0.76,
      • 3,
      • 11.98,
      • 11.61,
      • 0.36,
      • 12.03,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 7,
      • 0,
      • false,
      • ""
      ],
    • [
      • 190,
      • "🔵",
      • "🅾️",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/epfl-llm/meditron-7b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meditron-7b</a>",
      • 5.05,
      • 17.64,
      • 10.72,
      • 9.69,
      • 19.29,
      • 2,
      • 7.63,
      • 6.39,
      • 1.23,
      • 7.73,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "?",
      • "?",
      • 7,
      • 0,
      • false,
      • ""
      ],
    • [
      • 191,
      • "🔵",
      • "🅾️",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/Henrychur/MMed-Llama-3-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MMed-Llama-3-8B</a>",
      • 5.01,
      • 20.73,
      • 14.13,
      • 8.03,
      • 22.66,
      • 2,
      • 4.34,
      • 4.16,
      • 0.28,
      • 4.35,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "LlamaForCausalLM",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • 192,
      • "🔵",
      • "🅾️",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/epfl-llm/meditron-7b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meditron-7b</a>",
      • 4.51,
      • 0.36,
      • 0.34,
      • 0.03,
      • 0.36,
      • 1,
      • 8.66,
      • 7.78,
      • 1.66,
      • 8.74,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 7,
      • 0,
      • false,
      • ""
      ],
    • [
      • 193,
      • "🔵",
      • "🔟",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/epfl-llm/meditron-7b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meditron-7b</a>",
      • 4.33,
      • 7.2,
      • 6.92,
      • 0.3,
      • 7.22,
      • 1,
      • 14.44,
      • 12.45,
      • 3.83,
      • 14.79,
      • 2,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "?",
      • "?",
      • 7,
      • 0,
      • false,
      • ""
      ],
    • [
      • 194,
      • "🔵",
      • "🅾️",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/HiTZ/Medical-mT5-large" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Medical-mT5-large</a>",
      • 4.33,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 8.65,
      • 7.12,
      • 2.92,
      • 8.8,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "MT5ForConditionalGeneration",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • 195,
      • "🔵",
      • "🅾️",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/HiTZ/Medical-mT5-large" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Medical-mT5-large</a>",
      • 4.26,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 8.51,
      • 7.7,
      • 1.55,
      • 8.59,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "MT5ForConditionalGeneration",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • 196,
      • "🔵",
      • "🅾️",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/HiTZ/Medical-mT5-large" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Medical-mT5-large</a>",
      • 3.86,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 7.72,
      • 7.11,
      • 1.14,
      • 7.77,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "MT5ForConditionalGeneration",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • 197,
      • "🔵",
      • "🔟",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/HiTZ/Medical-mT5-large" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Medical-mT5-large</a>",
      • 3.65,
      • 0.64,
      • 0.48,
      • 0.15,
      • 0.64,
      • 2,
      • 17.6,
      • 16.93,
      • 0.79,
      • 17.74,
      • 2,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "MT5ForConditionalGeneration",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • 198,
      • "🔵",
      • "🅾️",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/Henrychur/MMed-Llama-3-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">MMed-Llama-3-8B</a>",
      • 3.32,
      • 2.77,
      • 1.74,
      • 0.92,
      • 2.8,
      • 2,
      • 3.87,
      • 3.85,
      • 0.04,
      • 3.87,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "LlamaForCausalLM",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • 199,
      • "🔵",
      • "🔟",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/HiTZ/Medical-mT5-large" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Medical-mT5-large</a>",
      • 2.9,
      • 0.49,
      • 0.31,
      • 0.17,
      • 0.49,
      • 3,
      • 14.01,
      • 13.17,
      • 1,
      • 14.15,
      • 2,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "MT5ForConditionalGeneration",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • 200,
      • "🔵",
      • "🅾️",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/epfl-llm/meditron-7b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meditron-7b</a>",
      • 2.63,
      • 4.88,
      • 3.13,
      • 2.72,
      • 4.97,
      • 3,
      • 8.28,
      • 6.12,
      • 2.21,
      • 8.48,
      • 3,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "?",
      • "?",
      • 7,
      • 0,
      • false,
      • ""
      ],
    • [
      • 201,
      • "🔵",
      • "🅾️",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/HiTZ/Medical-mT5-large" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Medical-mT5-large</a>",
      • 2.15,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 4.3,
      • 3.08,
      • 1.11,
      • 4.36,
      • 2,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "MT5ForConditionalGeneration",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • 202,
      • "🔵",
      • "🅾️",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/HiTZ/Medical-mT5-large" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Medical-mT5-large</a>",
      • 1.82,
      • 0,
      • 0,
      • 0,
      • 0,
      • 2,
      • 9.1,
      • 8.12,
      • 0.94,
      • 9.2,
      • 2,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "MT5ForConditionalGeneration",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • 203,
      • "🔵",
      • "🅾️",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/HiTZ/Medical-mT5-large" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Medical-mT5-large</a>",
      • 1.81,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 9.06,
      • 5.78,
      • 3.2,
      • 9.4,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "MT5ForConditionalGeneration",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • 204,
      • "🔵",
      • "🔟",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/epfl-llm/meditron-7b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meditron-7b</a>",
      • 0,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "n/a",
      • "?",
      • "?",
      • 7,
      • 0,
      • false,
      • ""
      ]
    ],
  • "metadata": null
}

***This project has been funded by the European Union under:

   Horizon Europe eCREAM Project (Grant Agreement No.101057726)