{
  "apiVersion": "v1",
  "methodology": "https://sourcescore.org/methodology/",
  "canonical": "https://sourcescore.org/claims/ad79b14fafb362cd/",
  "claim": {
    "vertical": "ai-ml",
    "subject": "Mixtral 8x7B",
    "predicate": "architecture",
    "object": "Sparse Mixture-of-Experts (8 experts × 7B params, 2 experts routed per token)",
    "confidence": 1,
    "sources": [
      {
        "url": "https://mistral.ai/news/mixtral-of-experts/",
        "title": "Mixtral of experts",
        "publisher": "Mistral AI",
        "publishedDate": "2023-12-11",
        "accessedDate": "2026-05-16",
        "type": "official-blog",
        "excerpt": "Mixtral has 8 experts in each layer … At every layer, for every token, a router network chooses two of these experts to process the token and combine their output additively."
      },
      {
        "url": "https://arxiv.org/abs/2401.04088",
        "title": "Mixtral of Experts",
        "publisher": "Mistral AI / arXiv",
        "publishedDate": "2024-01-08",
        "accessedDate": "2026-05-16",
        "type": "preprint"
      }
    ],
    "publishedAt": "2026-05-16T00:00:00Z",
    "lastVerified": "2026-05-16",
    "methodologyVersion": "veritas-v0.1",
    "tags": [
      "mixtral",
      "moe",
      "architecture",
      "mistral"
    ],
    "id": "ad79b14fafb362cd",
    "statement": "Mixtral 8x7B architecture: Sparse Mixture-of-Experts (8 experts × 7B params, 2 experts routed per token)."
  },
  "signature": {
    "algorithm": "HMAC-SHA256",
    "signedBy": "did:web:sourcescore.org",
    "signedAt": "2026-05-16T00:00:00.000Z",
    "signature": "dca87e570cdc0b448bf0b4712e015ea400b3428a3737851fd0d1653d101cb2db"
  },
  "citedAs": "Mixtral 8x7B architecture: Sparse Mixture-of-Experts (8 experts × 7B params, 2 experts routed per token). — SourceScore Claim ad79b14fafb362cd (verified 2026-05-16, signed dca87e57…). https://sourcescore.org/claims/ad79b14fafb362cd/"
}