{
  "apiVersion": "v1",
  "methodology": "https://sourcescore.org/methodology/",
  "canonical": "https://sourcescore.org/claims/f068236101568ad7/",
  "claim": {
    "vertical": "ai-ml",
    "subject": "Mixture of Experts (MoE) revival",
    "predicate": "popularized_in",
    "object": "Shazeer et al. 2017 — outrageously large neural networks via sparse gating",
    "confidence": 1,
    "sources": [
      {
        "url": "https://arxiv.org/abs/1701.06538",
        "title": "Outrageously Large Neural Networks: The Sparsely-Gated Mixture-of-Experts Layer",
        "publisher": "arXiv (Shazeer, Mirhoseini, Maziarz, Davis, Le, Hinton, Dean / Google Brain)",
        "publishedDate": "2017-01-23",
        "accessedDate": "2026-05-16",
        "type": "preprint",
        "excerpt": "The capacity of a neural network to absorb information is limited by its number of parameters. Conditional computation, where parts of the network are active on a per-example basis, has been proposed in theory as a way of dramatically increasing model capacity without a proportional increase in computation."
      },
      {
        "url": "https://openreview.net/forum?id=B1ckMDqlg",
        "title": "Outrageously Large Neural Networks — ICLR 2017 OpenReview",
        "publisher": "ICLR 2017",
        "publishedDate": "2017-01-23",
        "accessedDate": "2026-05-16",
        "type": "peer-reviewed"
      }
    ],
    "publishedAt": "2026-05-16T00:00:00Z",
    "lastVerified": "2026-05-16",
    "methodologyVersion": "veritas-v0.1",
    "tags": [
      "moe",
      "mixture-of-experts",
      "shazeer",
      "google",
      "foundational",
      "iclr",
      "2017",
      "introduced_in"
    ],
    "id": "f068236101568ad7",
    "statement": "Mixture of Experts (MoE) revival popularized in: Shazeer et al. 2017 — outrageously large neural networks via sparse gating."
  },
  "signature": {
    "algorithm": "HMAC-SHA256",
    "signedBy": "did:web:sourcescore.org",
    "signedAt": "2026-05-17T00:00:00.000Z",
    "signature": "8adc43721fa157a7fb4ad916b9a2f0a50243eb3f4f5ba88d1f44ab6becbd9ba4"
  },
  "citedAs": "Mixture of Experts (MoE) revival popularized in: Shazeer et al. 2017 — outrageously large neural networks via sparse gating. — SourceScore Claim f068236101568ad7 (verified 2026-05-16, signed 8adc4372…). https://sourcescore.org/claims/f068236101568ad7/"
}