{
  "apiVersion": "v1",
  "methodology": "https://sourcescore.org/methodology/",
  "canonical": "https://sourcescore.org/claims/3d9c14b9379038c9/",
  "claim": {
    "vertical": "ai-ml",
    "subject": "Switch Transformer",
    "predicate": "introduced_in_paper",
    "object": "Switch Transformers: Scaling to Trillion Parameter Models with Simple and Efficient Sparsity (Fedus et al., 2021)",
    "confidence": 1,
    "sources": [
      {
        "url": "https://arxiv.org/abs/2101.03961",
        "title": "Switch Transformers: Scaling to Trillion Parameter Models with Simple and Efficient Sparsity",
        "publisher": "arXiv (Fedus, Zoph, Shazeer)",
        "publishedDate": "2021-01-11",
        "accessedDate": "2026-05-16",
        "type": "preprint",
        "excerpt": "We simplify the MoE routing algorithm and design intuitive improved models with reduced communication and computational costs."
      },
      {
        "url": "https://www.jmlr.org/papers/v23/21-0998.html",
        "title": "Switch Transformers (JMLR 2022)",
        "publisher": "Journal of Machine Learning Research",
        "publishedDate": "2022-12-01",
        "accessedDate": "2026-05-16",
        "type": "peer-reviewed"
      }
    ],
    "publishedAt": "2026-05-16T00:00:00Z",
    "lastVerified": "2026-05-16",
    "methodologyVersion": "veritas-v0.1",
    "tags": [
      "switch-transformer",
      "moe",
      "foundational",
      "fedus",
      "2021",
      "google"
    ],
    "id": "3d9c14b9379038c9",
    "statement": "Switch Transformer introduced in paper: Switch Transformers: Scaling to Trillion Parameter Models with Simple and Efficient Sparsity (Fedus et al., 2021)."
  },
  "signature": {
    "algorithm": "HMAC-SHA256",
    "signedBy": "did:web:sourcescore.org",
    "signedAt": "2026-05-16T00:00:00.000Z",
    "signature": "ad460a5dca9be060f3e275a878932053d0ae1187f6034b837db8827abbe95b49"
  },
  "citedAs": "Switch Transformer introduced in paper: Switch Transformers: Scaling to Trillion Parameter Models with Simple and Efficient Sparsity (Fedus et al., 2021). — SourceScore Claim 3d9c14b9379038c9 (verified 2026-05-16, signed ad460a5d…). https://sourcescore.org/claims/3d9c14b9379038c9/"
}