{
  "apiVersion": "v1",
  "methodology": "https://sourcescore.org/methodology/",
  "canonical": "https://sourcescore.org/claims/245af747a3d21061/",
  "claim": {
    "vertical": "ai-ml",
    "subject": "DistilBERT",
    "predicate": "introduced_in",
    "object": "Sanh et al. 2019 — a smaller, faster, cheaper BERT via knowledge distillation",
    "confidence": 1,
    "sources": [
      {
        "url": "https://arxiv.org/abs/1910.01108",
        "title": "DistilBERT, a distilled version of BERT: smaller, faster, cheaper and lighter",
        "publisher": "arXiv (Sanh, Debut, Chaumond, Wolf / Hugging Face)",
        "publishedDate": "2019-10-02",
        "accessedDate": "2026-05-16",
        "type": "preprint",
        "excerpt": "We introduce a method to pre-train a smaller general-purpose language representation model, called DistilBERT, which can then be fine-tuned with good performances on a wide range of tasks like its larger counterparts. We show that it is possible to reduce the size of a BERT model by 40%, while retaining 97% of its language understanding capabilities and being 60% faster."
      },
      {
        "url": "https://huggingface.co/docs/transformers/model_doc/distilbert",
        "title": "DistilBERT — Hugging Face Transformers documentation",
        "publisher": "Hugging Face",
        "publishedDate": "2019-10-02",
        "accessedDate": "2026-05-16",
        "type": "official-blog"
      }
    ],
    "publishedAt": "2026-05-16T00:00:00Z",
    "lastVerified": "2026-05-16",
    "methodologyVersion": "veritas-v0.1",
    "tags": [
      "distilbert",
      "bert",
      "knowledge-distillation",
      "hugging-face",
      "foundational",
      "2019",
      "introduced_in"
    ],
    "id": "245af747a3d21061",
    "statement": "DistilBERT introduced in: Sanh et al. 2019 — a smaller, faster, cheaper BERT via knowledge distillation."
  },
  "signature": {
    "algorithm": "HMAC-SHA256",
    "signedBy": "did:web:sourcescore.org",
    "signedAt": "2026-05-17T00:00:00.000Z",
    "signature": "43b8ebd002088f974d533a74ec633c2e970dccf0662474ab7104ebda0a5ecabc"
  },
  "citedAs": "DistilBERT introduced in: Sanh et al. 2019 — a smaller, faster, cheaper BERT via knowledge distillation. — SourceScore Claim 245af747a3d21061 (verified 2026-05-16, signed 43b8ebd0…). https://sourcescore.org/claims/245af747a3d21061/"
}