{
  "apiVersion": "v1",
  "methodology": "https://sourcescore.org/methodology/",
  "canonical": "https://sourcescore.org/claims/f14acb906ba6c12f/",
  "claim": {
    "vertical": "ai-ml",
    "subject": "Knowledge Distillation",
    "predicate": "popularized_in",
    "object": "Hinton, Vinyals, Dean 2015 — distilling the knowledge in a neural network",
    "confidence": 1,
    "sources": [
      {
        "url": "https://arxiv.org/abs/1503.02531",
        "title": "Distilling the Knowledge in a Neural Network",
        "publisher": "arXiv (Hinton, Vinyals, Dean / Google)",
        "publishedDate": "2015-03-09",
        "accessedDate": "2026-05-16",
        "type": "preprint",
        "excerpt": "A very simple way to improve the performance of almost any machine learning algorithm is to train many different models on the same data and then to average their predictions. Unfortunately, making predictions using a whole ensemble of models is cumbersome and may be too computationally expensive. We show that it is possible to compress the knowledge in an ensemble into a single model which is much easier to deploy."
      },
      {
        "url": "https://arxiv.org/pdf/1503.02531",
        "title": "Knowledge Distillation — full paper PDF",
        "publisher": "arXiv",
        "publishedDate": "2015-03-09",
        "accessedDate": "2026-05-16",
        "type": "preprint"
      }
    ],
    "publishedAt": "2026-05-16T00:00:00Z",
    "lastVerified": "2026-05-16",
    "methodologyVersion": "veritas-v0.1",
    "tags": [
      "knowledge-distillation",
      "hinton",
      "google",
      "compression",
      "foundational",
      "2015",
      "introduced_in"
    ],
    "id": "f14acb906ba6c12f",
    "statement": "Knowledge Distillation popularized in: Hinton, Vinyals, Dean 2015 — distilling the knowledge in a neural network."
  },
  "signature": {
    "algorithm": "HMAC-SHA256",
    "signedBy": "did:web:sourcescore.org",
    "signedAt": "2026-05-17T00:00:00.000Z",
    "signature": "5795ad458a17f21ca595292b931e76a630d5fdaee398d932d23485b75d71ddfb"
  },
  "citedAs": "Knowledge Distillation popularized in: Hinton, Vinyals, Dean 2015 — distilling the knowledge in a neural network. — SourceScore Claim f14acb906ba6c12f (verified 2026-05-16, signed 5795ad45…). https://sourcescore.org/claims/f14acb906ba6c12f/"
}