{
  "apiVersion": "v1",
  "methodology": "https://sourcescore.org/methodology/",
  "canonical": "https://sourcescore.org/claims/4c1ee70007dc89c1/",
  "claim": {
    "vertical": "ai-ml",
    "subject": "BERT (Bidirectional Encoder Representations from Transformers)",
    "predicate": "introduced_in_paper",
    "object": "BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding (Devlin et al., 2018)",
    "confidence": 1,
    "sources": [
      {
        "url": "https://arxiv.org/abs/1810.04805",
        "title": "BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding",
        "publisher": "arXiv (Devlin, Chang, Lee, Toutanova)",
        "publishedDate": "2018-10-11",
        "accessedDate": "2026-05-16",
        "type": "preprint",
        "excerpt": "We introduce a new language representation model called BERT, which stands for Bidirectional Encoder Representations from Transformers."
      },
      {
        "url": "https://aclanthology.org/N19-1423/",
        "title": "BERT (NAACL 2019 proceedings)",
        "publisher": "Association for Computational Linguistics",
        "publishedDate": "2019-06-02",
        "accessedDate": "2026-05-16",
        "type": "peer-reviewed"
      }
    ],
    "publishedAt": "2026-05-16T00:00:00Z",
    "lastVerified": "2026-05-16",
    "methodologyVersion": "veritas-v0.1",
    "tags": [
      "bert",
      "foundational",
      "devlin",
      "2018",
      "google",
      "nlp"
    ],
    "id": "4c1ee70007dc89c1",
    "statement": "BERT (Bidirectional Encoder Representations from Transformers) introduced in paper: BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding (Devlin et al., 2018)."
  },
  "signature": {
    "algorithm": "HMAC-SHA256",
    "signedBy": "did:web:sourcescore.org",
    "signedAt": "2026-05-16T00:00:00.000Z",
    "signature": "46a4f10c2ca68199f550ff862a08144461a920ad16e26cdf15c5ab911682def8"
  },
  "citedAs": "BERT (Bidirectional Encoder Representations from Transformers) introduced in paper: BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding (Devlin et al., 2018). — SourceScore Claim 4c1ee70007dc89c1 (verified 2026-05-16, signed 46a4f10c…). https://sourcescore.org/claims/4c1ee70007dc89c1/"
}