{
  "apiVersion": "v1",
  "methodology": "https://sourcescore.org/methodology/",
  "canonical": "https://sourcescore.org/claims/d4fecb26a4c9cdca/",
  "claim": {
    "vertical": "ai-ml",
    "subject": "RoBERTa",
    "predicate": "introduced_in",
    "object": "Liu et al. 2019 — A Robustly Optimized BERT Pretraining Approach",
    "confidence": 1,
    "sources": [
      {
        "url": "https://arxiv.org/abs/1907.11692",
        "title": "RoBERTa: A Robustly Optimized BERT Pretraining Approach",
        "publisher": "arXiv (Liu, Ott, Goyal, Du, Joshi, Chen, Levy, Lewis, Zettlemoyer, Stoyanov / Facebook AI)",
        "publishedDate": "2019-07-26",
        "accessedDate": "2026-05-16",
        "type": "preprint",
        "excerpt": "We present a replication study of BERT pretraining (Devlin et al., 2019) that carefully measures the impact of many key hyperparameters and training data size. We find that BERT was significantly undertrained, and can match or exceed the performance of every model published after it."
      },
      {
        "url": "https://huggingface.co/docs/transformers/model_doc/roberta",
        "title": "RoBERTa — Hugging Face Transformers documentation",
        "publisher": "Hugging Face",
        "publishedDate": "2019-07-26",
        "accessedDate": "2026-05-16",
        "type": "official-blog"
      }
    ],
    "publishedAt": "2026-05-16T00:00:00Z",
    "lastVerified": "2026-05-16",
    "methodologyVersion": "veritas-v0.1",
    "tags": [
      "roberta",
      "bert",
      "facebook-ai",
      "pretraining",
      "foundational",
      "2019",
      "introduced_in"
    ],
    "id": "d4fecb26a4c9cdca",
    "statement": "RoBERTa introduced in: Liu et al. 2019 — A Robustly Optimized BERT Pretraining Approach."
  },
  "signature": {
    "algorithm": "HMAC-SHA256",
    "signedBy": "did:web:sourcescore.org",
    "signedAt": "2026-05-17T00:00:00.000Z",
    "signature": "cce9d64b1333c8408b67c5c65d186b90269fbcc3cf1eaa88a530aef72ad757ca"
  },
  "citedAs": "RoBERTa introduced in: Liu et al. 2019 — A Robustly Optimized BERT Pretraining Approach. — SourceScore Claim d4fecb26a4c9cdca (verified 2026-05-16, signed cce9d64b…). https://sourcescore.org/claims/d4fecb26a4c9cdca/"
}