{
  "apiVersion": "v1",
  "methodology": "https://sourcescore.org/methodology/",
  "canonical": "https://sourcescore.org/claims/7d3e6a39b1656571/",
  "claim": {
    "vertical": "ai-ml",
    "subject": "GPT-3",
    "predicate": "introduced_in_paper",
    "object": "Language Models are Few-Shot Learners (Brown et al., 2020)",
    "confidence": 1,
    "sources": [
      {
        "url": "https://arxiv.org/abs/2005.14165",
        "title": "Language Models are Few-Shot Learners",
        "publisher": "arXiv (Brown, Mann, Ryder, Subbiah, Kaplan, Dhariwal, Neelakantan, Shyam, Sastry, Askell, et al.)",
        "publishedDate": "2020-05-28",
        "accessedDate": "2026-05-16",
        "type": "preprint",
        "excerpt": "We train GPT-3, an autoregressive language model with 175 billion parameters, 10x more than any previous non-sparse language model, and test its performance in the few-shot setting."
      },
      {
        "url": "https://papers.nips.cc/paper/2020/hash/1457c0d6bfcb4967418bfb8ac142f64a-Abstract.html",
        "title": "Language Models are Few-Shot Learners (NeurIPS 2020)",
        "publisher": "NeurIPS Foundation",
        "publishedDate": "2020-12-06",
        "accessedDate": "2026-05-16",
        "type": "peer-reviewed"
      }
    ],
    "publishedAt": "2026-05-16T00:00:00Z",
    "lastVerified": "2026-05-16",
    "methodologyVersion": "veritas-v0.1",
    "tags": [
      "gpt-3",
      "openai",
      "few-shot",
      "foundational",
      "2020",
      "nips"
    ],
    "id": "7d3e6a39b1656571",
    "statement": "GPT-3 introduced in paper: Language Models are Few-Shot Learners (Brown et al., 2020)."
  },
  "signature": {
    "algorithm": "HMAC-SHA256",
    "signedBy": "did:web:sourcescore.org",
    "signedAt": "2026-05-16T00:00:00.000Z",
    "signature": "350767d45be16495388ee36fd0737973a66bf87a62bafb7a9503655d89a6ac8b"
  },
  "citedAs": "GPT-3 introduced in paper: Language Models are Few-Shot Learners (Brown et al., 2020). — SourceScore Claim 7d3e6a39b1656571 (verified 2026-05-16, signed 350767d4…). https://sourcescore.org/claims/7d3e6a39b1656571/"
}