{
  "apiVersion": "v1",
  "methodology": "https://sourcescore.org/methodology/",
  "canonical": "https://sourcescore.org/claims/410aec4f418f2b11/",
  "claim": {
    "vertical": "ai-ml",
    "subject": "Mixtral 8x7B",
    "predicate": "released_on",
    "object": "2023-12-11",
    "confidence": 0.95,
    "sources": [
      {
        "url": "https://mistral.ai/news/mixtral-of-experts/",
        "title": "Mixtral of experts — A high quality Sparse Mixture-of-Experts",
        "publisher": "Mistral AI",
        "publishedDate": "2023-12-11",
        "accessedDate": "2026-05-16",
        "type": "official-blog",
        "excerpt": "Today, the team is proud to release Mixtral 8x7B, a high-quality sparse mixture of experts model (SMoE) with open weights."
      },
      {
        "url": "https://huggingface.co/mistralai/Mixtral-8x7B-v0.1",
        "title": "mistralai/Mixtral-8x7B-v0.1 model card",
        "publisher": "Hugging Face",
        "publishedDate": "2023-12-11",
        "accessedDate": "2026-05-16",
        "type": "model-card"
      }
    ],
    "publishedAt": "2026-05-16T00:00:00Z",
    "lastVerified": "2026-05-16",
    "methodologyVersion": "veritas-v0.1",
    "tags": [
      "mixtral",
      "mistral",
      "release",
      "2023",
      "moe",
      "sparse"
    ],
    "id": "410aec4f418f2b11",
    "statement": "Mixtral 8x7B released on: 2023-12-11."
  },
  "signature": {
    "algorithm": "HMAC-SHA256",
    "signedBy": "did:web:sourcescore.org",
    "signedAt": "2026-05-16T00:00:00.000Z",
    "signature": "8346467e8ca776458dddd5bbc2b87fb04cb4eb32626db30d4c31480c5b2873d0"
  },
  "citedAs": "Mixtral 8x7B released on: 2023-12-11. — SourceScore Claim 410aec4f418f2b11 (verified 2026-05-16, signed 8346467e…). https://sourcescore.org/claims/410aec4f418f2b11/"
}