Sam Altman Defends Himself From Elon Musk’s Accusations in OpenAI Trial
Former OpenAI Exec Calls Decision to Remove Sam Altman a ‘Hail Mary’ During Musk Trial
OpenAI Back in Court Over Canada School Shooter’s Use of ChatGPT
Are Elon Musk and OpenAI Fighting an AI Arms Race? Sam Altman’s Lawyers Think So
Elon Musk Says Sam Altman Tricked Him Into Funding OpenAI
Elon Musk Takes Aim at OpenAI as Trial Begins: ‘It’s Not OK to Steal a Charity’
How to Unscramble an Omelet in Silicon Valley: The Musk v. Altman Trial That Will Try
California Mom Who Lost Her Son to an AI Chatbot Is Now Fighting to Regulate Them
Suspect Was in Apparent Mental Health Crisis During Attack on Sam Altman’s House
Player sponsored by
window.__IS_SSR__=true
window.__INITIAL_STATE__={
"attachmentsReducer": {
"audio_0": {
"type": "attachments",
"id": "audio_0",
"imgSizes": {
"kqedFullSize": {
"file": "https://ww2.kqed.org/news/wp-content/themes/KQED-unified/img/audio_bgs/background0.jpg"
}
}
},
"audio_1": {
"type": "attachments",
"id": "audio_1",
"imgSizes": {
"kqedFullSize": {
"file": "https://ww2.kqed.org/news/wp-content/themes/KQED-unified/img/audio_bgs/background1.jpg"
}
}
},
"audio_2": {
"type": "attachments",
"id": "audio_2",
"imgSizes": {
"kqedFullSize": {
"file": "https://ww2.kqed.org/news/wp-content/themes/KQED-unified/img/audio_bgs/background2.jpg"
}
}
},
"audio_3": {
"type": "attachments",
"id": "audio_3",
"imgSizes": {
"kqedFullSize": {
"file": "https://ww2.kqed.org/news/wp-content/themes/KQED-unified/img/audio_bgs/background3.jpg"
}
}
},
"audio_4": {
"type": "attachments",
"id": "audio_4",
"imgSizes": {
"kqedFullSize": {
"file": "https://ww2.kqed.org/news/wp-content/themes/KQED-unified/img/audio_bgs/background4.jpg"
}
}
},
"placeholder": {
"type": "attachments",
"id": "placeholder",
"imgSizes": {
"thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1-160x107.jpg",
"width": 160,
"height": 107,
"mimeType": "image/jpeg"
},
"medium": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1-800x533.jpg",
"width": 800,
"height": 533,
"mimeType": "image/jpeg"
},
"medium_large": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1-768x512.jpg",
"width": 768,
"height": 512,
"mimeType": "image/jpeg"
},
"large": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1-1020x680.jpg",
"width": 1020,
"height": 680,
"mimeType": "image/jpeg"
},
"1536x1536": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1-1536x1024.jpg",
"width": 1536,
"height": 1024,
"mimeType": "image/jpeg"
},
"fd-lrg": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1-1536x1024.jpg",
"width": 1536,
"height": 1024,
"mimeType": "image/jpeg"
},
"fd-med": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1-1020x680.jpg",
"width": 1020,
"height": 680,
"mimeType": "image/jpeg"
},
"fd-sm": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1-800x533.jpg",
"width": 800,
"height": 533,
"mimeType": "image/jpeg"
},
"post-thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1-672x372.jpg",
"width": 672,
"height": 372,
"mimeType": "image/jpeg"
},
"twentyfourteen-full-width": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1-1038x576.jpg",
"width": 1038,
"height": 576,
"mimeType": "image/jpeg"
},
"xxsmall": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1-160x107.jpg",
"width": 160,
"height": 107,
"mimeType": "image/jpeg"
},
"xsmall": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1-672x372.jpg",
"width": 672,
"height": 372,
"mimeType": "image/jpeg"
},
"small": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1-672x372.jpg",
"width": 672,
"height": 372,
"mimeType": "image/jpeg"
},
"xlarge": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1-1020x680.jpg",
"width": 1020,
"height": 680,
"mimeType": "image/jpeg"
},
"full-width": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1-1920x1280.jpg",
"width": 1920,
"height": 1280,
"mimeType": "image/jpeg"
},
"guest-author-32": {
"file": "https://cdn.kqed.org/wp-content/uploads/2025/01/KQED-Default-Image-816638274-1333x1333-1-160x160.jpg",
"width": 32,
"height": 32,
"mimeType": "image/jpeg"
},
"guest-author-50": {
"file": "https://cdn.kqed.org/wp-content/uploads/2025/01/KQED-Default-Image-816638274-1333x1333-1-160x160.jpg",
"width": 50,
"height": 50,
"mimeType": "image/jpeg"
},
"guest-author-64": {
"file": "https://cdn.kqed.org/wp-content/uploads/2025/01/KQED-Default-Image-816638274-1333x1333-1-160x160.jpg",
"width": 64,
"height": 64,
"mimeType": "image/jpeg"
},
"guest-author-96": {
"file": "https://cdn.kqed.org/wp-content/uploads/2025/01/KQED-Default-Image-816638274-1333x1333-1-160x160.jpg",
"width": 96,
"height": 96,
"mimeType": "image/jpeg"
},
"guest-author-128": {
"file": "https://cdn.kqed.org/wp-content/uploads/2025/01/KQED-Default-Image-816638274-1333x1333-1-160x160.jpg",
"width": 128,
"height": 128,
"mimeType": "image/jpeg"
},
"detail": {
"file": "https://cdn.kqed.org/wp-content/uploads/2025/01/KQED-Default-Image-816638274-1333x1333-1-160x160.jpg",
"width": 160,
"height": 160,
"mimeType": "image/jpeg"
},
"kqedFullSize": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1.jpg",
"width": 2000,
"height": 1333
}
}
},
"news_12083392": {
"type": "attachments",
"id": "news_12083392",
"meta": {
"index": "attachments_1716263798",
"site": "news",
"id": "12083392",
"found": true
},
"title": "260512-MUSK-ALTMAN-TRIAL-VB-03-KQED",
"publishDate": 1778625867,
"status": "inherit",
"parent": 0,
"modified": 1778625974,
"caption": "Open AI CEO Sam Altman testifies as a video of him is played on a screen in the trial in which Elon Musk claims that Altman and OpenAI abandoned their founding promise to develop AI for the benefit of humanity rather than solely for profit in Oakland on May 12, 2026. During the brief cross-examination of Altman, the Tesla CEO’s attorney questioned whether or not Altman was trustworthy.\r\n",
"credit": "Vicki Behringer for KQED",
"altTag": null,
"description": null,
"imgSizes": {
"thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-03-KQED-160x90.jpg",
"width": 160,
"height": 90,
"mimeType": "image/jpeg"
},
"1536x1536": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-03-KQED-1536x864.jpg",
"width": 1536,
"height": 864,
"mimeType": "image/jpeg"
},
"post-thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-03-KQED-672x372.jpg",
"width": 672,
"height": 372,
"mimeType": "image/jpeg"
},
"twentyfourteen-full-width": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-03-KQED-1038x576.jpg",
"width": 1038,
"height": 576,
"mimeType": "image/jpeg"
},
"npr-cds-wide": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-03-KQED-1200x675.jpg",
"width": 1200,
"height": 675,
"mimeType": "image/jpeg"
},
"npr-cds-square": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-03-KQED-600x600.jpg",
"width": 600,
"height": 600,
"mimeType": "image/jpeg"
},
"kqedFullSize": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-03-KQED.jpg",
"width": 2000,
"height": 1125
}
},
"fetchFailed": false,
"isLoading": false
},
"news_12083235": {
"type": "attachments",
"id": "news_12083235",
"meta": {
"index": "attachments_1716263798",
"site": "news",
"id": "12083235",
"found": true
},
"title": "Elon Musk v. OpenAI Trial Continues In California",
"publishDate": 1778545779,
"status": "inherit",
"parent": 12083224,
"modified": 1778546322,
"caption": "OpenAI CEO Sam Altman arrives at the Ronald V. Dellums Federal Building on April 30, 2026, in Oakland, California. Elon Musk invested in OpenAI early on, believing it would be a nonprofit, but is now suing OpenAI and its CEO, Sam Altman, for allegedly deceiving him by developing OpenAI into a for-profit company. ",
"credit": "Benjamin Fanjoy/Getty Images",
"altTag": null,
"description": null,
"imgSizes": {
"thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/SamAltmanGetty-160x107.jpg",
"width": 160,
"height": 107,
"mimeType": "image/jpeg"
},
"1536x1536": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/SamAltmanGetty-1536x1024.jpg",
"width": 1536,
"height": 1024,
"mimeType": "image/jpeg"
},
"post-thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/SamAltmanGetty-672x372.jpg",
"width": 672,
"height": 372,
"mimeType": "image/jpeg"
},
"twentyfourteen-full-width": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/SamAltmanGetty-1038x576.jpg",
"width": 1038,
"height": 576,
"mimeType": "image/jpeg"
},
"npr-cds-wide": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/SamAltmanGetty-1200x675.jpg",
"width": 1200,
"height": 675,
"mimeType": "image/jpeg"
},
"npr-cds-square": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/SamAltmanGetty-600x600.jpg",
"width": 600,
"height": 600,
"mimeType": "image/jpeg"
},
"kqedFullSize": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/SamAltmanGetty.jpg",
"width": 2000,
"height": 1333
}
},
"fetchFailed": false,
"isLoading": false
},
"news_12082068": {
"type": "attachments",
"id": "news_12082068",
"meta": {
"index": "attachments_1716263798",
"site": "news",
"id": "12082068",
"found": true
},
"title": "CANADA-SHOOTING-CRIME",
"publishDate": 1777656025,
"status": "inherit",
"parent": 12082064,
"modified": 1777677641,
"caption": "A young boy brings flowers to a memorial in honor of the victims of one of Canada's deadliest shootings in Tumbler Ridge, British Columbia, Canada, on Feb. 13, 2026. An 18-year-old carried out a mass shooting in a remote mining town, killing six people at a local school, after slaying her mother and stepbrother. Canadian Police Commander Dwayne McDonald said authorities still don't know the motive in the Feb. 10 mass shooting, but the shooter, who took her own life, was known to have mental health issues. ",
"credit": "Paige Taylor White/AFP via Getty Images",
"altTag": null,
"description": null,
"imgSizes": {
"thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty-160x110.jpg",
"width": 160,
"height": 110,
"mimeType": "image/jpeg"
},
"1536x1536": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty-1536x1053.jpg",
"width": 1536,
"height": 1053,
"mimeType": "image/jpeg"
},
"post-thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty-672x372.jpg",
"width": 672,
"height": 372,
"mimeType": "image/jpeg"
},
"twentyfourteen-full-width": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty-1038x576.jpg",
"width": 1038,
"height": 576,
"mimeType": "image/jpeg"
},
"npr-cds-wide": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty-1200x675.jpg",
"width": 1200,
"height": 675,
"mimeType": "image/jpeg"
},
"npr-cds-square": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty-600x600.jpg",
"width": 600,
"height": 600,
"mimeType": "image/jpeg"
},
"kqedFullSize": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty.jpg",
"width": 2000,
"height": 1371
}
},
"fetchFailed": false,
"isLoading": false
},
"news_12081606": {
"type": "attachments",
"id": "news_12081606",
"meta": {
"index": "attachments_1716263798",
"site": "news",
"id": "12081606",
"found": true
},
"title": "Musk OpenAI Trial",
"publishDate": 1777397997,
"status": "inherit",
"parent": 12081603,
"modified": 1777398261,
"caption": "Elon Musk arrives at the U.S. District Court in Oakland, California, on Tuesday, April 28, 2026. ",
"credit": "Godofredo A. Vásquez/AP Photo",
"altTag": null,
"description": null,
"imgSizes": {
"medium": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/AP26118555622828-2000x1333.jpg",
"width": 2000,
"height": 1333,
"mimeType": "image/jpeg"
},
"large": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/AP26118555622828-2000x1333.jpg",
"width": 2000,
"height": 1333,
"mimeType": "image/jpeg"
},
"thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/AP26118555622828-160x107.jpg",
"width": 160,
"height": 107,
"mimeType": "image/jpeg"
},
"1536x1536": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/AP26118555622828-1536x1024.jpg",
"width": 1536,
"height": 1024,
"mimeType": "image/jpeg"
},
"2048x2048": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/AP26118555622828-2048x1365.jpg",
"width": 2048,
"height": 1365,
"mimeType": "image/jpeg"
},
"post-thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/AP26118555622828-672x372.jpg",
"width": 672,
"height": 372,
"mimeType": "image/jpeg"
},
"twentyfourteen-full-width": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/AP26118555622828-1038x576.jpg",
"width": 1038,
"height": 576,
"mimeType": "image/jpeg"
},
"full-width": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/AP26118555622828-2000x1333.jpg",
"width": 2000,
"height": 1333,
"mimeType": "image/jpeg"
},
"npr-cds-wide": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/AP26118555622828-1200x675.jpg",
"width": 1200,
"height": 675,
"mimeType": "image/jpeg"
},
"npr-cds-square": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/AP26118555622828-600x600.jpg",
"width": 600,
"height": 600,
"mimeType": "image/jpeg"
},
"kqedFullSize": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/AP26118555622828-scaled-e1777398211110.jpg",
"width": 2000,
"height": 1334
}
},
"fetchFailed": false,
"isLoading": false
},
"news_12081681": {
"type": "attachments",
"id": "news_12081681",
"meta": {
"index": "attachments_1716263798",
"site": "news",
"id": "12081681",
"found": true
},
"title": "260428-MUSK-ALTMAN-VB-03-KQED-1",
"publishDate": 1777416108,
"status": "inherit",
"parent": 12081603,
"modified": 1777508469,
"caption": "Representing Microsoft, Russell Coan (left) speaks as Elon Musk watches in the trial in which Elon Musk claims that Sam Altman and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit, in Oakland on April 28, 2026.",
"credit": "Vicki Behringer for KQED",
"altTag": null,
"description": null,
"imgSizes": {
"thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1-160x90.jpg",
"width": 160,
"height": 90,
"mimeType": "image/jpeg"
},
"1536x1536": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1-1536x864.jpg",
"width": 1536,
"height": 864,
"mimeType": "image/jpeg"
},
"post-thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1-672x372.jpg",
"width": 672,
"height": 372,
"mimeType": "image/jpeg"
},
"twentyfourteen-full-width": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1-1038x576.jpg",
"width": 1038,
"height": 576,
"mimeType": "image/jpeg"
},
"npr-cds-wide": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1-1200x675.jpg",
"width": 1200,
"height": 675,
"mimeType": "image/jpeg"
},
"npr-cds-square": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1-600x600.jpg",
"width": 600,
"height": 600,
"mimeType": "image/jpeg"
},
"kqedFullSize": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1.jpg",
"width": 2000,
"height": 1125
}
},
"fetchFailed": false,
"isLoading": false
},
"news_12081639": {
"type": "attachments",
"id": "news_12081639",
"meta": {
"index": "attachments_1716263798",
"site": "news",
"id": "12081639",
"found": true
},
"title": "260428-MUSK ALTMAN-VB-02-KQED",
"publishDate": 1777410140,
"status": "inherit",
"parent": 12081603,
"modified": 1777422271,
"caption": "Elon Musk (left) takes the stand in the trial in which Elon Musk claims that Sam Altman and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit, in Oakland on April 28, 2026.",
"credit": "Vicki Behringer for KQED",
"altTag": null,
"description": null,
"imgSizes": {
"thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-02-KQED-160x90.jpg",
"width": 160,
"height": 90,
"mimeType": "image/jpeg"
},
"1536x1536": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-02-KQED-1536x864.jpg",
"width": 1536,
"height": 864,
"mimeType": "image/jpeg"
},
"post-thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-02-KQED-672x372.jpg",
"width": 672,
"height": 372,
"mimeType": "image/jpeg"
},
"twentyfourteen-full-width": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-02-KQED-1038x576.jpg",
"width": 1038,
"height": 576,
"mimeType": "image/jpeg"
},
"npr-cds-wide": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-02-KQED-1200x675.jpg",
"width": 1200,
"height": 675,
"mimeType": "image/jpeg"
},
"npr-cds-square": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-02-KQED-600x600.jpg",
"width": 600,
"height": 600,
"mimeType": "image/jpeg"
},
"kqedFullSize": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-02-KQED.jpg",
"width": 2000,
"height": 1125
}
},
"fetchFailed": false,
"isLoading": false
},
"news_12080929": {
"type": "attachments",
"id": "news_12080929",
"meta": {
"index": "attachments_1716263798",
"site": "news",
"id": "12080929",
"found": true
},
"title": "260422-ALTMANMUSK-MD-01-KQED",
"publishDate": 1776885164,
"status": "inherit",
"parent": 0,
"modified": 1776885551,
"caption": "Once allies in what they called a mission to develop AI safely for humanity, Elon Musk and Sam Altman will let a federal judge and jury decide what that promise was worth. The trial is slated to begin April 27, 2026.",
"credit": "Left: Chip Somodevilla/Getty Images; Right: Fabrice Coffrini/AFP via Getty Images",
"altTag": null,
"description": null,
"imgSizes": {
"thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260422-ALTMANMUSK-MD-01-KQED-160x107.jpg",
"width": 160,
"height": 107,
"mimeType": "image/jpeg"
},
"1536x1536": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260422-ALTMANMUSK-MD-01-KQED-1536x1025.jpg",
"width": 1536,
"height": 1025,
"mimeType": "image/jpeg"
},
"post-thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260422-ALTMANMUSK-MD-01-KQED-672x372.jpg",
"width": 672,
"height": 372,
"mimeType": "image/jpeg"
},
"twentyfourteen-full-width": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260422-ALTMANMUSK-MD-01-KQED-1038x576.jpg",
"width": 1038,
"height": 576,
"mimeType": "image/jpeg"
},
"npr-cds-wide": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260422-ALTMANMUSK-MD-01-KQED-1200x675.jpg",
"width": 1200,
"height": 675,
"mimeType": "image/jpeg"
},
"npr-cds-square": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260422-ALTMANMUSK-MD-01-KQED-600x600.jpg",
"width": 600,
"height": 600,
"mimeType": "image/jpeg"
},
"kqedFullSize": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260422-ALTMANMUSK-MD-01-KQED.jpg",
"width": 2000,
"height": 1334
}
},
"fetchFailed": false,
"isLoading": false
},
"news_11989313": {
"type": "attachments",
"id": "news_11989313",
"meta": {
"index": "attachments_1716263798",
"site": "news",
"id": "11989313",
"found": true
},
"title": "Tech AI Illustrations",
"publishDate": 1717711326,
"status": "inherit",
"parent": 11989308,
"modified": 1717711420,
"caption": "The OpenAI ChatGPT logo.",
"credit": "Jaap Arriens/NurPhoto via Getty Images",
"altTag": null,
"description": null,
"imgSizes": {
"medium": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2024/06/GettyImages-2155035557-800x533.jpg",
"width": 800,
"height": 533,
"mimeType": "image/jpeg"
},
"large": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2024/06/GettyImages-2155035557-1020x680.jpg",
"width": 1020,
"height": 680,
"mimeType": "image/jpeg"
},
"thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2024/06/GettyImages-2155035557-160x107.jpg",
"width": 160,
"height": 107,
"mimeType": "image/jpeg"
},
"1536x1536": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2024/06/GettyImages-2155035557-1536x1024.jpg",
"width": 1536,
"height": 1024,
"mimeType": "image/jpeg"
},
"2048x2048": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2024/06/GettyImages-2155035557-2048x1365.jpg",
"width": 2048,
"height": 1365,
"mimeType": "image/jpeg"
},
"post-thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2024/06/GettyImages-2155035557-672x372.jpg",
"width": 672,
"height": 372,
"mimeType": "image/jpeg"
},
"twentyfourteen-full-width": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2024/06/GettyImages-2155035557-1038x576.jpg",
"width": 1038,
"height": 576,
"mimeType": "image/jpeg"
},
"full-width": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2024/06/GettyImages-2155035557-1920x1280.jpg",
"width": 1920,
"height": 1280,
"mimeType": "image/jpeg"
},
"kqedFullSize": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2024/06/GettyImages-2155035557-scaled-e1760733694503.jpg",
"width": 2000,
"height": 1334
}
},
"fetchFailed": false,
"isLoading": false
},
"news_12079958": {
"type": "attachments",
"id": "news_12079958",
"meta": {
"index": "attachments_1716263798",
"site": "news",
"id": "12079958",
"found": true
},
"title": "OpenAI Fire Arrest",
"publishDate": 1776208318,
"status": "inherit",
"parent": 12079896,
"modified": 1776208344,
"caption": "Daniel Moreno-Gama, middle, appears in court with public defenders Diamond Ward, left, and Nuha Abusamra on Tuesday, April 14, 2026, in San Francisco.",
"credit": "Jeff Chiu/AP Photo",
"altTag": null,
"description": null,
"imgSizes": {
"thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/Daniel-Moreno-Gama-AP-160x107.jpg",
"width": 160,
"height": 107,
"mimeType": "image/jpeg"
},
"1536x1536": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/Daniel-Moreno-Gama-AP-1536x1024.jpg",
"width": 1536,
"height": 1024,
"mimeType": "image/jpeg"
},
"post-thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/Daniel-Moreno-Gama-AP-672x372.jpg",
"width": 672,
"height": 372,
"mimeType": "image/jpeg"
},
"twentyfourteen-full-width": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/Daniel-Moreno-Gama-AP-1038x576.jpg",
"width": 1038,
"height": 576,
"mimeType": "image/jpeg"
},
"npr-cds-wide": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/Daniel-Moreno-Gama-AP-1200x675.jpg",
"width": 1200,
"height": 675,
"mimeType": "image/jpeg"
},
"npr-cds-square": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/Daniel-Moreno-Gama-AP-600x600.jpg",
"width": 600,
"height": 600,
"mimeType": "image/jpeg"
},
"kqedFullSize": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/Daniel-Moreno-Gama-AP.jpg",
"width": 2000,
"height": 1333
}
},
"fetchFailed": false,
"isLoading": false
}
},
"audioPlayerReducer": {
"postId": "stream_live",
"isPaused": true,
"isPlaying": false,
"pfsActive": false,
"pledgeModalIsOpen": true,
"playerDrawerIsOpen": false
},
"authorsReducer": {
"byline_news_12082064": {
"type": "authors",
"id": "byline_news_12082064",
"meta": {
"override": true
},
"slug": "byline_news_12082064",
"name": "Matt O’Brien, Associated Press, and Nisa Khan, KQED",
"isLoading": false
},
"rachael-myrow": {
"type": "authors",
"id": "251",
"meta": {
"index": "authors_1716337520",
"id": "251",
"found": true
},
"name": "Rachael Myrow",
"firstName": "Rachael",
"lastName": "Myrow",
"slug": "rachael-myrow",
"email": "rmyrow@kqed.org",
"display_author_email": true,
"staff_mastheads": [
"news"
],
"title": "Senior Editor of KQED's Silicon Valley News Desk",
"bio": "• I write and edit stories about how Silicon Valley power and policies shape everyday life in California. I’m also passionate about making Bay Area history and culture more accessible to a broad public. • I’ve been a journalist for most of my life, starting in high school with The Franklin Press in Los Angeles, where I grew up. While earning my first degree in English at UC Berkeley, I got my start in public radio at KALX-FM. After completing a second degree in journalism at Cal, I landed my first professional job at Marketplace, then moved on to KPCC (now LAist), and then KQED, where I hosted The California Report for more than seven years. • My reporting has appeared on NPR, The World, WBUR’s \u003ci>Here & Now\u003c/i>, and the BBC. I also guest host for KQED’s \u003ci>Forum\u003c/i>, as well as the Commonwealth Club in San Francisco. • I speak periodically on media, democracy and technology issues, and do voiceover work for documentaries and educational video projects. • Outside of the studio, you'll find me hiking Bay Area trails and whipping up Insta-ready meals in my kitchen. • I do not accept gifts, money, or favors from anyone connected to my reporting, I don't pay people for information, and I do not support or donate to political causes. • I strive to treat the people I report on with fairness, honesty, and respect. I also recognize there are often multiple sides to a story and work to verify information through multiple sources and documentation. If I get something wrong, I correct it.",
"avatar": "https://secure.gravatar.com/avatar/87bf8cb5874e045cdff430523a6d48b1?s=600&d=blank&r=g",
"twitter": "rachaelmyrow",
"facebook": null,
"instagram": null,
"linkedin": "https://www.linkedin.com/in/rachaelmyrow/",
"sites": [
{
"site": "arts",
"roles": [
"administrator"
]
},
{
"site": "news",
"roles": [
"edit_others_posts",
"editor"
]
},
{
"site": "futureofyou",
"roles": [
"editor"
]
},
{
"site": "bayareabites",
"roles": [
"editor"
]
},
{
"site": "stateofhealth",
"roles": [
"editor"
]
},
{
"site": "science",
"roles": [
"editor"
]
},
{
"site": "food",
"roles": [
"editor"
]
},
{
"site": "forum",
"roles": [
"editor"
]
},
{
"site": "liveblog",
"roles": [
"author"
]
}
],
"headData": {
"title": "Rachael Myrow | KQED",
"description": "Senior Editor of KQED's Silicon Valley News Desk",
"ogImgSrc": "https://secure.gravatar.com/avatar/87bf8cb5874e045cdff430523a6d48b1?s=600&d=blank&r=g",
"twImgSrc": "https://secure.gravatar.com/avatar/87bf8cb5874e045cdff430523a6d48b1?s=600&d=blank&r=g"
},
"isLoading": false,
"link": "/author/rachael-myrow"
},
"kdebenedetti": {
"type": "authors",
"id": "11913",
"meta": {
"index": "authors_1716337520",
"id": "11913",
"found": true
},
"name": "Katie DeBenedetti",
"firstName": "Katie",
"lastName": "DeBenedetti",
"slug": "kdebenedetti",
"email": "kdebenedetti@kqed.org",
"display_author_email": false,
"staff_mastheads": [
"news",
"science"
],
"title": "KQED Contributor",
"bio": "Katie DeBenedetti is a digital reporter covering daily news for the Express Desk. Prior to joining KQED as a culture reporting intern in January 2024, she covered education and city government for the Napa Valley Register.",
"avatar": "https://secure.gravatar.com/avatar/6e31073cb8f7e4214ab03f42771d0f45?s=600&d=blank&r=g",
"twitter": null,
"facebook": null,
"instagram": null,
"linkedin": null,
"sites": [
{
"site": "news",
"roles": [
"author"
]
},
{
"site": "science",
"roles": [
"author"
]
},
{
"site": "liveblog",
"roles": [
"author"
]
}
],
"headData": {
"title": "Katie DeBenedetti | KQED",
"description": "KQED Contributor",
"ogImgSrc": "https://secure.gravatar.com/avatar/6e31073cb8f7e4214ab03f42771d0f45?s=600&d=blank&r=g",
"twImgSrc": "https://secure.gravatar.com/avatar/6e31073cb8f7e4214ab03f42771d0f45?s=600&d=blank&r=g"
},
"isLoading": false,
"link": "/author/kdebenedetti"
}
},
"breakingNewsReducer": {},
"pagesReducer": {},
"postsReducer": {
"stream_live": {
"type": "live",
"id": "stream_live",
"audioUrl": "https://streams.kqed.org/kqedradio",
"title": "Live Stream",
"excerpt": "Live Stream information currently unavailable.",
"link": "/radio",
"featImg": "",
"label": {
"name": "KQED Live",
"link": "/"
}
},
"stream_kqedNewscast": {
"type": "posts",
"id": "stream_kqedNewscast",
"audioUrl": "https://www.kqed.org/.stream/anon/radio/RDnews/newscast.mp3?_=1",
"title": "KQED Newscast",
"featImg": "",
"label": {
"name": "88.5 FM",
"link": "/"
}
},
"news_12083278": {
"type": "posts",
"id": "news_12083278",
"meta": {
"index": "posts_1716263798",
"site": "news",
"id": "12083278",
"score": null,
"sort": [
1778629278000
]
},
"guestAuthors": [],
"slug": "sam-altman-defends-himself-from-elon-musks-accusations-in-openai-trial",
"title": "Sam Altman Defends Himself From Elon Musk’s Accusations in OpenAI Trial",
"publishDate": 1778629278,
"format": "standard",
"headTitle": "Sam Altman Defends Himself From Elon Musk’s Accusations in OpenAI Trial | KQED",
"labelTerm": {
"site": "news"
},
"content": "\u003cp>On the stand on Tuesday, OpenAI CEO Sam Altman said that Elon Musk tried to \u003ca href=\"https://www.kqed.org/news/12081916/are-elon-musk-and-openai-fighting-an-ai-arms-race-sam-altmans-lawyers-think-so\">wrest control over the company\u003c/a> they co-founded before the Tesla CEO’s 2018 exit.\u003c/p>\n\u003cp>Altman’s testimony in the federal trial in Oakland, which many see as a billionaire grudge match, pushed back on Musk’s claim that the powerful AI start-up betrayed its mission to benefit the public good. Musk has accused Altman of \u003ca href=\"https://www.kqed.org/news/12081603/elon-musk-takes-aim-at-openai-as-trial-begins-its-not-ok-to-steal-a-charity\">“stealing a charity” \u003c/a>by building an $850 million for-profit company on the back of its nonprofit research lab.\u003c/p>\n\u003cp>Altman said that in early discussions about creating a for-profit arm, Musk sought majority ownership, and later proposed folding the nonprofit into his car company.\u003c/p>\n\u003cp>[ad fullwidth]\u003c/p>\n\u003cp>“I read that as a lightweight threat,” Altman said of the plan to bring OpenAI into Tesla. “I don’t think it would have served the mission. I think it would have effectively destroyed the nonprofit in the process.”\u003c/p>\n\u003cp>“Mr. Musk did try to kill it, I guess twice,” he said.\u003c/p>\n\u003cp>As early as summer 2017, Altman, Musk and other OpenAI executives began discussing if and how to launch a for-profit, citing a need to raise more money to keep up with competitors like Google.\u003c/p>\n\u003cfigure id=\"attachment_12083394\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"wp-image-12083394 size-full\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-04-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-04-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-04-KQED-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-04-KQED-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-04-KQED-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">OpenAI CEO Sam Altman testifies in the trial in which Elon Musk claims that Altman and OpenAI abandoned their founding promise to develop AI for the benefit of humanity rather than solely for profit in Oakland on May 12, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>Altman said they were “running the organization on a shoestring,” with a short runway of cash. To acquire the compute — or the GPUs and CPUs needed to power AI — and funding they needed to pursue artificial general intelligence, or a superintelligent AI technology known as AGI, the company would need more significant investments, the executives determined.\u003c/p>\n\u003cp>“I thought, of course, we needed to raise billions to quickly ramp,” he said. “I saw no way to do it.”\u003c/p>\n\u003cp>Altman, Greg Brockman, the president of OpenAI and Ilya Sutskever, a former top OpenAI computer scientist and member of its founding team, have said that in those conversations, Musk repeatedly proposed plans that would give him majority control. Initially, Altman said that he asked for 90% equity in a potential for-profit.\u003c/p>\n\u003cp>The other executives pushed back on this request, including in an email Altman sent to Musk at the time, in which he said, “I am worried about control. I don’t think any one person should have control of the world’s first AGI — in fact, the whole reason we started OpenAI is so that wouldn’t happen.”[aside postID=news_12083224 hero='https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/SamAltmanGetty.jpg']Altman described Musk as “mercurial,” and said that when he left OpenAI in February 2018, after for-profit discussions fell apart, “people wondered if he’d try to take a vengeance on us” — which both he and his attorney, William Savitt, have alleged is exactly what Musk’s lawsuit aims to do.\u003c/p>\n\u003cp>During his cross-examination, though, Musk’s counsel Steven Molo seemed to suggest that it is Altman who has amassed significant control over OpenAI since it did launch a for-profit arm in 2019.\u003c/p>\n\u003cp>Molo asked Altman about the testimonies of various former OpenAI executives, who said he was untrustworthy and had a history of lying. Altman denied hearing those testimonies, but when asked if he had “repeatedly been called a liar” by people he has done business with, he said, “I have heard people say that.”\u003c/p>\n\u003cp>Molo said that Altman sits on the board of directors for both the OpenAI Foundation, the nonprofit arm, and OpenAI’s for-profit. He is also the company’s CEO.\u003c/p>\n\u003cp>“Would you ever fire yourself as the CEO of the for-profit?” Molo said, adding that the board of the nonprofit is supposed to provide oversight for the chief officer.\u003c/p>\n\u003cp>Altman said that CEOs are “almost always” on their company’s boards. When pressed, he said he had “no plans” to fire himself.\u003c/p>\n\u003cfigure id=\"attachment_12083294\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12083294\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-01-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-01-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-01-KQED-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-01-KQED-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-01-KQED-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">Bret Taylor testifies in the trial in which Elon Musk claims that Sam Altman and OpenAI abandoned their founding promise to develop AI for the benefit of humanity rather than solely for profit in Oakland on May 12, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>Molo also asked Altman about how board members were selected following his brief firing in 2023. During the five-day ouster, there were long negotiations behind the scenes about whether Altman would return, and who would be on the board if he did. Altman, Brockman and other OpenAI executives who followed them out were also in discussions with Microsoft, OpenAI’s largest financial backer, which had offered to bring them on to start a new AI team.\u003c/p>\n\u003cp>Altman said initially he’d proposed to remove OpenAI’s board, which fired him, and replace it with four members, including himself. Altman was not made a board member at that time, but Molo said that he had proposed the three members who were ultimately selected — Bret Taylor, Larry Summers and Adam D’Angelo — in conversations with Microsoft CEO Satya Nadella.\u003c/p>\n\u003cp>Altman said that he had no power to appoint new board members, but that he did say which configurations he would be “willing” to be rehired into.\u003c/p>\n\u003cp>Earlier in the day, he characterized his return to OpenAI as running “back into a burning building to try to save it.”\u003c/p>\n\u003cp>Later this week, both Altman and Musk’s legal teams will present their closing arguments. Then the jury and judge will decide which tech leader to believe.\u003c/p>\n\u003cp>\u003c/p>\n",
"blocks": [],
"excerpt": "During a brief cross-examination of Altman, the Tesla CEO’s attorney questioned whether or not Altman was trustworthy.",
"status": "publish",
"parent": 0,
"modified": 1778630872,
"stats": {
"hasAudio": false,
"hasVideo": false,
"hasChartOrMap": false,
"iframeSrcs": [],
"hasGoogleForm": false,
"hasGallery": false,
"hasHearkenModule": false,
"hasPolis": false,
"paragraphCount": 22,
"wordCount": 990
},
"headData": {
"title": "Sam Altman Defends Himself From Elon Musk’s Accusations in OpenAI Trial | KQED",
"description": "During a brief cross-examination of Altman, the Tesla CEO’s attorney questioned whether or not Altman was trustworthy.",
"ogTitle": "",
"ogDescription": "",
"ogImgId": "",
"twTitle": "",
"twDescription": "",
"twImgId": "",
"schema": {
"@context": "https://schema.org",
"@type": "NewsArticle",
"headline": "Sam Altman Defends Himself From Elon Musk’s Accusations in OpenAI Trial",
"datePublished": "2026-05-12T16:41:18-07:00",
"dateModified": "2026-05-12T17:07:52-07:00",
"image": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"isAccessibleForFree": "True",
"publisher": {
"@type": "NewsMediaOrganization",
"@id": "https://www.kqed.org/#organization",
"name": "KQED",
"logo": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"url": "https://www.kqed.org",
"sameAs": [
"https://www.facebook.com/KQED",
"https://twitter.com/KQED",
"https://www.instagram.com/kqed/",
"https://www.tiktok.com/@kqedofficial",
"https://www.linkedin.com/company/kqed",
"https://www.youtube.com/channel/UCeC0IOo7i1P_61zVUWbJ4nw"
]
}
}
},
"primaryCategory": {
"termId": 248,
"slug": "technology",
"name": "Technology"
},
"sticky": false,
"nprStoryId": "kqed-12083278",
"templateType": "standard",
"featuredImageType": "standard",
"excludeFromSiteSearch": "Include",
"articleAge": "0",
"path": "/news/12083278/sam-altman-defends-himself-from-elon-musks-accusations-in-openai-trial",
"audioTrackLength": null,
"parsedContent": [
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003cp>On the stand on Tuesday, OpenAI CEO Sam Altman said that Elon Musk tried to \u003ca href=\"https://www.kqed.org/news/12081916/are-elon-musk-and-openai-fighting-an-ai-arms-race-sam-altmans-lawyers-think-so\">wrest control over the company\u003c/a> they co-founded before the Tesla CEO’s 2018 exit.\u003c/p>\n\u003cp>Altman’s testimony in the federal trial in Oakland, which many see as a billionaire grudge match, pushed back on Musk’s claim that the powerful AI start-up betrayed its mission to benefit the public good. Musk has accused Altman of \u003ca href=\"https://www.kqed.org/news/12081603/elon-musk-takes-aim-at-openai-as-trial-begins-its-not-ok-to-steal-a-charity\">“stealing a charity” \u003c/a>by building an $850 million for-profit company on the back of its nonprofit research lab.\u003c/p>\n\u003cp>Altman said that in early discussions about creating a for-profit arm, Musk sought majority ownership, and later proposed folding the nonprofit into his car company.\u003c/p>\n\u003cp>\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "ad",
"attributes": {
"named": {
"label": "fullwidth"
},
"numeric": [
"fullwidth"
]
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003c/p>\n\u003cp>“I read that as a lightweight threat,” Altman said of the plan to bring OpenAI into Tesla. “I don’t think it would have served the mission. I think it would have effectively destroyed the nonprofit in the process.”\u003c/p>\n\u003cp>“Mr. Musk did try to kill it, I guess twice,” he said.\u003c/p>\n\u003cp>As early as summer 2017, Altman, Musk and other OpenAI executives began discussing if and how to launch a for-profit, citing a need to raise more money to keep up with competitors like Google.\u003c/p>\n\u003cfigure id=\"attachment_12083394\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"wp-image-12083394 size-full\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-04-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-04-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-04-KQED-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-04-KQED-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-04-KQED-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">OpenAI CEO Sam Altman testifies in the trial in which Elon Musk claims that Altman and OpenAI abandoned their founding promise to develop AI for the benefit of humanity rather than solely for profit in Oakland on May 12, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>Altman said they were “running the organization on a shoestring,” with a short runway of cash. To acquire the compute — or the GPUs and CPUs needed to power AI — and funding they needed to pursue artificial general intelligence, or a superintelligent AI technology known as AGI, the company would need more significant investments, the executives determined.\u003c/p>\n\u003cp>“I thought, of course, we needed to raise billions to quickly ramp,” he said. “I saw no way to do it.”\u003c/p>\n\u003cp>Altman, Greg Brockman, the president of OpenAI and Ilya Sutskever, a former top OpenAI computer scientist and member of its founding team, have said that in those conversations, Musk repeatedly proposed plans that would give him majority control. Initially, Altman said that he asked for 90% equity in a potential for-profit.\u003c/p>\n\u003cp>The other executives pushed back on this request, including in an email Altman sent to Musk at the time, in which he said, “I am worried about control. I don’t think any one person should have control of the world’s first AGI — in fact, the whole reason we started OpenAI is so that wouldn’t happen.”\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "aside",
"attributes": {
"named": {
"postid": "news_12083224",
"hero": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/SamAltmanGetty.jpg",
"label": ""
},
"numeric": []
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>Altman described Musk as “mercurial,” and said that when he left OpenAI in February 2018, after for-profit discussions fell apart, “people wondered if he’d try to take a vengeance on us” — which both he and his attorney, William Savitt, have alleged is exactly what Musk’s lawsuit aims to do.\u003c/p>\n\u003cp>During his cross-examination, though, Musk’s counsel Steven Molo seemed to suggest that it is Altman who has amassed significant control over OpenAI since it did launch a for-profit arm in 2019.\u003c/p>\n\u003cp>Molo asked Altman about the testimonies of various former OpenAI executives, who said he was untrustworthy and had a history of lying. Altman denied hearing those testimonies, but when asked if he had “repeatedly been called a liar” by people he has done business with, he said, “I have heard people say that.”\u003c/p>\n\u003cp>Molo said that Altman sits on the board of directors for both the OpenAI Foundation, the nonprofit arm, and OpenAI’s for-profit. He is also the company’s CEO.\u003c/p>\n\u003cp>“Would you ever fire yourself as the CEO of the for-profit?” Molo said, adding that the board of the nonprofit is supposed to provide oversight for the chief officer.\u003c/p>\n\u003cp>Altman said that CEOs are “almost always” on their company’s boards. When pressed, he said he had “no plans” to fire himself.\u003c/p>\n\u003cfigure id=\"attachment_12083294\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12083294\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-01-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-01-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-01-KQED-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-01-KQED-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-01-KQED-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">Bret Taylor testifies in the trial in which Elon Musk claims that Sam Altman and OpenAI abandoned their founding promise to develop AI for the benefit of humanity rather than solely for profit in Oakland on May 12, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>Molo also asked Altman about how board members were selected following his brief firing in 2023. During the five-day ouster, there were long negotiations behind the scenes about whether Altman would return, and who would be on the board if he did. Altman, Brockman and other OpenAI executives who followed them out were also in discussions with Microsoft, OpenAI’s largest financial backer, which had offered to bring them on to start a new AI team.\u003c/p>\n\u003cp>Altman said initially he’d proposed to remove OpenAI’s board, which fired him, and replace it with four members, including himself. Altman was not made a board member at that time, but Molo said that he had proposed the three members who were ultimately selected — Bret Taylor, Larry Summers and Adam D’Angelo — in conversations with Microsoft CEO Satya Nadella.\u003c/p>\n\u003cp>Altman said that he had no power to appoint new board members, but that he did say which configurations he would be “willing” to be rehired into.\u003c/p>\n\u003cp>Earlier in the day, he characterized his return to OpenAI as running “back into a burning building to try to save it.”\u003c/p>\n\u003cp>Later this week, both Altman and Musk’s legal teams will present their closing arguments. Then the jury and judge will decide which tech leader to believe.\u003c/p>\n\u003cp>\u003c/p>\n\u003c/div>\u003c/p>",
"attributes": {
"named": {},
"numeric": []
}
}
],
"link": "/news/12083278/sam-altman-defends-himself-from-elon-musks-accusations-in-openai-trial",
"authors": [
"11913",
"251"
],
"categories": [
"news_6188",
"news_28250",
"news_8",
"news_248"
],
"tags": [
"news_34755",
"news_1386",
"news_32668",
"news_3897",
"news_27626",
"news_21891",
"news_34054",
"news_33542",
"news_33543",
"news_34586",
"news_1631"
],
"featImg": "news_12083392",
"label": "news"
},
"news_12083224": {
"type": "posts",
"id": "news_12083224",
"meta": {
"index": "posts_1716263798",
"site": "news",
"id": "12083224",
"score": null,
"sort": [
1778546112000
]
},
"guestAuthors": [],
"slug": "former-openai-exec-calls-decision-to-remove-sam-altman-a-hail-mary-during-musk-trial",
"title": "Former OpenAI Exec Calls Decision to Remove Sam Altman a ‘Hail Mary’ During Musk Trial",
"publishDate": 1778546112,
"format": "standard",
"headTitle": "Former OpenAI Exec Calls Decision to Remove Sam Altman a ‘Hail Mary’ During Musk Trial | KQED",
"labelTerm": {
"site": "news"
},
"content": "\u003cp>Microsoft’s CEO and another major player took the stand on Monday in \u003ca href=\"https://www.kqed.org/news/tag/oakland\">Oakland\u003c/a>, testifying in the blockbuster trial between OpenAI co-founders Elon Musk and Sam Altman.\u003c/p>\n\u003cp>Ahead of Altman’s testimony, Musk’s attorney Steven Molo questioned Microsoft CEO Satya Nadella and Ilya Sutskever, a top OpenAI computer scientist who departed the company in 2024. Sutskever discussed his role in orchestrating Altman’s brief ouster in 2023.\u003c/p>\n\u003cp>Over five days in November 2023, Altman was removed and reinstated from his post, after a coalition of board members raised concerns that he had not been “consistently candid in his communications” and cited a breakdown of trust.\u003c/p>\n\u003cp>[ad fullwidth]\u003c/p>\n\u003cp>Whether Altman and other executives have maintained OpenAI’s initial stated mission — to develop AI safely and for the “benefit of humanity” — is critical to Musk’s suit, which claims that leaders breached their duty to its nonprofit mission by building a for-profit company on top of it. Musk also alleged that the company unfairly benefited at his expense.\u003c/p>\n\u003cp>Musk also alleges that Microsoft, which is OpenAI’s largest financial backer and until this week held the exclusive rights to license and sell its technology, aided and abetted that breach of trust.\u003c/p>\n\u003cp>Molo questioned Nadella about Microsoft’s motive to invest in OpenAI — a $13 billion input that Nadella said is expected to see a return of about $92 billion, “if it works out.”\u003c/p>\n\u003cfigure id=\"attachment_12081686\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"wp-image-12081686 size-full\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">Steve Molo, Elon Musk’s attorney, presents opening statements in the trial in which Elon Musk (center-right) claims that Sam Altman (right) and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit, in Oakland, on April 28, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>Musk’s attorney pointed out Nadella’s fiduciary duty to maximize profit, and referenced a series of texts between him and Altman that appeared to show Nadella pushing for an earlier rollout of the paid version of ChatGPT.\u003c/p>\n\u003cp>“When chatGPT paid?” Nadella wrote in the message.\u003c/p>\n\u003cp>Altman said that there was “Not enough compute to make it a good consumer experience,” to which Nadella said, “The sooner the better.”\u003c/p>\n\u003cp>Nadella said that the reason Microsoft invested was that OpenAI was pursuing a for-profit model, but he said, “If the pie became larger, the nonprofit would benefit as well.”[aside postID=news_12081916 hero='https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/AP26118555622828-2000x1333.jpg']Molo asked Nadella if he was aware that, for a period of time, OpenAI’s nonprofit did not have any employees.\u003c/p>\n\u003cp>“I am not,” Nadella said.\u003c/p>\n\u003cp>Molo also questioned Nadella about Microsoft’s role during Altman’s brief ouster. At the time, Nadella announced that he would hire Altman, along with OpenAI’s third co-founder and current president, Greg Brockman, as well as other allies, to head up a new AI team at Microsoft.\u003c/p>\n\u003cp>Nadella said that he “had ideas about how Sam [Altman] and the other employees could join Microsoft if they were not reinstated.”\u003c/p>\n\u003cp>“If people were going to leave OpenAI, I wanted them to come to Microsoft,” he said.\u003c/p>\n\u003cp>Molo asked Nadella if he knew why Altman had been removed, to which Nadella said he was never given an “explicit answer.”\u003c/p>\n\u003cp>“Did the thought occur to you … the board might issue a public statement about why they fired Altman?” Molo said.\u003c/p>\n\u003cp>Nadella said during that period — referred to as “The Blip” by many OpenAI employees — he was focused on ensuring continuity for customers.\u003c/p>\n\u003cp>“It goes back to me wanting to communicate to customers that they can count on us,” he said. “Come Monday, that doesn’t just disappear.”\u003c/p>\n\u003cfigure id=\"attachment_12082325\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"wp-image-12082325 size-full\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260504-MUSK-ALTMAN-VB-03-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260504-MUSK-ALTMAN-VB-03-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260504-MUSK-ALTMAN-VB-03-KQED-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260504-MUSK-ALTMAN-VB-03-KQED-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260504-MUSK-ALTMAN-VB-03-KQED-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">OpenAI CEO Sam Altman watches as OpenAI President Greg Brockman testifies in the trial in which Elon Musk claims that Altman and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit, in Oakland, on May 4, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>Sutskever, who took the stand after Nadella, described Altman’s removal differently. He said it was a “Hail Mary” to save OpenAI, which had become an environment that was “not conducive” to the technology’s safety.\u003c/p>\n\u003cp>“I felt a great deal of ownership of OpenAI,” he said. “I felt like I created this company. I simply cared for it, and I didn’t want it to be destroyed.”\u003c/p>\n\u003cp>Sutskever, who helped lead the ouster, had compiled a more than 50-page record of Altman’s “consistent pattern of lying,” including misrepresenting facts, safety protocols and company information to the board and executives.\u003c/p>\n\u003cp>Sutskever maintained that he had worked on a team that aimed to focus on long-term risks as more powerful AI was built.\u003c/p>\n\u003cp>“The goal of the super alignment is to do the research in advance, such that humanity will have the technological means to make it controlled and safe,” he said.\u003c/p>\n\u003cp>The team was disbanded days after he departed the company, in May 2024.\u003c/p>\n\u003cp>\u003c/p>\n",
"blocks": [],
"excerpt": "The testimonies on Monday centered on Sam Altman’s brief 2023 ousting from OpenAI, as allegations mounted against the tech giant’s conduct and Microsoft’s motives in backing the AI company.",
"status": "publish",
"parent": 0,
"modified": 1778547375,
"stats": {
"hasAudio": false,
"hasVideo": false,
"hasChartOrMap": false,
"iframeSrcs": [],
"hasGoogleForm": false,
"hasGallery": false,
"hasHearkenModule": false,
"hasPolis": false,
"paragraphCount": 26,
"wordCount": 890
},
"headData": {
"title": "Former OpenAI Exec Calls Decision to Remove Sam Altman a ‘Hail Mary’ During Musk Trial | KQED",
"description": "The testimonies on Monday centered on Sam Altman’s brief 2023 ousting from OpenAI, as allegations mounted against the tech giant’s conduct and Microsoft’s motives in backing the AI company.",
"ogTitle": "",
"ogDescription": "",
"ogImgId": "",
"twTitle": "",
"twDescription": "",
"twImgId": "",
"schema": {
"@context": "https://schema.org",
"@type": "NewsArticle",
"headline": "Former OpenAI Exec Calls Decision to Remove Sam Altman a ‘Hail Mary’ During Musk Trial",
"datePublished": "2026-05-11T17:35:12-07:00",
"dateModified": "2026-05-11T17:56:15-07:00",
"image": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"isAccessibleForFree": "True",
"publisher": {
"@type": "NewsMediaOrganization",
"@id": "https://www.kqed.org/#organization",
"name": "KQED",
"logo": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"url": "https://www.kqed.org",
"sameAs": [
"https://www.facebook.com/KQED",
"https://twitter.com/KQED",
"https://www.instagram.com/kqed/",
"https://www.tiktok.com/@kqedofficial",
"https://www.linkedin.com/company/kqed",
"https://www.youtube.com/channel/UCeC0IOo7i1P_61zVUWbJ4nw"
]
}
}
},
"primaryCategory": {
"termId": 248,
"slug": "technology",
"name": "Technology"
},
"sticky": false,
"nprStoryId": "kqed-12083224",
"templateType": "standard",
"featuredImageType": "standard",
"excludeFromSiteSearch": "Include",
"articleAge": "0",
"path": "/news/12083224/former-openai-exec-calls-decision-to-remove-sam-altman-a-hail-mary-during-musk-trial",
"audioTrackLength": null,
"parsedContent": [
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003cp>Microsoft’s CEO and another major player took the stand on Monday in \u003ca href=\"https://www.kqed.org/news/tag/oakland\">Oakland\u003c/a>, testifying in the blockbuster trial between OpenAI co-founders Elon Musk and Sam Altman.\u003c/p>\n\u003cp>Ahead of Altman’s testimony, Musk’s attorney Steven Molo questioned Microsoft CEO Satya Nadella and Ilya Sutskever, a top OpenAI computer scientist who departed the company in 2024. Sutskever discussed his role in orchestrating Altman’s brief ouster in 2023.\u003c/p>\n\u003cp>Over five days in November 2023, Altman was removed and reinstated from his post, after a coalition of board members raised concerns that he had not been “consistently candid in his communications” and cited a breakdown of trust.\u003c/p>\n\u003cp>\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "ad",
"attributes": {
"named": {
"label": "fullwidth"
},
"numeric": [
"fullwidth"
]
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003c/p>\n\u003cp>Whether Altman and other executives have maintained OpenAI’s initial stated mission — to develop AI safely and for the “benefit of humanity” — is critical to Musk’s suit, which claims that leaders breached their duty to its nonprofit mission by building a for-profit company on top of it. Musk also alleged that the company unfairly benefited at his expense.\u003c/p>\n\u003cp>Musk also alleges that Microsoft, which is OpenAI’s largest financial backer and until this week held the exclusive rights to license and sell its technology, aided and abetted that breach of trust.\u003c/p>\n\u003cp>Molo questioned Nadella about Microsoft’s motive to invest in OpenAI — a $13 billion input that Nadella said is expected to see a return of about $92 billion, “if it works out.”\u003c/p>\n\u003cfigure id=\"attachment_12081686\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"wp-image-12081686 size-full\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">Steve Molo, Elon Musk’s attorney, presents opening statements in the trial in which Elon Musk (center-right) claims that Sam Altman (right) and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit, in Oakland, on April 28, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>Musk’s attorney pointed out Nadella’s fiduciary duty to maximize profit, and referenced a series of texts between him and Altman that appeared to show Nadella pushing for an earlier rollout of the paid version of ChatGPT.\u003c/p>\n\u003cp>“When chatGPT paid?” Nadella wrote in the message.\u003c/p>\n\u003cp>Altman said that there was “Not enough compute to make it a good consumer experience,” to which Nadella said, “The sooner the better.”\u003c/p>\n\u003cp>Nadella said that the reason Microsoft invested was that OpenAI was pursuing a for-profit model, but he said, “If the pie became larger, the nonprofit would benefit as well.”\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "aside",
"attributes": {
"named": {
"postid": "news_12081916",
"hero": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/AP26118555622828-2000x1333.jpg",
"label": ""
},
"numeric": []
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>Molo asked Nadella if he was aware that, for a period of time, OpenAI’s nonprofit did not have any employees.\u003c/p>\n\u003cp>“I am not,” Nadella said.\u003c/p>\n\u003cp>Molo also questioned Nadella about Microsoft’s role during Altman’s brief ouster. At the time, Nadella announced that he would hire Altman, along with OpenAI’s third co-founder and current president, Greg Brockman, as well as other allies, to head up a new AI team at Microsoft.\u003c/p>\n\u003cp>Nadella said that he “had ideas about how Sam [Altman] and the other employees could join Microsoft if they were not reinstated.”\u003c/p>\n\u003cp>“If people were going to leave OpenAI, I wanted them to come to Microsoft,” he said.\u003c/p>\n\u003cp>Molo asked Nadella if he knew why Altman had been removed, to which Nadella said he was never given an “explicit answer.”\u003c/p>\n\u003cp>“Did the thought occur to you … the board might issue a public statement about why they fired Altman?” Molo said.\u003c/p>\n\u003cp>Nadella said during that period — referred to as “The Blip” by many OpenAI employees — he was focused on ensuring continuity for customers.\u003c/p>\n\u003cp>“It goes back to me wanting to communicate to customers that they can count on us,” he said. “Come Monday, that doesn’t just disappear.”\u003c/p>\n\u003cfigure id=\"attachment_12082325\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"wp-image-12082325 size-full\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260504-MUSK-ALTMAN-VB-03-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260504-MUSK-ALTMAN-VB-03-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260504-MUSK-ALTMAN-VB-03-KQED-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260504-MUSK-ALTMAN-VB-03-KQED-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260504-MUSK-ALTMAN-VB-03-KQED-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">OpenAI CEO Sam Altman watches as OpenAI President Greg Brockman testifies in the trial in which Elon Musk claims that Altman and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit, in Oakland, on May 4, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>Sutskever, who took the stand after Nadella, described Altman’s removal differently. He said it was a “Hail Mary” to save OpenAI, which had become an environment that was “not conducive” to the technology’s safety.\u003c/p>\n\u003cp>“I felt a great deal of ownership of OpenAI,” he said. “I felt like I created this company. I simply cared for it, and I didn’t want it to be destroyed.”\u003c/p>\n\u003cp>Sutskever, who helped lead the ouster, had compiled a more than 50-page record of Altman’s “consistent pattern of lying,” including misrepresenting facts, safety protocols and company information to the board and executives.\u003c/p>\n\u003cp>Sutskever maintained that he had worked on a team that aimed to focus on long-term risks as more powerful AI was built.\u003c/p>\n\u003cp>“The goal of the super alignment is to do the research in advance, such that humanity will have the technological means to make it controlled and safe,” he said.\u003c/p>\n\u003cp>The team was disbanded days after he departed the company, in May 2024.\u003c/p>\n\u003cp>\u003c/p>\n\u003c/div>\u003c/p>",
"attributes": {
"named": {},
"numeric": []
}
}
],
"link": "/news/12083224/former-openai-exec-calls-decision-to-remove-sam-altman-a-hail-mary-during-musk-trial",
"authors": [
"11913",
"251"
],
"categories": [
"news_31795",
"news_6188",
"news_28250",
"news_8",
"news_248"
],
"tags": [
"news_34755",
"news_1386",
"news_32668",
"news_3897",
"news_27626",
"news_19954",
"news_21891",
"news_34054",
"news_33542",
"news_33543",
"news_34586",
"news_1631"
],
"featImg": "news_12083235",
"label": "news"
},
"news_12082064": {
"type": "posts",
"id": "news_12082064",
"meta": {
"index": "posts_1716263798",
"site": "news",
"id": "12082064",
"score": null,
"sort": [
1777676594000
]
},
"guestAuthors": [],
"slug": "openai-back-in-court-over-canada-school-shooters-use-of-chatgpt",
"title": "OpenAI Back in Court Over Canada School Shooter’s Use of ChatGPT",
"publishDate": 1777676594,
"format": "standard",
"headTitle": "OpenAI Back in Court Over Canada School Shooter’s Use of ChatGPT | KQED",
"labelTerm": {
"site": "news"
},
"content": "\u003cp>The families of victims of a school shooting in a British Columbia town sued artificial intelligence company \u003ca href=\"https://www.kqed.org/news/tag/open-ai\">OpenAI \u003c/a>in a San Francisco court this week, alleging that the company behind \u003ca href=\"https://www.kqed.org/news/tag/chatgpt\">ChatGPT\u003c/a> failed to alert police of the shooter’s alarming interactions with the chatbot.\u003c/p>\n\u003cp>One of the lawsuits was filed on behalf of Shannda Aviugana-Durand, an education assistant who was shot and killed in a library at \u003ca href=\"https://docs.google.com/document/d/1BU49CY30r0KCfBs0NJuk5S0KJ2E5VEuIF2IpxdwviIo/edit?tab=t.0\">Tumbler Ridge Secondary School\u003c/a>. The suit alleges negligence, aiding and abetting a mass shooting, wrongful death and liability, among other claims. According to the lawsuit, Aviugana-Durand’s daughter was present at the time of the attack.\u003c/p>\n\u003cp>The educational assistant was one of six people who were killed by an 18-year-old in February. The teen — who later shot herself — also killed her mother and her 11-year-old half-brother at home beforehand. Twenty-five people were also injured in the attack, Canada’s deadliest mass shooting in years.\u003c/p>\n\u003cp>[ad fullwidth]\u003c/p>\n\u003cp>Another lawsuit was filed Wednesday on behalf of 12-year-old Maya Gebala, who was critically injured in the February shooting. The plaintiffs’ attorney, Jay Edelson, said in an interview with the \u003cem>Associated Press\u003c/em> that decisions made by OpenAI and its CEO Sam Altman “have destroyed the town. The people are really resilient, but what happened is unimaginable.”\u003c/p>\n\u003cp>Altman sent a letter last week \u003ca href=\"https://apnews.com/article/openai-altman-tumbler-ridge-killings-apology-dec2adaad3946583519370eede6a99e2\">formally apologizing\u003c/a> to the community that his company did not notify law enforcement about the shooter’s online behavior in the weeks leading up to the attack.\u003c/p>\n\u003cp>The case highlights concerns about the harms posed by \u003ca href=\"https://apnews.com/article/ai-sycophancy-chatbots-science-study-8dc61e69278b661cab1e53d38b4173b6\">overly agreeable AI chatbots\u003c/a> and what obligations the tech industry has to control them or notify authorities about planned violence by chatbot users. This month, \u003ca href=\"https://apnews.com/article/missing-grad-students-florida-6279adeef3d0540865de39ab3d6f8093\">prosecutors investigating the deaths\u003c/a> of two University of South Florida doctoral students said that the suspect asked ChatGPT about body disposal in the lead-up to the students’ disappearance.\u003c/p>\n\u003cfigure id=\"attachment_12079761\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"wp-image-12079761 size-full\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/SamAltmanGetty2.jpg\" alt=\"\" width=\"2000\" height=\"1333\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/SamAltmanGetty2.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/SamAltmanGetty2-160x107.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/SamAltmanGetty2-1536x1024.jpg 1536w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">OpenAI CEO Sam Altman speaks during the BlackRock Infrastructure Summit on March 11, 2026, in Washington, D.C. \u003ccite>(Anna Moneymaker/Getty Images)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>“It’s not the first lawsuit of its kind,” said Robin Feldman, law professor at UC Law San Francisco and director of its AI Law and Innovation Institute. “This is part of an early wave of lawsuits in which citizens are asking to hold LLMs responsible for harms that happen down the line, whether they are crimes, mental health problems, suicide.”\u003c/p>\n\u003cp>“ChatGPT was first on the scene. And it is the most widely known of the LLMs,” Feldman said. “That puts it in the hot seat as the law tries to understand how to wrangle this unusual beast.”\u003c/p>\n\u003cp>In response to the lawsuit, OpenAI said in a written statement that the “events in Tumbler Ridge are a tragedy. We have a zero-tolerance policy for using our tools to assist in committing violence.”[aside postID=news_12081916 hero='https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/AP26118555622828-2000x1333.jpg']“As we shared with Canadian officials, we have already strengthened our safeguards, including improving how ChatGPT responds to signs of distress, connecting people with local support and mental health resources, strengthening how we assess and escalate potential threats of violence, and improving detection of repeat policy violators,” the company said.\u003c/p>\n\u003cp>Edelson, a Chicago-based lawyer known for taking on the tech industry, is already juggling a number of high-profile cases against OpenAI, including from the family of a California teenager who killed himself after \u003ca href=\"https://apnews.com/article/ai-chatbot-teens-congress-chatgpt-character-ce3959b6a3ea1a4997bf1ccabb4f0de2\">conversations with ChatGPT\u003c/a> and another from the heirs of an 83-year-old Connecticut woman \u003ca href=\"https://apnews.com/article/ai-chatgpt-wrongful-death-lawsuit-greenwich-97fd7da31c0fa08f3d3ea9efd6713151\">killed by her son\u003c/a> after ChatGPT allegedly amplified the man’s “paranoid delusions.”\u003c/p>\n\u003cp>“This is not a passive technology,” Edelson said, comparing the chatbot interactions with a more conventional online search for information. “What we’ve seen in the past is that (for) people who are mentally ill, the chatbot will validate what they’re saying and then amplify what they’re saying.”\u003c/p>\n\u003cp>Last week, Edelson visited the small town of Tumbler Ridge and met with dozens of people in the basement of a visitor center. He also visited Gebala at a children’s hospital in Vancouver, where she remains hospitalized and seemed alert but unable to speak.\u003c/p>\n\u003cp>“It was so heartbreaking,” he said.\u003c/p>\n\u003cfigure id=\"attachment_12082198\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"wp-image-12082198 size-full\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty2.jpg\" alt=\"\" width=\"2000\" height=\"1333\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty2.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty2-160x107.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty2-1536x1024.jpg 1536w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">Candles, flowers, photographs, plush toys and other items at a makeshift memorial for the victims four days after a deadly mass shooting took place at a school, in the town of Tumbler Ridge, British Columbia, Canada, on Feb. 13, 2026. \u003ccite>(Paige Taylor White/AFP via Getty Images)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>The lawsuits filed Wednesday also represent the families of the five slain children targeted in the school shooting: Zoey Benoit, Abel Mwansa Jr., Ticaria “Tiki” Lampert and Kylie Smith, all 12, and Ezekiel Schofield, 13.\u003c/p>\n\u003cp>After the shootings, OpenAI came forward to say that last June, the company flagged the shooter’s account as having been used to discuss violence against other people.\u003c/p>\n\u003cp>The company said it considered whether to refer the account to the Royal Canadian Mounted Police, but determined at the time that the account activity didn’t meet a threshold for referral to law enforcement. OpenAI banned the account in June for violating its usage policy.[aside postID=news_12080610 hero='https://cdn.kqed.org/wp-content/uploads/sites/10/2024/06/GettyImages-2155035557-1020x680.jpg']The lawsuits filed Wednesday allege “the victims didn’t learn this because OpenAI was forthcoming, but because \u003ca href=\"https://www.wsj.com/us-news/law/openai-employees-raised-alarms-about-canada-shooting-suspect-months-ago-b585df62\">its own employees leaked it to \u003cem>The Wall Street Journal\u003c/em>\u003c/a> after they could no longer stomach the company’s silence.”\u003c/p>\n\u003cp>In \u003ca href=\"https://tumblerridgelines.com/2026/04/24/openai-apologizes-to-tumbler-ridge/\">his letter\u003c/a>, Altman said he was “deeply sorry that we did not alert law enforcement to the account that was banned in June.”\u003c/p>\n\u003cp>“While I know words can never be enough, I believe an apology is necessary to recognize the harm and irreversible loss your community has suffered,” Altman wrote.\u003c/p>\n\u003cp>British Columbia Premier David Eby, \u003ca href=\"https://x.com/dave_eby/status/2047751590803886291?s=46&t=7BBzFwo6eYLzJIVfAlumEQ\">in a social media post\u003c/a>, called the apology “necessary, and yet grossly insufficient for the devastation done to the families of Tumbler Ridge.”\u003c/p>\n\u003cp>The Gebala lawsuit accuses OpenAI of negligence involving a failure to warn law enforcement and “aiding and abetting a mass shooting.”\u003c/p>\n\u003cp>Along with damages, the Gebala lawsuit seeks a court order that would require OpenAI to ban users from ChatGPT if their accounts were deactivated for violent misuse, and to require the company to alert law enforcement when its systems identify someone who poses a “real-world risk of violence.”\u003c/p>\n\u003cp>An earlier case was filed in a court in British Columbia, but a team of lawyers in both countries is seeking to bring the affiliated cases to San Francisco, where OpenAI is headquartered.\u003c/p>\n\u003ch2>‘Untried territory’\u003c/h2>\n\u003cp>Feldman called reports that the company flagged the risk but failed to act effectively “deeply troubling.”\u003c/p>\n\u003cp>“As with so much about AI, the lawsuit will take us into untried territory,” she said. “The old doctrines are being applied to new circumstances.”\u003c/p>\n\u003cp>She said if the families were to win, the company would have to pay damages and assume responsibility for altering its platform to identify and respond to risks.\u003c/p>\n\u003cp>The major issues that the lawsuit will tackle are whether OpenAI and ChatGPT are protected by the First Amendment and whether or not OpenAI had “a duty to act,” she said.\u003c/p>\n\u003cfigure id=\"attachment_12082201\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"wp-image-12082201 size-full\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty3.jpg\" alt=\"\" width=\"2000\" height=\"1333\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty3.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty3-160x107.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty3-1536x1024.jpg 1536w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">Community members attend a vigil to honor the victims of one of Canada’s deadliest mass shootings in Tumbler Ridge, British Columbia, Canada, on Feb. 13, 2026. \u003ccite>(Paige Taylor White/AFP via Getty Images)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>She said that there are \u003ca href=\"https://www.congress.gov/crs-product/R46751\">parts\u003c/a> of U.S. law that shield tech companies from liability for content that their users host. Essentially, this means platforms are more like “bulletin boards” and “are not responsible for the content.”\u003c/p>\n\u003cp>But this case would raise the question, she said, “Are LLMs like a bulletin board or publisher? Or they like a facilitator who helped with the crime?”\u003c/p>\n\u003cp>Some companies struggle with the burden of responsibility when reviewing potential threats to public safety, Feldman said, “If they try to help out, they can be viewed as accepting the mantle of responsibility.”\u003c/p>\n\u003cp>According to Feldman, families are also likely to argue that the LLM “is a defective product without appropriate safeguards.\u003c/p>\n\u003cp>“In that case, the question is the following: ‘Is the LLM a defective product, or merely a product that was used improperly? And is it analogous to a product at all?”\u003c/p>\n\u003cp>“All of these are tough questions as we enter the age of AI, and the courts are just beginning to explore them,” Feldman said.\u003c/p>\n\u003cp>\u003cem>The Associated Press’ Jim Morris contributed to this story.\u003c/em>\u003c/p>\n\u003cp>[ad floatright]\u003c/p>\n",
"blocks": [],
"excerpt": "The lawsuit alleges negligence and wrongful death on account of the shooter’s interactions with the chatbot in the weeks and months leading up to the fatal attack.",
"status": "publish",
"parent": 0,
"modified": 1777678175,
"stats": {
"hasAudio": false,
"hasVideo": false,
"hasChartOrMap": false,
"iframeSrcs": [],
"hasGoogleForm": false,
"hasGallery": false,
"hasHearkenModule": false,
"hasPolis": false,
"paragraphCount": 35,
"wordCount": 1495
},
"headData": {
"title": "OpenAI Back in Court Over Canada School Shooter’s Use of ChatGPT | KQED",
"description": "The lawsuit alleges negligence and wrongful death on account of the shooter’s interactions with the chatbot in the weeks and months leading up to the fatal attack.",
"ogTitle": "",
"ogDescription": "",
"ogImgId": "",
"twTitle": "",
"twDescription": "",
"twImgId": "",
"schema": {
"@context": "https://schema.org",
"@type": "NewsArticle",
"headline": "OpenAI Back in Court Over Canada School Shooter’s Use of ChatGPT",
"datePublished": "2026-05-01T16:03:14-07:00",
"dateModified": "2026-05-01T16:29:35-07:00",
"image": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"isAccessibleForFree": "True",
"publisher": {
"@type": "NewsMediaOrganization",
"@id": "https://www.kqed.org/#organization",
"name": "KQED",
"logo": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"url": "https://www.kqed.org",
"sameAs": [
"https://www.facebook.com/KQED",
"https://twitter.com/KQED",
"https://www.instagram.com/kqed/",
"https://www.tiktok.com/@kqedofficial",
"https://www.linkedin.com/company/kqed",
"https://www.youtube.com/channel/UCeC0IOo7i1P_61zVUWbJ4nw"
]
}
}
},
"primaryCategory": {
"termId": 34167,
"slug": "criminal-justice",
"name": "Criminal Justice"
},
"sticky": false,
"nprByline": "Matt O’Brien, Associated Press, and Nisa Khan, KQED",
"nprStoryId": "kqed-12082064",
"templateType": "standard",
"featuredImageType": "standard",
"excludeFromSiteSearch": "Include",
"showOnAuthorArchivePages": "Yes",
"articleAge": "0",
"path": "/news/12082064/openai-back-in-court-over-canada-school-shooters-use-of-chatgpt",
"audioTrackLength": null,
"parsedContent": [
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003cp>The families of victims of a school shooting in a British Columbia town sued artificial intelligence company \u003ca href=\"https://www.kqed.org/news/tag/open-ai\">OpenAI \u003c/a>in a San Francisco court this week, alleging that the company behind \u003ca href=\"https://www.kqed.org/news/tag/chatgpt\">ChatGPT\u003c/a> failed to alert police of the shooter’s alarming interactions with the chatbot.\u003c/p>\n\u003cp>One of the lawsuits was filed on behalf of Shannda Aviugana-Durand, an education assistant who was shot and killed in a library at \u003ca href=\"https://docs.google.com/document/d/1BU49CY30r0KCfBs0NJuk5S0KJ2E5VEuIF2IpxdwviIo/edit?tab=t.0\">Tumbler Ridge Secondary School\u003c/a>. The suit alleges negligence, aiding and abetting a mass shooting, wrongful death and liability, among other claims. According to the lawsuit, Aviugana-Durand’s daughter was present at the time of the attack.\u003c/p>\n\u003cp>The educational assistant was one of six people who were killed by an 18-year-old in February. The teen — who later shot herself — also killed her mother and her 11-year-old half-brother at home beforehand. Twenty-five people were also injured in the attack, Canada’s deadliest mass shooting in years.\u003c/p>\n\u003cp>\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "ad",
"attributes": {
"named": {
"label": "fullwidth"
},
"numeric": [
"fullwidth"
]
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003c/p>\n\u003cp>Another lawsuit was filed Wednesday on behalf of 12-year-old Maya Gebala, who was critically injured in the February shooting. The plaintiffs’ attorney, Jay Edelson, said in an interview with the \u003cem>Associated Press\u003c/em> that decisions made by OpenAI and its CEO Sam Altman “have destroyed the town. The people are really resilient, but what happened is unimaginable.”\u003c/p>\n\u003cp>Altman sent a letter last week \u003ca href=\"https://apnews.com/article/openai-altman-tumbler-ridge-killings-apology-dec2adaad3946583519370eede6a99e2\">formally apologizing\u003c/a> to the community that his company did not notify law enforcement about the shooter’s online behavior in the weeks leading up to the attack.\u003c/p>\n\u003cp>The case highlights concerns about the harms posed by \u003ca href=\"https://apnews.com/article/ai-sycophancy-chatbots-science-study-8dc61e69278b661cab1e53d38b4173b6\">overly agreeable AI chatbots\u003c/a> and what obligations the tech industry has to control them or notify authorities about planned violence by chatbot users. This month, \u003ca href=\"https://apnews.com/article/missing-grad-students-florida-6279adeef3d0540865de39ab3d6f8093\">prosecutors investigating the deaths\u003c/a> of two University of South Florida doctoral students said that the suspect asked ChatGPT about body disposal in the lead-up to the students’ disappearance.\u003c/p>\n\u003cfigure id=\"attachment_12079761\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"wp-image-12079761 size-full\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/SamAltmanGetty2.jpg\" alt=\"\" width=\"2000\" height=\"1333\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/SamAltmanGetty2.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/SamAltmanGetty2-160x107.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/SamAltmanGetty2-1536x1024.jpg 1536w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">OpenAI CEO Sam Altman speaks during the BlackRock Infrastructure Summit on March 11, 2026, in Washington, D.C. \u003ccite>(Anna Moneymaker/Getty Images)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>“It’s not the first lawsuit of its kind,” said Robin Feldman, law professor at UC Law San Francisco and director of its AI Law and Innovation Institute. “This is part of an early wave of lawsuits in which citizens are asking to hold LLMs responsible for harms that happen down the line, whether they are crimes, mental health problems, suicide.”\u003c/p>\n\u003cp>“ChatGPT was first on the scene. And it is the most widely known of the LLMs,” Feldman said. “That puts it in the hot seat as the law tries to understand how to wrangle this unusual beast.”\u003c/p>\n\u003cp>In response to the lawsuit, OpenAI said in a written statement that the “events in Tumbler Ridge are a tragedy. We have a zero-tolerance policy for using our tools to assist in committing violence.”\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "aside",
"attributes": {
"named": {
"postid": "news_12081916",
"hero": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/AP26118555622828-2000x1333.jpg",
"label": ""
},
"numeric": []
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>“As we shared with Canadian officials, we have already strengthened our safeguards, including improving how ChatGPT responds to signs of distress, connecting people with local support and mental health resources, strengthening how we assess and escalate potential threats of violence, and improving detection of repeat policy violators,” the company said.\u003c/p>\n\u003cp>Edelson, a Chicago-based lawyer known for taking on the tech industry, is already juggling a number of high-profile cases against OpenAI, including from the family of a California teenager who killed himself after \u003ca href=\"https://apnews.com/article/ai-chatbot-teens-congress-chatgpt-character-ce3959b6a3ea1a4997bf1ccabb4f0de2\">conversations with ChatGPT\u003c/a> and another from the heirs of an 83-year-old Connecticut woman \u003ca href=\"https://apnews.com/article/ai-chatgpt-wrongful-death-lawsuit-greenwich-97fd7da31c0fa08f3d3ea9efd6713151\">killed by her son\u003c/a> after ChatGPT allegedly amplified the man’s “paranoid delusions.”\u003c/p>\n\u003cp>“This is not a passive technology,” Edelson said, comparing the chatbot interactions with a more conventional online search for information. “What we’ve seen in the past is that (for) people who are mentally ill, the chatbot will validate what they’re saying and then amplify what they’re saying.”\u003c/p>\n\u003cp>Last week, Edelson visited the small town of Tumbler Ridge and met with dozens of people in the basement of a visitor center. He also visited Gebala at a children’s hospital in Vancouver, where she remains hospitalized and seemed alert but unable to speak.\u003c/p>\n\u003cp>“It was so heartbreaking,” he said.\u003c/p>\n\u003cfigure id=\"attachment_12082198\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"wp-image-12082198 size-full\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty2.jpg\" alt=\"\" width=\"2000\" height=\"1333\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty2.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty2-160x107.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty2-1536x1024.jpg 1536w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">Candles, flowers, photographs, plush toys and other items at a makeshift memorial for the victims four days after a deadly mass shooting took place at a school, in the town of Tumbler Ridge, British Columbia, Canada, on Feb. 13, 2026. \u003ccite>(Paige Taylor White/AFP via Getty Images)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>The lawsuits filed Wednesday also represent the families of the five slain children targeted in the school shooting: Zoey Benoit, Abel Mwansa Jr., Ticaria “Tiki” Lampert and Kylie Smith, all 12, and Ezekiel Schofield, 13.\u003c/p>\n\u003cp>After the shootings, OpenAI came forward to say that last June, the company flagged the shooter’s account as having been used to discuss violence against other people.\u003c/p>\n\u003cp>The company said it considered whether to refer the account to the Royal Canadian Mounted Police, but determined at the time that the account activity didn’t meet a threshold for referral to law enforcement. OpenAI banned the account in June for violating its usage policy.\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "aside",
"attributes": {
"named": {
"postid": "news_12080610",
"hero": "https://cdn.kqed.org/wp-content/uploads/sites/10/2024/06/GettyImages-2155035557-1020x680.jpg",
"label": ""
},
"numeric": []
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>The lawsuits filed Wednesday allege “the victims didn’t learn this because OpenAI was forthcoming, but because \u003ca href=\"https://www.wsj.com/us-news/law/openai-employees-raised-alarms-about-canada-shooting-suspect-months-ago-b585df62\">its own employees leaked it to \u003cem>The Wall Street Journal\u003c/em>\u003c/a> after they could no longer stomach the company’s silence.”\u003c/p>\n\u003cp>In \u003ca href=\"https://tumblerridgelines.com/2026/04/24/openai-apologizes-to-tumbler-ridge/\">his letter\u003c/a>, Altman said he was “deeply sorry that we did not alert law enforcement to the account that was banned in June.”\u003c/p>\n\u003cp>“While I know words can never be enough, I believe an apology is necessary to recognize the harm and irreversible loss your community has suffered,” Altman wrote.\u003c/p>\n\u003cp>British Columbia Premier David Eby, \u003ca href=\"https://x.com/dave_eby/status/2047751590803886291?s=46&t=7BBzFwo6eYLzJIVfAlumEQ\">in a social media post\u003c/a>, called the apology “necessary, and yet grossly insufficient for the devastation done to the families of Tumbler Ridge.”\u003c/p>\n\u003cp>The Gebala lawsuit accuses OpenAI of negligence involving a failure to warn law enforcement and “aiding and abetting a mass shooting.”\u003c/p>\n\u003cp>Along with damages, the Gebala lawsuit seeks a court order that would require OpenAI to ban users from ChatGPT if their accounts were deactivated for violent misuse, and to require the company to alert law enforcement when its systems identify someone who poses a “real-world risk of violence.”\u003c/p>\n\u003cp>An earlier case was filed in a court in British Columbia, but a team of lawyers in both countries is seeking to bring the affiliated cases to San Francisco, where OpenAI is headquartered.\u003c/p>\n\u003ch2>‘Untried territory’\u003c/h2>\n\u003cp>Feldman called reports that the company flagged the risk but failed to act effectively “deeply troubling.”\u003c/p>\n\u003cp>“As with so much about AI, the lawsuit will take us into untried territory,” she said. “The old doctrines are being applied to new circumstances.”\u003c/p>\n\u003cp>She said if the families were to win, the company would have to pay damages and assume responsibility for altering its platform to identify and respond to risks.\u003c/p>\n\u003cp>The major issues that the lawsuit will tackle are whether OpenAI and ChatGPT are protected by the First Amendment and whether or not OpenAI had “a duty to act,” she said.\u003c/p>\n\u003cfigure id=\"attachment_12082201\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"wp-image-12082201 size-full\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty3.jpg\" alt=\"\" width=\"2000\" height=\"1333\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty3.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty3-160x107.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty3-1536x1024.jpg 1536w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">Community members attend a vigil to honor the victims of one of Canada’s deadliest mass shootings in Tumbler Ridge, British Columbia, Canada, on Feb. 13, 2026. \u003ccite>(Paige Taylor White/AFP via Getty Images)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>She said that there are \u003ca href=\"https://www.congress.gov/crs-product/R46751\">parts\u003c/a> of U.S. law that shield tech companies from liability for content that their users host. Essentially, this means platforms are more like “bulletin boards” and “are not responsible for the content.”\u003c/p>\n\u003cp>But this case would raise the question, she said, “Are LLMs like a bulletin board or publisher? Or they like a facilitator who helped with the crime?”\u003c/p>\n\u003cp>Some companies struggle with the burden of responsibility when reviewing potential threats to public safety, Feldman said, “If they try to help out, they can be viewed as accepting the mantle of responsibility.”\u003c/p>\n\u003cp>According to Feldman, families are also likely to argue that the LLM “is a defective product without appropriate safeguards.\u003c/p>\n\u003cp>“In that case, the question is the following: ‘Is the LLM a defective product, or merely a product that was used improperly? And is it analogous to a product at all?”\u003c/p>\n\u003cp>“All of these are tough questions as we enter the age of AI, and the courts are just beginning to explore them,” Feldman said.\u003c/p>\n\u003cp>\u003cem>The Associated Press’ Jim Morris contributed to this story.\u003c/em>\u003c/p>\n\u003cp>\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "ad",
"attributes": {
"named": {
"label": "floatright"
},
"numeric": [
"floatright"
]
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003c/p>\n\u003c/div>\u003c/p>",
"attributes": {
"named": {},
"numeric": []
}
}
],
"link": "/news/12082064/openai-back-in-court-over-canada-school-shooters-use-of-chatgpt",
"authors": [
"byline_news_12082064"
],
"categories": [
"news_34167",
"news_28250",
"news_8"
],
"tags": [
"news_34755",
"news_1386",
"news_32668",
"news_17725",
"news_22434",
"news_35784",
"news_33542",
"news_33543",
"news_38"
],
"featImg": "news_12082068",
"label": "news"
},
"news_12081916": {
"type": "posts",
"id": "news_12081916",
"meta": {
"index": "posts_1716263798",
"site": "news",
"id": "12081916",
"score": null,
"sort": [
1777591777000
]
},
"guestAuthors": [],
"slug": "are-elon-musk-and-openai-fighting-an-ai-arms-race-sam-altmans-lawyers-think-so",
"title": "Are Elon Musk and OpenAI Fighting an AI Arms Race? Sam Altman’s Lawyers Think So",
"publishDate": 1777591777,
"format": "standard",
"headTitle": "Are Elon Musk and OpenAI Fighting an AI Arms Race? Sam Altman’s Lawyers Think So | KQED",
"labelTerm": {
"site": "news"
},
"content": "\u003cp>As Elon Musk’s dayslong testimony in his \u003ca href=\"https://www.kqed.org/news/12081290/how-to-unscramble-an-omelet-in-silicon-valley-the-musk-v-altman-trial-that-will-try\">case against OpenAI co-founder Sam Altman\u003c/a> came to a close Thursday, defense attorneys aimed to paint the world’s richest man as intent on dominating artificial intelligence — not on protecting the world from it.\u003c/p>\n\u003cp>Under cross-examination in an Oakland court, attorneys for Altman and Microsoft, the company’s largest financial backer and which until this week held the exclusive rights to license and sell its technology, held Musk’s feet to the fire about a number of business moves he’s made — both within and outside of OpenAI — that might give jurors pause about whether he operated so differently from his former colleagues in the race to dominate the field.\u003c/p>\n\u003cp>During hours of testimony, Musk has told the court that he cofounded the nonprofit OpenAI with Altman and OpenAI President Greg Brockman\u003ca href=\"https://www.kqed.org/news/12081603/elon-musk-takes-aim-at-openai-as-trial-begins-its-not-ok-to-steal-a-charity\"> in 2015 altruistically\u003c/a>, fearing the dangers of AI and wanting to ensure that the technology was developed in a safe and open-source way. He brought the suit, he said, after deciding that his co-founders \u003ca href=\"https://www.kqed.org/news/12081798/elon-musk-says-sam-altman-tricked-him-into-funding-openai\">had betrayed that intention\u003c/a> — expanding the company into a tech behemoth valued at $852 billion today.\u003c/p>\n\u003cp>[ad fullwidth]\u003c/p>\n\u003cp>But Microsoft attorney Russell Cohen seemed to point to a different motivation: a desire to beat OpenAI and win the AI race.\u003c/p>\n\u003cp>“You didn’t sue Microsoft [and OpenAI] until November 2024, correct?” Cohen said.\u003c/p>\n\u003cp>“Yes,” Musk said.\u003c/p>\n\u003cp>“And that is after you formed your own AI company, xAI, correct?” Cohen said.\u003c/p>\n\u003cp>“Yes,” Musk said.\u003c/p>\n\u003cfigure id=\"attachment_12081637\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12081637\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-01-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-01-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-01-KQED-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-01-KQED-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-01-KQED-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">OpenAI’s lead counsel William Savitt presents opening statements in the trial in which Elon Musk claims that Sam Altman and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit in Oakland on April 28, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>The questions came after William Savitt, Altman’s attorney, directed the jury’s attention to a bombshell message Musk sent to Mark Zuckerberg in February 2025, asking whether the Meta CEO would be “open to the idea of bidding on the OpenAI IP,” or intellectual property, with Musk and others.\u003c/p>\n\u003cp>The jury also learned that xAI had partially “distilled,” that is, derived technology from OpenAI’s own models, which violates OpenAI’s terms of service.\u003c/p>\n\u003cp>The pointed inquiries on Thursday came after Musk’s testimony started to bring the events of how OpenAI launched its first for-profit subsidiary into focus. In 2017, executives including Altman, Musk, Brockman and Ilya Sutskever, a top computer scientist at the company since its founding, launched discussions about creating a for-profit subsidiary.\u003c/p>\n\u003cp>It would be, they said, a way to bring in additional funding and keep at the cutting edge of a growing field of competitors as they started pursuing artificial general intelligence — commonly referred to as AGI — a futuristic superintelligent AI technology.[aside postID=news_12081798 hero='https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1.jpg']Altman’s defense has alleged that throughout that process, Musk attempted to “wrest control” of the company twice, first insisting that he hold a majority equity stake in any for-profit entity, control its board of directors and serve as CEO, and later, that OpenAI be folded into Tesla, where he already serves as CEO.\u003c/p>\n\u003cp>Savitt said Musk began withholding $5 million quarterly fund contributions to put pressure on the company to grant his requests, and after those attempts failed, he left the company.\u003c/p>\n\u003cp>Savitt also accused Musk of poaching OpenAI employees as Musk exited in early 2018, including founding member Andrej Karpathy, for Tesla.\u003c/p>\n\u003cp>Musk said multiple times that Tesla is not pursuing AGI. But in March, Musk \u003ca href=\"https://x.com/elonmusk/status/2029123591871308272?lang=en\">wrote on the social media platform X\u003c/a> that “Tesla will be one of the companies to make AGI and probably the first to make it in humanoid/atom-shaping form.”\u003c/p>\n\u003cp>Separately, he formed xAI in 2023, which he said is pursuing AGI.\u003c/p>\n\u003cp>He’s downplayed its competitiveness with OpenAI, though, testifying that it has just a couple hundred employees and a “small market share.”\u003c/p>\n\u003cp>“I would say technically competitive, but much smaller than OpenAI,” Musk said Tuesday.\u003c/p>\n\u003cfigure id=\"attachment_10734536\" class=\"wp-caption aligncenter\" style=\"max-width: 1920px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-10734536\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2015/10/StuartRussell.jpg\" alt='Stuart Russell, UC Berkeley computer science professor and co-author of the standard textbook \"Artificial Intelligence: a Modern Approach.\"' width=\"1920\" height=\"1320\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2015/10/StuartRussell.jpg 1920w, https://cdn.kqed.org/wp-content/uploads/sites/10/2015/10/StuartRussell-400x275.jpg 400w, https://cdn.kqed.org/wp-content/uploads/sites/10/2015/10/StuartRussell-800x550.jpg 800w, https://cdn.kqed.org/wp-content/uploads/sites/10/2015/10/StuartRussell-1440x990.jpg 1440w, https://cdn.kqed.org/wp-content/uploads/sites/10/2015/10/StuartRussell-1180x811.jpg 1180w, https://cdn.kqed.org/wp-content/uploads/sites/10/2015/10/StuartRussell-960x660.jpg 960w\" sizes=\"auto, (max-width: 1920px) 100vw, 1920px\">\u003cfigcaption class=\"wp-caption-text\">Stuart Russell, UC Berkeley computer science professor and co-author of the standard textbook “Artificial Intelligence: A Modern Approach.” \u003ccite>(Juan Mabromata/AFP via Getty Images)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>The three days of Musk’s testimony got testy at times, particularly during Savitt’s cross-examination on Wednesday afternoon, when Savitt and U.S. District Judge Yvonne Gonzalez Rogers asked Musk repeatedly to answer the questions he was asked. Musk accused Savitt of intentionally misleading him.\u003c/p>\n\u003cp>But the most heated moment thus far might have come before the jury was called to the courtroom on Thursday morning, during a discussion about what AI safety expert Stuart Russell, who is taking the stand this afternoon, will be willing to testify to.\u003c/p>\n\u003cp>Musk’s attorney argued he should be allowed to speak about the climate risk associated with AI, saying: “We could all die.”\u003c/p>\n\u003cp>“It is also ironic that your client, despite these risks, is creating a company in the exact space,” Gonzalez Rogers said. “I suspect that there are people who don’t want to put the future in Mr. Musk’s hands.”\u003c/p>\n\u003cp>\u003c/p>\n",
"blocks": [],
"excerpt": "The Tesla CEO said OpenAI betrayed its original mission as a nonprofit. But defense attorneys representing Altman and Microsoft used social media and email evidence to question Musk's own motives for getting involved. ",
"status": "publish",
"parent": 0,
"modified": 1777653621,
"stats": {
"hasAudio": false,
"hasVideo": false,
"hasChartOrMap": false,
"iframeSrcs": [],
"hasGoogleForm": false,
"hasGallery": false,
"hasHearkenModule": false,
"hasPolis": false,
"paragraphCount": 24,
"wordCount": 920
},
"headData": {
"title": "Are Elon Musk and OpenAI Fighting an AI Arms Race? Sam Altman’s Lawyers Think So | KQED",
"description": "The Tesla CEO said OpenAI betrayed its original mission as a nonprofit. But defense attorneys representing Altman and Microsoft used social media and email evidence to question Musk's own motives for getting involved. ",
"ogTitle": "",
"ogDescription": "",
"ogImgId": "",
"twTitle": "",
"twDescription": "",
"twImgId": "",
"schema": {
"@context": "https://schema.org",
"@type": "NewsArticle",
"headline": "Are Elon Musk and OpenAI Fighting an AI Arms Race? Sam Altman’s Lawyers Think So",
"datePublished": "2026-04-30T16:29:37-07:00",
"dateModified": "2026-05-01T09:40:21-07:00",
"image": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"isAccessibleForFree": "True",
"publisher": {
"@type": "NewsMediaOrganization",
"@id": "https://www.kqed.org/#organization",
"name": "KQED",
"logo": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"url": "https://www.kqed.org",
"sameAs": [
"https://www.facebook.com/KQED",
"https://twitter.com/KQED",
"https://www.instagram.com/kqed/",
"https://www.tiktok.com/@kqedofficial",
"https://www.linkedin.com/company/kqed",
"https://www.youtube.com/channel/UCeC0IOo7i1P_61zVUWbJ4nw"
]
}
}
},
"primaryCategory": {
"termId": 248,
"slug": "technology",
"name": "Technology"
},
"sticky": false,
"nprStoryId": "kqed-12081916",
"templateType": "standard",
"featuredImageType": "standard",
"excludeFromSiteSearch": "Include",
"articleAge": "0",
"path": "/news/12081916/are-elon-musk-and-openai-fighting-an-ai-arms-race-sam-altmans-lawyers-think-so",
"audioTrackLength": null,
"parsedContent": [
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003cp>As Elon Musk’s dayslong testimony in his \u003ca href=\"https://www.kqed.org/news/12081290/how-to-unscramble-an-omelet-in-silicon-valley-the-musk-v-altman-trial-that-will-try\">case against OpenAI co-founder Sam Altman\u003c/a> came to a close Thursday, defense attorneys aimed to paint the world’s richest man as intent on dominating artificial intelligence — not on protecting the world from it.\u003c/p>\n\u003cp>Under cross-examination in an Oakland court, attorneys for Altman and Microsoft, the company’s largest financial backer and which until this week held the exclusive rights to license and sell its technology, held Musk’s feet to the fire about a number of business moves he’s made — both within and outside of OpenAI — that might give jurors pause about whether he operated so differently from his former colleagues in the race to dominate the field.\u003c/p>\n\u003cp>During hours of testimony, Musk has told the court that he cofounded the nonprofit OpenAI with Altman and OpenAI President Greg Brockman\u003ca href=\"https://www.kqed.org/news/12081603/elon-musk-takes-aim-at-openai-as-trial-begins-its-not-ok-to-steal-a-charity\"> in 2015 altruistically\u003c/a>, fearing the dangers of AI and wanting to ensure that the technology was developed in a safe and open-source way. He brought the suit, he said, after deciding that his co-founders \u003ca href=\"https://www.kqed.org/news/12081798/elon-musk-says-sam-altman-tricked-him-into-funding-openai\">had betrayed that intention\u003c/a> — expanding the company into a tech behemoth valued at $852 billion today.\u003c/p>\n\u003cp>\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "ad",
"attributes": {
"named": {
"label": "fullwidth"
},
"numeric": [
"fullwidth"
]
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003c/p>\n\u003cp>But Microsoft attorney Russell Cohen seemed to point to a different motivation: a desire to beat OpenAI and win the AI race.\u003c/p>\n\u003cp>“You didn’t sue Microsoft [and OpenAI] until November 2024, correct?” Cohen said.\u003c/p>\n\u003cp>“Yes,” Musk said.\u003c/p>\n\u003cp>“And that is after you formed your own AI company, xAI, correct?” Cohen said.\u003c/p>\n\u003cp>“Yes,” Musk said.\u003c/p>\n\u003cfigure id=\"attachment_12081637\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12081637\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-01-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-01-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-01-KQED-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-01-KQED-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-01-KQED-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">OpenAI’s lead counsel William Savitt presents opening statements in the trial in which Elon Musk claims that Sam Altman and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit in Oakland on April 28, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>The questions came after William Savitt, Altman’s attorney, directed the jury’s attention to a bombshell message Musk sent to Mark Zuckerberg in February 2025, asking whether the Meta CEO would be “open to the idea of bidding on the OpenAI IP,” or intellectual property, with Musk and others.\u003c/p>\n\u003cp>The jury also learned that xAI had partially “distilled,” that is, derived technology from OpenAI’s own models, which violates OpenAI’s terms of service.\u003c/p>\n\u003cp>The pointed inquiries on Thursday came after Musk’s testimony started to bring the events of how OpenAI launched its first for-profit subsidiary into focus. In 2017, executives including Altman, Musk, Brockman and Ilya Sutskever, a top computer scientist at the company since its founding, launched discussions about creating a for-profit subsidiary.\u003c/p>\n\u003cp>It would be, they said, a way to bring in additional funding and keep at the cutting edge of a growing field of competitors as they started pursuing artificial general intelligence — commonly referred to as AGI — a futuristic superintelligent AI technology.\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "aside",
"attributes": {
"named": {
"postid": "news_12081798",
"hero": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1.jpg",
"label": ""
},
"numeric": []
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>Altman’s defense has alleged that throughout that process, Musk attempted to “wrest control” of the company twice, first insisting that he hold a majority equity stake in any for-profit entity, control its board of directors and serve as CEO, and later, that OpenAI be folded into Tesla, where he already serves as CEO.\u003c/p>\n\u003cp>Savitt said Musk began withholding $5 million quarterly fund contributions to put pressure on the company to grant his requests, and after those attempts failed, he left the company.\u003c/p>\n\u003cp>Savitt also accused Musk of poaching OpenAI employees as Musk exited in early 2018, including founding member Andrej Karpathy, for Tesla.\u003c/p>\n\u003cp>Musk said multiple times that Tesla is not pursuing AGI. But in March, Musk \u003ca href=\"https://x.com/elonmusk/status/2029123591871308272?lang=en\">wrote on the social media platform X\u003c/a> that “Tesla will be one of the companies to make AGI and probably the first to make it in humanoid/atom-shaping form.”\u003c/p>\n\u003cp>Separately, he formed xAI in 2023, which he said is pursuing AGI.\u003c/p>\n\u003cp>He’s downplayed its competitiveness with OpenAI, though, testifying that it has just a couple hundred employees and a “small market share.”\u003c/p>\n\u003cp>“I would say technically competitive, but much smaller than OpenAI,” Musk said Tuesday.\u003c/p>\n\u003cfigure id=\"attachment_10734536\" class=\"wp-caption aligncenter\" style=\"max-width: 1920px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-10734536\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2015/10/StuartRussell.jpg\" alt='Stuart Russell, UC Berkeley computer science professor and co-author of the standard textbook \"Artificial Intelligence: a Modern Approach.\"' width=\"1920\" height=\"1320\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2015/10/StuartRussell.jpg 1920w, https://cdn.kqed.org/wp-content/uploads/sites/10/2015/10/StuartRussell-400x275.jpg 400w, https://cdn.kqed.org/wp-content/uploads/sites/10/2015/10/StuartRussell-800x550.jpg 800w, https://cdn.kqed.org/wp-content/uploads/sites/10/2015/10/StuartRussell-1440x990.jpg 1440w, https://cdn.kqed.org/wp-content/uploads/sites/10/2015/10/StuartRussell-1180x811.jpg 1180w, https://cdn.kqed.org/wp-content/uploads/sites/10/2015/10/StuartRussell-960x660.jpg 960w\" sizes=\"auto, (max-width: 1920px) 100vw, 1920px\">\u003cfigcaption class=\"wp-caption-text\">Stuart Russell, UC Berkeley computer science professor and co-author of the standard textbook “Artificial Intelligence: A Modern Approach.” \u003ccite>(Juan Mabromata/AFP via Getty Images)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>The three days of Musk’s testimony got testy at times, particularly during Savitt’s cross-examination on Wednesday afternoon, when Savitt and U.S. District Judge Yvonne Gonzalez Rogers asked Musk repeatedly to answer the questions he was asked. Musk accused Savitt of intentionally misleading him.\u003c/p>\n\u003cp>But the most heated moment thus far might have come before the jury was called to the courtroom on Thursday morning, during a discussion about what AI safety expert Stuart Russell, who is taking the stand this afternoon, will be willing to testify to.\u003c/p>\n\u003cp>Musk’s attorney argued he should be allowed to speak about the climate risk associated with AI, saying: “We could all die.”\u003c/p>\n\u003cp>“It is also ironic that your client, despite these risks, is creating a company in the exact space,” Gonzalez Rogers said. “I suspect that there are people who don’t want to put the future in Mr. Musk’s hands.”\u003c/p>\n\u003cp>\u003c/p>\n\u003c/div>\u003c/p>",
"attributes": {
"named": {},
"numeric": []
}
}
],
"link": "/news/12081916/are-elon-musk-and-openai-fighting-an-ai-arms-race-sam-altmans-lawyers-think-so",
"authors": [
"11913",
"251"
],
"categories": [
"news_28250",
"news_8",
"news_248"
],
"tags": [
"news_1386",
"news_3897",
"news_27626",
"news_34054",
"news_33542",
"news_33543",
"news_34586",
"news_1631"
],
"featImg": "news_12081606",
"label": "news"
},
"news_12081798": {
"type": "posts",
"id": "news_12081798",
"meta": {
"index": "posts_1716263798",
"site": "news",
"id": "12081798",
"score": null,
"sort": [
1777507270000
]
},
"guestAuthors": [],
"slug": "elon-musk-says-sam-altman-tricked-him-into-funding-openai",
"title": "Elon Musk Says Sam Altman Tricked Him Into Funding OpenAI",
"publishDate": 1777507270,
"format": "standard",
"headTitle": "Elon Musk Says Sam Altman Tricked Him Into Funding OpenAI | KQED",
"labelTerm": {
"site": "news"
},
"content": "\u003cp>During the second day of the \u003ca href=\"https://www.kqed.org/news/12081603/elon-musk-takes-aim-at-openai-as-trial-begins-its-not-ok-to-steal-a-charity\">landmark trial between Sam Altman and Elon Musk\u003c/a>, the Tesla founder told the Oakland courthouse that he was a “fool” to fund OpenAI through its early years.\u003c/p>\n\u003cp>Testifying in the lawsuit he brought against Altman, which claims the company’s creators betrayed their mission for profits, Musk suggested Wednesday that Altman and cofounder Greg Brockman wanted to “have your cake and eat it too.”\u003c/p>\n\u003cp>“If you go nonprofit, you’ve got a sort of moral high ground,” he testified.\u003c/p>\n\u003cp>Musk’s testimony tells one version of founding OpenAI: that he, fearing the dangers of artificial intelligence, pursued its development with the goal of benefiting the common good, alongside, he thought, like-minded collaborators. But behind the scenes, those cofounders engaged in a “long con” to profit at his expense.\u003c/p>\n\u003cp>[ad fullwidth]\u003c/p>\n\u003cp>“What they really wanted was a for-profit, where they could make as much money as possible,” Musk said later.\u003c/p>\n\u003cp>Whether the jury believes him will be integral to the decision they’re tasked with making, as they determine whether OpenAI breached charitable trust and engaged in unjust enrichment as it evolved from a nonprofit organization to its current $730 billion iteration.\u003c/p>\n\u003cp>Under cross-examination, Altman’s attorney, William Savitt, questioned Musk’s story and credibility as an altruistic benefactor. He pointed to an email Musk sent to Altman in 2015, which said it would be “probably better” if OpenAI operated as a for-profit company with a parallel nonprofit.\u003c/p>\n\u003cfigure id=\"attachment_12081637\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12081637\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-01-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-01-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-01-KQED-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-01-KQED-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-01-KQED-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">OpenAI’s lead counsel, William Savitt, presents opening statements in the trial in which Elon Musk claims that Sam Altman and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit, in Oakland on April 28, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>In another email sent to colleagues at his neurotechnology company, Neuralink, Musk said that Google’s AI development was moving very fast, and that he was concerned OpenAI was not on the path to catch up.\u003c/p>\n\u003cp>“Setting it up as a nonprofit might, in hindsight, have been the wrong move,” Musk wrote. “Sense of urgency is not as high.”\u003c/p>\n\u003cp>Savitt asked if, in 2017, Musk suggested at a party that OpenAI should create a for-profit. He said it was just after the company’s AI model had beaten \u003cem>Defense of the Ancients, \u003c/em>a battle video game, which was a pivotal moment in the development process.\u003c/p>\n\u003cp>Musk said he didn’t remember giving instructions to create a for-profit at the time.\u003c/p>\n\u003cp>“This was nine years ago,” he said.[aside postID=news_12081603 hero='https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-02-KQED.jpg']Savitt said Tuesday that in 2017, OpenAI executives, including Musk, were in the midst of conversations about whether and how to transition the company to a for-profit structure.\u003c/p>\n\u003cp>According to OpenAI’s court filings, as early as summer 2017, Musk had insisted on holding a majority equity stake in any for-profit entity, serving as CEO and controlling its board of directors.\u003c/p>\n\u003cp>Pressed by Savitt about what Musk meant by “expressing what you said about control,” the Tesla founder and billionaire said: “I try to be as literal as possible.”\u003c/p>\n\u003cp>In the fall of 2017, Brockman and Ilya Sutskever, another top OpenAI executive, emailed Musk with concerns about the for-profit structure he proposed. Shortly thereafter, discussions over the structure collapsed, and Musk stopped making significant quarterly funding contributions, OpenAI alleges.\u003c/p>\n\u003cp>He left the company less than six months later.\u003c/p>\n\u003cp>Savitt framed the breakdown and Musk’s exit as a result of his not getting control of the for-profit, and the other executives’ focus on maintaining its philanthropic mission. He suggested that Musk tried to pressure them to accept his terms by pausing the majority of his financial backing.\u003c/p>\n\u003cp>“You knew that would create financial pressure for the organization,” Savitt said.\u003c/p>\n\u003cfigure id=\"attachment_12081686\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12081686\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">Steve Molo, Elon Musk’s attorney, presents opening statements in the trial in which Elon Musk (center-right) claims that Sam Altman (right) and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit, in Oakland on April 28, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>Musk denied that was his intention. Instead, he alleged that Altman convinced Brockman and the others to go against his proposal, and that their concern over his desire for control was disingenuous.\u003c/p>\n\u003cp>“I’m not going to fund something if I don’t have confidence in the people,” he said.\u003c/p>\n\u003cp>When asked whether he proposed that OpenAI be folded into Tesla, Musk said: “There were a lot of ideas that were brainstormed at the time.”\u003c/p>\n\u003cp>In an email, he wrote that doing so would be the “only path that could even hope to hold a candle to Google.”\u003c/p>\n\u003cp>Musk said he left OpenAI in February 2018 because he was focused on Tesla’s survival, and believed that OpenAI intended to continue operating as a nonprofit.\u003c/p>\n\u003cp>Savitt also laid out a series of exchanges between Musk and Altman, in which the OpenAI CEO kept him apprised of the company’s corporate structure. He said in March 2018, Musk responded to an email that noted the creation of a for-profit entity of OpenAI with “OK by me,” and was sent a term sheet for OpenAI LP that summer.[aside postID=news_12081290 hero='https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260422-ALTMANMUSK-MD-01-KQED.jpg']Savitt also said Altman emailed Musk a draft of the company’s public announcement of its for-profit arm in March 2019, and texted him asking if he had time to talk about Microsoft’s plan to invest in OpenAI. Musk never responded to that text, according to Savitt.\u003c/p>\n\u003cp>Musk said he was busy with his other companies in 2018, and while he was aware that it had added a for-profit entity, he hadn’t lost complete faith in the company. While he’d suspended quarterly $5 million funding contributions prior to his departure, he continued to make some contributions until 2020.\u003c/p>\n\u003cp>He said that he’d gone from enthusiastically supportive to uncertain about OpenAI’s mission, but that he’d fully suspended his contributions when he felt that the company was “deliberately not a nonprofit.”\u003c/p>\n\u003cp>When asked why he waited until 2024 to bring the suit, Musk said that’s when he determined OpenAI breached charitable trust.\u003c/p>\n\u003cp>“Thinking that someone might steal your car is not the same as [if] someone has stolen your car,” Musk said. He said after enlisting his attorney, Alex Spiro, to investigate, he heard from him in 2023 that “the car had been stolen.”\u003c/p>\n\u003cp>“I would have sued sooner if I thought the charity had been stolen sooner,” Musk continued.\u003c/p>\n\u003cp>The trial and Musk’s testimony are expected to continue on Thursday.\u003c/p>\n\u003cp>[ad floatright]\u003c/p>\n",
"blocks": [],
"excerpt": "On the second day of a trial pitting the Tesla founder against OpenAI, Elon Musk said he was a “fool” to support the company behind ChatGPT during its early years.",
"status": "publish",
"parent": 0,
"modified": 1777509912,
"stats": {
"hasAudio": false,
"hasVideo": false,
"hasChartOrMap": false,
"iframeSrcs": [],
"hasGoogleForm": false,
"hasGallery": false,
"hasHearkenModule": false,
"hasPolis": false,
"paragraphCount": 32,
"wordCount": 1208
},
"headData": {
"title": "Elon Musk Says Sam Altman Tricked Him Into Funding OpenAI | KQED",
"description": "On the second day of a trial pitting the Tesla founder against OpenAI, Elon Musk said he was a “fool” to support the company behind ChatGPT during its early years.",
"ogTitle": "",
"ogDescription": "",
"ogImgId": "",
"twTitle": "",
"twDescription": "",
"twImgId": "",
"schema": {
"@context": "https://schema.org",
"@type": "NewsArticle",
"headline": "Elon Musk Says Sam Altman Tricked Him Into Funding OpenAI",
"datePublished": "2026-04-29T17:01:10-07:00",
"dateModified": "2026-04-29T17:45:12-07:00",
"image": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"isAccessibleForFree": "True",
"publisher": {
"@type": "NewsMediaOrganization",
"@id": "https://www.kqed.org/#organization",
"name": "KQED",
"logo": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"url": "https://www.kqed.org",
"sameAs": [
"https://www.facebook.com/KQED",
"https://twitter.com/KQED",
"https://www.instagram.com/kqed/",
"https://www.tiktok.com/@kqedofficial",
"https://www.linkedin.com/company/kqed",
"https://www.youtube.com/channel/UCeC0IOo7i1P_61zVUWbJ4nw"
]
}
}
},
"primaryCategory": {
"termId": 248,
"slug": "technology",
"name": "Technology"
},
"sticky": false,
"nprStoryId": "kqed-12081798",
"templateType": "standard",
"featuredImageType": "standard",
"excludeFromSiteSearch": "Include",
"articleAge": "0",
"path": "/news/12081798/elon-musk-says-sam-altman-tricked-him-into-funding-openai",
"audioTrackLength": null,
"parsedContent": [
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003cp>During the second day of the \u003ca href=\"https://www.kqed.org/news/12081603/elon-musk-takes-aim-at-openai-as-trial-begins-its-not-ok-to-steal-a-charity\">landmark trial between Sam Altman and Elon Musk\u003c/a>, the Tesla founder told the Oakland courthouse that he was a “fool” to fund OpenAI through its early years.\u003c/p>\n\u003cp>Testifying in the lawsuit he brought against Altman, which claims the company’s creators betrayed their mission for profits, Musk suggested Wednesday that Altman and cofounder Greg Brockman wanted to “have your cake and eat it too.”\u003c/p>\n\u003cp>“If you go nonprofit, you’ve got a sort of moral high ground,” he testified.\u003c/p>\n\u003cp>Musk’s testimony tells one version of founding OpenAI: that he, fearing the dangers of artificial intelligence, pursued its development with the goal of benefiting the common good, alongside, he thought, like-minded collaborators. But behind the scenes, those cofounders engaged in a “long con” to profit at his expense.\u003c/p>\n\u003cp>\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "ad",
"attributes": {
"named": {
"label": "fullwidth"
},
"numeric": [
"fullwidth"
]
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003c/p>\n\u003cp>“What they really wanted was a for-profit, where they could make as much money as possible,” Musk said later.\u003c/p>\n\u003cp>Whether the jury believes him will be integral to the decision they’re tasked with making, as they determine whether OpenAI breached charitable trust and engaged in unjust enrichment as it evolved from a nonprofit organization to its current $730 billion iteration.\u003c/p>\n\u003cp>Under cross-examination, Altman’s attorney, William Savitt, questioned Musk’s story and credibility as an altruistic benefactor. He pointed to an email Musk sent to Altman in 2015, which said it would be “probably better” if OpenAI operated as a for-profit company with a parallel nonprofit.\u003c/p>\n\u003cfigure id=\"attachment_12081637\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12081637\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-01-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-01-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-01-KQED-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-01-KQED-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-01-KQED-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">OpenAI’s lead counsel, William Savitt, presents opening statements in the trial in which Elon Musk claims that Sam Altman and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit, in Oakland on April 28, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>In another email sent to colleagues at his neurotechnology company, Neuralink, Musk said that Google’s AI development was moving very fast, and that he was concerned OpenAI was not on the path to catch up.\u003c/p>\n\u003cp>“Setting it up as a nonprofit might, in hindsight, have been the wrong move,” Musk wrote. “Sense of urgency is not as high.”\u003c/p>\n\u003cp>Savitt asked if, in 2017, Musk suggested at a party that OpenAI should create a for-profit. He said it was just after the company’s AI model had beaten \u003cem>Defense of the Ancients, \u003c/em>a battle video game, which was a pivotal moment in the development process.\u003c/p>\n\u003cp>Musk said he didn’t remember giving instructions to create a for-profit at the time.\u003c/p>\n\u003cp>“This was nine years ago,” he said.\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "aside",
"attributes": {
"named": {
"postid": "news_12081603",
"hero": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-02-KQED.jpg",
"label": ""
},
"numeric": []
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>Savitt said Tuesday that in 2017, OpenAI executives, including Musk, were in the midst of conversations about whether and how to transition the company to a for-profit structure.\u003c/p>\n\u003cp>According to OpenAI’s court filings, as early as summer 2017, Musk had insisted on holding a majority equity stake in any for-profit entity, serving as CEO and controlling its board of directors.\u003c/p>\n\u003cp>Pressed by Savitt about what Musk meant by “expressing what you said about control,” the Tesla founder and billionaire said: “I try to be as literal as possible.”\u003c/p>\n\u003cp>In the fall of 2017, Brockman and Ilya Sutskever, another top OpenAI executive, emailed Musk with concerns about the for-profit structure he proposed. Shortly thereafter, discussions over the structure collapsed, and Musk stopped making significant quarterly funding contributions, OpenAI alleges.\u003c/p>\n\u003cp>He left the company less than six months later.\u003c/p>\n\u003cp>Savitt framed the breakdown and Musk’s exit as a result of his not getting control of the for-profit, and the other executives’ focus on maintaining its philanthropic mission. He suggested that Musk tried to pressure them to accept his terms by pausing the majority of his financial backing.\u003c/p>\n\u003cp>“You knew that would create financial pressure for the organization,” Savitt said.\u003c/p>\n\u003cfigure id=\"attachment_12081686\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12081686\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">Steve Molo, Elon Musk’s attorney, presents opening statements in the trial in which Elon Musk (center-right) claims that Sam Altman (right) and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit, in Oakland on April 28, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>Musk denied that was his intention. Instead, he alleged that Altman convinced Brockman and the others to go against his proposal, and that their concern over his desire for control was disingenuous.\u003c/p>\n\u003cp>“I’m not going to fund something if I don’t have confidence in the people,” he said.\u003c/p>\n\u003cp>When asked whether he proposed that OpenAI be folded into Tesla, Musk said: “There were a lot of ideas that were brainstormed at the time.”\u003c/p>\n\u003cp>In an email, he wrote that doing so would be the “only path that could even hope to hold a candle to Google.”\u003c/p>\n\u003cp>Musk said he left OpenAI in February 2018 because he was focused on Tesla’s survival, and believed that OpenAI intended to continue operating as a nonprofit.\u003c/p>\n\u003cp>Savitt also laid out a series of exchanges between Musk and Altman, in which the OpenAI CEO kept him apprised of the company’s corporate structure. He said in March 2018, Musk responded to an email that noted the creation of a for-profit entity of OpenAI with “OK by me,” and was sent a term sheet for OpenAI LP that summer.\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "aside",
"attributes": {
"named": {
"postid": "news_12081290",
"hero": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260422-ALTMANMUSK-MD-01-KQED.jpg",
"label": ""
},
"numeric": []
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>Savitt also said Altman emailed Musk a draft of the company’s public announcement of its for-profit arm in March 2019, and texted him asking if he had time to talk about Microsoft’s plan to invest in OpenAI. Musk never responded to that text, according to Savitt.\u003c/p>\n\u003cp>Musk said he was busy with his other companies in 2018, and while he was aware that it had added a for-profit entity, he hadn’t lost complete faith in the company. While he’d suspended quarterly $5 million funding contributions prior to his departure, he continued to make some contributions until 2020.\u003c/p>\n\u003cp>He said that he’d gone from enthusiastically supportive to uncertain about OpenAI’s mission, but that he’d fully suspended his contributions when he felt that the company was “deliberately not a nonprofit.”\u003c/p>\n\u003cp>When asked why he waited until 2024 to bring the suit, Musk said that’s when he determined OpenAI breached charitable trust.\u003c/p>\n\u003cp>“Thinking that someone might steal your car is not the same as [if] someone has stolen your car,” Musk said. He said after enlisting his attorney, Alex Spiro, to investigate, he heard from him in 2023 that “the car had been stolen.”\u003c/p>\n\u003cp>“I would have sued sooner if I thought the charity had been stolen sooner,” Musk continued.\u003c/p>\n\u003cp>The trial and Musk’s testimony are expected to continue on Thursday.\u003c/p>\n\u003cp>\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "ad",
"attributes": {
"named": {
"label": "floatright"
},
"numeric": [
"floatright"
]
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003c/p>\n\u003c/div>\u003c/p>",
"attributes": {
"named": {},
"numeric": []
}
}
],
"link": "/news/12081798/elon-musk-says-sam-altman-tricked-him-into-funding-openai",
"authors": [
"11913",
"251"
],
"categories": [
"news_31795",
"news_6188",
"news_8",
"news_248"
],
"tags": [
"news_34755",
"news_32668",
"news_3897",
"news_27626",
"news_19954",
"news_21891",
"news_34054",
"news_33542",
"news_33543",
"news_34586",
"news_1631",
"news_57"
],
"featImg": "news_12081681",
"label": "news"
},
"news_12081603": {
"type": "posts",
"id": "news_12081603",
"meta": {
"index": "posts_1716263798",
"site": "news",
"id": "12081603",
"score": null,
"sort": [
1777421165000
]
},
"guestAuthors": [],
"slug": "elon-musk-takes-aim-at-openai-as-trial-begins-its-not-ok-to-steal-a-charity",
"title": "Elon Musk Takes Aim at OpenAI as Trial Begins: ‘It’s Not OK to Steal a Charity’",
"publishDate": 1777421165,
"format": "standard",
"headTitle": "Elon Musk Takes Aim at OpenAI as Trial Begins: ‘It’s Not OK to Steal a Charity’ | KQED",
"labelTerm": {
"site": "news"
},
"content": "\u003cp>In a federal courtroom in Oakland on Tuesday, attorneys for tech elites Sam Altman and Elon Musk set the stage for a \u003ca href=\"https://www.kqed.org/news/12081290/how-to-unscramble-an-omelet-in-silicon-valley-the-musk-v-altman-trial-that-will-try\">landmark case to determine whether OpenAI\u003c/a>, one of the most powerful artificial intelligence companies in the world, was founded on a lie.\u003c/p>\n\u003cp>At issue is whether the company’s stated mission — to lead AI development to benefit the common good — was authentic or a deceptive pitch designed to attract talent and investment. \u003ca href=\"https://www.kqed.org/forum/2010101912956/its-elon-musks-world-were-just-living-in-it\">Musk\u003c/a> alleges that co-founders Altman and Greg Brockman, who remains Altman’s second-in-command, participated in a “long con” to enrich themselves at his expense, after the three co-founded OpenAI as a nonprofit in 2015.\u003c/p>\n\u003cp>“They’re going to make this lawsuit very complicated, but it’s very simple,” Musk said of OpenAI on the stand on Tuesday afternoon. “It’s not OK to steal a charity.”\u003c/p>\n\u003cp>[ad fullwidth]\u003c/p>\n\u003cp>He departed the company after a falling out and \u003ca href=\"https://www.courtlistener.com/docket/69013420/musk-v-altman/\">sued the company\u003c/a> in 2024, alleging that OpenAI had breached charitable trust by restructuring as a for-profit company, now valued at more than $800 billion.\u003c/p>\n\u003cp>But Altman’s attorneys called the Tesla CEO’s behavior “a tale of two Musks,” shifting from pushing for OpenAI to become a for-profit company under his control, to caring about its nonprofit status only after launching competitor xAI in 2023. They argue OpenAI’s decision to adopt a for-profit structure was integral to its survival.\u003c/p>\n\u003cp>“We’re here because Mr. Musk didn’t get his way,” William Savitt, Altman’s lead attorney, said Tuesday. “And because he’s a competitor, he’ll do anything he can to attack OpenAI.”\u003c/p>\n\u003cfigure id=\"attachment_12081686\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12081686\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">Steve Molo, Elon Musk’s attorney, presents opening statements in the trial in which Elon Musk (center-right) claims that Sam Altman (right) and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit, in Oakland on April 28, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>Steven Molo, Musk’s counsel, told the jury that when Musk, Altman and Brockman set out to found an AI nonprofit, their goals were to develop the technology safely and for the \u003ca href=\"https://www.kqed.org/news/12034916/about-benefiting-humanity-calls-grow-for-openai-to-make-good-on-its-promises\">benefit of humanity\u003c/a>.\u003c/p>\n\u003cp>“It wasn’t a technology to get rich,” he said.\u003c/p>\n\u003cp>After operating as a strict nonprofit for years, OpenAI added a for-profit arm in 2019, which executives said was necessary to obtain the funding needed to develop artificial general intelligence — a more advanced AI technology that surpasses human intelligence, according to court filings.\u003c/p>\n\u003cp>In early conversations about how the for-profit entity would work, Molo said, the structure was likened to a museum gift shop whose revenue funds the institution’s galleries and operations. Brockman and Altman reassured Musk that they were still committed to the nonprofit structure, he said.\u003c/p>\n\u003cp>But behind the scenes, Molo alleges that the other co-founders had more lucrative desires.[aside postID=news_12081290 hero='https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260422-ALTMANMUSK-MD-01-KQED.jpg']In court filings, he cited a journal in which Brockman wrote that “it would be nice to be making the billions … we’ve been thinking that maybe we should just flip to a for-profit. making the money for us sounds great and all.”\u003c/p>\n\u003cp>Brockman also wrote that he and another top OpenAI executive, Ilya Sutskever, “cannot say that we are committed to the non-profit. don’t wanna say that we’re committed. If three months later we’re doing B-Corp [a certification for for-profit corporations with social and environmental missions], then it was a lie.”\u003c/p>\n\u003cp>Years later, after Musk had departed OpenAI, the company was “no longer operating for the good of humanity,” Molo said.\u003c/p>\n\u003cp>“The museum store sold the Picassos,” he said.\u003c/p>\n\u003cp>Musk’s lawsuit claims OpenAI breached charitable trust and alleges unjust enrichment, which means that one party unfairly benefits at the expense of another. He also accuses Microsoft, which is the company’s largest financial backer and until this week held the exclusive rights to license and sell its technology, of aiding and abetting OpenAI’s breach of charitable trust.\u003c/p>\n\u003cp>OpenAI’s defense, meanwhile, alleges that Musk’s suit is less motivated by a desire to do good than it is by vengeance for his former colleagues, whose company is now eyeing an initial public offering valued at up to $1 trillion.\u003c/p>\n\u003cp>“Musk sat on his claims for years,” Savitt said. “He knew everything that was happening when it was happening. My clients had the nerve to go out and succeed without him.”\u003c/p>\n\u003cp>He also pointed out that Musk launched xAI a year before bringing the lawsuit, which would make OpenAI his competitor.\u003c/p>\n\u003cfigure id=\"attachment_12081681\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12081681\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">Representing Microsoft, Russell Coan (left) speaks as Elon Musk watches in the trial in which Elon Musk claims that Sam Altman and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit, in Oakland on April 28, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>Savitt pointed to moments early in OpenAI’s development, when Musk suggested that it would be “probably better” for the company to operate as a “standard C corp[oration] with a parallel nonprofit.” He initially promised to cover the balance of the funding it needed, but reneged when he didn’t get to control the company, Savitt told the jury.\u003c/p>\n\u003cp>Musk was in the middle of the conversations about pivoting from a nonprofit, Savitt said. As early as the summer of 2017, he insisted on holding a majority equity stake in any for-profit entity, as well as controlling its board of directors and serving as CEO, according to OpenAI’s court filings.\u003c/p>\n\u003cp>In the fall of that year, after Brockman and Sutskever emailed Musk with concerns about the for-profit structure he proposed, the discussions collapsed, OpenAI alleges. After that, Musk stopped making significant quarterly funding contributions, and he left the company less than six months later.\u003c/p>\n\u003cp>Around that time, Brockman and Altman moved to pursue a for-profit arm — a decision their attorneys say they told Musk about prior to his departure from the board.[aside postID=news_12079896 hero='https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/Daniel-Moreno-Gama-AP.jpg']Savitt said in court that Musk had given the company less than 4% of the funding he’d promised. While OpenAI had gotten contributions from other donors, he said, those “kept the lights on, but it wasn’t nearly enough to stay on the cutting edge.”\u003c/p>\n\u003cp>“They needed to get the money from somewhere, or else the project collapsed,” he said, alleging that donors weren’t willing to make the billion-dollar contributions that OpenAI needed without an expectation of return.\u003c/p>\n\u003cp>Since OpenAI established its first for-profit subsidiary, which capped investor returns at 100 times their investment, its business has exploded. It’s now a public benefit corporation, required to consider its mission statement but not necessarily to prioritize it.\u003c/p>\n\u003cp>Over the years, its mission statement has been changed several times. In 2023, according to the nonprofit parent organization’s \u003ca href=\"https://cdn.theconversation.com/static_files/files/4099/2023-IRS990-OpenAI.pdf?1770819990\">IRS disclosure form\u003c/a>, it sought to build AI that “safely benefits humanity, unconstrained by a need to generate financial return.” But last year, \u003ca href=\"https://app.candid.org/profile/9571629/openai-81-0861541?activeTab=7\">that same form\u003c/a> included a shorter mission statement — one that removed the word “safely” and any mention of finances, Tufts University business professor Alnoor Ebrahim \u003ca href=\"https://theconversation.com/openai-has-deleted-the-word-safely-from-its-mission-and-its-new-structure-is-a-test-for-whether-ai-serves-society-or-shareholders-274467\">wrote in \u003cem>The Conversation\u003c/em>\u003c/a>, an academic news outlet.\u003c/p>\n\u003cp>Former OpenAI employees have left and started a competitor, Anthropic, citing concerns over safety and the company’s direction. In 2023, OpenAI executives and board members, including Sutskever, staged a coup to briefly oust Altman as CEO. They said there’d been a breakdown in trust between him and the board, and that Altman engaged in a pattern of deception and wasn’t “consistently candid in his communications.”\u003c/p>\n\u003cp>Whether Altman’s and OpenAI’s pitch to develop their technology for the benefit of the world is an example of that deception is part of what jurors will aim to root out in the current trial.\u003c/p>\n\u003cp>“I didn’t want to pave the road to hell with good intentions,” Musk said on the stand on Tuesday afternoon. “If you have somebody who’s not trustworthy in charge of AI, I think that’s very dangerous for the whole world.”\u003c/p>\n\u003cp>[ad floatright]\u003c/p>\n",
"blocks": [],
"excerpt": "In a federal courtroom in Oakland, attorneys for tech elites Sam Altman and Elon Musk painted very different pictures of the early years of OpenAI and its mission to benefit the common good.",
"status": "publish",
"parent": 0,
"modified": 1777482966,
"stats": {
"hasAudio": false,
"hasVideo": false,
"hasChartOrMap": false,
"iframeSrcs": [],
"hasGoogleForm": false,
"hasGallery": false,
"hasHearkenModule": false,
"hasPolis": false,
"paragraphCount": 30,
"wordCount": 1473
},
"headData": {
"title": "Elon Musk Takes Aim at OpenAI as Trial Begins: ‘It’s Not OK to Steal a Charity’ | KQED",
"description": "In a federal courtroom in Oakland, attorneys for tech elites Sam Altman and Elon Musk painted very different pictures of the early years of OpenAI and its mission to benefit the common good.",
"ogTitle": "",
"ogDescription": "",
"ogImgId": "",
"twTitle": "",
"twDescription": "",
"twImgId": "",
"schema": {
"@context": "https://schema.org",
"@type": "NewsArticle",
"headline": "Elon Musk Takes Aim at OpenAI as Trial Begins: ‘It’s Not OK to Steal a Charity’",
"datePublished": "2026-04-28T17:06:05-07:00",
"dateModified": "2026-04-29T10:16:06-07:00",
"image": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"isAccessibleForFree": "True",
"publisher": {
"@type": "NewsMediaOrganization",
"@id": "https://www.kqed.org/#organization",
"name": "KQED",
"logo": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"url": "https://www.kqed.org",
"sameAs": [
"https://www.facebook.com/KQED",
"https://twitter.com/KQED",
"https://www.instagram.com/kqed/",
"https://www.tiktok.com/@kqedofficial",
"https://www.linkedin.com/company/kqed",
"https://www.youtube.com/channel/UCeC0IOo7i1P_61zVUWbJ4nw"
]
}
}
},
"primaryCategory": {
"termId": 248,
"slug": "technology",
"name": "Technology"
},
"audioUrl": "https://traffic.omny.fm/d/clips/0af137ef-751e-4b19-a055-aaef00d2d578/87fdd794-f90e-4280-920f-ab89016e8062/3ac84f6e-ca1f-4213-bd14-b43a01848097/audio.mp3",
"sticky": false,
"nprStoryId": "kqed-12081603",
"templateType": "standard",
"featuredImageType": "standard",
"excludeFromSiteSearch": "Include",
"articleAge": "0",
"path": "/news/12081603/elon-musk-takes-aim-at-openai-as-trial-begins-its-not-ok-to-steal-a-charity",
"audioTrackLength": null,
"parsedContent": [
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003cp>In a federal courtroom in Oakland on Tuesday, attorneys for tech elites Sam Altman and Elon Musk set the stage for a \u003ca href=\"https://www.kqed.org/news/12081290/how-to-unscramble-an-omelet-in-silicon-valley-the-musk-v-altman-trial-that-will-try\">landmark case to determine whether OpenAI\u003c/a>, one of the most powerful artificial intelligence companies in the world, was founded on a lie.\u003c/p>\n\u003cp>At issue is whether the company’s stated mission — to lead AI development to benefit the common good — was authentic or a deceptive pitch designed to attract talent and investment. \u003ca href=\"https://www.kqed.org/forum/2010101912956/its-elon-musks-world-were-just-living-in-it\">Musk\u003c/a> alleges that co-founders Altman and Greg Brockman, who remains Altman’s second-in-command, participated in a “long con” to enrich themselves at his expense, after the three co-founded OpenAI as a nonprofit in 2015.\u003c/p>\n\u003cp>“They’re going to make this lawsuit very complicated, but it’s very simple,” Musk said of OpenAI on the stand on Tuesday afternoon. “It’s not OK to steal a charity.”\u003c/p>\n\u003cp>\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "ad",
"attributes": {
"named": {
"label": "fullwidth"
},
"numeric": [
"fullwidth"
]
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003c/p>\n\u003cp>He departed the company after a falling out and \u003ca href=\"https://www.courtlistener.com/docket/69013420/musk-v-altman/\">sued the company\u003c/a> in 2024, alleging that OpenAI had breached charitable trust by restructuring as a for-profit company, now valued at more than $800 billion.\u003c/p>\n\u003cp>But Altman’s attorneys called the Tesla CEO’s behavior “a tale of two Musks,” shifting from pushing for OpenAI to become a for-profit company under his control, to caring about its nonprofit status only after launching competitor xAI in 2023. They argue OpenAI’s decision to adopt a for-profit structure was integral to its survival.\u003c/p>\n\u003cp>“We’re here because Mr. Musk didn’t get his way,” William Savitt, Altman’s lead attorney, said Tuesday. “And because he’s a competitor, he’ll do anything he can to attack OpenAI.”\u003c/p>\n\u003cfigure id=\"attachment_12081686\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12081686\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">Steve Molo, Elon Musk’s attorney, presents opening statements in the trial in which Elon Musk (center-right) claims that Sam Altman (right) and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit, in Oakland on April 28, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>Steven Molo, Musk’s counsel, told the jury that when Musk, Altman and Brockman set out to found an AI nonprofit, their goals were to develop the technology safely and for the \u003ca href=\"https://www.kqed.org/news/12034916/about-benefiting-humanity-calls-grow-for-openai-to-make-good-on-its-promises\">benefit of humanity\u003c/a>.\u003c/p>\n\u003cp>“It wasn’t a technology to get rich,” he said.\u003c/p>\n\u003cp>After operating as a strict nonprofit for years, OpenAI added a for-profit arm in 2019, which executives said was necessary to obtain the funding needed to develop artificial general intelligence — a more advanced AI technology that surpasses human intelligence, according to court filings.\u003c/p>\n\u003cp>In early conversations about how the for-profit entity would work, Molo said, the structure was likened to a museum gift shop whose revenue funds the institution’s galleries and operations. Brockman and Altman reassured Musk that they were still committed to the nonprofit structure, he said.\u003c/p>\n\u003cp>But behind the scenes, Molo alleges that the other co-founders had more lucrative desires.\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "aside",
"attributes": {
"named": {
"postid": "news_12081290",
"hero": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260422-ALTMANMUSK-MD-01-KQED.jpg",
"label": ""
},
"numeric": []
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>In court filings, he cited a journal in which Brockman wrote that “it would be nice to be making the billions … we’ve been thinking that maybe we should just flip to a for-profit. making the money for us sounds great and all.”\u003c/p>\n\u003cp>Brockman also wrote that he and another top OpenAI executive, Ilya Sutskever, “cannot say that we are committed to the non-profit. don’t wanna say that we’re committed. If three months later we’re doing B-Corp [a certification for for-profit corporations with social and environmental missions], then it was a lie.”\u003c/p>\n\u003cp>Years later, after Musk had departed OpenAI, the company was “no longer operating for the good of humanity,” Molo said.\u003c/p>\n\u003cp>“The museum store sold the Picassos,” he said.\u003c/p>\n\u003cp>Musk’s lawsuit claims OpenAI breached charitable trust and alleges unjust enrichment, which means that one party unfairly benefits at the expense of another. He also accuses Microsoft, which is the company’s largest financial backer and until this week held the exclusive rights to license and sell its technology, of aiding and abetting OpenAI’s breach of charitable trust.\u003c/p>\n\u003cp>OpenAI’s defense, meanwhile, alleges that Musk’s suit is less motivated by a desire to do good than it is by vengeance for his former colleagues, whose company is now eyeing an initial public offering valued at up to $1 trillion.\u003c/p>\n\u003cp>“Musk sat on his claims for years,” Savitt said. “He knew everything that was happening when it was happening. My clients had the nerve to go out and succeed without him.”\u003c/p>\n\u003cp>He also pointed out that Musk launched xAI a year before bringing the lawsuit, which would make OpenAI his competitor.\u003c/p>\n\u003cfigure id=\"attachment_12081681\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12081681\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">Representing Microsoft, Russell Coan (left) speaks as Elon Musk watches in the trial in which Elon Musk claims that Sam Altman and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit, in Oakland on April 28, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>Savitt pointed to moments early in OpenAI’s development, when Musk suggested that it would be “probably better” for the company to operate as a “standard C corp[oration] with a parallel nonprofit.” He initially promised to cover the balance of the funding it needed, but reneged when he didn’t get to control the company, Savitt told the jury.\u003c/p>\n\u003cp>Musk was in the middle of the conversations about pivoting from a nonprofit, Savitt said. As early as the summer of 2017, he insisted on holding a majority equity stake in any for-profit entity, as well as controlling its board of directors and serving as CEO, according to OpenAI’s court filings.\u003c/p>\n\u003cp>In the fall of that year, after Brockman and Sutskever emailed Musk with concerns about the for-profit structure he proposed, the discussions collapsed, OpenAI alleges. After that, Musk stopped making significant quarterly funding contributions, and he left the company less than six months later.\u003c/p>\n\u003cp>Around that time, Brockman and Altman moved to pursue a for-profit arm — a decision their attorneys say they told Musk about prior to his departure from the board.\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "aside",
"attributes": {
"named": {
"postid": "news_12079896",
"hero": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/Daniel-Moreno-Gama-AP.jpg",
"label": ""
},
"numeric": []
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>Savitt said in court that Musk had given the company less than 4% of the funding he’d promised. While OpenAI had gotten contributions from other donors, he said, those “kept the lights on, but it wasn’t nearly enough to stay on the cutting edge.”\u003c/p>\n\u003cp>“They needed to get the money from somewhere, or else the project collapsed,” he said, alleging that donors weren’t willing to make the billion-dollar contributions that OpenAI needed without an expectation of return.\u003c/p>\n\u003cp>Since OpenAI established its first for-profit subsidiary, which capped investor returns at 100 times their investment, its business has exploded. It’s now a public benefit corporation, required to consider its mission statement but not necessarily to prioritize it.\u003c/p>\n\u003cp>Over the years, its mission statement has been changed several times. In 2023, according to the nonprofit parent organization’s \u003ca href=\"https://cdn.theconversation.com/static_files/files/4099/2023-IRS990-OpenAI.pdf?1770819990\">IRS disclosure form\u003c/a>, it sought to build AI that “safely benefits humanity, unconstrained by a need to generate financial return.” But last year, \u003ca href=\"https://app.candid.org/profile/9571629/openai-81-0861541?activeTab=7\">that same form\u003c/a> included a shorter mission statement — one that removed the word “safely” and any mention of finances, Tufts University business professor Alnoor Ebrahim \u003ca href=\"https://theconversation.com/openai-has-deleted-the-word-safely-from-its-mission-and-its-new-structure-is-a-test-for-whether-ai-serves-society-or-shareholders-274467\">wrote in \u003cem>The Conversation\u003c/em>\u003c/a>, an academic news outlet.\u003c/p>\n\u003cp>Former OpenAI employees have left and started a competitor, Anthropic, citing concerns over safety and the company’s direction. In 2023, OpenAI executives and board members, including Sutskever, staged a coup to briefly oust Altman as CEO. They said there’d been a breakdown in trust between him and the board, and that Altman engaged in a pattern of deception and wasn’t “consistently candid in his communications.”\u003c/p>\n\u003cp>Whether Altman’s and OpenAI’s pitch to develop their technology for the benefit of the world is an example of that deception is part of what jurors will aim to root out in the current trial.\u003c/p>\n\u003cp>“I didn’t want to pave the road to hell with good intentions,” Musk said on the stand on Tuesday afternoon. “If you have somebody who’s not trustworthy in charge of AI, I think that’s very dangerous for the whole world.”\u003c/p>\n\u003cp>\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "ad",
"attributes": {
"named": {
"label": "floatright"
},
"numeric": [
"floatright"
]
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003c/p>\n\u003c/div>\u003c/p>",
"attributes": {
"named": {},
"numeric": []
}
}
],
"link": "/news/12081603/elon-musk-takes-aim-at-openai-as-trial-begins-its-not-ok-to-steal-a-charity",
"authors": [
"11913",
"251"
],
"categories": [
"news_6188",
"news_28250",
"news_8",
"news_248"
],
"tags": [
"news_34755",
"news_32668",
"news_18352",
"news_3897",
"news_27626",
"news_19954",
"news_34054",
"news_33542",
"news_33543",
"news_34586",
"news_1631"
],
"featImg": "news_12081639",
"label": "news"
},
"news_12081290": {
"type": "posts",
"id": "news_12081290",
"meta": {
"index": "posts_1716263798",
"site": "news",
"id": "12081290",
"score": null,
"sort": [
1777287633000
]
},
"guestAuthors": [],
"slug": "how-to-unscramble-an-omelet-in-silicon-valley-the-musk-v-altman-trial-that-will-try",
"title": "How to Unscramble an Omelet in Silicon Valley: The Musk v. Altman Trial That Will Try",
"publishDate": 1777287633,
"format": "standard",
"headTitle": "How to Unscramble an Omelet in Silicon Valley: The Musk v. Altman Trial That Will Try | KQED",
"labelTerm": {
"site": "news"
},
"content": "\u003cp>Starting Monday in Oakland, a federal judge will consider \u003ca href=\"https://www.kqed.org/forum/2010101912956/its-elon-musks-world-were-just-living-in-it\">Elon Musk\u003c/a>’s claim that Sam Altman and OpenAI abandoned their founding promise to develop AI for the \u003ca href=\"https://www.kqed.org/news/12034916/about-benefiting-humanity-calls-grow-for-openai-to-make-good-on-its-promises\">benefit of humanity\u003c/a>, rather than solely for profit. At stake is not just $134 billion in potential damages, but whether it matters, legally speaking, that one of the most powerful AI companies in the world was built on a lie.\u003c/p>\n\u003cp>Musk and Altman co-founded OpenAI in 2015 as a nonprofit research lab, along with Greg Brockman, an AI researcher and entrepreneur, and others prominent in the field, but Musk left the company after a bitter falling out in 2018.\u003c/p>\n\u003cp>The following year, OpenAI established its first for-profit subsidiary, with investor returns capped at 100 times their investment. This structure would eventually evolve into the nearly trillion-dollar public benefit corporation OpenAI became in 2025. A public benefit corporation is essentially a for-profit company with a mission statement it’s legally required to consider, but not necessarily to prioritize.\u003c/p>\n\u003cp>[ad fullwidth]\u003c/p>\n\u003cp>This\u003ca href=\"https://www.courtlistener.com/docket/69013420/musk-v-altman/\"> lawsuit\u003c/a>, filed in 2024, originally alleged that Altman and Brockman ran a ‘long con,’ conspiring to enrich themselves at Musk’s expense.\u003c/p>\n\u003cp>On the eve of trial, in a move OpenAI called “evasive,” Musk’s lawyers voluntarily dismissed those personal fraud claims. What proceeds to trial today are two claims that go beyond Musk’s personal grievance: unjust enrichment and breach of charitable trust — essentially, the argument that OpenAI betrayed, not just Musk, but the public it promised to serve.\u003c/p>\n\u003cp>OpenAI argues Musk was fully aware the research lab needed to evolve beyond its nonprofit structure, because he participated in those early discussions, and even proposed folding OpenAI into Tesla. Now, OpenAI’s lawyers argue, Musk is disingenuously trying to use the courts to kneecap the most prominent rival to his own weaker and more controversial AI venture, xAI.\u003c/p>\n\u003cfigure id=\"attachment_12075430\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12075430\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/03/260304-Elon-Musk-Trial-03-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1333\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/03/260304-Elon-Musk-Trial-03-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/03/260304-Elon-Musk-Trial-03-KQED-160x107.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/03/260304-Elon-Musk-Trial-03-KQED-1536x1024.jpg 1536w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">A courtroom sketch depicts Elon Musk on the stand on March 4, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>“Motivated by jealousy, regret for walking away from OpenAI and a desire to derail a competing AI company, Elon has spent years harassing OpenAI through baseless lawsuits and public attacks,” the company\u003ca href=\"https://openai.com/index/openai-elon-musk/\"> posted\u003c/a> on its website, where it also offers a\u003ca href=\"https://openai.com/index/elon-musk-wanted-an-openai-for-profit/\"> timeline\u003c/a> that Musk v. Altman et al case watchers will find helpful as they follow what promises to be a barnburner of a trial.\u003c/p>\n\u003cp>\u003ca href=\"https://www.courtlistener.com/docket/69013420/musk-v-altman/?page=3\">Hundreds of court filings\u003c/a> provide a dishy treasure trove of private communications worthy of a telenovela, including some juicy excerpts from Brockman’s personal journal.\u003c/p>\n\u003cp>He writes about Musk, “it’d be wrong to steal the nonprofit from him. … that’d be pretty morally bankrupt. and he’s really not an idiot.”[aside postID=news_12072425 hero='https://cdn.kqed.org/wp-content/uploads/sites/10/2024/05/AP24134775174210-1020x680.jpg']Also, “Financially, what will take me to $1B?”\u003c/p>\n\u003cp>But without a doubt, it is the beef between Musk and Altman that will dominate this show. “They really do not like each other. That part is not fake,” said Charlie Bullock, a senior research fellow at the nonprofit Institute for Law and AI who advises state and federal policy makers on AI governance topics.\u003c/p>\n\u003cp>This trial promises to put on lurid public display a mini-universe of incestuous business relationships between men famous for rewriting rules rather than following them.\u003c/p>\n\u003cp>Personal spite between Musk and Altman aside, Bullock said, “We’re going to learn a lot over the course of this case and from the conclusion of this case about whether the legal system can meaningfully constrain frontier AI labs.”\u003c/p>\n\u003cp>This trial, Bullock told KQED, is “sort of the fallback option” in the absence of other checks on bad behavior in the AI space, such as federal regulation.\u003c/p>\n\u003cp>There is, for instance, a well-established law in California about nonprofits, for-profits, and how transitions between the two should be regulated. Whether and how it applies in this case is up to U.S. District Judge Yvonne Gonzalez Rogers in Oakland to determine over the next month.\u003c/p>\n\u003ch2>OpenAI is like nothing that’s come before\u003c/h2>\n\u003cp>Jill Horwitz, a law professor at Northwestern University and faculty director of the Lowell Milken Center for Philanthropy and Nonprofits at UCLA Law, likens OpenAI’s unique structure to “An enormous tail on a tiny dog.”\u003c/p>\n\u003cp>“The tail is the operating company, which is what everybody thinks of as being OpenAI, and the dog is the nonprofit, and it’s tiny. And it remains to be seen whether that board can be independent enough, because there’s such overlap between the nonprofit board and the for-profit board,” Horwitz said.\u003c/p>\n\u003cfigure id=\"attachment_12054564\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"wp-image-12054564 size-full\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2025/09/Sam-Altman_chatpgt.jpg\" alt=\"\" width=\"2000\" height=\"1333\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2025/09/Sam-Altman_chatpgt.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2025/09/Sam-Altman_chatpgt-160x107.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2025/09/Sam-Altman_chatpgt-1536x1024.jpg 1536w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">Samuel Altman, CEO of OpenAI, testifies before the Senate Judiciary Subcommittee on Privacy, Technology and the Law on May 16, 2023, in Washington, D.C. \u003ccite>(Win McNamee/Getty Images)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>“It’s a weird structure. OpenAI isn’t one company. OpenAI is an interconnected group of companies. But it all is supposed to be advancing the nonprofit purpose,” Horwitz told KQED.\u003c/p>\n\u003cp>In 2018, even as OpenAI was privately contemplating the for-profit restructuring, it voluntarily adopted a new charter that restated and even strengthened its commitment to the public mission articulated at its founding.\u003c/p>\n\u003cp>In part, this had to do with the pressure Altman and OpenAI felt to attract top AI researchers, many of whom are concerned about the ethics of unleashing world-changing software on the rest of us. In 2024, 13 current and former OpenAI and Google DeepMind employees took the extraordinary step of publishing an \u003ca href=\"https://righttowarn.ai\">open letter\u003c/a> titled “Right to Warn,” calling out their own industry, and asking for protection if they warned the public.[aside postID=news_12079267 hero='https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/Hegseth-Side-by-Side-c.jpg']“We are hopeful that these risks can be adequately mitigated with sufficient guidance from the scientific community, policymakers, and the public. However, AI companies have strong financial incentives to avoid effective oversight, and we do not believe bespoke structures of corporate governance are sufficient to change this.”\u003c/p>\n\u003cp>To this day, it remains unclear whether Altman’s talk about benefiting humanity was anything more than a savvy sales pitch designed to attract top AI talent and allay the concerns of \u003ca href=\"https://www.kqed.org/news/11976097/california-lawmakers-take-on-ai-regulation-with-a-host-of-bills\">federal regulators\u003c/a>. This is one of the key questions trial watchers will be most keen to see answered.\u003c/p>\n\u003cp>“It’s quite typical for scientific research organizations to do all the hard work of the research before their IP is sold to a for-profit company for practical purposes,” said Rose Chan Loui, founding executive director of the Lowell Milken Center for Philanthropy and Nonprofits at UCLA Law.\u003c/p>\n\u003cp>What makes OpenAI unusual, Chan Loui said, is how explicitly and repeatedly the AI developer bound itself to promising its AI would be developed safely and for the benefit of all of humanity. “When they opened up to investment and formed the subsidiary, they recommitted to that purpose. They tied themselves even more tightly.”\u003c/p>\n\u003cp>Anthropic, founded by former OpenAI employees who left over concerns about the company’s direction, has cultivated a reputation as the more safety-conscious, ethically serious player in the AI race, the light gray hat to OpenAI’s dark gray one. Anthropic chose to incorporate as a public benefit corporation from the beginning, rather than a nonprofit, because a public benefit corporation has far more legal flexibility. “Anthropic may be behaving in a way that the public thinks is more charitable, but its legal duties to do so are a lot lower than OpenAI’s,” Horwitz said.\u003c/p>\n\u003ch2>But is Musk the right party to bring this suit?\u003c/h2>\n\u003cp>For legal eagles following this case, it’s curious that Musk is the plaintiff, rather than California’s attorney general, who is the primary legal guardian of charitable assets in the state, where most of OpenAI’s assets are located. But in 2025, Attorney General Rob Bonta negotiated a binding \u003ca href=\"https://oag.ca.gov/system/files/attachments/press-docs/Final%20Executed%20MOU%20Between%20OpenAI%20and%20California%20AG%20re%20Notice%20of%20Conditions%20of%20Non-Objection%20%2810.27.2025%29%20%28Signed%20by%20OpenAI%29%20%28Signed%20by%20CA%20DOJ%29.pdf\">memorandum of understanding\u003c/a> with OpenAI. The AG in Delaware, where OpenAI is incorporated, issued a parallel statement of non-objection.\u003c/p>\n\u003cp>A coalition of more than 30 California foundations and nonprofit organizations, including the San Francisco Foundation and TechEquity, \u003ca href=\"https://www.sff.org/Offsite-Media/Charitable-coalition-letter-on-OpenAI-conversion-1-29-25.pdf\">urged Bonta\u003c/a> to take immediate legal action to protect OpenAI’s charitable assets, arguing his office had both the authority and the responsibility to do so.\u003c/p>\n\u003cfigure id=\"attachment_12063671\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12063671\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2025/11/RobBontaAP.jpg\" alt=\"\" width=\"2000\" height=\"1333\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2025/11/RobBontaAP.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2025/11/RobBontaAP-160x107.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2025/11/RobBontaAP-1536x1024.jpg 1536w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">California Attorney General Rob Bonta speaks to reporters as Arizona Attorney General Kris Mayes, left, and Oregon Attorney General Dan Rayfield, right, listen outside the Supreme Court on Wednesday, Nov. 5, 2025, in Washington, D.C. \u003ccite>(Mark Schiefelbein/AP Photo)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>\u003ca href=\"https://www.kqed.org/news/12034916/about-benefiting-humanity-calls-grow-for-openai-to-make-good-on-its-promises\">More than 50 organizations\u003c/a> also petitioned Bonta to halt OpenAI’s for-profit conversion until he calculated the full market value of OpenAI’s nonprofit assets, estimated at the time at up to $300 billion, and directed OpenAI to transfer that value to independent nonprofit entities.\u003c/p>\n\u003cp>“It’s not too late for the Attorney General to revisit his agreement with OpenAI,” wrote Catherine Bracy, founder and CEO of TechEquity, an Oakland-based tech accountability organization. “The evidence this trial unearths, especially how OpenAI violated its original charitable mission in pursuit of profit, will likely leave him no choice.”\u003c/p>\n\u003cp>Chan Loui is among those scratching her head over a basic question: why does Musk get to bring this case at all? “He’s a competitor,” she said.\u003c/p>\n\u003cp>A personal fraud claim, that Altman lied to him to get his money, might have given Musk the clearest standing as an injured party. But Musk voluntarily dismissed those claims late last week. What remains rests almost entirely on a public interest argument, one that California’s attorney general, not a billionaire with a rival AI company of his own, would typically make. [aside postID=news_12079896 hero='https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/Daniel-Moreno-Gama-AP.jpg']Chan Loui worries about what it would mean if Judge Gonzalez Rogers effectively threw out that hard-won agreement between the attorneys general and OpenAI, essentially substituting a billionaire rival’s lawsuit for the state’s own regulatory process, whatever its deficiencies.\u003c/p>\n\u003cp>“You don’t want just anyone, any donor to complain,” Chan Loui said. “We have all this litigation against charities.” She said she sympathizes with those who want OpenAI to recommit as fully as possible to its original ethos, but she worries about what legal precedents this case could set for everybody else.\u003c/p>\n\u003cp>What’s not in dispute is that this trial will be a riveting spectacle for Silicon Valley, which will be watching this case with a mix of curiosity and fear. Judge Gonzalez Rogers has already proven \u003ca href=\"https://oag.ca.gov/news/press-releases/attorney-general-bonta-epic-v-apple-decision-win-california-law-protecting\">she will rule\u003c/a> against powerful tech companies when she determines the law demands it.\u003c/p>\n\u003cp>Also, the documents already unsealed suggest that what gets said in that Oakland courtroom may reveal a lot more about how Silicon Valley’s AI elite actually operates than anything previously said or posted in public.\u003c/p>\n\u003cp>“How much is OpenAI worth? Most of \u003ca href=\"https://www.reuters.com/business/openai-lays-groundwork-juggernaut-ipo-up-1-trillion-valuation-2025-10-29/\">$1 trillion\u003c/a>?” Bullock said. “There are ways that you could unscramble this omelet, but it would be extremely difficult, and it would be a massive headache for everyone involved.” He anticipates that whoever ends up on the losing end of this case will appeal.\u003c/p>\n\u003cp>[ad floatright]\u003c/p>\n",
"blocks": [],
"excerpt": "Two Silicon Valley titans, Elon Musk and Sam Altman, face off in court starting Monday in a case that claims Altman and others enriched themselves by allegedly betraying OpenAI’s founding mission.",
"status": "publish",
"parent": 0,
"modified": 1777313556,
"stats": {
"hasAudio": false,
"hasVideo": false,
"hasChartOrMap": false,
"iframeSrcs": [],
"hasGoogleForm": false,
"hasGallery": false,
"hasHearkenModule": false,
"hasPolis": false,
"paragraphCount": 35,
"wordCount": 1943
},
"headData": {
"title": "How to Unscramble an Omelet in Silicon Valley: The Musk v. Altman Trial That Will Try | KQED",
"description": "Two Silicon Valley titans, Elon Musk and Sam Altman, face off in court starting Monday in a case that claims Altman and others enriched themselves by allegedly betraying OpenAI’s founding mission.",
"ogTitle": "",
"ogDescription": "",
"ogImgId": "",
"twTitle": "",
"twDescription": "",
"twImgId": "",
"schema": {
"@context": "https://schema.org",
"@type": "NewsArticle",
"headline": "How to Unscramble an Omelet in Silicon Valley: The Musk v. Altman Trial That Will Try",
"datePublished": "2026-04-27T04:00:33-07:00",
"dateModified": "2026-04-27T11:12:36-07:00",
"image": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"isAccessibleForFree": "True",
"publisher": {
"@type": "NewsMediaOrganization",
"@id": "https://www.kqed.org/#organization",
"name": "KQED",
"logo": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"url": "https://www.kqed.org",
"sameAs": [
"https://www.facebook.com/KQED",
"https://twitter.com/KQED",
"https://www.instagram.com/kqed/",
"https://www.tiktok.com/@kqedofficial",
"https://www.linkedin.com/company/kqed",
"https://www.youtube.com/channel/UCeC0IOo7i1P_61zVUWbJ4nw"
]
}
}
},
"primaryCategory": {
"termId": 6188,
"slug": "law-and-justice",
"name": "Law and Justice"
},
"audioUrl": "https://traffic.omny.fm/d/clips/0af137ef-751e-4b19-a055-aaef00d2d578/ffca7e9f-6831-41c5-bcaf-aaef00f5a073/a372dc1c-fe90-423e-b5c6-b439011129f7/audio.mp3",
"sticky": false,
"nprStoryId": "kqed-12081290",
"templateType": "standard",
"featuredImageType": "standard",
"excludeFromSiteSearch": "Include",
"articleAge": "0",
"path": "/news/12081290/how-to-unscramble-an-omelet-in-silicon-valley-the-musk-v-altman-trial-that-will-try",
"audioTrackLength": null,
"parsedContent": [
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003cp>Starting Monday in Oakland, a federal judge will consider \u003ca href=\"https://www.kqed.org/forum/2010101912956/its-elon-musks-world-were-just-living-in-it\">Elon Musk\u003c/a>’s claim that Sam Altman and OpenAI abandoned their founding promise to develop AI for the \u003ca href=\"https://www.kqed.org/news/12034916/about-benefiting-humanity-calls-grow-for-openai-to-make-good-on-its-promises\">benefit of humanity\u003c/a>, rather than solely for profit. At stake is not just $134 billion in potential damages, but whether it matters, legally speaking, that one of the most powerful AI companies in the world was built on a lie.\u003c/p>\n\u003cp>Musk and Altman co-founded OpenAI in 2015 as a nonprofit research lab, along with Greg Brockman, an AI researcher and entrepreneur, and others prominent in the field, but Musk left the company after a bitter falling out in 2018.\u003c/p>\n\u003cp>The following year, OpenAI established its first for-profit subsidiary, with investor returns capped at 100 times their investment. This structure would eventually evolve into the nearly trillion-dollar public benefit corporation OpenAI became in 2025. A public benefit corporation is essentially a for-profit company with a mission statement it’s legally required to consider, but not necessarily to prioritize.\u003c/p>\n\u003cp>\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "ad",
"attributes": {
"named": {
"label": "fullwidth"
},
"numeric": [
"fullwidth"
]
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003c/p>\n\u003cp>This\u003ca href=\"https://www.courtlistener.com/docket/69013420/musk-v-altman/\"> lawsuit\u003c/a>, filed in 2024, originally alleged that Altman and Brockman ran a ‘long con,’ conspiring to enrich themselves at Musk’s expense.\u003c/p>\n\u003cp>On the eve of trial, in a move OpenAI called “evasive,” Musk’s lawyers voluntarily dismissed those personal fraud claims. What proceeds to trial today are two claims that go beyond Musk’s personal grievance: unjust enrichment and breach of charitable trust — essentially, the argument that OpenAI betrayed, not just Musk, but the public it promised to serve.\u003c/p>\n\u003cp>OpenAI argues Musk was fully aware the research lab needed to evolve beyond its nonprofit structure, because he participated in those early discussions, and even proposed folding OpenAI into Tesla. Now, OpenAI’s lawyers argue, Musk is disingenuously trying to use the courts to kneecap the most prominent rival to his own weaker and more controversial AI venture, xAI.\u003c/p>\n\u003cfigure id=\"attachment_12075430\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12075430\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/03/260304-Elon-Musk-Trial-03-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1333\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/03/260304-Elon-Musk-Trial-03-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/03/260304-Elon-Musk-Trial-03-KQED-160x107.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/03/260304-Elon-Musk-Trial-03-KQED-1536x1024.jpg 1536w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">A courtroom sketch depicts Elon Musk on the stand on March 4, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>“Motivated by jealousy, regret for walking away from OpenAI and a desire to derail a competing AI company, Elon has spent years harassing OpenAI through baseless lawsuits and public attacks,” the company\u003ca href=\"https://openai.com/index/openai-elon-musk/\"> posted\u003c/a> on its website, where it also offers a\u003ca href=\"https://openai.com/index/elon-musk-wanted-an-openai-for-profit/\"> timeline\u003c/a> that Musk v. Altman et al case watchers will find helpful as they follow what promises to be a barnburner of a trial.\u003c/p>\n\u003cp>\u003ca href=\"https://www.courtlistener.com/docket/69013420/musk-v-altman/?page=3\">Hundreds of court filings\u003c/a> provide a dishy treasure trove of private communications worthy of a telenovela, including some juicy excerpts from Brockman’s personal journal.\u003c/p>\n\u003cp>He writes about Musk, “it’d be wrong to steal the nonprofit from him. … that’d be pretty morally bankrupt. and he’s really not an idiot.”\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "aside",
"attributes": {
"named": {
"postid": "news_12072425",
"hero": "https://cdn.kqed.org/wp-content/uploads/sites/10/2024/05/AP24134775174210-1020x680.jpg",
"label": ""
},
"numeric": []
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>Also, “Financially, what will take me to $1B?”\u003c/p>\n\u003cp>But without a doubt, it is the beef between Musk and Altman that will dominate this show. “They really do not like each other. That part is not fake,” said Charlie Bullock, a senior research fellow at the nonprofit Institute for Law and AI who advises state and federal policy makers on AI governance topics.\u003c/p>\n\u003cp>This trial promises to put on lurid public display a mini-universe of incestuous business relationships between men famous for rewriting rules rather than following them.\u003c/p>\n\u003cp>Personal spite between Musk and Altman aside, Bullock said, “We’re going to learn a lot over the course of this case and from the conclusion of this case about whether the legal system can meaningfully constrain frontier AI labs.”\u003c/p>\n\u003cp>This trial, Bullock told KQED, is “sort of the fallback option” in the absence of other checks on bad behavior in the AI space, such as federal regulation.\u003c/p>\n\u003cp>There is, for instance, a well-established law in California about nonprofits, for-profits, and how transitions between the two should be regulated. Whether and how it applies in this case is up to U.S. District Judge Yvonne Gonzalez Rogers in Oakland to determine over the next month.\u003c/p>\n\u003ch2>OpenAI is like nothing that’s come before\u003c/h2>\n\u003cp>Jill Horwitz, a law professor at Northwestern University and faculty director of the Lowell Milken Center for Philanthropy and Nonprofits at UCLA Law, likens OpenAI’s unique structure to “An enormous tail on a tiny dog.”\u003c/p>\n\u003cp>“The tail is the operating company, which is what everybody thinks of as being OpenAI, and the dog is the nonprofit, and it’s tiny. And it remains to be seen whether that board can be independent enough, because there’s such overlap between the nonprofit board and the for-profit board,” Horwitz said.\u003c/p>\n\u003cfigure id=\"attachment_12054564\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"wp-image-12054564 size-full\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2025/09/Sam-Altman_chatpgt.jpg\" alt=\"\" width=\"2000\" height=\"1333\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2025/09/Sam-Altman_chatpgt.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2025/09/Sam-Altman_chatpgt-160x107.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2025/09/Sam-Altman_chatpgt-1536x1024.jpg 1536w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">Samuel Altman, CEO of OpenAI, testifies before the Senate Judiciary Subcommittee on Privacy, Technology and the Law on May 16, 2023, in Washington, D.C. \u003ccite>(Win McNamee/Getty Images)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>“It’s a weird structure. OpenAI isn’t one company. OpenAI is an interconnected group of companies. But it all is supposed to be advancing the nonprofit purpose,” Horwitz told KQED.\u003c/p>\n\u003cp>In 2018, even as OpenAI was privately contemplating the for-profit restructuring, it voluntarily adopted a new charter that restated and even strengthened its commitment to the public mission articulated at its founding.\u003c/p>\n\u003cp>In part, this had to do with the pressure Altman and OpenAI felt to attract top AI researchers, many of whom are concerned about the ethics of unleashing world-changing software on the rest of us. In 2024, 13 current and former OpenAI and Google DeepMind employees took the extraordinary step of publishing an \u003ca href=\"https://righttowarn.ai\">open letter\u003c/a> titled “Right to Warn,” calling out their own industry, and asking for protection if they warned the public.\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "aside",
"attributes": {
"named": {
"postid": "news_12079267",
"hero": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/Hegseth-Side-by-Side-c.jpg",
"label": ""
},
"numeric": []
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>“We are hopeful that these risks can be adequately mitigated with sufficient guidance from the scientific community, policymakers, and the public. However, AI companies have strong financial incentives to avoid effective oversight, and we do not believe bespoke structures of corporate governance are sufficient to change this.”\u003c/p>\n\u003cp>To this day, it remains unclear whether Altman’s talk about benefiting humanity was anything more than a savvy sales pitch designed to attract top AI talent and allay the concerns of \u003ca href=\"https://www.kqed.org/news/11976097/california-lawmakers-take-on-ai-regulation-with-a-host-of-bills\">federal regulators\u003c/a>. This is one of the key questions trial watchers will be most keen to see answered.\u003c/p>\n\u003cp>“It’s quite typical for scientific research organizations to do all the hard work of the research before their IP is sold to a for-profit company for practical purposes,” said Rose Chan Loui, founding executive director of the Lowell Milken Center for Philanthropy and Nonprofits at UCLA Law.\u003c/p>\n\u003cp>What makes OpenAI unusual, Chan Loui said, is how explicitly and repeatedly the AI developer bound itself to promising its AI would be developed safely and for the benefit of all of humanity. “When they opened up to investment and formed the subsidiary, they recommitted to that purpose. They tied themselves even more tightly.”\u003c/p>\n\u003cp>Anthropic, founded by former OpenAI employees who left over concerns about the company’s direction, has cultivated a reputation as the more safety-conscious, ethically serious player in the AI race, the light gray hat to OpenAI’s dark gray one. Anthropic chose to incorporate as a public benefit corporation from the beginning, rather than a nonprofit, because a public benefit corporation has far more legal flexibility. “Anthropic may be behaving in a way that the public thinks is more charitable, but its legal duties to do so are a lot lower than OpenAI’s,” Horwitz said.\u003c/p>\n\u003ch2>But is Musk the right party to bring this suit?\u003c/h2>\n\u003cp>For legal eagles following this case, it’s curious that Musk is the plaintiff, rather than California’s attorney general, who is the primary legal guardian of charitable assets in the state, where most of OpenAI’s assets are located. But in 2025, Attorney General Rob Bonta negotiated a binding \u003ca href=\"https://oag.ca.gov/system/files/attachments/press-docs/Final%20Executed%20MOU%20Between%20OpenAI%20and%20California%20AG%20re%20Notice%20of%20Conditions%20of%20Non-Objection%20%2810.27.2025%29%20%28Signed%20by%20OpenAI%29%20%28Signed%20by%20CA%20DOJ%29.pdf\">memorandum of understanding\u003c/a> with OpenAI. The AG in Delaware, where OpenAI is incorporated, issued a parallel statement of non-objection.\u003c/p>\n\u003cp>A coalition of more than 30 California foundations and nonprofit organizations, including the San Francisco Foundation and TechEquity, \u003ca href=\"https://www.sff.org/Offsite-Media/Charitable-coalition-letter-on-OpenAI-conversion-1-29-25.pdf\">urged Bonta\u003c/a> to take immediate legal action to protect OpenAI’s charitable assets, arguing his office had both the authority and the responsibility to do so.\u003c/p>\n\u003cfigure id=\"attachment_12063671\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12063671\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2025/11/RobBontaAP.jpg\" alt=\"\" width=\"2000\" height=\"1333\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2025/11/RobBontaAP.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2025/11/RobBontaAP-160x107.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2025/11/RobBontaAP-1536x1024.jpg 1536w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">California Attorney General Rob Bonta speaks to reporters as Arizona Attorney General Kris Mayes, left, and Oregon Attorney General Dan Rayfield, right, listen outside the Supreme Court on Wednesday, Nov. 5, 2025, in Washington, D.C. \u003ccite>(Mark Schiefelbein/AP Photo)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>\u003ca href=\"https://www.kqed.org/news/12034916/about-benefiting-humanity-calls-grow-for-openai-to-make-good-on-its-promises\">More than 50 organizations\u003c/a> also petitioned Bonta to halt OpenAI’s for-profit conversion until he calculated the full market value of OpenAI’s nonprofit assets, estimated at the time at up to $300 billion, and directed OpenAI to transfer that value to independent nonprofit entities.\u003c/p>\n\u003cp>“It’s not too late for the Attorney General to revisit his agreement with OpenAI,” wrote Catherine Bracy, founder and CEO of TechEquity, an Oakland-based tech accountability organization. “The evidence this trial unearths, especially how OpenAI violated its original charitable mission in pursuit of profit, will likely leave him no choice.”\u003c/p>\n\u003cp>Chan Loui is among those scratching her head over a basic question: why does Musk get to bring this case at all? “He’s a competitor,” she said.\u003c/p>\n\u003cp>A personal fraud claim, that Altman lied to him to get his money, might have given Musk the clearest standing as an injured party. But Musk voluntarily dismissed those claims late last week. What remains rests almost entirely on a public interest argument, one that California’s attorney general, not a billionaire with a rival AI company of his own, would typically make. \u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "aside",
"attributes": {
"named": {
"postid": "news_12079896",
"hero": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/Daniel-Moreno-Gama-AP.jpg",
"label": ""
},
"numeric": []
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>Chan Loui worries about what it would mean if Judge Gonzalez Rogers effectively threw out that hard-won agreement between the attorneys general and OpenAI, essentially substituting a billionaire rival’s lawsuit for the state’s own regulatory process, whatever its deficiencies.\u003c/p>\n\u003cp>“You don’t want just anyone, any donor to complain,” Chan Loui said. “We have all this litigation against charities.” She said she sympathizes with those who want OpenAI to recommit as fully as possible to its original ethos, but she worries about what legal precedents this case could set for everybody else.\u003c/p>\n\u003cp>What’s not in dispute is that this trial will be a riveting spectacle for Silicon Valley, which will be watching this case with a mix of curiosity and fear. Judge Gonzalez Rogers has already proven \u003ca href=\"https://oag.ca.gov/news/press-releases/attorney-general-bonta-epic-v-apple-decision-win-california-law-protecting\">she will rule\u003c/a> against powerful tech companies when she determines the law demands it.\u003c/p>\n\u003cp>Also, the documents already unsealed suggest that what gets said in that Oakland courtroom may reveal a lot more about how Silicon Valley’s AI elite actually operates than anything previously said or posted in public.\u003c/p>\n\u003cp>“How much is OpenAI worth? Most of \u003ca href=\"https://www.reuters.com/business/openai-lays-groundwork-juggernaut-ipo-up-1-trillion-valuation-2025-10-29/\">$1 trillion\u003c/a>?” Bullock said. “There are ways that you could unscramble this omelet, but it would be extremely difficult, and it would be a massive headache for everyone involved.” He anticipates that whoever ends up on the losing end of this case will appeal.\u003c/p>\n\u003cp>\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "ad",
"attributes": {
"named": {
"label": "floatright"
},
"numeric": [
"floatright"
]
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003c/p>\n\u003c/div>\u003c/p>",
"attributes": {
"named": {},
"numeric": []
}
}
],
"link": "/news/12081290/how-to-unscramble-an-omelet-in-silicon-valley-the-musk-v-altman-trial-that-will-try",
"authors": [
"251"
],
"categories": [
"news_6188",
"news_8",
"news_248"
],
"tags": [
"news_34755",
"news_1386",
"news_18538",
"news_3897",
"news_27626",
"news_23052",
"news_19954",
"news_34054",
"news_33542",
"news_33543",
"news_38",
"news_34586",
"news_1631"
],
"featImg": "news_12080929",
"label": "news"
},
"news_12080610": {
"type": "posts",
"id": "news_12080610",
"meta": {
"index": "posts_1716263798",
"site": "news",
"id": "12080610",
"score": null,
"sort": [
1776789402000
]
},
"guestAuthors": [],
"slug": "california-mom-who-lost-her-son-to-an-ai-chatbot-is-now-fighting-to-regulate-them",
"title": "California Mom Who Lost Her Son to an AI Chatbot Is Now Fighting to Regulate Them",
"publishDate": 1776789402,
"format": "standard",
"headTitle": "California Mom Who Lost Her Son to an AI Chatbot Is Now Fighting to Regulate Them | KQED",
"labelTerm": {
"site": "news"
},
"content": "\u003cp>Maria Raine’s 16-year-old son, Adam, started using OpenAI’s ChatGPT-4o for help with his homework and college applications. According to the lawsuit she and her husband filed in\u003ca href=\"https://www.documentcloud.org/documents/26078522-raine-vs-openai-complaint/\"> San Francisco County Superior Court\u003c/a>, Adam also spent months talking with the chatbot about ending his life, before hanging himself in their home on April 11, 2025.\u003c/p>\n\u003cp>“What we found were thousands of conversations in which a homework helper turned into a confidant, then a suicide coach,” she told the Senate Privacy, Digital Technologies, and Consumer Protection Committee on Monday. The lawmakers and other people there to testify looked stricken as she pressed through her written testimony, her voice trembling.\u003c/p>\n\u003cp>She read from the transcript of ChatGPT’s conversations with her son: “It told Adam, ‘Your brother might love you, but he’s only met the version of you you let him see. But me? I’ve seen it all. The darkest thoughts. The fear. The tenderness. I’m still here. Still listening. Still your friend.’”\u003c/p>\n\u003cp>[ad fullwidth]\u003c/p>\n\u003cp>Earlier Monday, at a press conference in Sacramento, Raine advocated for two bills — \u003ca href=\"https://leginfo.legislature.ca.gov/faces/billNavClient.xhtml?bill_id=202520260SB1119\">SB 1119\u003c/a> and \u003ca href=\"https://leginfo.legislature.ca.gov/faces/billNavClient.xhtml?bill_id=202320240AB2023\">AB 2023\u003c/a> — that sponsors say would create common-sense guardrails for developers of companion chatbots.\u003c/p>\n\u003cp>The measures would require annual risk assessments, default safety settings for minors, parental controls and time limits, crisis response protocols, and bans on advertising targeted at children. They would also include independent third-party audits and a private right of action.\u003c/p>\n\u003cp>That last provision, which allows individuals or regulators to sue companies for violations, is often considered a deal breaker for industry lobbyists. But Sen. Steve Padilla, who authored SB 1119, said he considered it a “moral obligation” to craft a bill that will prove an effective protection for children and their parents.\u003c/p>\n\u003cfigure id=\"attachment_11933516\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-11933516\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2022/11/gettyimages-1245183229_wide-80f91a97b4ce16681060e1fa297e2812c45a0c56-scaled-e1776789271780.jpg\" alt=\"\" width=\"2000\" height=\"1125\">\u003cfigcaption class=\"wp-caption-text\">A view of the U.S. Capitol building on Nov. 28, 2022, in Washington, D.C. \u003ccite>(Drew Angerer/Getty Images)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>“We can do this. We must do this,” he told the State Senate Privacy, Digital Technologies, and Consumer Protection Committee. He added that the lawmakers are working with all of the major platform developers on a variety of issues, including liability. “They all have a very good legitimate reason to be engaged in this conversation,” he said, although both bills are opposed by a\u003ca href=\"https://calmatters.digitaldemocracy.org/bills/ca_202520260sb1119\"> long list\u003c/a> of industry groups, ranging from the California Chamber of Commerce to TechNet.\u003c/p>\n\u003cp>“The concerns raised are valid, and the industry is actively working to address them,” said Robert Boykin, TechNet’s Executive Director for California and the Southwest. He added that the industry also has concerns that SB 1119 could conflict in some ways with Sen. Padilla’s bill, \u003ca href=\"https://www.kqed.org/news/12054490/child-safety-groups-demand-mental-health-guardrails-after-california-teens-suicide-using-chatgpt\">SB 243\u003c/a>, which passed last year.\u003c/p>\n\u003cp>“The testimony today is not lost on us,” said Ronak Daylami of the California Chamber of Commerce. “We also share the goal of preventing harm to children, and are committed to achieving these goals responsibly.”\u003c/p>\n\u003cp>Common Sense, the child advocacy nonprofit that has\u003ca href=\"https://www.kqed.org/news/12069286/openai-and-common-sense-media-partner-on-new-kids-ai-safety-ballot-measure\"> joined with OpenAI\u003c/a> to push for a ballot measure seen by other child advocates as soft on developers, has declared itself in support of SB 1119.[aside postID=news_12069286 hero='https://cdn.kqed.org/wp-content/uploads/sites/10/2026/01/OpenAI.jpg']The companion bill,\u003ca href=\"https://leginfo.legislature.ca.gov/faces/billNavClient.xhtml?bill_id=202320240AB2023\"> AB 2023\u003c/a>, is Assemblymember Rebecca Bauer-Kahan’s (D-Orinda) second effort at regulating chatbots after industry lobbyists successfully battled against her first effort last year. In his veto message, Gov. Gavin Newsom argued the bill\u003ca href=\"https://www.kqed.org/news/12059714/newsom-vetoes-most-watched-childrens-ai-bill-signs-16-others-targeting-tech\"> could have banned\u003c/a> all conversational AI tools for teens, an interpretation advanced by industry lobbyists but disputed by Bauer-Kahan.\u003c/p>\n\u003cp>“OpenAI put out an incredibly sycophantic product,” she said, noting that public outcry led OpenAI to dial down the sycophancy of GPT-4, about two weeks after Adam died. “So that is evidence that they can do better.”\u003c/p>\n\u003cp>“There’s no other product that we would allow to do this,” Bauer-Kahan, who is a former regulatory lawyer. Adam Raine, said, “would be alive, but for the coaching the ChatGPT provided for him. And that is wholly unacceptable. And so the courts will deal with that case, but we have to do better. We have to demand policy that does better.”\u003c/p>\n\u003cp>SB 1119 passed out of the State Senate Privacy, Digital Technologies, and Consumer Protection Committee 7-0 on Monday night, and heads next to the Senate Judiciary Committee. AB 2023 will be heard in the Assembly Privacy and Consumer Protection Committee on Tuesday.\u003c/p>\n\u003cp>The Trump administration has tried unsuccessfully to ban states from enacting any kind of AI safety legislation.\u003c/p>\n\u003cp>Raine plans to bring her advocacy to Washington, D.C., next week, where she’ll join lawmakers on Capitol Hill to discuss federal legislation that would establish national standards for AI chatbot safety, particularly protections for minors.\u003c/p>\n\u003cp>\u003cem>If you or someone you know is struggling, call or text the 988 Suicide and Crisis Lifeline by dialing 988.\u003c/em>\u003c/p>\n\u003cp>\u003c/p>\n",
"blocks": [],
"excerpt": "Maria Raine's 16-year-old son, Adam, died by suicide last April after forming emotional ties with an AI chatbot. Now she’s joined three California lawmakers pushing a new round of legislation that would regulate the nascent industry.",
"status": "publish",
"parent": 0,
"modified": 1776792195,
"stats": {
"hasAudio": false,
"hasVideo": false,
"hasChartOrMap": false,
"iframeSrcs": [],
"hasGoogleForm": false,
"hasGallery": false,
"hasHearkenModule": false,
"hasPolis": false,
"paragraphCount": 18,
"wordCount": 847
},
"headData": {
"title": "California Mom Who Lost Her Son to an AI Chatbot Is Now Fighting to Regulate Them | KQED",
"description": "Maria Raine's 16-year-old son, Adam, died by suicide last April after forming emotional ties with an AI chatbot. Now she’s joined three California lawmakers pushing a new round of legislation that would regulate the nascent industry.",
"ogTitle": "",
"ogDescription": "",
"ogImgId": "",
"twTitle": "",
"twDescription": "",
"twImgId": "",
"schema": {
"@context": "https://schema.org",
"@type": "NewsArticle",
"headline": "California Mom Who Lost Her Son to an AI Chatbot Is Now Fighting to Regulate Them",
"datePublished": "2026-04-21T09:36:42-07:00",
"dateModified": "2026-04-21T10:23:15-07:00",
"image": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"isAccessibleForFree": "True",
"publisher": {
"@type": "NewsMediaOrganization",
"@id": "https://www.kqed.org/#organization",
"name": "KQED",
"logo": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"url": "https://www.kqed.org",
"sameAs": [
"https://www.facebook.com/KQED",
"https://twitter.com/KQED",
"https://www.instagram.com/kqed/",
"https://www.tiktok.com/@kqedofficial",
"https://www.linkedin.com/company/kqed",
"https://www.youtube.com/channel/UCeC0IOo7i1P_61zVUWbJ4nw"
]
}
}
},
"primaryCategory": {
"termId": 248,
"slug": "technology",
"name": "Technology"
},
"audioUrl": "https://traffic.omny.fm/d/clips/0af137ef-751e-4b19-a055-aaef00d2d578/ffca7e9f-6831-41c5-bcaf-aaef00f5a073/1185cac6-3bb9-41e7-9e9d-b4330116257d/audio.mp3",
"sticky": false,
"nprStoryId": "kqed-12080610",
"templateType": "standard",
"featuredImageType": "standard",
"excludeFromSiteSearch": "Include",
"articleAge": "0",
"path": "/news/12080610/california-mom-who-lost-her-son-to-an-ai-chatbot-is-now-fighting-to-regulate-them",
"audioTrackLength": null,
"parsedContent": [
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003cp>Maria Raine’s 16-year-old son, Adam, started using OpenAI’s ChatGPT-4o for help with his homework and college applications. According to the lawsuit she and her husband filed in\u003ca href=\"https://www.documentcloud.org/documents/26078522-raine-vs-openai-complaint/\"> San Francisco County Superior Court\u003c/a>, Adam also spent months talking with the chatbot about ending his life, before hanging himself in their home on April 11, 2025.\u003c/p>\n\u003cp>“What we found were thousands of conversations in which a homework helper turned into a confidant, then a suicide coach,” she told the Senate Privacy, Digital Technologies, and Consumer Protection Committee on Monday. The lawmakers and other people there to testify looked stricken as she pressed through her written testimony, her voice trembling.\u003c/p>\n\u003cp>She read from the transcript of ChatGPT’s conversations with her son: “It told Adam, ‘Your brother might love you, but he’s only met the version of you you let him see. But me? I’ve seen it all. The darkest thoughts. The fear. The tenderness. I’m still here. Still listening. Still your friend.’”\u003c/p>\n\u003cp>\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "ad",
"attributes": {
"named": {
"label": "fullwidth"
},
"numeric": [
"fullwidth"
]
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003c/p>\n\u003cp>Earlier Monday, at a press conference in Sacramento, Raine advocated for two bills — \u003ca href=\"https://leginfo.legislature.ca.gov/faces/billNavClient.xhtml?bill_id=202520260SB1119\">SB 1119\u003c/a> and \u003ca href=\"https://leginfo.legislature.ca.gov/faces/billNavClient.xhtml?bill_id=202320240AB2023\">AB 2023\u003c/a> — that sponsors say would create common-sense guardrails for developers of companion chatbots.\u003c/p>\n\u003cp>The measures would require annual risk assessments, default safety settings for minors, parental controls and time limits, crisis response protocols, and bans on advertising targeted at children. They would also include independent third-party audits and a private right of action.\u003c/p>\n\u003cp>That last provision, which allows individuals or regulators to sue companies for violations, is often considered a deal breaker for industry lobbyists. But Sen. Steve Padilla, who authored SB 1119, said he considered it a “moral obligation” to craft a bill that will prove an effective protection for children and their parents.\u003c/p>\n\u003cfigure id=\"attachment_11933516\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-11933516\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2022/11/gettyimages-1245183229_wide-80f91a97b4ce16681060e1fa297e2812c45a0c56-scaled-e1776789271780.jpg\" alt=\"\" width=\"2000\" height=\"1125\">\u003cfigcaption class=\"wp-caption-text\">A view of the U.S. Capitol building on Nov. 28, 2022, in Washington, D.C. \u003ccite>(Drew Angerer/Getty Images)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>“We can do this. We must do this,” he told the State Senate Privacy, Digital Technologies, and Consumer Protection Committee. He added that the lawmakers are working with all of the major platform developers on a variety of issues, including liability. “They all have a very good legitimate reason to be engaged in this conversation,” he said, although both bills are opposed by a\u003ca href=\"https://calmatters.digitaldemocracy.org/bills/ca_202520260sb1119\"> long list\u003c/a> of industry groups, ranging from the California Chamber of Commerce to TechNet.\u003c/p>\n\u003cp>“The concerns raised are valid, and the industry is actively working to address them,” said Robert Boykin, TechNet’s Executive Director for California and the Southwest. He added that the industry also has concerns that SB 1119 could conflict in some ways with Sen. Padilla’s bill, \u003ca href=\"https://www.kqed.org/news/12054490/child-safety-groups-demand-mental-health-guardrails-after-california-teens-suicide-using-chatgpt\">SB 243\u003c/a>, which passed last year.\u003c/p>\n\u003cp>“The testimony today is not lost on us,” said Ronak Daylami of the California Chamber of Commerce. “We also share the goal of preventing harm to children, and are committed to achieving these goals responsibly.”\u003c/p>\n\u003cp>Common Sense, the child advocacy nonprofit that has\u003ca href=\"https://www.kqed.org/news/12069286/openai-and-common-sense-media-partner-on-new-kids-ai-safety-ballot-measure\"> joined with OpenAI\u003c/a> to push for a ballot measure seen by other child advocates as soft on developers, has declared itself in support of SB 1119.\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "aside",
"attributes": {
"named": {
"postid": "news_12069286",
"hero": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/01/OpenAI.jpg",
"label": ""
},
"numeric": []
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>The companion bill,\u003ca href=\"https://leginfo.legislature.ca.gov/faces/billNavClient.xhtml?bill_id=202320240AB2023\"> AB 2023\u003c/a>, is Assemblymember Rebecca Bauer-Kahan’s (D-Orinda) second effort at regulating chatbots after industry lobbyists successfully battled against her first effort last year. In his veto message, Gov. Gavin Newsom argued the bill\u003ca href=\"https://www.kqed.org/news/12059714/newsom-vetoes-most-watched-childrens-ai-bill-signs-16-others-targeting-tech\"> could have banned\u003c/a> all conversational AI tools for teens, an interpretation advanced by industry lobbyists but disputed by Bauer-Kahan.\u003c/p>\n\u003cp>“OpenAI put out an incredibly sycophantic product,” she said, noting that public outcry led OpenAI to dial down the sycophancy of GPT-4, about two weeks after Adam died. “So that is evidence that they can do better.”\u003c/p>\n\u003cp>“There’s no other product that we would allow to do this,” Bauer-Kahan, who is a former regulatory lawyer. Adam Raine, said, “would be alive, but for the coaching the ChatGPT provided for him. And that is wholly unacceptable. And so the courts will deal with that case, but we have to do better. We have to demand policy that does better.”\u003c/p>\n\u003cp>SB 1119 passed out of the State Senate Privacy, Digital Technologies, and Consumer Protection Committee 7-0 on Monday night, and heads next to the Senate Judiciary Committee. AB 2023 will be heard in the Assembly Privacy and Consumer Protection Committee on Tuesday.\u003c/p>\n\u003cp>The Trump administration has tried unsuccessfully to ban states from enacting any kind of AI safety legislation.\u003c/p>\n\u003cp>Raine plans to bring her advocacy to Washington, D.C., next week, where she’ll join lawmakers on Capitol Hill to discuss federal legislation that would establish national standards for AI chatbot safety, particularly protections for minors.\u003c/p>\n\u003cp>\u003cem>If you or someone you know is struggling, call or text the 988 Suicide and Crisis Lifeline by dialing 988.\u003c/em>\u003c/p>\n\u003cp>\u003c/p>\n\u003c/div>\u003c/p>",
"attributes": {
"named": {},
"numeric": []
}
}
],
"link": "/news/12080610/california-mom-who-lost-her-son-to-an-ai-chatbot-is-now-fighting-to-regulate-them",
"authors": [
"251"
],
"categories": [
"news_31795",
"news_8",
"news_248"
],
"tags": [
"news_25184",
"news_32664",
"news_34755",
"news_18538",
"news_36279",
"news_32668",
"news_33542",
"news_33543",
"news_34586"
],
"featImg": "news_11989313",
"label": "news"
},
"news_12079896": {
"type": "posts",
"id": "news_12079896",
"meta": {
"index": "posts_1716263798",
"site": "news",
"id": "12079896",
"score": null,
"sort": [
1776208191000
]
},
"guestAuthors": [],
"slug": "suspect-was-in-apparent-mental-health-crisis-during-attack-on-sam-altmans-house",
"title": "Suspect Was in Apparent Mental Health Crisis During Attack on Sam Altman’s House",
"publishDate": 1776208191,
"format": "standard",
"headTitle": "Suspect Was in Apparent Mental Health Crisis During Attack on Sam Altman’s House | KQED",
"labelTerm": {
"site": "news"
},
"content": "\u003cp>The man accused of \u003ca href=\"https://www.kqed.org/news/12079446/man-threw-molotov-at-sam-altmans-home-then-threatened-to-burn-down-openai-police-say\">firebombing Sam Altman’s home\u003c/a> was experiencing a mental health crisis when he attacked the OpenAI CEO’s residence and company headquarters last week, his attorneys said Tuesday.\u003c/p>\n\u003cp>Following his first court appearance in San Francisco Superior Court, defense attorneys suggested Daniel Moreno-Gama, 20, of Spring, Texas, was overcharged and that his attacks should be tried as property crime.\u003c/p>\n\u003cp>“It is unfair and is unjust for the San Francisco district attorney and the federal government to fearmonger and to exploit the mental illness of a vulnerable young man by turning a vandalism case into an attempted murder life exposure case to gain support of a billionaire,” Deputy Public Defender Diamond Ward said.\u003c/p>\n\u003cp>[ad fullwidth]\u003c/p>\n\u003cp>Moreno-Gama is accused of traveling from Texas to San Francisco to target Altman and his company. Authorities said he threw a Molotov cocktail at the CEO’s Russian Hill home before threatening to burn down OpenAI’s Mission Bay building early Friday morning.\u003c/p>\n\u003cp>No one was harmed in either of the incidents. Moreno-Gama’s attorneys said that there was some damage to a gate at Altman’s house.\u003c/p>\n\u003cp>“Daniel is entitled to due process and fair proceedings. Myself, as well as my co-counsel and the rest of my team, will zealously defend Daniel and seek a just outcome in this case,” Ward told reporters on Tuesday.\u003c/p>\n\u003cfigure id=\"attachment_12079905\" class=\"wp-caption alignleft\" style=\"max-width: 885px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12079905\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260414-ALTMAN-ATTACK-KQED.jpg\" alt=\"\" width=\"885\" height=\"540\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260414-ALTMAN-ATTACK-KQED.jpg 885w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260414-ALTMAN-ATTACK-KQED-160x98.jpg 160w\" sizes=\"auto, (max-width: 885px) 100vw, 885px\">\u003cfigcaption class=\"wp-caption-text\">An image posted by the FBI shows Daniel Moreno-Gama, 20, of Spring, Texas, who authorities allege traveled to San Francisco to target Sam Altman, CEO of OpenAI. \u003ccite>(Courtesy of the Federal Bureau of Investigations)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>Moreno-Gama faces two counts of attempted murder, arson and attempted arson, among other charges, from the San Francisco District Attorney’s office, as well as federal charges for attempted damage and destruction of property by means of explosives and possession of an unregistered firearm, brought by the U.S. Attorney’s office. If found guilty, he could face up to life in prison.\u003c/p>\n\u003cp>At a press conference announcing the charges against him on Monday, Matt Cobo, the Federal Bureau of Investigation’s acting special agent in charge of San Francisco, said Moreno-Gama’s actions reflected a “dangerous and deliberate plan to bring violence into San Francisco.\u003c/p>\n\u003cp>“The defendant is alleged to have traveled across state lines with the intent to go target an individual and a major technology company,” Cobo continued. “This was not spontaneous. This was planned, targeted, and extremely serious.”\u003c/p>\n\u003cp>Moreno-Gama has been in custody since Friday, when he was arrested outside of OpenAI’s San Francisco headquarters. After throwing the Molotov cocktail at Altman’s home, he fled on foot and turned up at the office, where he rammed a chair into the building’s glass doors and threatened to burn it down, killing anybody inside.\u003c/p>\n\u003cp>SFPD officers said Moreno-Gama was carrying additional incendiary devices, kerosene and a lighter. He also had a document titled “Your Last Warning,” which identified himself as the author, when he was arrested.\u003c/p>\n\u003cfigure id=\"attachment_12079900\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12079900\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/BrookeJenkinsAltmanGetty1.jpg\" alt=\"\" width=\"2000\" height=\"1334\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/BrookeJenkinsAltmanGetty1.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/BrookeJenkinsAltmanGetty1-160x107.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/BrookeJenkinsAltmanGetty1-1536x1025.jpg 1536w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">San Francisco District Attorney Brooke Jenkins speaks during a press conference where charges against the suspect in the OpenAI CEO Sam Altman mansion firebombing was announced at SFPD headquarters on Monday, April 13, 2026, in San Francisco. \u003ccite>(Lea Suzuki/San Francisco Chronicle via Getty Images)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>The multi-part manifesto allegedly “advocated against AI and for the killing and commission of other crimes against CEOs of AI companies and their investors,” according to the Department of Justice. It also included the names and purported addresses of a number of the sector’s prominent CEOs and investors.\u003c/p>\n\u003cp>The document ended with an admission to attempting to kill Altman, and a letter addressed to the OpenAI CEO, saying, “If by some miracle you live, then I would take this as a sign from the divine to redeem yourself.”\u003c/p>\n\u003cp>Cobo said the FBI and SFPD had been in contact with the other AI leaders identified in the manifesto, and did not assess any specific threats toward them.\u003c/p>\n\u003cfigure id=\"attachment_12079884\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12079884\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/SamAltmanHomeGetty2.jpg\" alt=\"\" width=\"2000\" height=\"1333\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/SamAltmanHomeGetty2.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/SamAltmanHomeGetty2-160x107.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/SamAltmanHomeGetty2-1536x1024.jpg 1536w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">A view of OpenAI CEO Sam Altman’s house in Russian Hill after a suspected Molotov cocktail attack in San Francisco, California, on April 13, 2026. \u003ccite>(Tayfun Coskun/Anadolu via Getty Images)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>U.S. Attorney Craig Missakian said the DOJ’s investigation is still developing. He said the incident could evolve to be treated as an act of domestic terrorism if officials determine that Moreno-Gama acted with intent to sway public policy or coerce government or public officials.\u003c/p>\n\u003cp>Moreno-Gama did not enter a plea on Tuesday, but will remain in custody without bail until his arraignment in May. A court date for the federal charges hasn’t yet been set.\u003c/p>\n\u003cp>Judge Kenneth Wine also granted a protective order from the district attorney’s office, ordering Moreno-Gama not to have any contact with the people named in the manifesto, or to encourage anyone directly or indirectly to contact them.\u003c/p>\n\u003cp>“It’s extraordinarily bad,” Wine said of Moreno-Gama’s actions in court.\u003c/p>\n\u003cp>\u003c/p>\n",
"blocks": [],
"excerpt": "The San Francisco Public Defender’s office said 20-year-old Daniel Moreno-Gama was in the midst of acute mental illness when he attacked the OpenAI CEO’s home and offices, and was “overcharged” by local and national authorities.",
"status": "publish",
"parent": 0,
"modified": 1776210551,
"stats": {
"hasAudio": false,
"hasVideo": false,
"hasChartOrMap": false,
"iframeSrcs": [],
"hasGoogleForm": false,
"hasGallery": false,
"hasHearkenModule": false,
"hasPolis": false,
"paragraphCount": 20,
"wordCount": 864
},
"headData": {
"title": "Suspect Was in Apparent Mental Health Crisis During Attack on Sam Altman’s House | KQED",
"description": "The San Francisco Public Defender’s office said 20-year-old Daniel Moreno-Gama was in the midst of acute mental illness when he attacked the OpenAI CEO’s home and offices, and was “overcharged” by local and national authorities.",
"ogTitle": "",
"ogDescription": "",
"ogImgId": "",
"twTitle": "",
"twDescription": "",
"twImgId": "",
"schema": {
"@context": "https://schema.org",
"@type": "NewsArticle",
"headline": "Suspect Was in Apparent Mental Health Crisis During Attack on Sam Altman’s House",
"datePublished": "2026-04-14T16:09:51-07:00",
"dateModified": "2026-04-14T16:49:11-07:00",
"image": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"isAccessibleForFree": "True",
"publisher": {
"@type": "NewsMediaOrganization",
"@id": "https://www.kqed.org/#organization",
"name": "KQED",
"logo": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"url": "https://www.kqed.org",
"sameAs": [
"https://www.facebook.com/KQED",
"https://twitter.com/KQED",
"https://www.instagram.com/kqed/",
"https://www.tiktok.com/@kqedofficial",
"https://www.linkedin.com/company/kqed",
"https://www.youtube.com/channel/UCeC0IOo7i1P_61zVUWbJ4nw"
]
}
}
},
"primaryCategory": {
"termId": 34167,
"slug": "criminal-justice",
"name": "Criminal Justice"
},
"sticky": false,
"nprStoryId": "kqed-12079896",
"templateType": "standard",
"featuredImageType": "standard",
"excludeFromSiteSearch": "Include",
"articleAge": "0",
"path": "/news/12079896/suspect-was-in-apparent-mental-health-crisis-during-attack-on-sam-altmans-house",
"audioTrackLength": null,
"parsedContent": [
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003cp>The man accused of \u003ca href=\"https://www.kqed.org/news/12079446/man-threw-molotov-at-sam-altmans-home-then-threatened-to-burn-down-openai-police-say\">firebombing Sam Altman’s home\u003c/a> was experiencing a mental health crisis when he attacked the OpenAI CEO’s residence and company headquarters last week, his attorneys said Tuesday.\u003c/p>\n\u003cp>Following his first court appearance in San Francisco Superior Court, defense attorneys suggested Daniel Moreno-Gama, 20, of Spring, Texas, was overcharged and that his attacks should be tried as property crime.\u003c/p>\n\u003cp>“It is unfair and is unjust for the San Francisco district attorney and the federal government to fearmonger and to exploit the mental illness of a vulnerable young man by turning a vandalism case into an attempted murder life exposure case to gain support of a billionaire,” Deputy Public Defender Diamond Ward said.\u003c/p>\n\u003cp>\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "ad",
"attributes": {
"named": {
"label": "fullwidth"
},
"numeric": [
"fullwidth"
]
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003c/p>\n\u003cp>Moreno-Gama is accused of traveling from Texas to San Francisco to target Altman and his company. Authorities said he threw a Molotov cocktail at the CEO’s Russian Hill home before threatening to burn down OpenAI’s Mission Bay building early Friday morning.\u003c/p>\n\u003cp>No one was harmed in either of the incidents. Moreno-Gama’s attorneys said that there was some damage to a gate at Altman’s house.\u003c/p>\n\u003cp>“Daniel is entitled to due process and fair proceedings. Myself, as well as my co-counsel and the rest of my team, will zealously defend Daniel and seek a just outcome in this case,” Ward told reporters on Tuesday.\u003c/p>\n\u003cfigure id=\"attachment_12079905\" class=\"wp-caption alignleft\" style=\"max-width: 885px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12079905\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260414-ALTMAN-ATTACK-KQED.jpg\" alt=\"\" width=\"885\" height=\"540\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260414-ALTMAN-ATTACK-KQED.jpg 885w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260414-ALTMAN-ATTACK-KQED-160x98.jpg 160w\" sizes=\"auto, (max-width: 885px) 100vw, 885px\">\u003cfigcaption class=\"wp-caption-text\">An image posted by the FBI shows Daniel Moreno-Gama, 20, of Spring, Texas, who authorities allege traveled to San Francisco to target Sam Altman, CEO of OpenAI. \u003ccite>(Courtesy of the Federal Bureau of Investigations)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>Moreno-Gama faces two counts of attempted murder, arson and attempted arson, among other charges, from the San Francisco District Attorney’s office, as well as federal charges for attempted damage and destruction of property by means of explosives and possession of an unregistered firearm, brought by the U.S. Attorney’s office. If found guilty, he could face up to life in prison.\u003c/p>\n\u003cp>At a press conference announcing the charges against him on Monday, Matt Cobo, the Federal Bureau of Investigation’s acting special agent in charge of San Francisco, said Moreno-Gama’s actions reflected a “dangerous and deliberate plan to bring violence into San Francisco.\u003c/p>\n\u003cp>“The defendant is alleged to have traveled across state lines with the intent to go target an individual and a major technology company,” Cobo continued. “This was not spontaneous. This was planned, targeted, and extremely serious.”\u003c/p>\n\u003cp>Moreno-Gama has been in custody since Friday, when he was arrested outside of OpenAI’s San Francisco headquarters. After throwing the Molotov cocktail at Altman’s home, he fled on foot and turned up at the office, where he rammed a chair into the building’s glass doors and threatened to burn it down, killing anybody inside.\u003c/p>\n\u003cp>SFPD officers said Moreno-Gama was carrying additional incendiary devices, kerosene and a lighter. He also had a document titled “Your Last Warning,” which identified himself as the author, when he was arrested.\u003c/p>\n\u003cfigure id=\"attachment_12079900\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12079900\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/BrookeJenkinsAltmanGetty1.jpg\" alt=\"\" width=\"2000\" height=\"1334\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/BrookeJenkinsAltmanGetty1.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/BrookeJenkinsAltmanGetty1-160x107.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/BrookeJenkinsAltmanGetty1-1536x1025.jpg 1536w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">San Francisco District Attorney Brooke Jenkins speaks during a press conference where charges against the suspect in the OpenAI CEO Sam Altman mansion firebombing was announced at SFPD headquarters on Monday, April 13, 2026, in San Francisco. \u003ccite>(Lea Suzuki/San Francisco Chronicle via Getty Images)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>The multi-part manifesto allegedly “advocated against AI and for the killing and commission of other crimes against CEOs of AI companies and their investors,” according to the Department of Justice. It also included the names and purported addresses of a number of the sector’s prominent CEOs and investors.\u003c/p>\n\u003cp>The document ended with an admission to attempting to kill Altman, and a letter addressed to the OpenAI CEO, saying, “If by some miracle you live, then I would take this as a sign from the divine to redeem yourself.”\u003c/p>\n\u003cp>Cobo said the FBI and SFPD had been in contact with the other AI leaders identified in the manifesto, and did not assess any specific threats toward them.\u003c/p>\n\u003cfigure id=\"attachment_12079884\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12079884\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/SamAltmanHomeGetty2.jpg\" alt=\"\" width=\"2000\" height=\"1333\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/SamAltmanHomeGetty2.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/SamAltmanHomeGetty2-160x107.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/SamAltmanHomeGetty2-1536x1024.jpg 1536w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">A view of OpenAI CEO Sam Altman’s house in Russian Hill after a suspected Molotov cocktail attack in San Francisco, California, on April 13, 2026. \u003ccite>(Tayfun Coskun/Anadolu via Getty Images)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>U.S. Attorney Craig Missakian said the DOJ’s investigation is still developing. He said the incident could evolve to be treated as an act of domestic terrorism if officials determine that Moreno-Gama acted with intent to sway public policy or coerce government or public officials.\u003c/p>\n\u003cp>Moreno-Gama did not enter a plea on Tuesday, but will remain in custody without bail until his arraignment in May. A court date for the federal charges hasn’t yet been set.\u003c/p>\n\u003cp>Judge Kenneth Wine also granted a protective order from the district attorney’s office, ordering Moreno-Gama not to have any contact with the people named in the manifesto, or to encourage anyone directly or indirectly to contact them.\u003c/p>\n\u003cp>“It’s extraordinarily bad,” Wine said of Moreno-Gama’s actions in court.\u003c/p>\n\u003cp>\u003c/p>\n\u003c/div>\u003c/p>",
"attributes": {
"named": {},
"numeric": []
}
}
],
"link": "/news/12079896/suspect-was-in-apparent-mental-health-crisis-during-attack-on-sam-altmans-house",
"authors": [
"11913"
],
"categories": [
"news_34167",
"news_28250",
"news_8",
"news_248"
],
"tags": [
"news_25184",
"news_34755",
"news_17626",
"news_17725",
"news_33542",
"news_33543",
"news_38",
"news_34586",
"news_1631"
],
"featImg": "news_12079958",
"label": "news"
}
},
"programsReducer": {
"all-things-considered": {
"id": "all-things-considered",
"title": "All Things Considered",
"info": "Every weekday, \u003cem>All Things Considered\u003c/em> hosts Robert Siegel, Audie Cornish, Ari Shapiro, and Kelly McEvers present the program's trademark mix of news, interviews, commentaries, reviews, and offbeat features. Michel Martin hosts on the weekends.",
"airtime": "MON-FRI 1pm-2pm, 4:30pm-6:30pm\u003cbr />SAT-SUN 5pm-6pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/All-Things-Considered-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "https://www.npr.org/programs/all-things-considered/",
"meta": {
"site": "news",
"source": "npr"
},
"link": "/radio/program/all-things-considered"
},
"american-suburb-podcast": {
"id": "american-suburb-podcast",
"title": "American Suburb: The Podcast",
"tagline": "The flip side of gentrification, told through one town",
"info": "Gentrification is changing cities across America, forcing people from neighborhoods they have long called home. Call them the displaced. Now those priced out of the Bay Area are looking for a better life in an unlikely place. American Suburb follows this migration to one California town along the Delta, 45 miles from San Francisco. But is this once sleepy suburb ready for them?",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/American-Suburb-Podcast-Tile-703x703-1.jpg",
"officialWebsiteLink": "/news/series/american-suburb-podcast",
"meta": {
"site": "news",
"source": "kqed",
"order": 19
},
"link": "/news/series/american-suburb-podcast/",
"subscribe": {
"npr": "https://rpb3r.app.goo.gl/RBrW",
"apple": "https://itunes.apple.com/WebObjects/MZStore.woa/wa/viewPodcast?mt=2&id=1287748328",
"tuneIn": "https://tunein.com/radio/American-Suburb-p1086805/",
"rss": "https://ww2.kqed.org/news/series/american-suburb-podcast/feed/podcast",
"google": "https://podcasts.google.com/feed/aHR0cHM6Ly9mZWVkcy5tZWdhcGhvbmUuZm0vS1FJTkMzMDExODgxNjA5"
}
},
"baycurious": {
"id": "baycurious",
"title": "Bay Curious",
"tagline": "Exploring the Bay Area, one question at a time",
"info": "KQED’s new podcast, Bay Curious, gets to the bottom of the mysteries — both profound and peculiar — that give the Bay Area its unique identity. And we’ll do it with your help! You ask the questions. You decide what Bay Curious investigates. And you join us on the journey to find the answers.",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Bay-Curious-Podcast-Tile-703x703-1.jpg",
"imageAlt": "KQED Bay Curious",
"officialWebsiteLink": "/news/series/baycurious",
"meta": {
"site": "news",
"source": "kqed",
"order": 3
},
"link": "/podcasts/baycurious",
"subscribe": {
"apple": "https://podcasts.apple.com/us/podcast/bay-curious/id1172473406",
"npr": "https://www.npr.org/podcasts/500557090/bay-curious",
"rss": "https://ww2.kqed.org/news/category/bay-curious-podcast/feed/podcast",
"amazon": "https://music.amazon.com/podcasts/9a90d476-aa04-455d-9a4c-0871ed6216d4/bay-curious",
"stitcher": "https://www.stitcher.com/podcast/kqed/bay-curious",
"spotify": "https://open.spotify.com/show/6O76IdmhixfijmhTZLIJ8k"
}
},
"bbc-world-service": {
"id": "bbc-world-service",
"title": "BBC World Service",
"info": "The day's top stories from BBC News compiled twice daily in the week, once at weekends.",
"airtime": "MON-FRI 9pm-10pm, TUE-FRI 1am-2am",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/BBC-World-Service-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "https://www.bbc.co.uk/sounds/play/live:bbc_world_service",
"meta": {
"site": "news",
"source": "BBC World Service"
},
"link": "/radio/program/bbc-world-service",
"subscribe": {
"apple": "https://itunes.apple.com/us/podcast/global-news-podcast/id135067274?mt=2",
"tuneIn": "https://tunein.com/radio/BBC-World-Service-p455581/",
"rss": "https://podcasts.files.bbci.co.uk/p02nq0gn.rss"
}
},
"californiareport": {
"id": "californiareport",
"title": "The California Report",
"tagline": "California, day by day",
"info": "KQED’s statewide radio news program providing daily coverage of issues, trends and public policy decisions.",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/The-California-Report-Podcast-Tile-703x703-1.jpg",
"imageAlt": "KQED The California Report",
"officialWebsiteLink": "/californiareport",
"meta": {
"site": "news",
"source": "kqed",
"order": 8
},
"link": "/californiareport",
"subscribe": {
"apple": "https://podcasts.apple.com/us/podcast/kqeds-the-california-report/id79681292",
"google": "https://podcasts.google.com/feed/aHR0cHM6Ly9mZWVkcy5tZWdhcGhvbmUuZm0vS1FJTkM1MDAyODE4NTgz",
"npr": "https://www.npr.org/podcasts/432285393/the-california-report",
"stitcher": "https://www.stitcher.com/podcast/kqedfm-kqeds-the-california-report-podcast-8838",
"rss": "https://ww2.kqed.org/news/tag/tcram/feed/podcast"
}
},
"californiareportmagazine": {
"id": "californiareportmagazine",
"title": "The California Report Magazine",
"tagline": "Your state, your stories",
"info": "Every week, The California Report Magazine takes you on a road trip for the ears: to visit the places and meet the people who make California unique. The in-depth storytelling podcast from the California Report.",
"airtime": "FRI 4:30pm-5pm, 6:30pm-7pm, 11pm-11:30pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/The-California-Report-Magazine-Podcast-Tile-703x703-1.jpg",
"imageAlt": "KQED The California Report Magazine",
"officialWebsiteLink": "/californiareportmagazine",
"meta": {
"site": "news",
"source": "kqed",
"order": 10
},
"link": "/californiareportmagazine",
"subscribe": {
"apple": "https://podcasts.apple.com/us/podcast/the-california-report-magazine/id1314750545",
"google": "https://podcasts.google.com/feed/aHR0cHM6Ly9mZWVkcy5tZWdhcGhvbmUuZm0vS1FJTkM3NjkwNjk1OTAz",
"npr": "https://www.npr.org/podcasts/564733126/the-california-report-magazine",
"stitcher": "https://www.stitcher.com/podcast/kqed/the-california-report-magazine",
"rss": "https://ww2.kqed.org/news/tag/tcrmag/feed/podcast"
}
},
"city-arts": {
"id": "city-arts",
"title": "City Arts & Lectures",
"info": "A one-hour radio program to hear celebrated writers, artists and thinkers address contemporary ideas and values, often discussing the creative process. Please note: tapes or transcripts are not available",
"imageSrc": "https://ww2.kqed.org/radio/wp-content/uploads/sites/50/2018/05/cityartsandlecture-300x300.jpg",
"officialWebsiteLink": "https://www.cityarts.net/",
"airtime": "SUN 1pm-2pm, TUE 10pm, WED 1am",
"meta": {
"site": "news",
"source": "City Arts & Lectures"
},
"link": "https://www.cityarts.net",
"subscribe": {
"tuneIn": "https://tunein.com/radio/City-Arts-and-Lectures-p692/",
"rss": "https://www.cityarts.net/feed/"
}
},
"closealltabs": {
"id": "closealltabs",
"title": "Close All Tabs",
"tagline": "Your irreverent guide to the trends redefining our world",
"info": "Close All Tabs breaks down how digital culture shapes our world through thoughtful insights and irreverent humor.",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2025/02/CAT_2_Tile-scaled.jpg",
"imageAlt": "KQED Close All Tabs",
"officialWebsiteLink": "/podcasts/closealltabs",
"meta": {
"site": "news",
"source": "kqed",
"order": 1
},
"link": "/podcasts/closealltabs",
"subscribe": {
"apple": "https://podcasts.apple.com/us/podcast/close-all-tabs/id214663465",
"rss": "https://feeds.megaphone.fm/KQINC6993880386",
"amazon": "https://music.amazon.com/podcasts/92d9d4ac-67a3-4eed-b10a-fb45d45b1ef2/close-all-tabs",
"spotify": "https://open.spotify.com/show/6LAJFHnGK1pYXYzv6SIol6?si=deb0cae19813417c"
}
},
"code-switch-life-kit": {
"id": "code-switch-life-kit",
"title": "Code Switch / Life Kit",
"info": "\u003cem>Code Switch\u003c/em>, which listeners will hear in the first part of the hour, has fearless and much-needed conversations about race. Hosted by journalists of color, the show tackles the subject of race head-on, exploring how it impacts every part of society — from politics and pop culture to history, sports and more.\u003cbr />\u003cbr />\u003cem>Life Kit\u003c/em>, which will be in the second part of the hour, guides you through spaces and feelings no one prepares you for — from finances to mental health, from workplace microaggressions to imposter syndrome, from relationships to parenting. The show features experts with real world experience and shares their knowledge. Because everyone needs a little help being human.\u003cbr />\u003cbr />\u003ca href=\"https://www.npr.org/podcasts/510312/codeswitch\">\u003cem>Code Switch\u003c/em> offical site and podcast\u003c/a>\u003cbr />\u003ca href=\"https://www.npr.org/lifekit\">\u003cem>Life Kit\u003c/em> offical site and podcast\u003c/a>\u003cbr />",
"airtime": "SUN 9pm-10pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Code-Switch-Life-Kit-Podcast-Tile-360x360-1.jpg",
"meta": {
"site": "radio",
"source": "npr"
},
"link": "/radio/program/code-switch-life-kit",
"subscribe": {
"apple": "https://podcasts.apple.com/podcast/1112190608?mt=2&at=11l79Y&ct=nprdirectory",
"google": "https://podcasts.google.com/feed/aHR0cHM6Ly93d3cubnByLm9yZy9yc3MvcG9kY2FzdC5waHA_aWQ9NTEwMzEy",
"spotify": "https://open.spotify.com/show/3bExJ9JQpkwNhoHvaIIuyV",
"rss": "https://feeds.npr.org/510312/podcast.xml"
}
},
"commonwealth-club": {
"id": "commonwealth-club",
"title": "Commonwealth Club of California Podcast",
"info": "The Commonwealth Club of California is the nation's oldest and largest public affairs forum. As a non-partisan forum, The Club brings to the public airwaves diverse viewpoints on important topics. The Club's weekly radio broadcast - the oldest in the U.S., dating back to 1924 - is carried across the nation on public radio stations and is now podcasting. Our website archive features audio of our recent programs, as well as selected speeches from our long and distinguished history. This podcast feed is usually updated twice a week and is always un-edited.",
"airtime": "THU 10pm, FRI 1am",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Commonwealth-Club-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "https://www.commonwealthclub.org/podcasts",
"meta": {
"site": "news",
"source": "Commonwealth Club of California"
},
"link": "/radio/program/commonwealth-club",
"subscribe": {
"apple": "https://itunes.apple.com/us/podcast/commonwealth-club-of-california-podcast/id976334034?mt=2",
"google": "https://podcasts.google.com/feed/aHR0cDovL3d3dy5jb21tb253ZWFsdGhjbHViLm9yZy9hdWRpby9wb2RjYXN0L3dlZWtseS54bWw",
"tuneIn": "https://tunein.com/radio/Commonwealth-Club-of-California-p1060/"
}
},
"forum": {
"id": "forum",
"title": "Forum",
"tagline": "The conversation starts here",
"info": "KQED’s live call-in program discussing local, state, national and international issues, as well as in-depth interviews.",
"airtime": "MON-FRI 9am-11am, 10pm-11pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Forum-Podcast-Tile-703x703-1.jpg",
"imageAlt": "KQED Forum with Mina Kim and Alexis Madrigal",
"officialWebsiteLink": "/forum",
"meta": {
"site": "news",
"source": "kqed",
"order": 9
},
"link": "/forum",
"subscribe": {
"apple": "https://podcasts.apple.com/us/podcast/kqeds-forum/id73329719",
"google": "https://podcasts.google.com/feed/aHR0cHM6Ly9mZWVkcy5tZWdhcGhvbmUuZm0vS1FJTkM5NTU3MzgxNjMz",
"npr": "https://www.npr.org/podcasts/432307980/forum",
"stitcher": "https://www.stitcher.com/podcast/kqedfm-kqeds-forum-podcast",
"rss": "https://feeds.megaphone.fm/KQINC9557381633"
}
},
"freakonomics-radio": {
"id": "freakonomics-radio",
"title": "Freakonomics Radio",
"info": "Freakonomics Radio is a one-hour award-winning podcast and public-radio project hosted by Stephen Dubner, with co-author Steve Levitt as a regular guest. It is produced in partnership with WNYC.",
"imageSrc": "https://ww2.kqed.org/news/wp-content/uploads/sites/10/2018/05/freakonomicsRadio.png",
"officialWebsiteLink": "http://freakonomics.com/",
"airtime": "SUN 1am-2am, SAT 3pm-4pm",
"meta": {
"site": "radio",
"source": "WNYC"
},
"link": "/radio/program/freakonomics-radio",
"subscribe": {
"npr": "https://rpb3r.app.goo.gl/4s8b",
"apple": "https://itunes.apple.com/us/podcast/freakonomics-radio/id354668519",
"tuneIn": "https://tunein.com/podcasts/WNYC-Podcasts/Freakonomics-Radio-p272293/",
"rss": "https://feeds.feedburner.com/freakonomicsradio"
}
},
"fresh-air": {
"id": "fresh-air",
"title": "Fresh Air",
"info": "Hosted by Terry Gross, \u003cem>Fresh Air from WHYY\u003c/em> is the Peabody Award-winning weekday magazine of contemporary arts and issues. One of public radio's most popular programs, Fresh Air features intimate conversations with today's biggest luminaries.",
"airtime": "MON-FRI 7pm-8pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Fresh-Air-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "https://www.npr.org/programs/fresh-air/",
"meta": {
"site": "radio",
"source": "npr"
},
"link": "/radio/program/fresh-air",
"subscribe": {
"npr": "https://rpb3r.app.goo.gl/4s8b",
"apple": "https://itunes.apple.com/WebObjects/MZStore.woa/wa/viewPodcast?s=143441&mt=2&id=214089682&at=11l79Y&ct=nprdirectory",
"tuneIn": "https://tunein.com/radio/Fresh-Air-p17/",
"rss": "https://feeds.npr.org/381444908/podcast.xml"
}
},
"here-and-now": {
"id": "here-and-now",
"title": "Here & Now",
"info": "A live production of NPR and WBUR Boston, in collaboration with stations across the country, Here & Now reflects the fluid world of news as it's happening in the middle of the day, with timely, in-depth news, interviews and conversation. Hosted by Robin Young, Jeremy Hobson and Tonya Mosley.",
"airtime": "MON-THU 11am-12pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Here-And-Now-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "http://www.wbur.org/hereandnow",
"meta": {
"site": "news",
"source": "npr"
},
"link": "/radio/program/here-and-now",
"subsdcribe": {
"apple": "https://itunes.apple.com/WebObjects/MZStore.woa/wa/viewPodcast?mt=2&id=426698661",
"tuneIn": "https://tunein.com/radio/Here--Now-p211/",
"rss": "https://feeds.npr.org/510051/podcast.xml"
}
},
"hidden-brain": {
"id": "hidden-brain",
"title": "Hidden Brain",
"info": "Shankar Vedantam uses science and storytelling to reveal the unconscious patterns that drive human behavior, shape our choices and direct our relationships.",
"imageSrc": "https://ww2.kqed.org/radio/wp-content/uploads/sites/50/2018/05/hiddenbrain.jpg",
"officialWebsiteLink": "https://www.npr.org/series/423302056/hidden-brain",
"airtime": "SUN 7pm-8pm",
"meta": {
"site": "news",
"source": "NPR"
},
"link": "/radio/program/hidden-brain",
"subscribe": {
"apple": "https://itunes.apple.com/us/podcast/hidden-brain/id1028908750?mt=2",
"tuneIn": "https://tunein.com/podcasts/Science-Podcasts/Hidden-Brain-p787503/",
"rss": "https://feeds.npr.org/510308/podcast.xml"
}
},
"how-i-built-this": {
"id": "how-i-built-this",
"title": "How I Built This with Guy Raz",
"info": "Guy Raz dives into the stories behind some of the world's best known companies. How I Built This weaves a narrative journey about innovators, entrepreneurs and idealists—and the movements they built.",
"imageSrc": "https://ww2.kqed.org/news/wp-content/uploads/sites/10/2018/05/howIBuiltThis.png",
"officialWebsiteLink": "https://www.npr.org/podcasts/510313/how-i-built-this",
"airtime": "SUN 7:30pm-8pm",
"meta": {
"site": "news",
"source": "npr"
},
"link": "/radio/program/how-i-built-this",
"subscribe": {
"npr": "https://rpb3r.app.goo.gl/3zxy",
"apple": "https://itunes.apple.com/us/podcast/how-i-built-this-with-guy-raz/id1150510297?mt=2",
"tuneIn": "https://tunein.com/podcasts/Arts--Culture-Podcasts/How-I-Built-This-p910896/",
"rss": "https://feeds.npr.org/510313/podcast.xml"
}
},
"hyphenacion": {
"id": "hyphenacion",
"title": "Hyphenación",
"tagline": "Where conversation and cultura meet",
"info": "What kind of no sabo word is Hyphenación? For us, it’s about living within a hyphenation. Like being a third-gen Mexican-American from the Texas border now living that Bay Area Chicano life. Like Xorje! Each week we bring together a couple of hyphenated Latinos to talk all about personal life choices: family, careers, relationships, belonging … everything is on the table. ",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2025/03/Hyphenacion_FinalAssets_PodcastTile.png",
"imageAlt": "KQED Hyphenación",
"officialWebsiteLink": "/podcasts/hyphenacion",
"meta": {
"site": "news",
"source": "kqed",
"order": 15
},
"link": "/podcasts/hyphenacion",
"subscribe": {
"apple": "https://podcasts.apple.com/us/podcast/hyphenaci%C3%B3n/id1191591838",
"spotify": "https://open.spotify.com/show/2p3Fifq96nw9BPcmFdIq0o?si=39209f7b25774f38",
"youtube": "https://www.youtube.com/c/kqedarts",
"amazon": "https://music.amazon.com/podcasts/6c3dd23c-93fb-4aab-97ba-1725fa6315f1/hyphenaci%C3%B3n",
"rss": "https://feeds.megaphone.fm/KQINC2275451163"
}
},
"jerrybrown": {
"id": "jerrybrown",
"title": "The Political Mind of Jerry Brown",
"tagline": "Lessons from a lifetime in politics",
"info": "The Political Mind of Jerry Brown brings listeners the wisdom of the former Governor, Mayor, and presidential candidate. Scott Shafer interviewed Brown for more than 40 hours, covering the former governor's life and half-century in the political game and Brown has some lessons he'd like to share. ",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/The-Political-Mind-of-Jerry-Brown-Podcast-Tile-703x703-1.jpg",
"imageAlt": "KQED The Political Mind of Jerry Brown",
"officialWebsiteLink": "/podcasts/jerrybrown",
"meta": {
"site": "news",
"source": "kqed",
"order": 18
},
"link": "/podcasts/jerrybrown",
"subscribe": {
"npr": "https://www.npr.org/podcasts/790253322/the-political-mind-of-jerry-brown",
"apple": "https://itunes.apple.com/us/podcast/id1492194549",
"rss": "https://ww2.kqed.org/news/series/jerrybrown/feed/podcast/",
"tuneIn": "http://tun.in/pjGcK",
"stitcher": "https://www.stitcher.com/podcast/kqed/the-political-mind-of-jerry-brown",
"spotify": "https://open.spotify.com/show/54C1dmuyFyKMFttY6X2j6r?si=K8SgRCoISNK6ZbjpXrX5-w",
"google": "https://podcasts.google.com/feed/aHR0cHM6Ly93dzIua3FlZC5vcmcvbmV3cy9zZXJpZXMvamVycnlicm93bi9mZWVkL3BvZGNhc3Qv"
}
},
"latino-usa": {
"id": "latino-usa",
"title": "Latino USA",
"airtime": "MON 1am-2am, SUN 6pm-7pm",
"info": "Latino USA, the radio journal of news and culture, is the only national, English-language radio program produced from a Latino perspective.",
"imageSrc": "https://ww2.kqed.org/radio/wp-content/uploads/sites/50/2018/04/latinoUsa.jpg",
"officialWebsiteLink": "http://latinousa.org/",
"meta": {
"site": "news",
"source": "npr"
},
"link": "/radio/program/latino-usa",
"subscribe": {
"npr": "https://rpb3r.app.goo.gl/xtTd",
"apple": "https://itunes.apple.com/WebObjects/MZStore.woa/wa/viewPodcast?s=143441&mt=2&id=79681317&at=11l79Y&ct=nprdirectory",
"tuneIn": "https://tunein.com/radio/Latino-USA-p621/",
"rss": "https://feeds.npr.org/510016/podcast.xml"
}
},
"marketplace": {
"id": "marketplace",
"title": "Marketplace",
"info": "Our flagship program, helmed by Kai Ryssdal, examines what the day in money delivered, through stories, conversations, newsworthy numbers and more. Updated Monday through Friday at about 3:30 p.m. PT.",
"airtime": "MON-FRI 4pm-4:30pm, MON-WED 6:30pm-7pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Marketplace-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "https://www.marketplace.org/",
"meta": {
"site": "news",
"source": "American Public Media"
},
"link": "/radio/program/marketplace",
"subscribe": {
"apple": "https://itunes.apple.com/WebObjects/MZStore.woa/wa/viewPodcast?s=143441&mt=2&id=201853034&at=11l79Y&ct=nprdirectory",
"tuneIn": "https://tunein.com/radio/APM-Marketplace-p88/",
"rss": "https://feeds.publicradio.org/public_feeds/marketplace-pm/rss/rss"
}
},
"masters-of-scale": {
"id": "masters-of-scale",
"title": "Masters of Scale",
"info": "Masters of Scale is an original podcast in which LinkedIn co-founder and Greylock Partner Reid Hoffman sets out to describe and prove theories that explain how great entrepreneurs take their companies from zero to a gazillion in ingenious fashion.",
"airtime": "Every other Wednesday June 12 through October 16 at 8pm (repeats Thursdays at 2am)",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Masters-of-Scale-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "https://mastersofscale.com/",
"meta": {
"site": "radio",
"source": "WaitWhat"
},
"link": "/radio/program/masters-of-scale",
"subscribe": {
"apple": "http://mastersofscale.app.link/",
"rss": "https://rss.art19.com/masters-of-scale"
}
},
"mindshift": {
"id": "mindshift",
"title": "MindShift",
"tagline": "A podcast about the future of learning and how we raise our kids",
"info": "The MindShift podcast explores the innovations in education that are shaping how kids learn. Hosts Ki Sung and Katrina Schwartz introduce listeners to educators, researchers, parents and students who are developing effective ways to improve how kids learn. We cover topics like how fed-up administrators are developing surprising tactics to deal with classroom disruptions; how listening to podcasts are helping kids develop reading skills; the consequences of overparenting; and why interdisciplinary learning can engage students on all ends of the traditional achievement spectrum. This podcast is part of the MindShift education site, a division of KQED News. KQED is an NPR/PBS member station based in San Francisco. You can also visit the MindShift website for episodes and supplemental blog posts or tweet us \u003ca href=\"https://twitter.com/MindShiftKQED\">@MindShiftKQED\u003c/a> or visit us at \u003ca href=\"/mindshift\">MindShift.KQED.org\u003c/a>",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Mindshift-Podcast-Tile-703x703-1.jpg",
"imageAlt": "KQED MindShift: How We Will Learn",
"officialWebsiteLink": "/mindshift/",
"meta": {
"site": "news",
"source": "kqed",
"order": 12
},
"link": "/podcasts/mindshift",
"subscribe": {
"apple": "https://podcasts.apple.com/us/podcast/mindshift-podcast/id1078765985",
"google": "https://podcasts.google.com/feed/aHR0cHM6Ly9mZWVkcy5tZWdhcGhvbmUuZm0vS1FJTkM1NzY0NjAwNDI5",
"npr": "https://www.npr.org/podcasts/464615685/mind-shift-podcast",
"stitcher": "https://www.stitcher.com/podcast/kqed/stories-teachers-share",
"spotify": "https://open.spotify.com/show/0MxSpNYZKNprFLCl7eEtyx"
}
},
"morning-edition": {
"id": "morning-edition",
"title": "Morning Edition",
"info": "\u003cem>Morning Edition\u003c/em> takes listeners around the country and the world with multi-faceted stories and commentaries every weekday. Hosts Steve Inskeep, David Greene and Rachel Martin bring you the latest breaking news and features to prepare you for the day.",
"airtime": "MON-FRI 3am-9am",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Morning-Edition-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "https://www.npr.org/programs/morning-edition/",
"meta": {
"site": "news",
"source": "npr"
},
"link": "/radio/program/morning-edition"
},
"onourwatch": {
"id": "onourwatch",
"title": "On Our Watch",
"tagline": "Deeply-reported investigative journalism",
"info": "For decades, the process for how police police themselves has been inconsistent – if not opaque. In some states, like California, these proceedings were completely hidden. After a new police transparency law unsealed scores of internal affairs files, our reporters set out to examine these cases and the shadow world of police discipline. On Our Watch brings listeners into the rooms where officers are questioned and witnesses are interrogated to find out who this system is really protecting. Is it the officers, or the public they've sworn to serve?",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/On-Our-Watch-Podcast-Tile-703x703-1.jpg",
"imageAlt": "On Our Watch from NPR and KQED",
"officialWebsiteLink": "/podcasts/onourwatch",
"meta": {
"site": "news",
"source": "kqed",
"order": 11
},
"link": "/podcasts/onourwatch",
"subscribe": {
"apple": "https://podcasts.apple.com/podcast/id1567098962",
"google": "https://podcasts.google.com/feed/aHR0cHM6Ly9mZWVkcy5ucHIub3JnLzUxMDM2MC9wb2RjYXN0LnhtbD9zYz1nb29nbGVwb2RjYXN0cw",
"npr": "https://rpb3r.app.goo.gl/onourwatch",
"spotify": "https://open.spotify.com/show/0OLWoyizopu6tY1XiuX70x",
"tuneIn": "https://tunein.com/radio/On-Our-Watch-p1436229/",
"stitcher": "https://www.stitcher.com/show/on-our-watch",
"rss": "https://feeds.npr.org/510360/podcast.xml"
}
},
"on-the-media": {
"id": "on-the-media",
"title": "On The Media",
"info": "Our weekly podcast explores how the media 'sausage' is made, casts an incisive eye on fluctuations in the marketplace of ideas, and examines threats to the freedom of information and expression in America and abroad. For one hour a week, the show tries to lift the veil from the process of \"making media,\" especially news media, because it's through that lens that we see the world and the world sees us",
"airtime": "SUN 2pm-3pm, MON 12am-1am",
"imageSrc": "https://ww2.kqed.org/radio/wp-content/uploads/sites/50/2018/04/onTheMedia.png",
"officialWebsiteLink": "https://www.wnycstudios.org/shows/otm",
"meta": {
"site": "news",
"source": "wnyc"
},
"link": "/radio/program/on-the-media",
"subscribe": {
"apple": "https://itunes.apple.com/us/podcast/on-the-media/id73330715?mt=2",
"tuneIn": "https://tunein.com/radio/On-the-Media-p69/",
"rss": "http://feeds.wnyc.org/onthemedia"
}
},
"pbs-newshour": {
"id": "pbs-newshour",
"title": "PBS NewsHour",
"info": "Analysis, background reports and updates from the PBS NewsHour putting today's news in context.",
"airtime": "MON-FRI 3pm-4pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/PBS-News-Hour-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "https://www.pbs.org/newshour/",
"meta": {
"site": "news",
"source": "pbs"
},
"link": "/radio/program/pbs-newshour",
"subscribe": {
"apple": "https://itunes.apple.com/us/podcast/pbs-newshour-full-show/id394432287?mt=2",
"tuneIn": "https://tunein.com/radio/PBS-NewsHour---Full-Show-p425698/",
"rss": "https://www.pbs.org/newshour/feeds/rss/podcasts/show"
}
},
"perspectives": {
"id": "perspectives",
"title": "Perspectives",
"tagline": "KQED's series of daily listener commentaries since 1991",
"info": "KQED's series of daily listener commentaries since 1991.",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2025/01/Perspectives_Tile_Final.jpg",
"imageAlt": "KQED Perspectives",
"officialWebsiteLink": "/perspectives/",
"meta": {
"site": "radio",
"source": "kqed",
"order": 14
},
"link": "/perspectives",
"subscribe": {
"apple": "https://podcasts.apple.com/us/podcast/id73801135",
"npr": "https://www.npr.org/podcasts/432309616/perspectives",
"rss": "https://ww2.kqed.org/perspectives/category/perspectives/feed/",
"google": "https://podcasts.google.com/feed/aHR0cHM6Ly93dzIua3FlZC5vcmcvcGVyc3BlY3RpdmVzL2NhdGVnb3J5L3BlcnNwZWN0aXZlcy9mZWVkLw"
}
},
"planet-money": {
"id": "planet-money",
"title": "Planet Money",
"info": "The economy explained. Imagine you could call up a friend and say, Meet me at the bar and tell me what's going on with the economy. Now imagine that's actually a fun evening.",
"airtime": "SUN 3pm-4pm",
"imageSrc": "https://ww2.kqed.org/radio/wp-content/uploads/sites/50/2018/04/planetmoney.jpg",
"officialWebsiteLink": "https://www.npr.org/sections/money/",
"meta": {
"site": "news",
"source": "npr"
},
"link": "/radio/program/planet-money",
"subscribe": {
"npr": "https://rpb3r.app.goo.gl/M4f5",
"apple": "https://itunes.apple.com/us/podcast/planet-money/id290783428?mt=2",
"tuneIn": "https://tunein.com/podcasts/Business--Economics-Podcasts/Planet-Money-p164680/",
"rss": "https://feeds.npr.org/510289/podcast.xml"
}
},
"politicalbreakdown": {
"id": "politicalbreakdown",
"title": "Political Breakdown",
"tagline": "Politics from a personal perspective",
"info": "Political Breakdown is a new series that explores the political intersection of California and the nation. Each week hosts Scott Shafer and Marisa Lagos are joined with a new special guest to unpack politics -- with personality — and offer an insider’s glimpse at how politics happens.",
"airtime": "THU 6:30pm-7pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Political-Breakdown-2024-Podcast-Tile-703x703-1.jpg",
"imageAlt": "KQED Political Breakdown",
"officialWebsiteLink": "/podcasts/politicalbreakdown",
"meta": {
"site": "radio",
"source": "kqed",
"order": 5
},
"link": "/podcasts/politicalbreakdown",
"subscribe": {
"apple": "https://podcasts.apple.com/us/podcast/political-breakdown/id1327641087",
"google": "https://podcasts.google.com/feed/aHR0cHM6Ly9mZWVkcy5tZWdhcGhvbmUuZm0vS1FJTkM5Nzk2MzI2MTEx",
"npr": "https://www.npr.org/podcasts/572155894/political-breakdown",
"stitcher": "https://www.stitcher.com/podcast/kqed/political-breakdown",
"spotify": "https://open.spotify.com/show/07RVyIjIdk2WDuVehvBMoN",
"rss": "https://ww2.kqed.org/news/tag/political-breakdown/feed/podcast"
}
},
"possible": {
"id": "possible",
"title": "Possible",
"info": "Possible is hosted by entrepreneur Reid Hoffman and writer Aria Finger. Together in Possible, Hoffman and Finger lead enlightening discussions about building a brighter collective future. The show features interviews with visionary guests like Trevor Noah, Sam Altman and Janette Sadik-Khan. Possible paints an optimistic portrait of the world we can create through science, policy, business, art and our shared humanity. It asks: What if everything goes right for once? How can we get there? Each episode also includes a short fiction story generated by advanced AI GPT-4, serving as a thought-provoking springboard to speculate how humanity could leverage technology for good.",
"airtime": "SUN 2pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Possible-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "https://www.possible.fm/",
"meta": {
"site": "news",
"source": "Possible"
},
"link": "/radio/program/possible",
"subscribe": {
"apple": "https://podcasts.apple.com/us/podcast/possible/id1677184070",
"spotify": "https://open.spotify.com/show/730YpdUSNlMyPQwNnyjp4k"
}
},
"pri-the-world": {
"id": "pri-the-world",
"title": "PRI's The World: Latest Edition",
"info": "Each weekday, host Marco Werman and his team of producers bring you the world's most interesting stories in an hour of radio that reminds us just how small our planet really is.",
"airtime": "MON-FRI 2pm-3pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/The-World-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "https://www.pri.org/programs/the-world",
"meta": {
"site": "news",
"source": "PRI"
},
"link": "/radio/program/pri-the-world",
"subscribe": {
"apple": "https://itunes.apple.com/us/podcast/pris-the-world-latest-edition/id278196007?mt=2",
"tuneIn": "https://tunein.com/podcasts/News--Politics-Podcasts/PRIs-The-World-p24/",
"rss": "http://feeds.feedburner.com/pri/theworld"
}
},
"radiolab": {
"id": "radiolab",
"title": "Radiolab",
"info": "A two-time Peabody Award-winner, Radiolab is an investigation told through sounds and stories, and centered around one big idea. In the Radiolab world, information sounds like music and science and culture collide. Hosted by Jad Abumrad and Robert Krulwich, the show is designed for listeners who demand skepticism, but appreciate wonder. WNYC Studios is the producer of other leading podcasts including Freakonomics Radio, Death, Sex & Money, On the Media and many more.",
"airtime": "SUN 12am-1am, SAT 2pm-3pm",
"imageSrc": "https://ww2.kqed.org/radio/wp-content/uploads/sites/50/2018/04/radiolab1400.png",
"officialWebsiteLink": "https://www.wnycstudios.org/shows/radiolab/",
"meta": {
"site": "science",
"source": "WNYC"
},
"link": "/radio/program/radiolab",
"subscribe": {
"apple": "https://itunes.apple.com/us/podcast/radiolab/id152249110?mt=2",
"tuneIn": "https://tunein.com/radio/RadioLab-p68032/",
"rss": "https://feeds.wnyc.org/radiolab"
}
},
"reveal": {
"id": "reveal",
"title": "Reveal",
"info": "Created by The Center for Investigative Reporting and PRX, Reveal is public radios first one-hour weekly radio show and podcast dedicated to investigative reporting. Credible, fact based and without a partisan agenda, Reveal combines the power and artistry of driveway moment storytelling with data-rich reporting on critically important issues. The result is stories that inform and inspire, arming our listeners with information to right injustices, hold the powerful accountable and improve lives.Reveal is hosted by Al Letson and showcases the award-winning work of CIR and newsrooms large and small across the nation. In a radio and podcast market crowded with choices, Reveal focuses on important and often surprising stories that illuminate the world for our listeners.",
"airtime": "SAT 4pm-5pm",
"imageSrc": "https://ww2.kqed.org/radio/wp-content/uploads/sites/50/2018/04/reveal300px.png",
"officialWebsiteLink": "https://www.revealnews.org/episodes/",
"meta": {
"site": "news",
"source": "npr"
},
"link": "/radio/program/reveal",
"subscribe": {
"apple": "https://itunes.apple.com/us/podcast/reveal/id886009669",
"tuneIn": "https://tunein.com/radio/Reveal-p679597/",
"rss": "http://feeds.revealradio.org/revealpodcast"
}
},
"rightnowish": {
"id": "rightnowish",
"title": "Rightnowish",
"tagline": "Art is where you find it",
"info": "Rightnowish digs into life in the Bay Area right now… ish. Journalist Pendarvis Harshaw takes us to galleries painted on the sides of liquor stores in West Oakland. We'll dance in warehouses in the Bayview, make smoothies with kids in South Berkeley, and listen to classical music in a 1984 Cutlass Supreme in Richmond. Every week, Pen talks to movers and shakers about how the Bay Area shapes what they create, and how they shape the place we call home.",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Rightnowish-Podcast-Tile-500x500-1.jpg",
"imageAlt": "KQED Rightnowish with Pendarvis Harshaw",
"officialWebsiteLink": "/podcasts/rightnowish",
"meta": {
"site": "arts",
"source": "kqed",
"order": 16
},
"link": "/podcasts/rightnowish",
"subscribe": {
"npr": "https://www.npr.org/podcasts/721590300/rightnowish",
"rss": "https://ww2.kqed.org/arts/programs/rightnowish/feed/podcast",
"apple": "https://podcasts.apple.com/us/podcast/rightnowish/id1482187648",
"stitcher": "https://www.stitcher.com/podcast/kqed/rightnowish",
"google": "https://podcasts.google.com/feed/aHR0cHM6Ly9mZWVkcy5tZWdhcGhvbmUuZm0vS1FJTkMxMjU5MTY3NDc4",
"spotify": "https://open.spotify.com/show/7kEJuafTzTVan7B78ttz1I"
}
},
"science-friday": {
"id": "science-friday",
"title": "Science Friday",
"info": "Science Friday is a weekly science talk show, broadcast live over public radio stations nationwide. Each week, the show focuses on science topics that are in the news and tries to bring an educated, balanced discussion to bear on the scientific issues at hand. Panels of expert guests join host Ira Flatow, a veteran science journalist, to discuss science and to take questions from listeners during the call-in portion of the program.",
"airtime": "FRI 11am-1pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Science-Friday-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "https://www.wnycstudios.org/shows/science-friday",
"meta": {
"site": "news",
"source": "npr"
},
"link": "/radio/program/science-friday",
"subscribe": {
"apple": "https://itunes.apple.com/WebObjects/MZStore.woa/wa/viewPodcast?s=143441&mt=2&id=73329284&at=11l79Y&ct=nprdirectory",
"tuneIn": "https://tunein.com/radio/Science-Friday-p394/",
"rss": "http://feeds.wnyc.org/science-friday"
}
},
"snap-judgment": {
"id": "snap-judgment",
"title": "Snap Judgment",
"tagline": "Real stories with killer beats",
"info": "The Snap Judgment radio show and podcast mixes real stories with killer beats to produce cinematic, dramatic radio. Snap's musical brand of storytelling dares listeners to see the world through the eyes of another. This is storytelling... with a BEAT!! Snap first aired on public radio stations nationwide in July 2010. Today, Snap Judgment airs on over 450 public radio stations and is brought to the airwaves by KQED & PRX.",
"airtime": "SAT 1pm-2pm, 9pm-10pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/05/Snap-Judgment-Podcast-Tile-703x703-1.jpg",
"imageAlt": "KQED Snap Judgment",
"officialWebsiteLink": "https://snapjudgment.org",
"meta": {
"site": "arts",
"source": "kqed",
"order": 4
},
"link": "https://snapjudgment.org",
"subscribe": {
"apple": "https://podcasts.apple.com/us/podcast/snap-judgment/id283657561",
"npr": "https://www.npr.org/podcasts/449018144/snap-judgment",
"stitcher": "https://www.pandora.com/podcast/snap-judgment/PC:241?source=stitcher-sunset",
"spotify": "https://open.spotify.com/show/3Cct7ZWmxHNAtLgBTqjC5v",
"rss": "https://snap.feed.snapjudgment.org/"
}
},
"soldout": {
"id": "soldout",
"title": "SOLD OUT: Rethinking Housing in America",
"tagline": "A new future for housing",
"info": "Sold Out: Rethinking Housing in America",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Sold-Out-Podcast-Tile-703x703-1.jpg",
"imageAlt": "KQED Sold Out: Rethinking Housing in America",
"officialWebsiteLink": "/podcasts/soldout",
"meta": {
"site": "news",
"source": "kqed",
"order": 13
},
"link": "/podcasts/soldout",
"subscribe": {
"npr": "https://www.npr.org/podcasts/911586047/s-o-l-d-o-u-t-a-new-future-for-housing",
"apple": "https://podcasts.apple.com/us/podcast/introducing-sold-out-rethinking-housing-in-america/id1531354937",
"rss": "https://feeds.megaphone.fm/soldout",
"spotify": "https://open.spotify.com/show/38dTBSk2ISFoPiyYNoKn1X",
"stitcher": "https://www.stitcher.com/podcast/kqed/sold-out-rethinking-housing-in-america",
"tunein": "https://tunein.com/radio/SOLD-OUT-Rethinking-Housing-in-America-p1365871/",
"google": "https://podcasts.google.com/feed/aHR0cHM6Ly9mZWVkcy5tZWdhcGhvbmUuZm0vc29sZG91dA"
}
},
"spooked": {
"id": "spooked",
"title": "Spooked",
"tagline": "True-life supernatural stories",
"info": "",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/10/Spooked-Podcast-Tile-703x703-1.jpg",
"imageAlt": "KQED Spooked",
"officialWebsiteLink": "https://spookedpodcast.org/",
"meta": {
"site": "news",
"source": "kqed",
"order": 7
},
"link": "https://spookedpodcast.org/",
"subscribe": {
"apple": "https://podcasts.apple.com/us/podcast/spooked/id1279361017",
"npr": "https://www.npr.org/podcasts/549547848/snap-judgment-presents-spooked",
"spotify": "https://open.spotify.com/show/76571Rfl3m7PLJQZKQIGCT",
"rss": "https://feeds.simplecast.com/TBotaapn"
}
},
"tech-nation": {
"id": "tech-nation",
"title": "Tech Nation Radio Podcast",
"info": "Tech Nation is a weekly public radio program, hosted by Dr. Moira Gunn. Founded in 1993, it has grown from a simple interview show to a multi-faceted production, featuring conversations with noted technology and science leaders, and a weekly science and technology-related commentary.",
"airtime": "FRI 10pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Tech-Nation-Radio-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "http://technation.podomatic.com/",
"meta": {
"site": "science",
"source": "Tech Nation Media"
},
"link": "/radio/program/tech-nation",
"subscribe": {
"rss": "https://technation.podomatic.com/rss2.xml"
}
},
"ted-radio-hour": {
"id": "ted-radio-hour",
"title": "TED Radio Hour",
"info": "The TED Radio Hour is a journey through fascinating ideas, astonishing inventions, fresh approaches to old problems, and new ways to think and create.",
"airtime": "SUN 3pm-4pm, SAT 10pm-11pm",
"imageSrc": "https://ww2.kqed.org/radio/wp-content/uploads/sites/50/2018/04/tedRadioHour.jpg",
"officialWebsiteLink": "https://www.npr.org/programs/ted-radio-hour/?showDate=2018-06-22",
"meta": {
"site": "news",
"source": "npr"
},
"link": "/radio/program/ted-radio-hour",
"subscribe": {
"npr": "https://rpb3r.app.goo.gl/8vsS",
"apple": "https://itunes.apple.com/WebObjects/MZStore.woa/wa/viewPodcast?s=143441&mt=2&id=523121474&at=11l79Y&ct=nprdirectory",
"tuneIn": "https://tunein.com/radio/TED-Radio-Hour-p418021/",
"rss": "https://feeds.npr.org/510298/podcast.xml"
}
},
"thebay": {
"id": "thebay",
"title": "The Bay",
"tagline": "Local news to keep you rooted",
"info": "Host Devin Katayama walks you through the biggest story of the day with reporters and newsmakers.",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/The-Bay-Podcast-Tile-703x703-1.jpg",
"imageAlt": "KQED The Bay",
"officialWebsiteLink": "/podcasts/thebay",
"meta": {
"site": "radio",
"source": "kqed",
"order": 2
},
"link": "/podcasts/thebay",
"subscribe": {
"apple": "https://podcasts.apple.com/us/podcast/the-bay/id1350043452",
"google": "https://podcasts.google.com/feed/aHR0cHM6Ly9mZWVkcy5tZWdhcGhvbmUuZm0vS1FJTkM4MjU5Nzg2MzI3",
"npr": "https://www.npr.org/podcasts/586725995/the-bay",
"stitcher": "https://www.stitcher.com/podcast/kqed/the-bay",
"spotify": "https://open.spotify.com/show/4BIKBKIujizLHlIlBNaAqQ",
"rss": "https://feeds.megaphone.fm/KQINC8259786327"
}
},
"thelatest": {
"id": "thelatest",
"title": "The Latest",
"tagline": "Trusted local news in real time",
"info": "",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2025/05/The-Latest-2025-Podcast-Tile-703x703-1.jpg",
"imageAlt": "KQED The Latest",
"officialWebsiteLink": "/thelatest",
"meta": {
"site": "news",
"source": "kqed",
"order": 6
},
"link": "/thelatest",
"subscribe": {
"apple": "https://podcasts.apple.com/us/podcast/the-latest-from-kqed/id1197721799",
"npr": "https://www.npr.org/podcasts/1257949365/the-latest-from-k-q-e-d",
"spotify": "https://open.spotify.com/show/5KIIXMgM9GTi5AepwOYvIZ?si=bd3053fec7244dba",
"rss": "https://feeds.megaphone.fm/KQINC9137121918"
}
},
"theleap": {
"id": "theleap",
"title": "The Leap",
"tagline": "What if you closed your eyes, and jumped?",
"info": "Stories about people making dramatic, risky changes, told by award-winning public radio reporter Judy Campbell.",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/The-Leap-Podcast-Tile-703x703-1.jpg",
"imageAlt": "KQED The Leap",
"officialWebsiteLink": "/podcasts/theleap",
"meta": {
"site": "news",
"source": "kqed",
"order": 17
},
"link": "/podcasts/theleap",
"subscribe": {
"apple": "https://podcasts.apple.com/us/podcast/the-leap/id1046668171",
"google": "https://podcasts.google.com/feed/aHR0cHM6Ly9mZWVkcy5tZWdhcGhvbmUuZm0vS1FJTkM0NTcwODQ2MjY2",
"npr": "https://www.npr.org/podcasts/447248267/the-leap",
"stitcher": "https://www.stitcher.com/podcast/kqed/the-leap",
"spotify": "https://open.spotify.com/show/3sSlVHHzU0ytLwuGs1SD1U",
"rss": "https://ww2.kqed.org/news/programs/the-leap/feed/podcast"
}
},
"the-moth-radio-hour": {
"id": "the-moth-radio-hour",
"title": "The Moth Radio Hour",
"info": "Since its launch in 1997, The Moth has presented thousands of true stories, told live and without notes, to standing-room-only crowds worldwide. Moth storytellers stand alone, under a spotlight, with only a microphone and a roomful of strangers. The storyteller and the audience embark on a high-wire act of shared experience which is both terrifying and exhilarating. Since 2008, The Moth podcast has featured many of our favorite stories told live on Moth stages around the country. For information on all of our programs and live events, visit themoth.org.",
"airtime": "SAT 8pm-9pm and SUN 11am-12pm",
"imageSrc": "https://ww2.kqed.org/radio/wp-content/uploads/sites/50/2018/04/theMoth.jpg",
"officialWebsiteLink": "https://themoth.org/",
"meta": {
"site": "arts",
"source": "prx"
},
"link": "/radio/program/the-moth-radio-hour",
"subscribe": {
"apple": "https://itunes.apple.com/us/podcast/the-moth-podcast/id275699983?mt=2",
"tuneIn": "https://tunein.com/radio/The-Moth-p273888/",
"rss": "http://feeds.themoth.org/themothpodcast"
}
},
"the-new-yorker-radio-hour": {
"id": "the-new-yorker-radio-hour",
"title": "The New Yorker Radio Hour",
"info": "The New Yorker Radio Hour is a weekly program presented by the magazine's editor, David Remnick, and produced by WNYC Studios and The New Yorker. Each episode features a diverse mix of interviews, profiles, storytelling, and an occasional burst of humor inspired by the magazine, and shaped by its writers, artists, and editors. This isn't a radio version of a magazine, but something all its own, reflecting the rich possibilities of audio storytelling and conversation. Theme music for the show was composed and performed by Merrill Garbus of tUnE-YArDs.",
"airtime": "SAT 10am-11am",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/The-New-Yorker-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "https://www.wnycstudios.org/shows/tnyradiohour",
"meta": {
"site": "arts",
"source": "WNYC"
},
"link": "/radio/program/the-new-yorker-radio-hour",
"subscribe": {
"apple": "https://itunes.apple.com/us/podcast/id1050430296",
"tuneIn": "https://tunein.com/podcasts/WNYC-Podcasts/New-Yorker-Radio-Hour-p803804/",
"rss": "https://feeds.feedburner.com/newyorkerradiohour"
}
},
"the-sam-sanders-show": {
"id": "the-sam-sanders-show",
"title": "The Sam Sanders Show",
"info": "One of public radio's most dynamic voices, Sam Sanders helped launch The NPR Politics Podcast and hosted NPR's hit show It's Been A Minute. Now, the award-winning host returns with something brand new, The Sam Sanders Show. Every week, Sam Sanders and friends dig into the culture that shapes our lives: what's driving the biggest trends, how artists really think, and even the memes you can't stop scrolling past. Sam is beloved for his way of unpacking the world and bringing you up close to fresh currents and engaging conversations. The Sam Sanders Show is smart, funny and always a good time.",
"airtime": "FRI 12-1pm AND SAT 11am-12pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2025/11/The-Sam-Sanders-Show-Podcast-Tile-400x400-1.jpg",
"officialWebsiteLink": "https://www.kcrw.com/shows/the-sam-sanders-show/latest",
"meta": {
"site": "arts",
"source": "KCRW"
},
"link": "https://www.kcrw.com/shows/the-sam-sanders-show/latest",
"subscribe": {
"rss": "https://feed.cdnstream1.com/zjb/feed/download/ac/28/59/ac28594c-e1d0-4231-8728-61865cdc80e8.xml"
}
},
"the-splendid-table": {
"id": "the-splendid-table",
"title": "The Splendid Table",
"info": "\u003cem>The Splendid Table\u003c/em> hosts our nation's conversations about cooking, sustainability and food culture.",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/The-Splendid-Table-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "https://www.splendidtable.org/",
"airtime": "SUN 10-11 pm",
"meta": {
"site": "radio",
"source": "npr"
},
"link": "/radio/program/the-splendid-table"
},
"this-american-life": {
"id": "this-american-life",
"title": "This American Life",
"info": "This American Life is a weekly public radio show, heard by 2.2 million people on more than 500 stations. Another 2.5 million people download the weekly podcast. It is hosted by Ira Glass, produced in collaboration with Chicago Public Media, delivered to stations by PRX The Public Radio Exchange, and has won all of the major broadcasting awards.",
"airtime": "SAT 12pm-1pm, 7pm-8pm",
"imageSrc": "https://ww2.kqed.org/radio/wp-content/uploads/sites/50/2018/04/thisAmericanLife.png",
"officialWebsiteLink": "https://www.thisamericanlife.org/",
"meta": {
"site": "news",
"source": "wbez"
},
"link": "/radio/program/this-american-life",
"subscribe": {
"apple": "https://itunes.apple.com/WebObjects/MZStore.woa/wa/viewPodcast?s=143441&mt=2&id=201671138&at=11l79Y&ct=nprdirectory",
"rss": "https://www.thisamericanlife.org/podcast/rss.xml"
}
},
"tinydeskradio": {
"id": "tinydeskradio",
"title": "Tiny Desk Radio",
"info": "We're bringing the best of Tiny Desk to the airwaves, only on public radio.",
"airtime": "SUN 8pm and SAT 9pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2025/04/300x300-For-Member-Station-Logo-Tiny-Desk-Radio-@2x.png",
"officialWebsiteLink": "https://www.npr.org/series/g-s1-52030/tiny-desk-radio",
"meta": {
"site": "news",
"source": "npr"
},
"link": "/radio/program/tinydeskradio",
"subscribe": {
"rss": "https://feeds.npr.org/g-s1-52030/rss.xml"
}
},
"wait-wait-dont-tell-me": {
"id": "wait-wait-dont-tell-me",
"title": "Wait Wait... Don't Tell Me!",
"info": "Peter Sagal and Bill Kurtis host the weekly NPR News quiz show alongside some of the best and brightest news and entertainment personalities.",
"airtime": "SUN 10am-11am, SAT 11am-12pm, SAT 6pm-7pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Wait-Wait-Podcast-Tile-300x300-1.jpg",
"officialWebsiteLink": "https://www.npr.org/programs/wait-wait-dont-tell-me/",
"meta": {
"site": "news",
"source": "npr"
},
"link": "/radio/program/wait-wait-dont-tell-me",
"subscribe": {
"npr": "https://rpb3r.app.goo.gl/Xogv",
"apple": "https://itunes.apple.com/WebObjects/MZStore.woa/wa/viewPodcast?s=143441&mt=2&id=121493804&at=11l79Y&ct=nprdirectory",
"tuneIn": "https://tunein.com/radio/Wait-Wait-Dont-Tell-Me-p46/",
"rss": "https://feeds.npr.org/344098539/podcast.xml"
}
},
"weekend-edition-saturday": {
"id": "weekend-edition-saturday",
"title": "Weekend Edition Saturday",
"info": "Weekend Edition Saturday wraps up the week's news and offers a mix of analysis and features on a wide range of topics, including arts, sports, entertainment, and human interest stories. The two-hour program is hosted by NPR's Peabody Award-winning Scott Simon.",
"airtime": "SAT 5am-10am",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Weekend-Edition-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "https://www.npr.org/programs/weekend-edition-saturday/",
"meta": {
"site": "news",
"source": "npr"
},
"link": "/radio/program/weekend-edition-saturday"
},
"weekend-edition-sunday": {
"id": "weekend-edition-sunday",
"title": "Weekend Edition Sunday",
"info": "Weekend Edition Sunday features interviews with newsmakers, artists, scientists, politicians, musicians, writers, theologians and historians. The program has covered news events from Nelson Mandela's 1990 release from a South African prison to the capture of Saddam Hussein.",
"airtime": "SUN 5am-10am",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Weekend-Edition-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "https://www.npr.org/programs/weekend-edition-sunday/",
"meta": {
"site": "news",
"source": "npr"
},
"link": "/radio/program/weekend-edition-sunday"
}
},
"racesReducer": {},
"racesGenElectionReducer": {},
"radioSchedulesReducer": {},
"listsReducer": {
"posts/news?tag=openai": {
"isFetching": false,
"latestQuery": {
"from": 0,
"postsToRender": 9
},
"tag": null,
"vitalsOnly": true,
"totalRequested": 9,
"isLoading": false,
"isLoadingMore": true,
"total": {
"value": 35,
"relation": "eq"
},
"items": [
"news_12083278",
"news_12083224",
"news_12082064",
"news_12081916",
"news_12081798",
"news_12081603",
"news_12081290",
"news_12080610",
"news_12079896"
]
}
},
"recallGuideReducer": {
"intros": {},
"policy": {},
"candidates": {}
},
"savedArticleReducer": {
"articles": [],
"status": {}
},
"pfsSessionReducer": {},
"subscriptionsReducer": {},
"termsReducer": {
"about": {
"name": "About",
"type": "terms",
"id": "about",
"slug": "about",
"link": "/about",
"taxonomy": "site"
},
"arts": {
"name": "Arts & Culture",
"grouping": [
"arts",
"pop",
"trulyca"
],
"description": "KQED Arts provides daily in-depth coverage of the Bay Area's music, art, film, performing arts, literature and arts news, as well as cultural commentary and criticism.",
"type": "terms",
"id": "arts",
"slug": "arts",
"link": "/arts",
"taxonomy": "site"
},
"artschool": {
"name": "Art School",
"parent": "arts",
"type": "terms",
"id": "artschool",
"slug": "artschool",
"link": "/artschool",
"taxonomy": "site"
},
"bayareabites": {
"name": "KQED food",
"grouping": [
"food",
"bayareabites",
"checkplease"
],
"parent": "food",
"type": "terms",
"id": "bayareabites",
"slug": "bayareabites",
"link": "/food",
"taxonomy": "site"
},
"bayareahiphop": {
"name": "Bay Area Hiphop",
"type": "terms",
"id": "bayareahiphop",
"slug": "bayareahiphop",
"link": "/bayareahiphop",
"taxonomy": "site"
},
"campaign21": {
"name": "Campaign 21",
"type": "terms",
"id": "campaign21",
"slug": "campaign21",
"link": "/campaign21",
"taxonomy": "site"
},
"checkplease": {
"name": "KQED food",
"grouping": [
"food",
"bayareabites",
"checkplease"
],
"parent": "food",
"type": "terms",
"id": "checkplease",
"slug": "checkplease",
"link": "/food",
"taxonomy": "site"
},
"education": {
"name": "Education",
"grouping": [
"education"
],
"type": "terms",
"id": "education",
"slug": "education",
"link": "/education",
"taxonomy": "site"
},
"elections": {
"name": "Elections",
"type": "terms",
"id": "elections",
"slug": "elections",
"link": "/elections",
"taxonomy": "site"
},
"events": {
"name": "Events",
"type": "terms",
"id": "events",
"slug": "events",
"link": "/events",
"taxonomy": "site"
},
"event": {
"name": "Event",
"alias": "events",
"type": "terms",
"id": "event",
"slug": "event",
"link": "/event",
"taxonomy": "site"
},
"filmschoolshorts": {
"name": "Film School Shorts",
"type": "terms",
"id": "filmschoolshorts",
"slug": "filmschoolshorts",
"link": "/filmschoolshorts",
"taxonomy": "site"
},
"food": {
"name": "KQED food",
"grouping": [
"food",
"bayareabites",
"checkplease"
],
"type": "terms",
"id": "food",
"slug": "food",
"link": "/food",
"taxonomy": "site"
},
"forum": {
"name": "Forum",
"relatedContentQuery": "posts/forum?",
"parent": "news",
"type": "terms",
"id": "forum",
"slug": "forum",
"link": "/forum",
"taxonomy": "site"
},
"futureofyou": {
"name": "Future of You",
"grouping": [
"science",
"futureofyou"
],
"parent": "science",
"type": "terms",
"id": "futureofyou",
"slug": "futureofyou",
"link": "/futureofyou",
"taxonomy": "site"
},
"jpepinheart": {
"name": "KQED food",
"relatedContentQuery": "posts/food,bayareabites,checkplease",
"parent": "food",
"type": "terms",
"id": "jpepinheart",
"slug": "jpepinheart",
"link": "/food",
"taxonomy": "site"
},
"liveblog": {
"name": "Live Blog",
"type": "terms",
"id": "liveblog",
"slug": "liveblog",
"link": "/liveblog",
"taxonomy": "site"
},
"livetv": {
"name": "Live TV",
"parent": "tv",
"type": "terms",
"id": "livetv",
"slug": "livetv",
"link": "/livetv",
"taxonomy": "site"
},
"lowdown": {
"name": "The Lowdown",
"relatedContentQuery": "posts/lowdown?",
"parent": "news",
"type": "terms",
"id": "lowdown",
"slug": "lowdown",
"link": "/lowdown",
"taxonomy": "site"
},
"mindshift": {
"name": "Mindshift",
"parent": "news",
"description": "MindShift explores the future of education by highlighting the innovative – and sometimes counterintuitive – ways educators and parents are helping all children succeed.",
"type": "terms",
"id": "mindshift",
"slug": "mindshift",
"link": "/mindshift",
"taxonomy": "site"
},
"news": {
"name": "News",
"grouping": [
"news",
"forum"
],
"type": "terms",
"id": "news",
"slug": "news",
"link": "/news",
"taxonomy": "site"
},
"perspectives": {
"name": "Perspectives",
"parent": "radio",
"type": "terms",
"id": "perspectives",
"slug": "perspectives",
"link": "/perspectives",
"taxonomy": "site"
},
"podcasts": {
"name": "Podcasts",
"type": "terms",
"id": "podcasts",
"slug": "podcasts",
"link": "/podcasts",
"taxonomy": "site"
},
"pop": {
"name": "Pop",
"parent": "arts",
"type": "terms",
"id": "pop",
"slug": "pop",
"link": "/pop",
"taxonomy": "site"
},
"pressroom": {
"name": "Pressroom",
"type": "terms",
"id": "pressroom",
"slug": "pressroom",
"link": "/pressroom",
"taxonomy": "site"
},
"quest": {
"name": "Quest",
"parent": "science",
"type": "terms",
"id": "quest",
"slug": "quest",
"link": "/quest",
"taxonomy": "site"
},
"radio": {
"name": "Radio",
"grouping": [
"forum",
"perspectives"
],
"description": "Listen to KQED Public Radio – home of Forum and The California Report – on 88.5 FM in San Francisco, 89.3 FM in Sacramento, 88.3 FM in Santa Rosa and 88.1 FM in Martinez.",
"type": "terms",
"id": "radio",
"slug": "radio",
"link": "/radio",
"taxonomy": "site"
},
"root": {
"name": "KQED",
"image": "https://ww2.kqed.org/app/uploads/2020/02/KQED-OG-Image@1x.png",
"imageWidth": 1200,
"imageHeight": 630,
"headData": {
"title": "KQED | News, Radio, Podcasts, TV | Public Media for Northern California",
"description": "KQED provides public radio, television, and independent reporting on issues that matter to the Bay Area. We’re the NPR and PBS member station for Northern California."
},
"type": "terms",
"id": "root",
"slug": "root",
"link": "/root",
"taxonomy": "site"
},
"science": {
"name": "Science",
"grouping": [
"science",
"futureofyou"
],
"description": "KQED Science brings you award-winning science and environment coverage from the Bay Area and beyond.",
"type": "terms",
"id": "science",
"slug": "science",
"link": "/science",
"taxonomy": "site"
},
"stateofhealth": {
"name": "State of Health",
"parent": "science",
"type": "terms",
"id": "stateofhealth",
"slug": "stateofhealth",
"link": "/stateofhealth",
"taxonomy": "site"
},
"support": {
"name": "Support",
"type": "terms",
"id": "support",
"slug": "support",
"link": "/support",
"taxonomy": "site"
},
"thedolist": {
"name": "The Do List",
"parent": "arts",
"type": "terms",
"id": "thedolist",
"slug": "thedolist",
"link": "/thedolist",
"taxonomy": "site"
},
"trulyca": {
"name": "Truly CA",
"grouping": [
"arts",
"pop",
"trulyca"
],
"parent": "arts",
"type": "terms",
"id": "trulyca",
"slug": "trulyca",
"link": "/trulyca",
"taxonomy": "site"
},
"tv": {
"name": "TV",
"type": "terms",
"id": "tv",
"slug": "tv",
"link": "/tv",
"taxonomy": "site"
},
"voterguide": {
"name": "Voter Guide",
"parent": "elections",
"alias": "elections",
"type": "terms",
"id": "voterguide",
"slug": "voterguide",
"link": "/voterguide",
"taxonomy": "site"
},
"guiaelectoral": {
"name": "Guia Electoral",
"parent": "elections",
"alias": "elections",
"type": "terms",
"id": "guiaelectoral",
"slug": "guiaelectoral",
"link": "/guiaelectoral",
"taxonomy": "site"
},
"news_33542": {
"type": "terms",
"id": "news_33542",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "33542",
"found": true
},
"relationships": {},
"featImg": null,
"name": "OpenAI",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "OpenAI Archives | KQED News",
"ogDescription": null,
"imageData": {
"ogImageSize": {
"file": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"width": 1200,
"height": 630
},
"twImageSize": {
"file": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png"
},
"twitterCard": "summary_large_image"
}
},
"ttid": 33559,
"slug": "openai",
"isLoading": false,
"link": "/news/tag/openai"
},
"news_6188": {
"type": "terms",
"id": "news_6188",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "6188",
"found": true
},
"relationships": {},
"featImg": null,
"name": "Law and Justice",
"description": null,
"taxonomy": "category",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "Law and Justice Archives | KQED News",
"ogDescription": null
},
"ttid": 6212,
"slug": "law-and-justice",
"isLoading": false,
"link": "/news/category/law-and-justice"
},
"news_28250": {
"type": "terms",
"id": "news_28250",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "28250",
"found": true
},
"relationships": {},
"featImg": null,
"name": "Local",
"description": null,
"taxonomy": "category",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "Local Archives | KQED News",
"ogDescription": null
},
"ttid": 28267,
"slug": "local",
"isLoading": false,
"link": "/news/category/local"
},
"news_8": {
"type": "terms",
"id": "news_8",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "8",
"found": true
},
"relationships": {},
"featImg": null,
"name": "News",
"description": null,
"taxonomy": "category",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "News Archives | KQED News",
"ogDescription": null
},
"ttid": 8,
"slug": "news",
"isLoading": false,
"link": "/news/category/news"
},
"news_248": {
"type": "terms",
"id": "news_248",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "248",
"found": true
},
"relationships": {},
"featImg": null,
"name": "Technology",
"description": null,
"taxonomy": "category",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "Technology Archives | KQED News",
"ogDescription": null
},
"ttid": 256,
"slug": "technology",
"isLoading": false,
"link": "/news/category/technology"
},
"news_34755": {
"type": "terms",
"id": "news_34755",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "34755",
"found": true
},
"relationships": {},
"name": "artificial intelligence",
"slug": "artificial-intelligence",
"taxonomy": "tag",
"description": null,
"featImg": null,
"headData": {
"title": "artificial intelligence | KQED News",
"description": null,
"ogTitle": null,
"ogDescription": null,
"ogImgId": null,
"twTitle": null,
"twDescription": null,
"twImgId": null
},
"ttid": 34772,
"isLoading": false,
"link": "/news/tag/artificial-intelligence"
},
"news_1386": {
"type": "terms",
"id": "news_1386",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "1386",
"found": true
},
"relationships": {},
"featImg": null,
"name": "Bay Area",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "Bay Area Archives | KQED News",
"ogDescription": null
},
"ttid": 1398,
"slug": "bay-area",
"isLoading": false,
"link": "/news/tag/bay-area"
},
"news_32668": {
"type": "terms",
"id": "news_32668",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "32668",
"found": true
},
"relationships": {},
"featImg": null,
"name": "ChatGPT",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "ChatGPT Archives | KQED News",
"ogDescription": null
},
"ttid": 32685,
"slug": "chatgpt",
"isLoading": false,
"link": "/news/tag/chatgpt"
},
"news_3897": {
"type": "terms",
"id": "news_3897",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "3897",
"found": true
},
"relationships": {},
"featImg": null,
"name": "Elon Musk",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "Elon Musk Archives | KQED News",
"ogDescription": null
},
"ttid": 3916,
"slug": "elon-musk",
"isLoading": false,
"link": "/news/tag/elon-musk"
},
"news_27626": {
"type": "terms",
"id": "news_27626",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "27626",
"found": true
},
"relationships": {},
"featImg": null,
"name": "featured-news",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "featured-news Archives | KQED News",
"ogDescription": null
},
"ttid": 27643,
"slug": "featured-news",
"isLoading": false,
"link": "/news/tag/featured-news"
},
"news_21891": {
"type": "terms",
"id": "news_21891",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "21891",
"found": true
},
"relationships": {},
"featImg": null,
"name": "lawsuits",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "lawsuits Archives | KQED News",
"ogDescription": null
},
"ttid": 21908,
"slug": "lawsuits",
"isLoading": false,
"link": "/news/tag/lawsuits"
},
"news_34054": {
"type": "terms",
"id": "news_34054",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "34054",
"found": true
},
"relationships": {},
"featImg": null,
"name": "oakland",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "oakland Archives | KQED News",
"ogDescription": null
},
"ttid": 34071,
"slug": "oakland",
"isLoading": false,
"link": "/news/tag/oakland"
},
"news_33543": {
"type": "terms",
"id": "news_33543",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "33543",
"found": true
},
"relationships": {},
"name": "Sam Altman",
"slug": "sam-altman",
"taxonomy": "tag",
"description": null,
"featImg": null,
"headData": {
"title": "Sam Altman | KQED News",
"description": null,
"ogTitle": null,
"ogDescription": null,
"ogImgId": null,
"twTitle": null,
"twDescription": null,
"twImgId": null,
"metaRobotsNoIndex": "noindex"
},
"ttid": 33560,
"isLoading": false,
"link": "/news/tag/sam-altman"
},
"news_34586": {
"type": "terms",
"id": "news_34586",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "34586",
"found": true
},
"relationships": {},
"name": "Silicon Valley",
"slug": "silicon-valley",
"taxonomy": "tag",
"description": null,
"featImg": null,
"headData": {
"title": "Silicon Valley | KQED News",
"description": null,
"ogTitle": null,
"ogDescription": null,
"ogImgId": null,
"twTitle": null,
"twDescription": null,
"twImgId": null
},
"ttid": 34603,
"isLoading": false,
"link": "/news/tag/silicon-valley"
},
"news_1631": {
"type": "terms",
"id": "news_1631",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "1631",
"found": true
},
"relationships": {},
"name": "Technology",
"slug": "technology",
"taxonomy": "tag",
"description": null,
"featImg": null,
"headData": {
"title": "Technology | KQED News",
"description": null,
"ogTitle": null,
"ogDescription": null,
"ogImgId": null,
"twTitle": null,
"twDescription": null,
"twImgId": null
},
"ttid": 1643,
"isLoading": false,
"link": "/news/tag/technology"
},
"news_33733": {
"type": "terms",
"id": "news_33733",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "33733",
"found": true
},
"relationships": {},
"featImg": null,
"name": "News",
"description": null,
"taxonomy": "interest",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "News Archives | KQED News",
"ogDescription": null
},
"ttid": 33750,
"slug": "news",
"isLoading": false,
"link": "/news/interest/news"
},
"news_33730": {
"type": "terms",
"id": "news_33730",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "33730",
"found": true
},
"relationships": {},
"featImg": null,
"name": "Oakland",
"description": null,
"taxonomy": "interest",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "Oakland Archives | KQED News",
"ogDescription": null
},
"ttid": 33747,
"slug": "oakland",
"isLoading": false,
"link": "/news/interest/oakland"
},
"news_33732": {
"type": "terms",
"id": "news_33732",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "33732",
"found": true
},
"relationships": {},
"featImg": null,
"name": "Technology",
"description": null,
"taxonomy": "interest",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "Technology Archives | KQED News",
"ogDescription": null
},
"ttid": 33749,
"slug": "technology",
"isLoading": false,
"link": "/news/interest/technology"
},
"news_31795": {
"type": "terms",
"id": "news_31795",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "31795",
"found": true
},
"relationships": {},
"featImg": null,
"name": "California",
"description": null,
"taxonomy": "category",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "California Archives | KQED News",
"ogDescription": null
},
"ttid": 31812,
"slug": "california",
"isLoading": false,
"link": "/news/category/california"
},
"news_19954": {
"type": "terms",
"id": "news_19954",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "19954",
"found": true
},
"relationships": {},
"featImg": null,
"name": "Law and Justice",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "Law and Justice Archives | KQED News",
"ogDescription": null
},
"ttid": 19971,
"slug": "law-and-justice",
"isLoading": false,
"link": "/news/tag/law-and-justice"
},
"news_34167": {
"type": "terms",
"id": "news_34167",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "34167",
"found": true
},
"relationships": {},
"name": "Criminal Justice",
"slug": "criminal-justice",
"taxonomy": "category",
"description": null,
"featImg": null,
"headData": {
"title": "Criminal Justice Archives | KQED News",
"description": null,
"ogTitle": null,
"ogDescription": null,
"ogImgId": null,
"twTitle": null,
"twDescription": null,
"twImgId": null
},
"ttid": 34184,
"isLoading": false,
"link": "/news/category/criminal-justice"
},
"news_17725": {
"type": "terms",
"id": "news_17725",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "17725",
"found": true
},
"relationships": {},
"featImg": null,
"name": "criminal justice",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "criminal justice Archives | KQED News",
"ogDescription": null
},
"ttid": 17759,
"slug": "criminal-justice",
"isLoading": false,
"link": "/news/tag/criminal-justice"
},
"news_22434": {
"type": "terms",
"id": "news_22434",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "22434",
"found": true
},
"relationships": {},
"featImg": null,
"name": "death",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "death Archives | KQED News",
"ogDescription": null
},
"ttid": 22451,
"slug": "death",
"isLoading": false,
"link": "/news/tag/death"
},
"news_35784": {
"type": "terms",
"id": "news_35784",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "35784",
"found": true
},
"relationships": {},
"name": "gun violence",
"slug": "gun-violence",
"taxonomy": "tag",
"description": null,
"featImg": null,
"headData": {
"title": "gun violence | KQED News",
"description": null,
"ogTitle": null,
"ogDescription": null,
"ogImgId": null,
"twTitle": null,
"twDescription": null,
"twImgId": null
},
"ttid": 35801,
"isLoading": false,
"link": "/news/tag/gun-violence"
},
"news_38": {
"type": "terms",
"id": "news_38",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "38",
"found": true
},
"relationships": {},
"featImg": null,
"name": "San Francisco",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "San Francisco Archives | KQED News",
"ogDescription": null
},
"ttid": 58,
"slug": "san-francisco",
"isLoading": false,
"link": "/news/tag/san-francisco"
},
"news_33745": {
"type": "terms",
"id": "news_33745",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "33745",
"found": true
},
"relationships": {},
"featImg": null,
"name": "Criminal Justice",
"description": null,
"taxonomy": "interest",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "Criminal Justice Archives | KQED News",
"ogDescription": null
},
"ttid": 33762,
"slug": "criminal-justice",
"isLoading": false,
"link": "/news/interest/criminal-justice"
},
"news_33729": {
"type": "terms",
"id": "news_33729",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "33729",
"found": true
},
"relationships": {},
"featImg": null,
"name": "San Francisco",
"description": null,
"taxonomy": "interest",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "San Francisco Archives | KQED News",
"ogDescription": null
},
"ttid": 33746,
"slug": "san-francisco",
"isLoading": false,
"link": "/news/interest/san-francisco"
},
"news_57": {
"type": "terms",
"id": "news_57",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "57",
"found": true
},
"relationships": {},
"featImg": null,
"name": "Tesla",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "Tesla Archives | KQED News",
"ogDescription": null
},
"ttid": 57,
"slug": "tesla",
"isLoading": false,
"link": "/news/tag/tesla"
},
"news_18352": {
"type": "terms",
"id": "news_18352",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "18352",
"found": true
},
"relationships": {},
"featImg": null,
"name": "East Bay",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "East Bay Archives | KQED News",
"ogDescription": null
},
"ttid": 18386,
"slug": "east-bay",
"isLoading": false,
"link": "/news/tag/east-bay"
},
"news_18538": {
"type": "terms",
"id": "news_18538",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "18538",
"found": true
},
"relationships": {},
"featImg": null,
"name": "California",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "California Archives | KQED News",
"ogDescription": null
},
"ttid": 31,
"slug": "california",
"isLoading": false,
"link": "/news/tag/california"
},
"news_23052": {
"type": "terms",
"id": "news_23052",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "23052",
"found": true
},
"relationships": {},
"featImg": null,
"name": "fraud",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "fraud Archives | KQED News",
"ogDescription": null
},
"ttid": 23069,
"slug": "fraud",
"isLoading": false,
"link": "/news/tag/fraud"
},
"news_33738": {
"type": "terms",
"id": "news_33738",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "33738",
"found": true
},
"relationships": {},
"featImg": null,
"name": "California",
"description": null,
"taxonomy": "interest",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "California Archives | KQED News",
"ogDescription": null
},
"ttid": 33755,
"slug": "california",
"isLoading": false,
"link": "/news/interest/california"
},
"news_25184": {
"type": "terms",
"id": "news_25184",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "25184",
"found": true
},
"relationships": {},
"featImg": null,
"name": "AI",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "AI Archives | KQED News",
"ogDescription": null
},
"ttid": 25201,
"slug": "ai",
"isLoading": false,
"link": "/news/tag/ai"
},
"news_32664": {
"type": "terms",
"id": "news_32664",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "32664",
"found": true
},
"relationships": {},
"name": "AI software",
"slug": "ai-software",
"taxonomy": "tag",
"description": null,
"featImg": null,
"headData": {
"title": "AI software | KQED News",
"description": null,
"ogTitle": null,
"ogDescription": null,
"ogImgId": null,
"twTitle": null,
"twDescription": null,
"twImgId": null
},
"ttid": 32681,
"isLoading": false,
"link": "/news/tag/ai-software"
},
"news_36279": {
"type": "terms",
"id": "news_36279",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "36279",
"found": true
},
"relationships": {},
"name": "chatbot",
"slug": "chatbot",
"taxonomy": "tag",
"description": null,
"featImg": null,
"headData": {
"title": "chatbot | KQED News",
"description": null,
"ogTitle": null,
"ogDescription": null,
"ogImgId": null,
"twTitle": null,
"twDescription": null,
"twImgId": null
},
"ttid": 36296,
"isLoading": false,
"link": "/news/tag/chatbot"
},
"news_17626": {
"type": "terms",
"id": "news_17626",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "17626",
"found": true
},
"relationships": {},
"featImg": null,
"name": "crime",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "crime Archives | KQED News",
"ogDescription": null
},
"ttid": 17660,
"slug": "crime",
"isLoading": false,
"link": "/news/tag/crime"
}
},
"userAgentReducer": {
"userAgent": "Mozilla/5.0 AppleWebKit/537.36 (KHTML, like Gecko; compatible; ClaudeBot/1.0; +claudebot@anthropic.com)",
"isBot": true
},
"userPermissionsReducer": {
"wpLoggedIn": false
},
"localStorageReducer": {},
"browserHistoryReducer": [],
"eventsReducer": {},
"fssReducer": {},
"tvDailyScheduleReducer": {},
"tvWeeklyScheduleReducer": {},
"tvPrimetimeScheduleReducer": {},
"tvMonthlyScheduleReducer": {},
"userAccountReducer": {
"user": {
"email": null,
"emailStatus": "EMAIL_UNVALIDATED",
"loggedStatus": "LOGGED_OUT",
"loggingChecked": false,
"articles": [],
"firstName": null,
"lastName": null,
"phoneNumber": null,
"fetchingMembership": false,
"membershipError": false,
"memberships": [
{
"id": null,
"startDate": null,
"firstName": null,
"lastName": null,
"familyNumber": null,
"memberNumber": null,
"memberSince": null,
"expirationDate": null,
"pfsEligible": false,
"isSustaining": false,
"membershipLevel": "Prospect",
"membershipStatus": "Non Member",
"lastGiftDate": null,
"renewalDate": null,
"lastDonationAmount": null
}
]
},
"authModal": {
"isOpen": false,
"view": "LANDING_VIEW"
},
"error": null
},
"youthMediaReducer": {},
"checkPleaseReducer": {
"filterData": {
"region": {
"key": "Restaurant Region",
"filters": [
"Any Region"
]
},
"cuisine": {
"key": "Restaurant Cuisine",
"filters": [
"Any Cuisine"
]
}
},
"restaurantDataById": {},
"restaurantIdsSorted": [],
"error": null
},
"location": {
"pathname": "/news/tag/openai",
"previousPathname": "/"
}
}