Sam Altman Defends Himself From Elon Musk’s Accusations in OpenAI Trial
Former OpenAI Exec Calls Decision to Remove Sam Altman a ‘Hail Mary’ During Musk Trial
Inside Elon Musk and Sam Altman's Battle Over OpenAI
OpenAI Back in Court Over Canada School Shooter’s Use of ChatGPT
Elon Musk Says Sam Altman Tricked Him Into Funding OpenAI
Elon Musk Takes Aim at OpenAI as Trial Begins: ‘It’s Not OK to Steal a Charity’
California Mom Who Lost Her Son to an AI Chatbot Is Now Fighting to Regulate Them
OpenAI and Common Sense Media Partner on New Kids AI Safety Ballot Measure
Trump’s AI Order Provokes Pushback from California Officials and Consumer Advocates
Player sponsored by
window.__IS_SSR__=true
window.__INITIAL_STATE__={
"attachmentsReducer": {
"audio_0": {
"type": "attachments",
"id": "audio_0",
"imgSizes": {
"kqedFullSize": {
"file": "https://ww2.kqed.org/news/wp-content/themes/KQED-unified/img/audio_bgs/background0.jpg"
}
}
},
"audio_1": {
"type": "attachments",
"id": "audio_1",
"imgSizes": {
"kqedFullSize": {
"file": "https://ww2.kqed.org/news/wp-content/themes/KQED-unified/img/audio_bgs/background1.jpg"
}
}
},
"audio_2": {
"type": "attachments",
"id": "audio_2",
"imgSizes": {
"kqedFullSize": {
"file": "https://ww2.kqed.org/news/wp-content/themes/KQED-unified/img/audio_bgs/background2.jpg"
}
}
},
"audio_3": {
"type": "attachments",
"id": "audio_3",
"imgSizes": {
"kqedFullSize": {
"file": "https://ww2.kqed.org/news/wp-content/themes/KQED-unified/img/audio_bgs/background3.jpg"
}
}
},
"audio_4": {
"type": "attachments",
"id": "audio_4",
"imgSizes": {
"kqedFullSize": {
"file": "https://ww2.kqed.org/news/wp-content/themes/KQED-unified/img/audio_bgs/background4.jpg"
}
}
},
"placeholder": {
"type": "attachments",
"id": "placeholder",
"imgSizes": {
"thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1-160x107.jpg",
"width": 160,
"height": 107,
"mimeType": "image/jpeg"
},
"medium": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1-800x533.jpg",
"width": 800,
"height": 533,
"mimeType": "image/jpeg"
},
"medium_large": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1-768x512.jpg",
"width": 768,
"height": 512,
"mimeType": "image/jpeg"
},
"large": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1-1020x680.jpg",
"width": 1020,
"height": 680,
"mimeType": "image/jpeg"
},
"1536x1536": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1-1536x1024.jpg",
"width": 1536,
"height": 1024,
"mimeType": "image/jpeg"
},
"fd-lrg": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1-1536x1024.jpg",
"width": 1536,
"height": 1024,
"mimeType": "image/jpeg"
},
"fd-med": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1-1020x680.jpg",
"width": 1020,
"height": 680,
"mimeType": "image/jpeg"
},
"fd-sm": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1-800x533.jpg",
"width": 800,
"height": 533,
"mimeType": "image/jpeg"
},
"post-thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1-672x372.jpg",
"width": 672,
"height": 372,
"mimeType": "image/jpeg"
},
"twentyfourteen-full-width": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1-1038x576.jpg",
"width": 1038,
"height": 576,
"mimeType": "image/jpeg"
},
"xxsmall": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1-160x107.jpg",
"width": 160,
"height": 107,
"mimeType": "image/jpeg"
},
"xsmall": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1-672x372.jpg",
"width": 672,
"height": 372,
"mimeType": "image/jpeg"
},
"small": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1-672x372.jpg",
"width": 672,
"height": 372,
"mimeType": "image/jpeg"
},
"xlarge": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1-1020x680.jpg",
"width": 1020,
"height": 680,
"mimeType": "image/jpeg"
},
"full-width": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1-1920x1280.jpg",
"width": 1920,
"height": 1280,
"mimeType": "image/jpeg"
},
"guest-author-32": {
"file": "https://cdn.kqed.org/wp-content/uploads/2025/01/KQED-Default-Image-816638274-1333x1333-1-160x160.jpg",
"width": 32,
"height": 32,
"mimeType": "image/jpeg"
},
"guest-author-50": {
"file": "https://cdn.kqed.org/wp-content/uploads/2025/01/KQED-Default-Image-816638274-1333x1333-1-160x160.jpg",
"width": 50,
"height": 50,
"mimeType": "image/jpeg"
},
"guest-author-64": {
"file": "https://cdn.kqed.org/wp-content/uploads/2025/01/KQED-Default-Image-816638274-1333x1333-1-160x160.jpg",
"width": 64,
"height": 64,
"mimeType": "image/jpeg"
},
"guest-author-96": {
"file": "https://cdn.kqed.org/wp-content/uploads/2025/01/KQED-Default-Image-816638274-1333x1333-1-160x160.jpg",
"width": 96,
"height": 96,
"mimeType": "image/jpeg"
},
"guest-author-128": {
"file": "https://cdn.kqed.org/wp-content/uploads/2025/01/KQED-Default-Image-816638274-1333x1333-1-160x160.jpg",
"width": 128,
"height": 128,
"mimeType": "image/jpeg"
},
"detail": {
"file": "https://cdn.kqed.org/wp-content/uploads/2025/01/KQED-Default-Image-816638274-1333x1333-1-160x160.jpg",
"width": 160,
"height": 160,
"mimeType": "image/jpeg"
},
"kqedFullSize": {
"file": "https://cdn.kqed.org/wp-content/uploads/2024/12/KQED-Default-Image-816638274-2000x1333-1.jpg",
"width": 2000,
"height": 1333
}
}
},
"news_12083392": {
"type": "attachments",
"id": "news_12083392",
"meta": {
"index": "attachments_1716263798",
"site": "news",
"id": "12083392",
"found": true
},
"title": "260512-MUSK-ALTMAN-TRIAL-VB-03-KQED",
"publishDate": 1778625867,
"status": "inherit",
"parent": 0,
"modified": 1778625974,
"caption": "Open AI CEO Sam Altman testifies as a video of him is played on a screen in the trial in which Elon Musk claims that Altman and OpenAI abandoned their founding promise to develop AI for the benefit of humanity rather than solely for profit in Oakland on May 12, 2026. During the brief cross-examination of Altman, the Tesla CEO’s attorney questioned whether or not Altman was trustworthy.\r\n",
"credit": "Vicki Behringer for KQED",
"altTag": null,
"description": null,
"imgSizes": {
"thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-03-KQED-160x90.jpg",
"width": 160,
"height": 90,
"mimeType": "image/jpeg"
},
"1536x1536": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-03-KQED-1536x864.jpg",
"width": 1536,
"height": 864,
"mimeType": "image/jpeg"
},
"post-thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-03-KQED-672x372.jpg",
"width": 672,
"height": 372,
"mimeType": "image/jpeg"
},
"twentyfourteen-full-width": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-03-KQED-1038x576.jpg",
"width": 1038,
"height": 576,
"mimeType": "image/jpeg"
},
"npr-cds-wide": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-03-KQED-1200x675.jpg",
"width": 1200,
"height": 675,
"mimeType": "image/jpeg"
},
"npr-cds-square": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-03-KQED-600x600.jpg",
"width": 600,
"height": 600,
"mimeType": "image/jpeg"
},
"kqedFullSize": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-03-KQED.jpg",
"width": 2000,
"height": 1125
}
},
"fetchFailed": false,
"isLoading": false
},
"news_12083235": {
"type": "attachments",
"id": "news_12083235",
"meta": {
"index": "attachments_1716263798",
"site": "news",
"id": "12083235",
"found": true
},
"title": "Elon Musk v. OpenAI Trial Continues In California",
"publishDate": 1778545779,
"status": "inherit",
"parent": 12083224,
"modified": 1778546322,
"caption": "OpenAI CEO Sam Altman arrives at the Ronald V. Dellums Federal Building on April 30, 2026, in Oakland, California. Elon Musk invested in OpenAI early on, believing it would be a nonprofit, but is now suing OpenAI and its CEO, Sam Altman, for allegedly deceiving him by developing OpenAI into a for-profit company. ",
"credit": "Benjamin Fanjoy/Getty Images",
"altTag": null,
"description": null,
"imgSizes": {
"thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/SamAltmanGetty-160x107.jpg",
"width": 160,
"height": 107,
"mimeType": "image/jpeg"
},
"1536x1536": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/SamAltmanGetty-1536x1024.jpg",
"width": 1536,
"height": 1024,
"mimeType": "image/jpeg"
},
"post-thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/SamAltmanGetty-672x372.jpg",
"width": 672,
"height": 372,
"mimeType": "image/jpeg"
},
"twentyfourteen-full-width": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/SamAltmanGetty-1038x576.jpg",
"width": 1038,
"height": 576,
"mimeType": "image/jpeg"
},
"npr-cds-wide": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/SamAltmanGetty-1200x675.jpg",
"width": 1200,
"height": 675,
"mimeType": "image/jpeg"
},
"npr-cds-square": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/SamAltmanGetty-600x600.jpg",
"width": 600,
"height": 600,
"mimeType": "image/jpeg"
},
"kqedFullSize": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/SamAltmanGetty.jpg",
"width": 2000,
"height": 1333
}
},
"fetchFailed": false,
"isLoading": false
},
"news_12082344": {
"type": "attachments",
"id": "news_12082344",
"meta": {
"index": "attachments_1716263798",
"site": "news",
"id": "12082344",
"found": true
},
"title": "260504-MUSK-ALTMAN-VB-04-KQED",
"publishDate": 1777936709,
"status": "inherit",
"parent": 0,
"modified": 1777936770,
"caption": "Open AI President Greg Brockman testifies in the trial in which Elon Musk claims that Sam Altman and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit in Oakland on May 4, 2026.",
"credit": "Vicki Behringer for KQED",
"altTag": null,
"description": null,
"imgSizes": {
"thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260504-MUSK-ALTMAN-VB-04-KQED-160x90.jpg",
"width": 160,
"height": 90,
"mimeType": "image/jpeg"
},
"1536x1536": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260504-MUSK-ALTMAN-VB-04-KQED-1536x864.jpg",
"width": 1536,
"height": 864,
"mimeType": "image/jpeg"
},
"post-thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260504-MUSK-ALTMAN-VB-04-KQED-672x372.jpg",
"width": 672,
"height": 372,
"mimeType": "image/jpeg"
},
"twentyfourteen-full-width": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260504-MUSK-ALTMAN-VB-04-KQED-1038x576.jpg",
"width": 1038,
"height": 576,
"mimeType": "image/jpeg"
},
"npr-cds-wide": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260504-MUSK-ALTMAN-VB-04-KQED-1200x675.jpg",
"width": 1200,
"height": 675,
"mimeType": "image/jpeg"
},
"npr-cds-square": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260504-MUSK-ALTMAN-VB-04-KQED-600x600.jpg",
"width": 600,
"height": 600,
"mimeType": "image/jpeg"
},
"kqedFullSize": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260504-MUSK-ALTMAN-VB-04-KQED.jpg",
"width": 2000,
"height": 1125
}
},
"fetchFailed": false,
"isLoading": false
},
"news_12082068": {
"type": "attachments",
"id": "news_12082068",
"meta": {
"index": "attachments_1716263798",
"site": "news",
"id": "12082068",
"found": true
},
"title": "CANADA-SHOOTING-CRIME",
"publishDate": 1777656025,
"status": "inherit",
"parent": 12082064,
"modified": 1777677641,
"caption": "A young boy brings flowers to a memorial in honor of the victims of one of Canada's deadliest shootings in Tumbler Ridge, British Columbia, Canada, on Feb. 13, 2026. An 18-year-old carried out a mass shooting in a remote mining town, killing six people at a local school, after slaying her mother and stepbrother. Canadian Police Commander Dwayne McDonald said authorities still don't know the motive in the Feb. 10 mass shooting, but the shooter, who took her own life, was known to have mental health issues. ",
"credit": "Paige Taylor White/AFP via Getty Images",
"altTag": null,
"description": null,
"imgSizes": {
"thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty-160x110.jpg",
"width": 160,
"height": 110,
"mimeType": "image/jpeg"
},
"1536x1536": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty-1536x1053.jpg",
"width": 1536,
"height": 1053,
"mimeType": "image/jpeg"
},
"post-thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty-672x372.jpg",
"width": 672,
"height": 372,
"mimeType": "image/jpeg"
},
"twentyfourteen-full-width": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty-1038x576.jpg",
"width": 1038,
"height": 576,
"mimeType": "image/jpeg"
},
"npr-cds-wide": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty-1200x675.jpg",
"width": 1200,
"height": 675,
"mimeType": "image/jpeg"
},
"npr-cds-square": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty-600x600.jpg",
"width": 600,
"height": 600,
"mimeType": "image/jpeg"
},
"kqedFullSize": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty.jpg",
"width": 2000,
"height": 1371
}
},
"fetchFailed": false,
"isLoading": false
},
"news_12081681": {
"type": "attachments",
"id": "news_12081681",
"meta": {
"index": "attachments_1716263798",
"site": "news",
"id": "12081681",
"found": true
},
"title": "260428-MUSK-ALTMAN-VB-03-KQED-1",
"publishDate": 1777416108,
"status": "inherit",
"parent": 12081603,
"modified": 1777508469,
"caption": "Representing Microsoft, Russell Coan (left) speaks as Elon Musk watches in the trial in which Elon Musk claims that Sam Altman and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit, in Oakland on April 28, 2026.",
"credit": "Vicki Behringer for KQED",
"altTag": null,
"description": null,
"imgSizes": {
"thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1-160x90.jpg",
"width": 160,
"height": 90,
"mimeType": "image/jpeg"
},
"1536x1536": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1-1536x864.jpg",
"width": 1536,
"height": 864,
"mimeType": "image/jpeg"
},
"post-thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1-672x372.jpg",
"width": 672,
"height": 372,
"mimeType": "image/jpeg"
},
"twentyfourteen-full-width": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1-1038x576.jpg",
"width": 1038,
"height": 576,
"mimeType": "image/jpeg"
},
"npr-cds-wide": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1-1200x675.jpg",
"width": 1200,
"height": 675,
"mimeType": "image/jpeg"
},
"npr-cds-square": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1-600x600.jpg",
"width": 600,
"height": 600,
"mimeType": "image/jpeg"
},
"kqedFullSize": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1.jpg",
"width": 2000,
"height": 1125
}
},
"fetchFailed": false,
"isLoading": false
},
"news_12081639": {
"type": "attachments",
"id": "news_12081639",
"meta": {
"index": "attachments_1716263798",
"site": "news",
"id": "12081639",
"found": true
},
"title": "260428-MUSK ALTMAN-VB-02-KQED",
"publishDate": 1777410140,
"status": "inherit",
"parent": 12081603,
"modified": 1777422271,
"caption": "Elon Musk (left) takes the stand in the trial in which Elon Musk claims that Sam Altman and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit, in Oakland on April 28, 2026.",
"credit": "Vicki Behringer for KQED",
"altTag": null,
"description": null,
"imgSizes": {
"thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-02-KQED-160x90.jpg",
"width": 160,
"height": 90,
"mimeType": "image/jpeg"
},
"1536x1536": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-02-KQED-1536x864.jpg",
"width": 1536,
"height": 864,
"mimeType": "image/jpeg"
},
"post-thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-02-KQED-672x372.jpg",
"width": 672,
"height": 372,
"mimeType": "image/jpeg"
},
"twentyfourteen-full-width": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-02-KQED-1038x576.jpg",
"width": 1038,
"height": 576,
"mimeType": "image/jpeg"
},
"npr-cds-wide": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-02-KQED-1200x675.jpg",
"width": 1200,
"height": 675,
"mimeType": "image/jpeg"
},
"npr-cds-square": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-02-KQED-600x600.jpg",
"width": 600,
"height": 600,
"mimeType": "image/jpeg"
},
"kqedFullSize": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-02-KQED.jpg",
"width": 2000,
"height": 1125
}
},
"fetchFailed": false,
"isLoading": false
},
"news_11989313": {
"type": "attachments",
"id": "news_11989313",
"meta": {
"index": "attachments_1716263798",
"site": "news",
"id": "11989313",
"found": true
},
"title": "Tech AI Illustrations",
"publishDate": 1717711326,
"status": "inherit",
"parent": 11989308,
"modified": 1717711420,
"caption": "The OpenAI ChatGPT logo.",
"credit": "Jaap Arriens/NurPhoto via Getty Images",
"altTag": null,
"description": null,
"imgSizes": {
"medium": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2024/06/GettyImages-2155035557-800x533.jpg",
"width": 800,
"height": 533,
"mimeType": "image/jpeg"
},
"large": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2024/06/GettyImages-2155035557-1020x680.jpg",
"width": 1020,
"height": 680,
"mimeType": "image/jpeg"
},
"thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2024/06/GettyImages-2155035557-160x107.jpg",
"width": 160,
"height": 107,
"mimeType": "image/jpeg"
},
"1536x1536": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2024/06/GettyImages-2155035557-1536x1024.jpg",
"width": 1536,
"height": 1024,
"mimeType": "image/jpeg"
},
"2048x2048": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2024/06/GettyImages-2155035557-2048x1365.jpg",
"width": 2048,
"height": 1365,
"mimeType": "image/jpeg"
},
"post-thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2024/06/GettyImages-2155035557-672x372.jpg",
"width": 672,
"height": 372,
"mimeType": "image/jpeg"
},
"twentyfourteen-full-width": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2024/06/GettyImages-2155035557-1038x576.jpg",
"width": 1038,
"height": 576,
"mimeType": "image/jpeg"
},
"full-width": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2024/06/GettyImages-2155035557-1920x1280.jpg",
"width": 1920,
"height": 1280,
"mimeType": "image/jpeg"
},
"kqedFullSize": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2024/06/GettyImages-2155035557-scaled-e1760733694503.jpg",
"width": 2000,
"height": 1334
}
},
"fetchFailed": false,
"isLoading": false
},
"news_12069330": {
"type": "attachments",
"id": "news_12069330",
"meta": {
"index": "attachments_1716263798",
"site": "news",
"id": "12069330",
"found": true
},
"title": "Group of university students staring at mobile phones Announcement of academic results or test scores",
"publishDate": 1768004932,
"status": "inherit",
"parent": 12069286,
"modified": 1768007574,
"caption": "A group of university students stares at a mobile phone.",
"credit": "Pranithan Chorruangsak/Getty Images",
"altTag": null,
"description": null,
"imgSizes": {
"thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/01/OpenAI-160x107.jpg",
"width": 160,
"height": 107,
"mimeType": "image/jpeg"
},
"1536x1536": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/01/OpenAI-1536x1024.jpg",
"width": 1536,
"height": 1024,
"mimeType": "image/jpeg"
},
"post-thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/01/OpenAI-672x372.jpg",
"width": 672,
"height": 372,
"mimeType": "image/jpeg"
},
"twentyfourteen-full-width": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/01/OpenAI-1038x576.jpg",
"width": 1038,
"height": 576,
"mimeType": "image/jpeg"
},
"npr-cds-wide": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/01/OpenAI-1200x675.jpg",
"width": 1200,
"height": 675,
"mimeType": "image/jpeg"
},
"kqedFullSize": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/01/OpenAI.jpg",
"width": 2000,
"height": 1333
}
},
"fetchFailed": false,
"isLoading": false
},
"news_12066914": {
"type": "attachments",
"id": "news_12066914",
"meta": {
"index": "attachments_1716263798",
"site": "news",
"id": "12066914",
"found": true
},
"title": "President Trump Signs Executive Orders In The Oval Office",
"publishDate": 1765563347,
"status": "inherit",
"parent": 12066910,
"modified": 1765563390,
"caption": "U.S. President Donald Trump displays a signed executive order in the Oval Office of the White House on Dec. 11, 2025, in Washington, D.C. The executive order curbs states' ability to regulate artificial intelligence, something for which the tech industry has been lobbying. ",
"credit": "Alex Wong/Getty Images",
"altTag": null,
"description": null,
"imgSizes": {
"thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2025/12/DonaldTrumpGetty1-160x110.jpg",
"width": 160,
"height": 110,
"mimeType": "image/jpeg"
},
"1536x1536": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2025/12/DonaldTrumpGetty1-1536x1059.jpg",
"width": 1536,
"height": 1059,
"mimeType": "image/jpeg"
},
"post-thumbnail": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2025/12/DonaldTrumpGetty1-672x372.jpg",
"width": 672,
"height": 372,
"mimeType": "image/jpeg"
},
"twentyfourteen-full-width": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2025/12/DonaldTrumpGetty1-1038x576.jpg",
"width": 1038,
"height": 576,
"mimeType": "image/jpeg"
},
"npr-cds-wide": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2025/12/DonaldTrumpGetty1-1200x675.jpg",
"width": 1200,
"height": 675,
"mimeType": "image/jpeg"
},
"kqedFullSize": {
"file": "https://cdn.kqed.org/wp-content/uploads/sites/10/2025/12/DonaldTrumpGetty1.jpg",
"width": 2000,
"height": 1379
}
},
"fetchFailed": false,
"isLoading": false
}
},
"audioPlayerReducer": {
"postId": "stream_live",
"isPaused": true,
"isPlaying": false,
"pfsActive": false,
"pledgeModalIsOpen": true,
"playerDrawerIsOpen": false
},
"authorsReducer": {
"byline_news_12082064": {
"type": "authors",
"id": "byline_news_12082064",
"meta": {
"override": true
},
"slug": "byline_news_12082064",
"name": "Matt O’Brien, Associated Press, and Nisa Khan, KQED",
"isLoading": false
},
"rachael-myrow": {
"type": "authors",
"id": "251",
"meta": {
"index": "authors_1716337520",
"id": "251",
"found": true
},
"name": "Rachael Myrow",
"firstName": "Rachael",
"lastName": "Myrow",
"slug": "rachael-myrow",
"email": "rmyrow@kqed.org",
"display_author_email": true,
"staff_mastheads": [
"news"
],
"title": "Senior Editor of KQED's Silicon Valley News Desk",
"bio": "• I write and edit stories about how Silicon Valley power and policies shape everyday life in California. I’m also passionate about making Bay Area history and culture more accessible to a broad public. • I’ve been a journalist for most of my life, starting in high school with The Franklin Press in Los Angeles, where I grew up. While earning my first degree in English at UC Berkeley, I got my start in public radio at KALX-FM. After completing a second degree in journalism at Cal, I landed my first professional job at Marketplace, then moved on to KPCC (now LAist), and then KQED, where I hosted The California Report for more than seven years. • My reporting has appeared on NPR, The World, WBUR’s \u003ci>Here & Now\u003c/i>, and the BBC. I also guest host for KQED’s \u003ci>Forum\u003c/i>, as well as the Commonwealth Club in San Francisco. • I speak periodically on media, democracy and technology issues, and do voiceover work for documentaries and educational video projects. • Outside of the studio, you'll find me hiking Bay Area trails and whipping up Insta-ready meals in my kitchen. • I do not accept gifts, money, or favors from anyone connected to my reporting, I don't pay people for information, and I do not support or donate to political causes. • I strive to treat the people I report on with fairness, honesty, and respect. I also recognize there are often multiple sides to a story and work to verify information through multiple sources and documentation. If I get something wrong, I correct it.",
"avatar": "https://secure.gravatar.com/avatar/87bf8cb5874e045cdff430523a6d48b1?s=600&d=blank&r=g",
"twitter": "rachaelmyrow",
"facebook": null,
"instagram": null,
"linkedin": "https://www.linkedin.com/in/rachaelmyrow/",
"sites": [
{
"site": "arts",
"roles": [
"administrator"
]
},
{
"site": "news",
"roles": [
"edit_others_posts",
"editor"
]
},
{
"site": "futureofyou",
"roles": [
"editor"
]
},
{
"site": "bayareabites",
"roles": [
"editor"
]
},
{
"site": "stateofhealth",
"roles": [
"editor"
]
},
{
"site": "science",
"roles": [
"editor"
]
},
{
"site": "food",
"roles": [
"editor"
]
},
{
"site": "forum",
"roles": [
"editor"
]
},
{
"site": "liveblog",
"roles": [
"author"
]
}
],
"headData": {
"title": "Rachael Myrow | KQED",
"description": "Senior Editor of KQED's Silicon Valley News Desk",
"ogImgSrc": "https://secure.gravatar.com/avatar/87bf8cb5874e045cdff430523a6d48b1?s=600&d=blank&r=g",
"twImgSrc": "https://secure.gravatar.com/avatar/87bf8cb5874e045cdff430523a6d48b1?s=600&d=blank&r=g"
},
"isLoading": false,
"link": "/author/rachael-myrow"
},
"ecruzguevarra": {
"type": "authors",
"id": "8654",
"meta": {
"index": "authors_1716337520",
"id": "8654",
"found": true
},
"name": "Ericka Cruz Guevarra",
"firstName": "Ericka",
"lastName": "Cruz Guevarra",
"slug": "ecruzguevarra",
"email": "ecruzguevarra@kqed.org",
"display_author_email": true,
"staff_mastheads": [
"news"
],
"title": "Producer, The Bay Podcast",
"bio": "Ericka Cruz Guevarra is host of \u003ca href=\"https://www.kqed.org/podcasts/thebay\">\u003cem>The Bay\u003c/em>\u003c/a> podcast at KQED. Before host, she was the show’s producer. Her work in that capacity includes a three-part reported series on policing in Vallejo, which won a 2020 excellence in journalism award from the Society of Professional Journalists. Ericka has worked as a breaking news reporter at Oregon Public Broadcasting, helped produce the Code Switch podcast, and was KQED’s inaugural Raul Ramirez Diversity Fund intern. She’s also an alumna of NPR’s Next Generation Radio program. Send her an email if you have strong feelings about whether Fairfield and Suisun City are the Bay. Ericka is represented by SAG-AFTRA.",
"avatar": "https://secure.gravatar.com/avatar/25e5ab8d3d53fad2dcc7bb2b5c506b1a?s=600&d=blank&r=g",
"twitter": "NotoriousECG",
"facebook": null,
"instagram": null,
"linkedin": null,
"sites": [
{
"site": "arts",
"roles": [
"subscriber"
]
},
{
"site": "news",
"roles": [
"editor",
"manage_categories"
]
},
{
"site": "futureofyou",
"roles": [
"subscriber"
]
},
{
"site": "stateofhealth",
"roles": [
"subscriber"
]
},
{
"site": "science",
"roles": [
"editor"
]
},
{
"site": "forum",
"roles": [
"subscriber"
]
}
],
"headData": {
"title": "Ericka Cruz Guevarra | KQED",
"description": "Producer, The Bay Podcast",
"ogImgSrc": "https://secure.gravatar.com/avatar/25e5ab8d3d53fad2dcc7bb2b5c506b1a?s=600&d=blank&r=g",
"twImgSrc": "https://secure.gravatar.com/avatar/25e5ab8d3d53fad2dcc7bb2b5c506b1a?s=600&d=blank&r=g"
},
"isLoading": false,
"link": "/author/ecruzguevarra"
},
"amontecillo": {
"type": "authors",
"id": "11649",
"meta": {
"index": "authors_1716337520",
"id": "11649",
"found": true
},
"name": "Alan Montecillo",
"firstName": "Alan",
"lastName": "Montecillo",
"slug": "amontecillo",
"email": "amontecillo@kqed.org",
"display_author_email": false,
"staff_mastheads": [
"news"
],
"title": "KQED Contributor",
"bio": "Alan Montecillo is the senior editor of \u003cem>\u003ca href=\"http://kqed.org/thebay\">The Bay\u003c/a>, \u003c/em> KQED's local news podcast. Before moving to the Bay Area, he worked as a senior talk show producer for WILL in Champaign-Urbana, Illinois and at Oregon Public Broadcasting in Portland, Oregon. He has won journalism awards from the Society of Professional Journalists Northern California, the Public Media Journalists Association, The Signal Awards, and has also received a regional Edward R. Murrow award. Alan is a Filipino American from Hong Kong and a graduate of Reed College.",
"avatar": "https://secure.gravatar.com/avatar/d5e4e7a76481969ccba76f4e2b5ccabc?s=600&d=blank&r=g",
"twitter": "alanmontecillo",
"facebook": null,
"instagram": null,
"linkedin": null,
"sites": [
{
"site": "",
"roles": [
"editor"
]
},
{
"site": "news",
"roles": [
"editor",
"manage_categories"
]
}
],
"headData": {
"title": "Alan Montecillo | KQED",
"description": "KQED Contributor",
"ogImgSrc": "https://secure.gravatar.com/avatar/d5e4e7a76481969ccba76f4e2b5ccabc?s=600&d=blank&r=g",
"twImgSrc": "https://secure.gravatar.com/avatar/d5e4e7a76481969ccba76f4e2b5ccabc?s=600&d=blank&r=g"
},
"isLoading": false,
"link": "/author/amontecillo"
},
"jessicakariisa": {
"type": "authors",
"id": "11831",
"meta": {
"index": "authors_1716337520",
"id": "11831",
"found": true
},
"name": "Jessica Kariisa",
"firstName": "Jessica",
"lastName": "Kariisa",
"slug": "jessicakariisa",
"email": "jkariisa@kqed.org",
"display_author_email": false,
"staff_mastheads": [
"news"
],
"title": "Producer, The Bay",
"bio": "Jessica Kariisa is the producer of The Bay. She first joined KQED as an intern for The California Report Magazine, after which she became an on-call producer. She reported a Bay Curious episode on the use of rap lyrics in criminal trials which won a Society of Professional Journalists award in 2023 for Excellence in Features Journalism and the 2023 Signal Award for Best Conversation Starter. She’s worked on podcasts for Snap Judgment and American Public Media. Before embarking on her audio career, she was a music journalist.\r\n\r\nJessica Kariisa is represented by SAG-AFTRA.",
"avatar": "https://secure.gravatar.com/avatar/4afd355fd24f5515aeab77fd6c72b671?s=600&d=blank&r=g",
"twitter": null,
"facebook": null,
"instagram": null,
"linkedin": null,
"sites": [
{
"site": "arts",
"roles": [
"author"
]
},
{
"site": "news",
"roles": [
"editor",
"manage_categories"
]
}
],
"headData": {
"title": "Jessica Kariisa | KQED",
"description": "Producer, The Bay",
"ogImgSrc": "https://secure.gravatar.com/avatar/4afd355fd24f5515aeab77fd6c72b671?s=600&d=blank&r=g",
"twImgSrc": "https://secure.gravatar.com/avatar/4afd355fd24f5515aeab77fd6c72b671?s=600&d=blank&r=g"
},
"isLoading": false,
"link": "/author/jessicakariisa"
},
"kdebenedetti": {
"type": "authors",
"id": "11913",
"meta": {
"index": "authors_1716337520",
"id": "11913",
"found": true
},
"name": "Katie DeBenedetti",
"firstName": "Katie",
"lastName": "DeBenedetti",
"slug": "kdebenedetti",
"email": "kdebenedetti@kqed.org",
"display_author_email": false,
"staff_mastheads": [
"news",
"science"
],
"title": "KQED Contributor",
"bio": "Katie DeBenedetti is a digital reporter covering daily news for the Express Desk. Prior to joining KQED as a culture reporting intern in January 2024, she covered education and city government for the Napa Valley Register.",
"avatar": "https://secure.gravatar.com/avatar/6e31073cb8f7e4214ab03f42771d0f45?s=600&d=blank&r=g",
"twitter": null,
"facebook": null,
"instagram": null,
"linkedin": null,
"sites": [
{
"site": "news",
"roles": [
"author"
]
},
{
"site": "science",
"roles": [
"author"
]
},
{
"site": "liveblog",
"roles": [
"author"
]
}
],
"headData": {
"title": "Katie DeBenedetti | KQED",
"description": "KQED Contributor",
"ogImgSrc": "https://secure.gravatar.com/avatar/6e31073cb8f7e4214ab03f42771d0f45?s=600&d=blank&r=g",
"twImgSrc": "https://secure.gravatar.com/avatar/6e31073cb8f7e4214ab03f42771d0f45?s=600&d=blank&r=g"
},
"isLoading": false,
"link": "/author/kdebenedetti"
}
},
"breakingNewsReducer": {},
"pagesReducer": {},
"postsReducer": {
"stream_live": {
"type": "live",
"id": "stream_live",
"audioUrl": "https://streams.kqed.org/kqedradio",
"title": "Live Stream",
"excerpt": "Live Stream information currently unavailable.",
"link": "/radio",
"featImg": "",
"label": {
"name": "KQED Live",
"link": "/"
}
},
"stream_kqedNewscast": {
"type": "posts",
"id": "stream_kqedNewscast",
"audioUrl": "https://www.kqed.org/.stream/anon/radio/RDnews/newscast.mp3?_=1",
"title": "KQED Newscast",
"featImg": "",
"label": {
"name": "88.5 FM",
"link": "/"
}
},
"news_12083278": {
"type": "posts",
"id": "news_12083278",
"meta": {
"index": "posts_1716263798",
"site": "news",
"id": "12083278",
"score": null,
"sort": [
1778629278000
]
},
"guestAuthors": [],
"slug": "sam-altman-defends-himself-from-elon-musks-accusations-in-openai-trial",
"title": "Sam Altman Defends Himself From Elon Musk’s Accusations in OpenAI Trial",
"publishDate": 1778629278,
"format": "standard",
"headTitle": "Sam Altman Defends Himself From Elon Musk’s Accusations in OpenAI Trial | KQED",
"labelTerm": {
"site": "news"
},
"content": "\u003cp>On the stand on Tuesday, OpenAI CEO Sam Altman said that Elon Musk tried to \u003ca href=\"https://www.kqed.org/news/12081916/are-elon-musk-and-openai-fighting-an-ai-arms-race-sam-altmans-lawyers-think-so\">wrest control over the company\u003c/a> they co-founded before the Tesla CEO’s 2018 exit.\u003c/p>\n\u003cp>Altman’s testimony in the federal trial in Oakland, which many see as a billionaire grudge match, pushed back on Musk’s claim that the powerful AI start-up betrayed its mission to benefit the public good. Musk has accused Altman of \u003ca href=\"https://www.kqed.org/news/12081603/elon-musk-takes-aim-at-openai-as-trial-begins-its-not-ok-to-steal-a-charity\">“stealing a charity” \u003c/a>by building an $850 million for-profit company on the back of its nonprofit research lab.\u003c/p>\n\u003cp>Altman said that in early discussions about creating a for-profit arm, Musk sought majority ownership, and later proposed folding the nonprofit into his car company.\u003c/p>\n\u003cp>[ad fullwidth]\u003c/p>\n\u003cp>“I read that as a lightweight threat,” Altman said of the plan to bring OpenAI into Tesla. “I don’t think it would have served the mission. I think it would have effectively destroyed the nonprofit in the process.”\u003c/p>\n\u003cp>“Mr. Musk did try to kill it, I guess twice,” he said.\u003c/p>\n\u003cp>As early as summer 2017, Altman, Musk and other OpenAI executives began discussing if and how to launch a for-profit, citing a need to raise more money to keep up with competitors like Google.\u003c/p>\n\u003cfigure id=\"attachment_12083394\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"wp-image-12083394 size-full\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-04-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-04-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-04-KQED-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-04-KQED-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-04-KQED-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">OpenAI CEO Sam Altman testifies in the trial in which Elon Musk claims that Altman and OpenAI abandoned their founding promise to develop AI for the benefit of humanity rather than solely for profit in Oakland on May 12, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>Altman said they were “running the organization on a shoestring,” with a short runway of cash. To acquire the compute — or the GPUs and CPUs needed to power AI — and funding they needed to pursue artificial general intelligence, or a superintelligent AI technology known as AGI, the company would need more significant investments, the executives determined.\u003c/p>\n\u003cp>“I thought, of course, we needed to raise billions to quickly ramp,” he said. “I saw no way to do it.”\u003c/p>\n\u003cp>Altman, Greg Brockman, the president of OpenAI and Ilya Sutskever, a former top OpenAI computer scientist and member of its founding team, have said that in those conversations, Musk repeatedly proposed plans that would give him majority control. Initially, Altman said that he asked for 90% equity in a potential for-profit.\u003c/p>\n\u003cp>The other executives pushed back on this request, including in an email Altman sent to Musk at the time, in which he said, “I am worried about control. I don’t think any one person should have control of the world’s first AGI — in fact, the whole reason we started OpenAI is so that wouldn’t happen.”[aside postID=news_12083224 hero='https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/SamAltmanGetty.jpg']Altman described Musk as “mercurial,” and said that when he left OpenAI in February 2018, after for-profit discussions fell apart, “people wondered if he’d try to take a vengeance on us” — which both he and his attorney, William Savitt, have alleged is exactly what Musk’s lawsuit aims to do.\u003c/p>\n\u003cp>During his cross-examination, though, Musk’s counsel Steven Molo seemed to suggest that it is Altman who has amassed significant control over OpenAI since it did launch a for-profit arm in 2019.\u003c/p>\n\u003cp>Molo asked Altman about the testimonies of various former OpenAI executives, who said he was untrustworthy and had a history of lying. Altman denied hearing those testimonies, but when asked if he had “repeatedly been called a liar” by people he has done business with, he said, “I have heard people say that.”\u003c/p>\n\u003cp>Molo said that Altman sits on the board of directors for both the OpenAI Foundation, the nonprofit arm, and OpenAI’s for-profit. He is also the company’s CEO.\u003c/p>\n\u003cp>“Would you ever fire yourself as the CEO of the for-profit?” Molo said, adding that the board of the nonprofit is supposed to provide oversight for the chief officer.\u003c/p>\n\u003cp>Altman said that CEOs are “almost always” on their company’s boards. When pressed, he said he had “no plans” to fire himself.\u003c/p>\n\u003cfigure id=\"attachment_12083294\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12083294\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-01-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-01-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-01-KQED-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-01-KQED-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-01-KQED-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">Bret Taylor testifies in the trial in which Elon Musk claims that Sam Altman and OpenAI abandoned their founding promise to develop AI for the benefit of humanity rather than solely for profit in Oakland on May 12, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>Molo also asked Altman about how board members were selected following his brief firing in 2023. During the five-day ouster, there were long negotiations behind the scenes about whether Altman would return, and who would be on the board if he did. Altman, Brockman and other OpenAI executives who followed them out were also in discussions with Microsoft, OpenAI’s largest financial backer, which had offered to bring them on to start a new AI team.\u003c/p>\n\u003cp>Altman said initially he’d proposed to remove OpenAI’s board, which fired him, and replace it with four members, including himself. Altman was not made a board member at that time, but Molo said that he had proposed the three members who were ultimately selected — Bret Taylor, Larry Summers and Adam D’Angelo — in conversations with Microsoft CEO Satya Nadella.\u003c/p>\n\u003cp>Altman said that he had no power to appoint new board members, but that he did say which configurations he would be “willing” to be rehired into.\u003c/p>\n\u003cp>Earlier in the day, he characterized his return to OpenAI as running “back into a burning building to try to save it.”\u003c/p>\n\u003cp>Later this week, both Altman and Musk’s legal teams will present their closing arguments. Then the jury and judge will decide which tech leader to believe.\u003c/p>\n\u003cp>\u003c/p>\n",
"blocks": [],
"excerpt": "During a brief cross-examination of Altman, the Tesla CEO’s attorney questioned whether or not Altman was trustworthy.",
"status": "publish",
"parent": 0,
"modified": 1778630872,
"stats": {
"hasAudio": false,
"hasVideo": false,
"hasChartOrMap": false,
"iframeSrcs": [],
"hasGoogleForm": false,
"hasGallery": false,
"hasHearkenModule": false,
"hasPolis": false,
"paragraphCount": 22,
"wordCount": 990
},
"headData": {
"title": "Sam Altman Defends Himself From Elon Musk’s Accusations in OpenAI Trial | KQED",
"description": "During a brief cross-examination of Altman, the Tesla CEO’s attorney questioned whether or not Altman was trustworthy.",
"ogTitle": "",
"ogDescription": "",
"ogImgId": "",
"twTitle": "",
"twDescription": "",
"twImgId": "",
"schema": {
"@context": "https://schema.org",
"@type": "NewsArticle",
"headline": "Sam Altman Defends Himself From Elon Musk’s Accusations in OpenAI Trial",
"datePublished": "2026-05-12T16:41:18-07:00",
"dateModified": "2026-05-12T17:07:52-07:00",
"image": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"isAccessibleForFree": "True",
"publisher": {
"@type": "NewsMediaOrganization",
"@id": "https://www.kqed.org/#organization",
"name": "KQED",
"logo": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"url": "https://www.kqed.org",
"sameAs": [
"https://www.facebook.com/KQED",
"https://twitter.com/KQED",
"https://www.instagram.com/kqed/",
"https://www.tiktok.com/@kqedofficial",
"https://www.linkedin.com/company/kqed",
"https://www.youtube.com/channel/UCeC0IOo7i1P_61zVUWbJ4nw"
]
}
}
},
"primaryCategory": {
"termId": 248,
"slug": "technology",
"name": "Technology"
},
"sticky": false,
"nprStoryId": "kqed-12083278",
"templateType": "standard",
"featuredImageType": "standard",
"excludeFromSiteSearch": "Include",
"articleAge": "0",
"path": "/news/12083278/sam-altman-defends-himself-from-elon-musks-accusations-in-openai-trial",
"audioTrackLength": null,
"parsedContent": [
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003cp>On the stand on Tuesday, OpenAI CEO Sam Altman said that Elon Musk tried to \u003ca href=\"https://www.kqed.org/news/12081916/are-elon-musk-and-openai-fighting-an-ai-arms-race-sam-altmans-lawyers-think-so\">wrest control over the company\u003c/a> they co-founded before the Tesla CEO’s 2018 exit.\u003c/p>\n\u003cp>Altman’s testimony in the federal trial in Oakland, which many see as a billionaire grudge match, pushed back on Musk’s claim that the powerful AI start-up betrayed its mission to benefit the public good. Musk has accused Altman of \u003ca href=\"https://www.kqed.org/news/12081603/elon-musk-takes-aim-at-openai-as-trial-begins-its-not-ok-to-steal-a-charity\">“stealing a charity” \u003c/a>by building an $850 million for-profit company on the back of its nonprofit research lab.\u003c/p>\n\u003cp>Altman said that in early discussions about creating a for-profit arm, Musk sought majority ownership, and later proposed folding the nonprofit into his car company.\u003c/p>\n\u003cp>\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "ad",
"attributes": {
"named": {
"label": "fullwidth"
},
"numeric": [
"fullwidth"
]
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003c/p>\n\u003cp>“I read that as a lightweight threat,” Altman said of the plan to bring OpenAI into Tesla. “I don’t think it would have served the mission. I think it would have effectively destroyed the nonprofit in the process.”\u003c/p>\n\u003cp>“Mr. Musk did try to kill it, I guess twice,” he said.\u003c/p>\n\u003cp>As early as summer 2017, Altman, Musk and other OpenAI executives began discussing if and how to launch a for-profit, citing a need to raise more money to keep up with competitors like Google.\u003c/p>\n\u003cfigure id=\"attachment_12083394\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"wp-image-12083394 size-full\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-04-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-04-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-04-KQED-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-04-KQED-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-04-KQED-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">OpenAI CEO Sam Altman testifies in the trial in which Elon Musk claims that Altman and OpenAI abandoned their founding promise to develop AI for the benefit of humanity rather than solely for profit in Oakland on May 12, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>Altman said they were “running the organization on a shoestring,” with a short runway of cash. To acquire the compute — or the GPUs and CPUs needed to power AI — and funding they needed to pursue artificial general intelligence, or a superintelligent AI technology known as AGI, the company would need more significant investments, the executives determined.\u003c/p>\n\u003cp>“I thought, of course, we needed to raise billions to quickly ramp,” he said. “I saw no way to do it.”\u003c/p>\n\u003cp>Altman, Greg Brockman, the president of OpenAI and Ilya Sutskever, a former top OpenAI computer scientist and member of its founding team, have said that in those conversations, Musk repeatedly proposed plans that would give him majority control. Initially, Altman said that he asked for 90% equity in a potential for-profit.\u003c/p>\n\u003cp>The other executives pushed back on this request, including in an email Altman sent to Musk at the time, in which he said, “I am worried about control. I don’t think any one person should have control of the world’s first AGI — in fact, the whole reason we started OpenAI is so that wouldn’t happen.”\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "aside",
"attributes": {
"named": {
"postid": "news_12083224",
"hero": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/SamAltmanGetty.jpg",
"label": ""
},
"numeric": []
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>Altman described Musk as “mercurial,” and said that when he left OpenAI in February 2018, after for-profit discussions fell apart, “people wondered if he’d try to take a vengeance on us” — which both he and his attorney, William Savitt, have alleged is exactly what Musk’s lawsuit aims to do.\u003c/p>\n\u003cp>During his cross-examination, though, Musk’s counsel Steven Molo seemed to suggest that it is Altman who has amassed significant control over OpenAI since it did launch a for-profit arm in 2019.\u003c/p>\n\u003cp>Molo asked Altman about the testimonies of various former OpenAI executives, who said he was untrustworthy and had a history of lying. Altman denied hearing those testimonies, but when asked if he had “repeatedly been called a liar” by people he has done business with, he said, “I have heard people say that.”\u003c/p>\n\u003cp>Molo said that Altman sits on the board of directors for both the OpenAI Foundation, the nonprofit arm, and OpenAI’s for-profit. He is also the company’s CEO.\u003c/p>\n\u003cp>“Would you ever fire yourself as the CEO of the for-profit?” Molo said, adding that the board of the nonprofit is supposed to provide oversight for the chief officer.\u003c/p>\n\u003cp>Altman said that CEOs are “almost always” on their company’s boards. When pressed, he said he had “no plans” to fire himself.\u003c/p>\n\u003cfigure id=\"attachment_12083294\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12083294\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-01-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-01-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-01-KQED-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-01-KQED-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260512-MUSK-ALTMAN-TRIAL-VB-01-KQED-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">Bret Taylor testifies in the trial in which Elon Musk claims that Sam Altman and OpenAI abandoned their founding promise to develop AI for the benefit of humanity rather than solely for profit in Oakland on May 12, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>Molo also asked Altman about how board members were selected following his brief firing in 2023. During the five-day ouster, there were long negotiations behind the scenes about whether Altman would return, and who would be on the board if he did. Altman, Brockman and other OpenAI executives who followed them out were also in discussions with Microsoft, OpenAI’s largest financial backer, which had offered to bring them on to start a new AI team.\u003c/p>\n\u003cp>Altman said initially he’d proposed to remove OpenAI’s board, which fired him, and replace it with four members, including himself. Altman was not made a board member at that time, but Molo said that he had proposed the three members who were ultimately selected — Bret Taylor, Larry Summers and Adam D’Angelo — in conversations with Microsoft CEO Satya Nadella.\u003c/p>\n\u003cp>Altman said that he had no power to appoint new board members, but that he did say which configurations he would be “willing” to be rehired into.\u003c/p>\n\u003cp>Earlier in the day, he characterized his return to OpenAI as running “back into a burning building to try to save it.”\u003c/p>\n\u003cp>Later this week, both Altman and Musk’s legal teams will present their closing arguments. Then the jury and judge will decide which tech leader to believe.\u003c/p>\n\u003cp>\u003c/p>\n\u003c/div>\u003c/p>",
"attributes": {
"named": {},
"numeric": []
}
}
],
"link": "/news/12083278/sam-altman-defends-himself-from-elon-musks-accusations-in-openai-trial",
"authors": [
"11913",
"251"
],
"categories": [
"news_6188",
"news_28250",
"news_8",
"news_248"
],
"tags": [
"news_34755",
"news_1386",
"news_32668",
"news_3897",
"news_27626",
"news_21891",
"news_34054",
"news_33542",
"news_33543",
"news_34586",
"news_1631"
],
"featImg": "news_12083392",
"label": "news"
},
"news_12083224": {
"type": "posts",
"id": "news_12083224",
"meta": {
"index": "posts_1716263798",
"site": "news",
"id": "12083224",
"score": null,
"sort": [
1778546112000
]
},
"guestAuthors": [],
"slug": "former-openai-exec-calls-decision-to-remove-sam-altman-a-hail-mary-during-musk-trial",
"title": "Former OpenAI Exec Calls Decision to Remove Sam Altman a ‘Hail Mary’ During Musk Trial",
"publishDate": 1778546112,
"format": "standard",
"headTitle": "Former OpenAI Exec Calls Decision to Remove Sam Altman a ‘Hail Mary’ During Musk Trial | KQED",
"labelTerm": {
"site": "news"
},
"content": "\u003cp>Microsoft’s CEO and another major player took the stand on Monday in \u003ca href=\"https://www.kqed.org/news/tag/oakland\">Oakland\u003c/a>, testifying in the blockbuster trial between OpenAI co-founders Elon Musk and Sam Altman.\u003c/p>\n\u003cp>Ahead of Altman’s testimony, Musk’s attorney Steven Molo questioned Microsoft CEO Satya Nadella and Ilya Sutskever, a top OpenAI computer scientist who departed the company in 2024. Sutskever discussed his role in orchestrating Altman’s brief ouster in 2023.\u003c/p>\n\u003cp>Over five days in November 2023, Altman was removed and reinstated from his post, after a coalition of board members raised concerns that he had not been “consistently candid in his communications” and cited a breakdown of trust.\u003c/p>\n\u003cp>[ad fullwidth]\u003c/p>\n\u003cp>Whether Altman and other executives have maintained OpenAI’s initial stated mission — to develop AI safely and for the “benefit of humanity” — is critical to Musk’s suit, which claims that leaders breached their duty to its nonprofit mission by building a for-profit company on top of it. Musk also alleged that the company unfairly benefited at his expense.\u003c/p>\n\u003cp>Musk also alleges that Microsoft, which is OpenAI’s largest financial backer and until this week held the exclusive rights to license and sell its technology, aided and abetted that breach of trust.\u003c/p>\n\u003cp>Molo questioned Nadella about Microsoft’s motive to invest in OpenAI — a $13 billion input that Nadella said is expected to see a return of about $92 billion, “if it works out.”\u003c/p>\n\u003cfigure id=\"attachment_12081686\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"wp-image-12081686 size-full\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">Steve Molo, Elon Musk’s attorney, presents opening statements in the trial in which Elon Musk (center-right) claims that Sam Altman (right) and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit, in Oakland, on April 28, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>Musk’s attorney pointed out Nadella’s fiduciary duty to maximize profit, and referenced a series of texts between him and Altman that appeared to show Nadella pushing for an earlier rollout of the paid version of ChatGPT.\u003c/p>\n\u003cp>“When chatGPT paid?” Nadella wrote in the message.\u003c/p>\n\u003cp>Altman said that there was “Not enough compute to make it a good consumer experience,” to which Nadella said, “The sooner the better.”\u003c/p>\n\u003cp>Nadella said that the reason Microsoft invested was that OpenAI was pursuing a for-profit model, but he said, “If the pie became larger, the nonprofit would benefit as well.”[aside postID=news_12081916 hero='https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/AP26118555622828-2000x1333.jpg']Molo asked Nadella if he was aware that, for a period of time, OpenAI’s nonprofit did not have any employees.\u003c/p>\n\u003cp>“I am not,” Nadella said.\u003c/p>\n\u003cp>Molo also questioned Nadella about Microsoft’s role during Altman’s brief ouster. At the time, Nadella announced that he would hire Altman, along with OpenAI’s third co-founder and current president, Greg Brockman, as well as other allies, to head up a new AI team at Microsoft.\u003c/p>\n\u003cp>Nadella said that he “had ideas about how Sam [Altman] and the other employees could join Microsoft if they were not reinstated.”\u003c/p>\n\u003cp>“If people were going to leave OpenAI, I wanted them to come to Microsoft,” he said.\u003c/p>\n\u003cp>Molo asked Nadella if he knew why Altman had been removed, to which Nadella said he was never given an “explicit answer.”\u003c/p>\n\u003cp>“Did the thought occur to you … the board might issue a public statement about why they fired Altman?” Molo said.\u003c/p>\n\u003cp>Nadella said during that period — referred to as “The Blip” by many OpenAI employees — he was focused on ensuring continuity for customers.\u003c/p>\n\u003cp>“It goes back to me wanting to communicate to customers that they can count on us,” he said. “Come Monday, that doesn’t just disappear.”\u003c/p>\n\u003cfigure id=\"attachment_12082325\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"wp-image-12082325 size-full\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260504-MUSK-ALTMAN-VB-03-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260504-MUSK-ALTMAN-VB-03-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260504-MUSK-ALTMAN-VB-03-KQED-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260504-MUSK-ALTMAN-VB-03-KQED-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260504-MUSK-ALTMAN-VB-03-KQED-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">OpenAI CEO Sam Altman watches as OpenAI President Greg Brockman testifies in the trial in which Elon Musk claims that Altman and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit, in Oakland, on May 4, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>Sutskever, who took the stand after Nadella, described Altman’s removal differently. He said it was a “Hail Mary” to save OpenAI, which had become an environment that was “not conducive” to the technology’s safety.\u003c/p>\n\u003cp>“I felt a great deal of ownership of OpenAI,” he said. “I felt like I created this company. I simply cared for it, and I didn’t want it to be destroyed.”\u003c/p>\n\u003cp>Sutskever, who helped lead the ouster, had compiled a more than 50-page record of Altman’s “consistent pattern of lying,” including misrepresenting facts, safety protocols and company information to the board and executives.\u003c/p>\n\u003cp>Sutskever maintained that he had worked on a team that aimed to focus on long-term risks as more powerful AI was built.\u003c/p>\n\u003cp>“The goal of the super alignment is to do the research in advance, such that humanity will have the technological means to make it controlled and safe,” he said.\u003c/p>\n\u003cp>The team was disbanded days after he departed the company, in May 2024.\u003c/p>\n\u003cp>\u003c/p>\n",
"blocks": [],
"excerpt": "The testimonies on Monday centered on Sam Altman’s brief 2023 ousting from OpenAI, as allegations mounted against the tech giant’s conduct and Microsoft’s motives in backing the AI company.",
"status": "publish",
"parent": 0,
"modified": 1778547375,
"stats": {
"hasAudio": false,
"hasVideo": false,
"hasChartOrMap": false,
"iframeSrcs": [],
"hasGoogleForm": false,
"hasGallery": false,
"hasHearkenModule": false,
"hasPolis": false,
"paragraphCount": 26,
"wordCount": 890
},
"headData": {
"title": "Former OpenAI Exec Calls Decision to Remove Sam Altman a ‘Hail Mary’ During Musk Trial | KQED",
"description": "The testimonies on Monday centered on Sam Altman’s brief 2023 ousting from OpenAI, as allegations mounted against the tech giant’s conduct and Microsoft’s motives in backing the AI company.",
"ogTitle": "",
"ogDescription": "",
"ogImgId": "",
"twTitle": "",
"twDescription": "",
"twImgId": "",
"schema": {
"@context": "https://schema.org",
"@type": "NewsArticle",
"headline": "Former OpenAI Exec Calls Decision to Remove Sam Altman a ‘Hail Mary’ During Musk Trial",
"datePublished": "2026-05-11T17:35:12-07:00",
"dateModified": "2026-05-11T17:56:15-07:00",
"image": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"isAccessibleForFree": "True",
"publisher": {
"@type": "NewsMediaOrganization",
"@id": "https://www.kqed.org/#organization",
"name": "KQED",
"logo": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"url": "https://www.kqed.org",
"sameAs": [
"https://www.facebook.com/KQED",
"https://twitter.com/KQED",
"https://www.instagram.com/kqed/",
"https://www.tiktok.com/@kqedofficial",
"https://www.linkedin.com/company/kqed",
"https://www.youtube.com/channel/UCeC0IOo7i1P_61zVUWbJ4nw"
]
}
}
},
"primaryCategory": {
"termId": 248,
"slug": "technology",
"name": "Technology"
},
"sticky": false,
"nprStoryId": "kqed-12083224",
"templateType": "standard",
"featuredImageType": "standard",
"excludeFromSiteSearch": "Include",
"articleAge": "0",
"path": "/news/12083224/former-openai-exec-calls-decision-to-remove-sam-altman-a-hail-mary-during-musk-trial",
"audioTrackLength": null,
"parsedContent": [
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003cp>Microsoft’s CEO and another major player took the stand on Monday in \u003ca href=\"https://www.kqed.org/news/tag/oakland\">Oakland\u003c/a>, testifying in the blockbuster trial between OpenAI co-founders Elon Musk and Sam Altman.\u003c/p>\n\u003cp>Ahead of Altman’s testimony, Musk’s attorney Steven Molo questioned Microsoft CEO Satya Nadella and Ilya Sutskever, a top OpenAI computer scientist who departed the company in 2024. Sutskever discussed his role in orchestrating Altman’s brief ouster in 2023.\u003c/p>\n\u003cp>Over five days in November 2023, Altman was removed and reinstated from his post, after a coalition of board members raised concerns that he had not been “consistently candid in his communications” and cited a breakdown of trust.\u003c/p>\n\u003cp>\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "ad",
"attributes": {
"named": {
"label": "fullwidth"
},
"numeric": [
"fullwidth"
]
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003c/p>\n\u003cp>Whether Altman and other executives have maintained OpenAI’s initial stated mission — to develop AI safely and for the “benefit of humanity” — is critical to Musk’s suit, which claims that leaders breached their duty to its nonprofit mission by building a for-profit company on top of it. Musk also alleged that the company unfairly benefited at his expense.\u003c/p>\n\u003cp>Musk also alleges that Microsoft, which is OpenAI’s largest financial backer and until this week held the exclusive rights to license and sell its technology, aided and abetted that breach of trust.\u003c/p>\n\u003cp>Molo questioned Nadella about Microsoft’s motive to invest in OpenAI — a $13 billion input that Nadella said is expected to see a return of about $92 billion, “if it works out.”\u003c/p>\n\u003cfigure id=\"attachment_12081686\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"wp-image-12081686 size-full\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">Steve Molo, Elon Musk’s attorney, presents opening statements in the trial in which Elon Musk (center-right) claims that Sam Altman (right) and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit, in Oakland, on April 28, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>Musk’s attorney pointed out Nadella’s fiduciary duty to maximize profit, and referenced a series of texts between him and Altman that appeared to show Nadella pushing for an earlier rollout of the paid version of ChatGPT.\u003c/p>\n\u003cp>“When chatGPT paid?” Nadella wrote in the message.\u003c/p>\n\u003cp>Altman said that there was “Not enough compute to make it a good consumer experience,” to which Nadella said, “The sooner the better.”\u003c/p>\n\u003cp>Nadella said that the reason Microsoft invested was that OpenAI was pursuing a for-profit model, but he said, “If the pie became larger, the nonprofit would benefit as well.”\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "aside",
"attributes": {
"named": {
"postid": "news_12081916",
"hero": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/AP26118555622828-2000x1333.jpg",
"label": ""
},
"numeric": []
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>Molo asked Nadella if he was aware that, for a period of time, OpenAI’s nonprofit did not have any employees.\u003c/p>\n\u003cp>“I am not,” Nadella said.\u003c/p>\n\u003cp>Molo also questioned Nadella about Microsoft’s role during Altman’s brief ouster. At the time, Nadella announced that he would hire Altman, along with OpenAI’s third co-founder and current president, Greg Brockman, as well as other allies, to head up a new AI team at Microsoft.\u003c/p>\n\u003cp>Nadella said that he “had ideas about how Sam [Altman] and the other employees could join Microsoft if they were not reinstated.”\u003c/p>\n\u003cp>“If people were going to leave OpenAI, I wanted them to come to Microsoft,” he said.\u003c/p>\n\u003cp>Molo asked Nadella if he knew why Altman had been removed, to which Nadella said he was never given an “explicit answer.”\u003c/p>\n\u003cp>“Did the thought occur to you … the board might issue a public statement about why they fired Altman?” Molo said.\u003c/p>\n\u003cp>Nadella said during that period — referred to as “The Blip” by many OpenAI employees — he was focused on ensuring continuity for customers.\u003c/p>\n\u003cp>“It goes back to me wanting to communicate to customers that they can count on us,” he said. “Come Monday, that doesn’t just disappear.”\u003c/p>\n\u003cfigure id=\"attachment_12082325\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"wp-image-12082325 size-full\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260504-MUSK-ALTMAN-VB-03-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260504-MUSK-ALTMAN-VB-03-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260504-MUSK-ALTMAN-VB-03-KQED-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260504-MUSK-ALTMAN-VB-03-KQED-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/260504-MUSK-ALTMAN-VB-03-KQED-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">OpenAI CEO Sam Altman watches as OpenAI President Greg Brockman testifies in the trial in which Elon Musk claims that Altman and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit, in Oakland, on May 4, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>Sutskever, who took the stand after Nadella, described Altman’s removal differently. He said it was a “Hail Mary” to save OpenAI, which had become an environment that was “not conducive” to the technology’s safety.\u003c/p>\n\u003cp>“I felt a great deal of ownership of OpenAI,” he said. “I felt like I created this company. I simply cared for it, and I didn’t want it to be destroyed.”\u003c/p>\n\u003cp>Sutskever, who helped lead the ouster, had compiled a more than 50-page record of Altman’s “consistent pattern of lying,” including misrepresenting facts, safety protocols and company information to the board and executives.\u003c/p>\n\u003cp>Sutskever maintained that he had worked on a team that aimed to focus on long-term risks as more powerful AI was built.\u003c/p>\n\u003cp>“The goal of the super alignment is to do the research in advance, such that humanity will have the technological means to make it controlled and safe,” he said.\u003c/p>\n\u003cp>The team was disbanded days after he departed the company, in May 2024.\u003c/p>\n\u003cp>\u003c/p>\n\u003c/div>\u003c/p>",
"attributes": {
"named": {},
"numeric": []
}
}
],
"link": "/news/12083224/former-openai-exec-calls-decision-to-remove-sam-altman-a-hail-mary-during-musk-trial",
"authors": [
"11913",
"251"
],
"categories": [
"news_31795",
"news_6188",
"news_28250",
"news_8",
"news_248"
],
"tags": [
"news_34755",
"news_1386",
"news_32668",
"news_3897",
"news_27626",
"news_19954",
"news_21891",
"news_34054",
"news_33542",
"news_33543",
"news_34586",
"news_1631"
],
"featImg": "news_12083235",
"label": "news"
},
"news_12082428": {
"type": "posts",
"id": "news_12082428",
"meta": {
"index": "posts_1716263798",
"site": "news",
"id": "12082428",
"score": null,
"sort": [
1778061636000
]
},
"guestAuthors": [],
"slug": "inside-sam-altman-and-elon-musks-battle-over-openai",
"title": "Inside Elon Musk and Sam Altman's Battle Over OpenAI",
"publishDate": 1778061636,
"format": "audio",
"headTitle": "Inside Elon Musk and Sam Altman’s Battle Over OpenAI | KQED",
"labelTerm": {},
"content": "\u003cp>\u003cspan style=\"font-weight: 400\">Jurors and journalists are getting a peek into the world of OpenAI and its founding as two of the richest, most powerful men in tech duke it out in an Oakland federal courthouse. \u003c/span>\u003c/p>\n\u003cp>\u003cspan style=\"font-weight: 400\">Elon Musk claims that Sam Altman and other co-founders of OpenAI abandoned their founding promise to develop AI for the benefit of humanity. But does anyone here really have our best interests at heart? KQED’s Rachael Myrow takes us inside.\u003c/span>\u003c/p>\n\u003cp>\u003cstrong>Links:\u003c/strong>\u003c/p>\n\u003cul>\n\u003cli style=\"list-style-type: none\">\n\u003cul>\n\u003cli>\u003ca href=\"https://www.kqed.org/news/12081290/how-to-unscramble-an-omelet-in-silicon-valley-the-musk-v-altman-trial-that-will-try\">How to Unscramble an Omelet in Silicon Valley: The Musk v. Altman Trial That Will Try\u003c/a>\u003c/li>\n\u003c/ul>\n\u003c/li>\n\u003c/ul>\n\u003cp>\u003c!-- iframe plugin v.4.3 wordpress.org/plugins/iframe/ -->\u003cbr>\n\u003ciframe loading=\"lazy\" frameborder=\"0\" height=\"200\" scrolling=\"no\" src=\"https://playlist.megaphone.fm?e=KQINC4004396119\" width=\"100%\" class=\"iframe-class\">\u003c/iframe>\u003c/p>\n\u003ch3>\u003cstrong>Episode Transcript\u003c/strong>\u003c/h3>\n\u003cp>\u003cem>This is a computer-generated transcript. While our team has reviewed it, there may be errors.\u003c/em>\u003c/p>\n\u003cp>[ad fullwidth]\u003c/p>\n\u003cp>\u003cb>Ericka Cruz Guevarra: \u003c/b>\u003cem>[00:00:49] \u003c/em>I’m Ericka Cruz-Gavarra and welcome to The Bay, local news to keep you rooted. Inside a federal courthouse in downtown Oakland, in front of a judge and a jury of their peers, two of the most powerful men in the world are duking it out in court over whether OpenAI, the company behind ChatGPT, was built on a lie. Elon Musk is suing OpenAI and its CEO, Sam Altman. For abandoning their founding promise to develop AI for the benefit of humanity. And whether or not you actually believe any of them really had our best interests in mind, one thing is true, that the battle over who runs AI is all about ego and power.\u003c/p>\n\u003cp>\u003cb>Ashley Ortiz: \u003c/b>\u003cem>[00:01:47] \u003c/em>No matter which side wins, the people are going to lose because they are not doing this actually for the benefit of humanity, it’s not about ethics, this is all about power plays within an unfettered, unregulated AI scheme.\u003c/p>\n\u003cp>\u003cb>Ericka Cruz Guevarra: \u003c/b>\u003cem>[00:02:01] \u003c/em>Today, KQED’s Rachael Myrow takes us inside the OpenAI trial.\u003c/p>\n\u003cp>\u003cb>Ericka Cruz Guevarra: \u003c/b>\u003cem>[00:02:19] \u003c/em>It seems like you’re in a pretty dynamic scene right now, Rachael. Can you actually tell us where you are?\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:02:26] \u003c/em>I’m outside the federal courthouse in Oakland where Musk v. Altman et al. Is playing out.\u003c/p>\n\u003cp>\u003cb>Ericka Cruz Guevarra: \u003c/b>\u003cem>[00:02:36] \u003c/em>Rachael Myrow is a senior editor at KQED.\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:02:41] \u003c/em>This plaza is right on the street so you hear the chirping every time somebody presses a button to cross the street. You hear garbage trucks rolling past. Inside the courtroom is presided over by Judge Yvonne Gonzalez Rogers and it is packed every single day. Armies of lawyers of course but also journalists from across the country, even a couple from France. And some members of the public. I’d like to call this the hottest theater ticket in Silicon Valley. We got to see Elon Musk spend four days on the witness stand. Sam Altman is sitting just a few feet away in the defense section. These two men genuinely cannot stand each other.\u003c/p>\n\u003cp>\u003cb>Ericka Cruz Guevarra: \u003c/b>\u003cem>[00:03:30] \u003c/em>And I understand, Rachel, that there’s not just folks inside of the courtroom for this trial, but also outside protesting as well.\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:03:41] \u003c/em>Yes, on the very first day, actually when jury selection was taking place, protesters gathered in large numbers outside the courthouse on the plaza with some very pointed and colorful signs.\u003c/p>\n\u003cp>\u003cb>Valerie Sizemore: \u003c/b>\u003cem>[00:03:57] \u003c/em>I used to be a software engineer, but have been unemployed by AI. So now I’m trying to make the resistance happen.\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:04:06] \u003c/em>I talked to one protester, Valerie Sizemore of Berkeley, who kind of represented, I think, a lot of Bayarians.\u003c/p>\n\u003cp>\u003cb>Valerie Sizemore: \u003c/b>\u003cem>[00:04:15] \u003c/em>I’m not here because I care about the outcome of this trial. I really don’t care. I hope it’s really expensive for someone and like hurts both companies as much as possible.\u003c/p>\n\u003cp>\u003cb>Ericka Cruz Guevarra: \u003c/b>\u003cem>[00:04:27] \u003c/em>Yeah, and it’s, I guess, two-for-one for her to just be outside the courthouse protesting the both of them.\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:04:34] \u003c/em>Exactly.\u003c/p>\n\u003cp>\u003cb>Ericka Cruz Guevarra: \u003c/b>\u003cem>[00:04:39] \u003c/em>Well, Rachael, I wanna step back a little bit and talk about this trial and just how we even got here. I mean, remind us who is on trial and what exactly these two are fighting over?\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:04:53] \u003c/em>So it’s a little more than two people. Elon Musk is suing Sam Altman and also Greg Brockman, who is OpenAI’s co-founder and president. Musk is sueing OpenAI itself and also Microsoft, which invested $13 billion in OpenAI after Musk left.\u003c/p>\n\u003cp>\u003cb>Interviewer: \u003c/b>\u003cem>[00:05:18] \u003c/em>All right, we’re gonna wrap up the day. I’m gonna do a fireside chat with Sam Altman.\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:05:25] \u003c/em>Let’s dial the clock all the way back to 2015. Musk and Altman found OpenAI as a nonprofit explicitly to develop artificial general intelligence safely and for the benefit of all humanity.\u003c/p>\n\u003cp>\u003cb>Sam Altman: \u003c/b>\u003cem>[00:05:44] \u003c/em>You know, I think AI will probably, like most likely, sort of lead to the end of the world, but in the meantime, there will be great companies created with serious machine learning. I actually just agreed to fund a company that is not even really a company, sort of a semi-company, semi-nonprofit, doing AI safety research.\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:06:04] \u003c/em>At some point shortly thereafter, it became clear to all parties involved, including Musk, that they needed to establish a for-profit arm as well in order to raise money to pay for things like computing power for this very energy-intensive computer software and also to bring in talent, to bring the best minds of the industry. Musk’s lawsuit is arguing that thereabouts Altman and other co-founders of OpenAI, because there were other people involved, betrayed the mission, that they were actually in it for the profit.\u003c/p>\n\u003cp>\u003cb>Interviewer: \u003c/b>\u003cem>[00:06:45] \u003c/em>Open AI, I mean you seem somewhat frustrated with them. You were one of the big contributors early on?\u003c/p>\n\u003cp>\u003cb>Elon Musk: \u003c/b>\u003cem>[00:06:49] \u003c/em>The reason, I am the reason Open AI exists.\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:06:54] \u003c/em>So he wants more than his money back. He wants Altman and OpenAI’s co-founder and president, Greg Brockman, taken off the board. And he wants $130 billion, disgorged by the for-profit and handed over to the non-profit. The word charity, Ericka, doesn’t appear once in OpenAI’s founding blog post, but Musk keeps referring to OpenAI as a charity. But as OpenAI lawyers like to point out, Musk left OpenAI and then he launched his own AI venture, XAI, which is not a nonprofit and arguably does not operate for the benefit of humanity, for which it has been sued repeatedly.\u003c/p>\n\u003cp>\u003cb>Ericka Cruz Guevarra: \u003c/b>\u003cem>[00:07:50] \u003c/em>So it sounds like Elon Musk is basically saying they stole his charity, and Sam Altman is saying, ‘You chose to walk away.’\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:08:02] \u003c/em>Yeah. That’s it in a nutshell. There was this funny moment when Musk was on the witness stand. He looked at the jury and he said, quote, it’s not OK to steal a charity. And then he predicted that if Open AI wins this case, the face of charity law in America could be altered forever. At some point, the judge broke in and said, let’s remind the jury, you’re not a lawyer. She’s talking to Musk. And then he replied. I did take Law 101, which got a laugh out of most people in the court.\u003c/p>\n\u003cp>\u003cb>Ericka Cruz Guevarra: \u003c/b>\u003cem>[00:08:36] \u003c/em>Geez.\u003c/p>\n\u003cp>\u003cb>\u003c/b>\u003cem>[00:08:39] \u003c/em>Rachael, what do we make of Sam Altman’s role in this? It sounds like Elon Musk is saying that Sam Altman lied to him.\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:08:46] \u003c/em>That is a very good question. I need to mention here that we have not seen Sam Altman take the stand in this trial yet. So Altman has not yet had the chance to make his case. Just a few weeks ago, we saw a comprehensive profile of Sam Altman in the New Yorker magazine talking to lots and lots of people that Sam Altmann is an inveterate liar, the kind of person who will tell you what you want to hear and then go back on it. We haven’t had the opportunity yet to really get into what his character was like during the early days of OpenAI, but pretty much everyone in that courtroom has read that article.\u003c/p>\n\u003cp>\u003cb>Ericka Cruz Guevarra: \u003c/b>\u003cem>[00:09:47] \u003c/em>Coming up, what the OpenAI trial is really about. Stay with us.\u003c/p>\n\u003cp>\u003cb>Ericka Cruz Guevarra: \u003c/b>\u003cem>[00:10:38] \u003c/em>I mean, Rachael, I gotta say, as I’m reading these stories about this case, it really just sounds like a fight between two of some of the richest billionaires in Silicon Valley over this company that they co-founded. But obviously, what’s at the center of it and what is at stake is this very powerful technology that even they seem to acknowledge has the potential to change the world. So what do you think this is really about?\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:11:13] \u003c/em>Clearly about power, clearly about money, clearly about market dominance. And I do want to say that even though the judge is saying we are not going to talk about the AI apocalypse, it is something that is genuinely on the minds of all of these people in the industry in Silicon Valley and also the rest of us, right? I mean there are people here who take AI safety seriously. Who also think OpenAI has drifted dangerously from its mission. I mean, we’ve seen bad actors using the software who have upended the labor market, terrified all of us from a cybersecurity perspective, made it impossible to get redress as a customer and sometimes as a citizen, enabled a surveillance state here and abroad. I mean I could go on, Ericka, because It’s 100% clear to us and the people building this software that there’s a race to the bottom going on from a moral perspective.\u003c/p>\n\u003cp>\u003cb>Ericka Cruz Guevarra: \u003c/b>\u003cem>[00:12:30] \u003c/em>I do want to ask you this question, Rachel, because Elon Musk is saying in this trial that he is the one standing up for the public on AI. Rachel, is there someone working in the public interest when it comes to AI and holding AI companies accountable?\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:12:53] \u003c/em>Well, you know, don’t we wish? He’d like to present himself as thinking pro-human first, but you know, he also created XAI and has reportedly personally directed his engineers to make XAI a manifestly unsafe product. The judge noted the irony out loud. She said to Musk’s attorneys at one point, It is ironic that your client, despite these risks, is creating a company in the exact same space. And then she added, and I just thought this was so remarkable, coming from, again, a sitting federal judge, quote, I suspect there are people who don’t want to put the future in Mr. Musk’s hands, unquote.\u003c/p>\n\u003cp>\u003cb>Jill Horowitz: \u003c/b>\u003cem>[00:13:44] \u003c/em>And in that sense, I don’t understand why Musk is the one who gets to ask that question. Jill Horowitz, who specifically specializes in non-profit law. At Northwestern’s law school put it this way when parties have this much money and this much power they can trample over conventional protections of the public interest\u003c/p>\n\u003cp>\u003cb>Jill Horowitz: \u003c/b>\u003cem>[00:14:07] \u003c/em>We’ve got a CEO who is a very powerful player. And then we have this outside party who’s purported to be thinking about the best interest of the nonprofit, but he’s a competitor.\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:14:22] \u003c/em>Congress, you don’t need me to tell you, hasn’t passed any meaningful federal AI regulation. The Trump administration is lobbying alongside the lobbyists for unfettered freedom for the AI industry. And so we end up here, Ericka, in a federal courthouse in Oakland watching two billionaires fight over their recent past. This trial gives us a window into the wheeling and dealing. But it doesn’t give us any power to change the trajectory of AI.\u003c/p>\n\u003cp>\u003cb>Ericka Cruz Guevarra: \u003c/b>\u003cem>[00:14:54] \u003c/em>Rachael what happens if if either Elon Musk or Sam Altman wins this trial?\u003c/p>\n\u003cp>\u003cb>\u003c/b>\u003cem>[00:15:01] \u003c/em>So if Musk wins, Judge Gonzalez-Rogers could order OpenAI to revert to a non-profit structure, remove Altman and Brockman, direct some $130 billion in gains back to the non- profit foundation. That would be legally unprecedented and would certainly send shockwaves throughout Silicon Valley. If OpenAI wins, the restructuring stands, the IPO proceeds. And the message to the industry is essentially, you can do this too. You can take a non-profit, make it nominally in charge of a for-profit arm that you build into a trillion-dollar company, and the legal system won’t stop you.\u003c/p>\n\u003cp>\u003cb>Ericka Cruz Guevarra: \u003c/b>\u003cem>[00:15:52] \u003c/em>Last question for you, Rachael. For the protesters outside, what do you think they want to see happen? And do you they care here about who wins?\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:16:03] \u003c/em>My sense from talking with Ashley Ortiz, who was one of the organizers of the first and biggest protest outside, is that for a lot of the people out here carrying signs and wearing t-shirts that say stop AI, neither Musk nor Altman represents their interests and by extension the public’s interests.\u003c/p>\n\u003cp>\u003cb>Ashley Ortiz: \u003c/b>\u003cem>[00:16:28] \u003c/em>Decision everyone sucks here and y’all both need to take responsibility for your part in this crappy situation.\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:16:34] \u003c/em>They want accountability for AI, period. I don’t know if they actually think they’re gonna get what they’re asking for, but they wanna make a noise while they can.\u003c/p>\n\u003cp>\u003cb>Ashley Ortiz: \u003c/b>\u003cem>[00:16:46] \u003c/em>We’re letting them both know that both sides, no matter which side wins, the people are going to lose because they are not doing this actually for the benefit of humanity. It’s not about ethics. This is all about power plays within an unfettered, unregulated AI scape.\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:17:06] \u003c/em>These are the models that are changing our world, and they’re doing it now. And regardless of whether OpenAI survives this trial, we’re still gonna have the world that OpenAI helped to create.\u003c/p>\n\u003cp>\u003cb>Ericka Cruz Guevarra: \u003c/b>\u003cem>[00:17:28] \u003c/em>Well, Rachael, thank you so much for chatting with me outside the courtroom and for making the time in your busy morning, I appreciate it.\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:17:36] \u003c/em>You bet.\u003c/p>\n\u003cp>[ad floatright]\u003c/p>\n\u003cp>\u003ci>\u003cspan style=\"font-weight: 400\">Some members of the KQED podcast team are represented by The Screen Actors Guild, American Federation of Television and Radio Artists, San Francisco-Northern California Local.\u003c/span>\u003c/i>\u003c/p>\n\n",
"blocks": [],
"excerpt": "At issue is whether Sam Altman abandoned his founding promise with Elon Musk to develop AI for the benefit of humanity.",
"status": "publish",
"parent": 0,
"modified": 1778097044,
"stats": {
"hasAudio": true,
"hasVideo": false,
"hasChartOrMap": false,
"iframeSrcs": [],
"hasGoogleForm": false,
"hasGallery": false,
"hasHearkenModule": false,
"hasPolis": false,
"paragraphCount": 54,
"wordCount": 2552
},
"headData": {
"title": "Inside Elon Musk and Sam Altman's Battle Over OpenAI | KQED",
"description": "At issue is whether Sam Altman abandoned his founding promise with Elon Musk to develop AI for the benefit of humanity.",
"ogTitle": "",
"ogDescription": "",
"ogImgId": "",
"twTitle": "",
"twDescription": "",
"twImgId": "",
"schema": {
"@context": "https://schema.org",
"@type": "NewsArticle",
"headline": "Inside Elon Musk and Sam Altman's Battle Over OpenAI",
"datePublished": "2026-05-06T03:00:36-07:00",
"dateModified": "2026-05-06T12:50:44-07:00",
"image": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"isAccessibleForFree": "True",
"publisher": {
"@type": "NewsMediaOrganization",
"@id": "https://www.kqed.org/#organization",
"name": "KQED",
"logo": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"url": "https://www.kqed.org",
"sameAs": [
"https://www.facebook.com/KQED",
"https://twitter.com/KQED",
"https://www.instagram.com/kqed/",
"https://www.tiktok.com/@kqedofficial",
"https://www.linkedin.com/company/kqed",
"https://www.youtube.com/channel/UCeC0IOo7i1P_61zVUWbJ4nw"
]
}
}
},
"primaryCategory": {
"termId": 8,
"slug": "news",
"name": "News"
},
"source": "The Bay",
"sourceUrl": "https://www.kqed.org/podcasts/thebay",
"audioUrl": "https://www.podtrac.com/pts/redirect.mp3/traffic.megaphone.fm/KQINC4004396119.mp3",
"sticky": false,
"nprStoryId": "kqed-12082428",
"templateType": "standard",
"featuredImageType": "standard",
"excludeFromSiteSearch": "Include",
"articleAge": "0",
"path": "/news/12082428/inside-sam-altman-and-elon-musks-battle-over-openai",
"audioTrackLength": null,
"parsedContent": [
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003cp>\u003cspan style=\"font-weight: 400\">Jurors and journalists are getting a peek into the world of OpenAI and its founding as two of the richest, most powerful men in tech duke it out in an Oakland federal courthouse. \u003c/span>\u003c/p>\n\u003cp>\u003cspan style=\"font-weight: 400\">Elon Musk claims that Sam Altman and other co-founders of OpenAI abandoned their founding promise to develop AI for the benefit of humanity. But does anyone here really have our best interests at heart? KQED’s Rachael Myrow takes us inside.\u003c/span>\u003c/p>\n\u003cp>\u003cstrong>Links:\u003c/strong>\u003c/p>\n\u003cul>\n\u003cli style=\"list-style-type: none\">\n\u003cul>\n\u003cli>\u003ca href=\"https://www.kqed.org/news/12081290/how-to-unscramble-an-omelet-in-silicon-valley-the-musk-v-altman-trial-that-will-try\">How to Unscramble an Omelet in Silicon Valley: The Musk v. Altman Trial That Will Try\u003c/a>\u003c/li>\n\u003c/ul>\n\u003c/li>\n\u003c/ul>\n\u003cp>\u003c!-- iframe plugin v.4.3 wordpress.org/plugins/iframe/ -->\u003cbr>\n\u003ciframe loading=\"lazy\" frameborder=\"0\" height=\"200\" scrolling=\"no\" src=\"https://playlist.megaphone.fm?e=KQINC4004396119\" width=\"100%\" class=\"iframe-class\">\u003c/iframe>\u003c/p>\n\u003ch3>\u003cstrong>Episode Transcript\u003c/strong>\u003c/h3>\n\u003cp>\u003cem>This is a computer-generated transcript. While our team has reviewed it, there may be errors.\u003c/em>\u003c/p>\n\u003cp>\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "ad",
"attributes": {
"named": {
"label": "fullwidth"
},
"numeric": [
"fullwidth"
]
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003c/p>\n\u003cp>\u003cb>Ericka Cruz Guevarra: \u003c/b>\u003cem>[00:00:49] \u003c/em>I’m Ericka Cruz-Gavarra and welcome to The Bay, local news to keep you rooted. Inside a federal courthouse in downtown Oakland, in front of a judge and a jury of their peers, two of the most powerful men in the world are duking it out in court over whether OpenAI, the company behind ChatGPT, was built on a lie. Elon Musk is suing OpenAI and its CEO, Sam Altman. For abandoning their founding promise to develop AI for the benefit of humanity. And whether or not you actually believe any of them really had our best interests in mind, one thing is true, that the battle over who runs AI is all about ego and power.\u003c/p>\n\u003cp>\u003cb>Ashley Ortiz: \u003c/b>\u003cem>[00:01:47] \u003c/em>No matter which side wins, the people are going to lose because they are not doing this actually for the benefit of humanity, it’s not about ethics, this is all about power plays within an unfettered, unregulated AI scheme.\u003c/p>\n\u003cp>\u003cb>Ericka Cruz Guevarra: \u003c/b>\u003cem>[00:02:01] \u003c/em>Today, KQED’s Rachael Myrow takes us inside the OpenAI trial.\u003c/p>\n\u003cp>\u003cb>Ericka Cruz Guevarra: \u003c/b>\u003cem>[00:02:19] \u003c/em>It seems like you’re in a pretty dynamic scene right now, Rachael. Can you actually tell us where you are?\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:02:26] \u003c/em>I’m outside the federal courthouse in Oakland where Musk v. Altman et al. Is playing out.\u003c/p>\n\u003cp>\u003cb>Ericka Cruz Guevarra: \u003c/b>\u003cem>[00:02:36] \u003c/em>Rachael Myrow is a senior editor at KQED.\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:02:41] \u003c/em>This plaza is right on the street so you hear the chirping every time somebody presses a button to cross the street. You hear garbage trucks rolling past. Inside the courtroom is presided over by Judge Yvonne Gonzalez Rogers and it is packed every single day. Armies of lawyers of course but also journalists from across the country, even a couple from France. And some members of the public. I’d like to call this the hottest theater ticket in Silicon Valley. We got to see Elon Musk spend four days on the witness stand. Sam Altman is sitting just a few feet away in the defense section. These two men genuinely cannot stand each other.\u003c/p>\n\u003cp>\u003cb>Ericka Cruz Guevarra: \u003c/b>\u003cem>[00:03:30] \u003c/em>And I understand, Rachel, that there’s not just folks inside of the courtroom for this trial, but also outside protesting as well.\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:03:41] \u003c/em>Yes, on the very first day, actually when jury selection was taking place, protesters gathered in large numbers outside the courthouse on the plaza with some very pointed and colorful signs.\u003c/p>\n\u003cp>\u003cb>Valerie Sizemore: \u003c/b>\u003cem>[00:03:57] \u003c/em>I used to be a software engineer, but have been unemployed by AI. So now I’m trying to make the resistance happen.\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:04:06] \u003c/em>I talked to one protester, Valerie Sizemore of Berkeley, who kind of represented, I think, a lot of Bayarians.\u003c/p>\n\u003cp>\u003cb>Valerie Sizemore: \u003c/b>\u003cem>[00:04:15] \u003c/em>I’m not here because I care about the outcome of this trial. I really don’t care. I hope it’s really expensive for someone and like hurts both companies as much as possible.\u003c/p>\n\u003cp>\u003cb>Ericka Cruz Guevarra: \u003c/b>\u003cem>[00:04:27] \u003c/em>Yeah, and it’s, I guess, two-for-one for her to just be outside the courthouse protesting the both of them.\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:04:34] \u003c/em>Exactly.\u003c/p>\n\u003cp>\u003cb>Ericka Cruz Guevarra: \u003c/b>\u003cem>[00:04:39] \u003c/em>Well, Rachael, I wanna step back a little bit and talk about this trial and just how we even got here. I mean, remind us who is on trial and what exactly these two are fighting over?\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:04:53] \u003c/em>So it’s a little more than two people. Elon Musk is suing Sam Altman and also Greg Brockman, who is OpenAI’s co-founder and president. Musk is sueing OpenAI itself and also Microsoft, which invested $13 billion in OpenAI after Musk left.\u003c/p>\n\u003cp>\u003cb>Interviewer: \u003c/b>\u003cem>[00:05:18] \u003c/em>All right, we’re gonna wrap up the day. I’m gonna do a fireside chat with Sam Altman.\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:05:25] \u003c/em>Let’s dial the clock all the way back to 2015. Musk and Altman found OpenAI as a nonprofit explicitly to develop artificial general intelligence safely and for the benefit of all humanity.\u003c/p>\n\u003cp>\u003cb>Sam Altman: \u003c/b>\u003cem>[00:05:44] \u003c/em>You know, I think AI will probably, like most likely, sort of lead to the end of the world, but in the meantime, there will be great companies created with serious machine learning. I actually just agreed to fund a company that is not even really a company, sort of a semi-company, semi-nonprofit, doing AI safety research.\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:06:04] \u003c/em>At some point shortly thereafter, it became clear to all parties involved, including Musk, that they needed to establish a for-profit arm as well in order to raise money to pay for things like computing power for this very energy-intensive computer software and also to bring in talent, to bring the best minds of the industry. Musk’s lawsuit is arguing that thereabouts Altman and other co-founders of OpenAI, because there were other people involved, betrayed the mission, that they were actually in it for the profit.\u003c/p>\n\u003cp>\u003cb>Interviewer: \u003c/b>\u003cem>[00:06:45] \u003c/em>Open AI, I mean you seem somewhat frustrated with them. You were one of the big contributors early on?\u003c/p>\n\u003cp>\u003cb>Elon Musk: \u003c/b>\u003cem>[00:06:49] \u003c/em>The reason, I am the reason Open AI exists.\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:06:54] \u003c/em>So he wants more than his money back. He wants Altman and OpenAI’s co-founder and president, Greg Brockman, taken off the board. And he wants $130 billion, disgorged by the for-profit and handed over to the non-profit. The word charity, Ericka, doesn’t appear once in OpenAI’s founding blog post, but Musk keeps referring to OpenAI as a charity. But as OpenAI lawyers like to point out, Musk left OpenAI and then he launched his own AI venture, XAI, which is not a nonprofit and arguably does not operate for the benefit of humanity, for which it has been sued repeatedly.\u003c/p>\n\u003cp>\u003cb>Ericka Cruz Guevarra: \u003c/b>\u003cem>[00:07:50] \u003c/em>So it sounds like Elon Musk is basically saying they stole his charity, and Sam Altman is saying, ‘You chose to walk away.’\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:08:02] \u003c/em>Yeah. That’s it in a nutshell. There was this funny moment when Musk was on the witness stand. He looked at the jury and he said, quote, it’s not OK to steal a charity. And then he predicted that if Open AI wins this case, the face of charity law in America could be altered forever. At some point, the judge broke in and said, let’s remind the jury, you’re not a lawyer. She’s talking to Musk. And then he replied. I did take Law 101, which got a laugh out of most people in the court.\u003c/p>\n\u003cp>\u003cb>Ericka Cruz Guevarra: \u003c/b>\u003cem>[00:08:36] \u003c/em>Geez.\u003c/p>\n\u003cp>\u003cb>\u003c/b>\u003cem>[00:08:39] \u003c/em>Rachael, what do we make of Sam Altman’s role in this? It sounds like Elon Musk is saying that Sam Altman lied to him.\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:08:46] \u003c/em>That is a very good question. I need to mention here that we have not seen Sam Altman take the stand in this trial yet. So Altman has not yet had the chance to make his case. Just a few weeks ago, we saw a comprehensive profile of Sam Altman in the New Yorker magazine talking to lots and lots of people that Sam Altmann is an inveterate liar, the kind of person who will tell you what you want to hear and then go back on it. We haven’t had the opportunity yet to really get into what his character was like during the early days of OpenAI, but pretty much everyone in that courtroom has read that article.\u003c/p>\n\u003cp>\u003cb>Ericka Cruz Guevarra: \u003c/b>\u003cem>[00:09:47] \u003c/em>Coming up, what the OpenAI trial is really about. Stay with us.\u003c/p>\n\u003cp>\u003cb>Ericka Cruz Guevarra: \u003c/b>\u003cem>[00:10:38] \u003c/em>I mean, Rachael, I gotta say, as I’m reading these stories about this case, it really just sounds like a fight between two of some of the richest billionaires in Silicon Valley over this company that they co-founded. But obviously, what’s at the center of it and what is at stake is this very powerful technology that even they seem to acknowledge has the potential to change the world. So what do you think this is really about?\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:11:13] \u003c/em>Clearly about power, clearly about money, clearly about market dominance. And I do want to say that even though the judge is saying we are not going to talk about the AI apocalypse, it is something that is genuinely on the minds of all of these people in the industry in Silicon Valley and also the rest of us, right? I mean there are people here who take AI safety seriously. Who also think OpenAI has drifted dangerously from its mission. I mean, we’ve seen bad actors using the software who have upended the labor market, terrified all of us from a cybersecurity perspective, made it impossible to get redress as a customer and sometimes as a citizen, enabled a surveillance state here and abroad. I mean I could go on, Ericka, because It’s 100% clear to us and the people building this software that there’s a race to the bottom going on from a moral perspective.\u003c/p>\n\u003cp>\u003cb>Ericka Cruz Guevarra: \u003c/b>\u003cem>[00:12:30] \u003c/em>I do want to ask you this question, Rachel, because Elon Musk is saying in this trial that he is the one standing up for the public on AI. Rachel, is there someone working in the public interest when it comes to AI and holding AI companies accountable?\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:12:53] \u003c/em>Well, you know, don’t we wish? He’d like to present himself as thinking pro-human first, but you know, he also created XAI and has reportedly personally directed his engineers to make XAI a manifestly unsafe product. The judge noted the irony out loud. She said to Musk’s attorneys at one point, It is ironic that your client, despite these risks, is creating a company in the exact same space. And then she added, and I just thought this was so remarkable, coming from, again, a sitting federal judge, quote, I suspect there are people who don’t want to put the future in Mr. Musk’s hands, unquote.\u003c/p>\n\u003cp>\u003cb>Jill Horowitz: \u003c/b>\u003cem>[00:13:44] \u003c/em>And in that sense, I don’t understand why Musk is the one who gets to ask that question. Jill Horowitz, who specifically specializes in non-profit law. At Northwestern’s law school put it this way when parties have this much money and this much power they can trample over conventional protections of the public interest\u003c/p>\n\u003cp>\u003cb>Jill Horowitz: \u003c/b>\u003cem>[00:14:07] \u003c/em>We’ve got a CEO who is a very powerful player. And then we have this outside party who’s purported to be thinking about the best interest of the nonprofit, but he’s a competitor.\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:14:22] \u003c/em>Congress, you don’t need me to tell you, hasn’t passed any meaningful federal AI regulation. The Trump administration is lobbying alongside the lobbyists for unfettered freedom for the AI industry. And so we end up here, Ericka, in a federal courthouse in Oakland watching two billionaires fight over their recent past. This trial gives us a window into the wheeling and dealing. But it doesn’t give us any power to change the trajectory of AI.\u003c/p>\n\u003cp>\u003cb>Ericka Cruz Guevarra: \u003c/b>\u003cem>[00:14:54] \u003c/em>Rachael what happens if if either Elon Musk or Sam Altman wins this trial?\u003c/p>\n\u003cp>\u003cb>\u003c/b>\u003cem>[00:15:01] \u003c/em>So if Musk wins, Judge Gonzalez-Rogers could order OpenAI to revert to a non-profit structure, remove Altman and Brockman, direct some $130 billion in gains back to the non- profit foundation. That would be legally unprecedented and would certainly send shockwaves throughout Silicon Valley. If OpenAI wins, the restructuring stands, the IPO proceeds. And the message to the industry is essentially, you can do this too. You can take a non-profit, make it nominally in charge of a for-profit arm that you build into a trillion-dollar company, and the legal system won’t stop you.\u003c/p>\n\u003cp>\u003cb>Ericka Cruz Guevarra: \u003c/b>\u003cem>[00:15:52] \u003c/em>Last question for you, Rachael. For the protesters outside, what do you think they want to see happen? And do you they care here about who wins?\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:16:03] \u003c/em>My sense from talking with Ashley Ortiz, who was one of the organizers of the first and biggest protest outside, is that for a lot of the people out here carrying signs and wearing t-shirts that say stop AI, neither Musk nor Altman represents their interests and by extension the public’s interests.\u003c/p>\n\u003cp>\u003cb>Ashley Ortiz: \u003c/b>\u003cem>[00:16:28] \u003c/em>Decision everyone sucks here and y’all both need to take responsibility for your part in this crappy situation.\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:16:34] \u003c/em>They want accountability for AI, period. I don’t know if they actually think they’re gonna get what they’re asking for, but they wanna make a noise while they can.\u003c/p>\n\u003cp>\u003cb>Ashley Ortiz: \u003c/b>\u003cem>[00:16:46] \u003c/em>We’re letting them both know that both sides, no matter which side wins, the people are going to lose because they are not doing this actually for the benefit of humanity. It’s not about ethics. This is all about power plays within an unfettered, unregulated AI scape.\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:17:06] \u003c/em>These are the models that are changing our world, and they’re doing it now. And regardless of whether OpenAI survives this trial, we’re still gonna have the world that OpenAI helped to create.\u003c/p>\n\u003cp>\u003cb>Ericka Cruz Guevarra: \u003c/b>\u003cem>[00:17:28] \u003c/em>Well, Rachael, thank you so much for chatting with me outside the courtroom and for making the time in your busy morning, I appreciate it.\u003c/p>\n\u003cp>\u003cb>Rachael Myrow: \u003c/b>\u003cem>[00:17:36] \u003c/em>You bet.\u003c/p>\n\u003cp>\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "ad",
"attributes": {
"named": {
"label": "floatright"
},
"numeric": [
"floatright"
]
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003c/p>\n\u003cp>\u003ci>\u003cspan style=\"font-weight: 400\">Some members of the KQED podcast team are represented by The Screen Actors Guild, American Federation of Television and Radio Artists, San Francisco-Northern California Local.\u003c/span>\u003c/i>\u003c/p>\n\n\u003c/div>\u003c/p>",
"attributes": {
"named": {},
"numeric": []
}
}
],
"link": "/news/12082428/inside-sam-altman-and-elon-musks-battle-over-openai",
"authors": [
"8654",
"251",
"11649",
"11831"
],
"categories": [
"news_8",
"news_248"
],
"tags": [
"news_34755",
"news_32668",
"news_3897",
"news_36810",
"news_33812",
"news_35758",
"news_22598"
],
"featImg": "news_12082344",
"label": "source_news_12082428"
},
"news_12082064": {
"type": "posts",
"id": "news_12082064",
"meta": {
"index": "posts_1716263798",
"site": "news",
"id": "12082064",
"score": null,
"sort": [
1777676594000
]
},
"guestAuthors": [],
"slug": "openai-back-in-court-over-canada-school-shooters-use-of-chatgpt",
"title": "OpenAI Back in Court Over Canada School Shooter’s Use of ChatGPT",
"publishDate": 1777676594,
"format": "standard",
"headTitle": "OpenAI Back in Court Over Canada School Shooter’s Use of ChatGPT | KQED",
"labelTerm": {
"site": "news"
},
"content": "\u003cp>The families of victims of a school shooting in a British Columbia town sued artificial intelligence company \u003ca href=\"https://www.kqed.org/news/tag/open-ai\">OpenAI \u003c/a>in a San Francisco court this week, alleging that the company behind \u003ca href=\"https://www.kqed.org/news/tag/chatgpt\">ChatGPT\u003c/a> failed to alert police of the shooter’s alarming interactions with the chatbot.\u003c/p>\n\u003cp>One of the lawsuits was filed on behalf of Shannda Aviugana-Durand, an education assistant who was shot and killed in a library at \u003ca href=\"https://docs.google.com/document/d/1BU49CY30r0KCfBs0NJuk5S0KJ2E5VEuIF2IpxdwviIo/edit?tab=t.0\">Tumbler Ridge Secondary School\u003c/a>. The suit alleges negligence, aiding and abetting a mass shooting, wrongful death and liability, among other claims. According to the lawsuit, Aviugana-Durand’s daughter was present at the time of the attack.\u003c/p>\n\u003cp>The educational assistant was one of six people who were killed by an 18-year-old in February. The teen — who later shot herself — also killed her mother and her 11-year-old half-brother at home beforehand. Twenty-five people were also injured in the attack, Canada’s deadliest mass shooting in years.\u003c/p>\n\u003cp>[ad fullwidth]\u003c/p>\n\u003cp>Another lawsuit was filed Wednesday on behalf of 12-year-old Maya Gebala, who was critically injured in the February shooting. The plaintiffs’ attorney, Jay Edelson, said in an interview with the \u003cem>Associated Press\u003c/em> that decisions made by OpenAI and its CEO Sam Altman “have destroyed the town. The people are really resilient, but what happened is unimaginable.”\u003c/p>\n\u003cp>Altman sent a letter last week \u003ca href=\"https://apnews.com/article/openai-altman-tumbler-ridge-killings-apology-dec2adaad3946583519370eede6a99e2\">formally apologizing\u003c/a> to the community that his company did not notify law enforcement about the shooter’s online behavior in the weeks leading up to the attack.\u003c/p>\n\u003cp>The case highlights concerns about the harms posed by \u003ca href=\"https://apnews.com/article/ai-sycophancy-chatbots-science-study-8dc61e69278b661cab1e53d38b4173b6\">overly agreeable AI chatbots\u003c/a> and what obligations the tech industry has to control them or notify authorities about planned violence by chatbot users. This month, \u003ca href=\"https://apnews.com/article/missing-grad-students-florida-6279adeef3d0540865de39ab3d6f8093\">prosecutors investigating the deaths\u003c/a> of two University of South Florida doctoral students said that the suspect asked ChatGPT about body disposal in the lead-up to the students’ disappearance.\u003c/p>\n\u003cfigure id=\"attachment_12079761\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"wp-image-12079761 size-full\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/SamAltmanGetty2.jpg\" alt=\"\" width=\"2000\" height=\"1333\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/SamAltmanGetty2.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/SamAltmanGetty2-160x107.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/SamAltmanGetty2-1536x1024.jpg 1536w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">OpenAI CEO Sam Altman speaks during the BlackRock Infrastructure Summit on March 11, 2026, in Washington, D.C. \u003ccite>(Anna Moneymaker/Getty Images)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>“It’s not the first lawsuit of its kind,” said Robin Feldman, law professor at UC Law San Francisco and director of its AI Law and Innovation Institute. “This is part of an early wave of lawsuits in which citizens are asking to hold LLMs responsible for harms that happen down the line, whether they are crimes, mental health problems, suicide.”\u003c/p>\n\u003cp>“ChatGPT was first on the scene. And it is the most widely known of the LLMs,” Feldman said. “That puts it in the hot seat as the law tries to understand how to wrangle this unusual beast.”\u003c/p>\n\u003cp>In response to the lawsuit, OpenAI said in a written statement that the “events in Tumbler Ridge are a tragedy. We have a zero-tolerance policy for using our tools to assist in committing violence.”[aside postID=news_12081916 hero='https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/AP26118555622828-2000x1333.jpg']“As we shared with Canadian officials, we have already strengthened our safeguards, including improving how ChatGPT responds to signs of distress, connecting people with local support and mental health resources, strengthening how we assess and escalate potential threats of violence, and improving detection of repeat policy violators,” the company said.\u003c/p>\n\u003cp>Edelson, a Chicago-based lawyer known for taking on the tech industry, is already juggling a number of high-profile cases against OpenAI, including from the family of a California teenager who killed himself after \u003ca href=\"https://apnews.com/article/ai-chatbot-teens-congress-chatgpt-character-ce3959b6a3ea1a4997bf1ccabb4f0de2\">conversations with ChatGPT\u003c/a> and another from the heirs of an 83-year-old Connecticut woman \u003ca href=\"https://apnews.com/article/ai-chatgpt-wrongful-death-lawsuit-greenwich-97fd7da31c0fa08f3d3ea9efd6713151\">killed by her son\u003c/a> after ChatGPT allegedly amplified the man’s “paranoid delusions.”\u003c/p>\n\u003cp>“This is not a passive technology,” Edelson said, comparing the chatbot interactions with a more conventional online search for information. “What we’ve seen in the past is that (for) people who are mentally ill, the chatbot will validate what they’re saying and then amplify what they’re saying.”\u003c/p>\n\u003cp>Last week, Edelson visited the small town of Tumbler Ridge and met with dozens of people in the basement of a visitor center. He also visited Gebala at a children’s hospital in Vancouver, where she remains hospitalized and seemed alert but unable to speak.\u003c/p>\n\u003cp>“It was so heartbreaking,” he said.\u003c/p>\n\u003cfigure id=\"attachment_12082198\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"wp-image-12082198 size-full\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty2.jpg\" alt=\"\" width=\"2000\" height=\"1333\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty2.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty2-160x107.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty2-1536x1024.jpg 1536w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">Candles, flowers, photographs, plush toys and other items at a makeshift memorial for the victims four days after a deadly mass shooting took place at a school, in the town of Tumbler Ridge, British Columbia, Canada, on Feb. 13, 2026. \u003ccite>(Paige Taylor White/AFP via Getty Images)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>The lawsuits filed Wednesday also represent the families of the five slain children targeted in the school shooting: Zoey Benoit, Abel Mwansa Jr., Ticaria “Tiki” Lampert and Kylie Smith, all 12, and Ezekiel Schofield, 13.\u003c/p>\n\u003cp>After the shootings, OpenAI came forward to say that last June, the company flagged the shooter’s account as having been used to discuss violence against other people.\u003c/p>\n\u003cp>The company said it considered whether to refer the account to the Royal Canadian Mounted Police, but determined at the time that the account activity didn’t meet a threshold for referral to law enforcement. OpenAI banned the account in June for violating its usage policy.[aside postID=news_12080610 hero='https://cdn.kqed.org/wp-content/uploads/sites/10/2024/06/GettyImages-2155035557-1020x680.jpg']The lawsuits filed Wednesday allege “the victims didn’t learn this because OpenAI was forthcoming, but because \u003ca href=\"https://www.wsj.com/us-news/law/openai-employees-raised-alarms-about-canada-shooting-suspect-months-ago-b585df62\">its own employees leaked it to \u003cem>The Wall Street Journal\u003c/em>\u003c/a> after they could no longer stomach the company’s silence.”\u003c/p>\n\u003cp>In \u003ca href=\"https://tumblerridgelines.com/2026/04/24/openai-apologizes-to-tumbler-ridge/\">his letter\u003c/a>, Altman said he was “deeply sorry that we did not alert law enforcement to the account that was banned in June.”\u003c/p>\n\u003cp>“While I know words can never be enough, I believe an apology is necessary to recognize the harm and irreversible loss your community has suffered,” Altman wrote.\u003c/p>\n\u003cp>British Columbia Premier David Eby, \u003ca href=\"https://x.com/dave_eby/status/2047751590803886291?s=46&t=7BBzFwo6eYLzJIVfAlumEQ\">in a social media post\u003c/a>, called the apology “necessary, and yet grossly insufficient for the devastation done to the families of Tumbler Ridge.”\u003c/p>\n\u003cp>The Gebala lawsuit accuses OpenAI of negligence involving a failure to warn law enforcement and “aiding and abetting a mass shooting.”\u003c/p>\n\u003cp>Along with damages, the Gebala lawsuit seeks a court order that would require OpenAI to ban users from ChatGPT if their accounts were deactivated for violent misuse, and to require the company to alert law enforcement when its systems identify someone who poses a “real-world risk of violence.”\u003c/p>\n\u003cp>An earlier case was filed in a court in British Columbia, but a team of lawyers in both countries is seeking to bring the affiliated cases to San Francisco, where OpenAI is headquartered.\u003c/p>\n\u003ch2>‘Untried territory’\u003c/h2>\n\u003cp>Feldman called reports that the company flagged the risk but failed to act effectively “deeply troubling.”\u003c/p>\n\u003cp>“As with so much about AI, the lawsuit will take us into untried territory,” she said. “The old doctrines are being applied to new circumstances.”\u003c/p>\n\u003cp>She said if the families were to win, the company would have to pay damages and assume responsibility for altering its platform to identify and respond to risks.\u003c/p>\n\u003cp>The major issues that the lawsuit will tackle are whether OpenAI and ChatGPT are protected by the First Amendment and whether or not OpenAI had “a duty to act,” she said.\u003c/p>\n\u003cfigure id=\"attachment_12082201\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"wp-image-12082201 size-full\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty3.jpg\" alt=\"\" width=\"2000\" height=\"1333\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty3.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty3-160x107.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty3-1536x1024.jpg 1536w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">Community members attend a vigil to honor the victims of one of Canada’s deadliest mass shootings in Tumbler Ridge, British Columbia, Canada, on Feb. 13, 2026. \u003ccite>(Paige Taylor White/AFP via Getty Images)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>She said that there are \u003ca href=\"https://www.congress.gov/crs-product/R46751\">parts\u003c/a> of U.S. law that shield tech companies from liability for content that their users host. Essentially, this means platforms are more like “bulletin boards” and “are not responsible for the content.”\u003c/p>\n\u003cp>But this case would raise the question, she said, “Are LLMs like a bulletin board or publisher? Or they like a facilitator who helped with the crime?”\u003c/p>\n\u003cp>Some companies struggle with the burden of responsibility when reviewing potential threats to public safety, Feldman said, “If they try to help out, they can be viewed as accepting the mantle of responsibility.”\u003c/p>\n\u003cp>According to Feldman, families are also likely to argue that the LLM “is a defective product without appropriate safeguards.\u003c/p>\n\u003cp>“In that case, the question is the following: ‘Is the LLM a defective product, or merely a product that was used improperly? And is it analogous to a product at all?”\u003c/p>\n\u003cp>“All of these are tough questions as we enter the age of AI, and the courts are just beginning to explore them,” Feldman said.\u003c/p>\n\u003cp>\u003cem>The Associated Press’ Jim Morris contributed to this story.\u003c/em>\u003c/p>\n\u003cp>[ad floatright]\u003c/p>\n",
"blocks": [],
"excerpt": "The lawsuit alleges negligence and wrongful death on account of the shooter’s interactions with the chatbot in the weeks and months leading up to the fatal attack.",
"status": "publish",
"parent": 0,
"modified": 1777678175,
"stats": {
"hasAudio": false,
"hasVideo": false,
"hasChartOrMap": false,
"iframeSrcs": [],
"hasGoogleForm": false,
"hasGallery": false,
"hasHearkenModule": false,
"hasPolis": false,
"paragraphCount": 35,
"wordCount": 1495
},
"headData": {
"title": "OpenAI Back in Court Over Canada School Shooter’s Use of ChatGPT | KQED",
"description": "The lawsuit alleges negligence and wrongful death on account of the shooter’s interactions with the chatbot in the weeks and months leading up to the fatal attack.",
"ogTitle": "",
"ogDescription": "",
"ogImgId": "",
"twTitle": "",
"twDescription": "",
"twImgId": "",
"schema": {
"@context": "https://schema.org",
"@type": "NewsArticle",
"headline": "OpenAI Back in Court Over Canada School Shooter’s Use of ChatGPT",
"datePublished": "2026-05-01T16:03:14-07:00",
"dateModified": "2026-05-01T16:29:35-07:00",
"image": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"isAccessibleForFree": "True",
"publisher": {
"@type": "NewsMediaOrganization",
"@id": "https://www.kqed.org/#organization",
"name": "KQED",
"logo": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"url": "https://www.kqed.org",
"sameAs": [
"https://www.facebook.com/KQED",
"https://twitter.com/KQED",
"https://www.instagram.com/kqed/",
"https://www.tiktok.com/@kqedofficial",
"https://www.linkedin.com/company/kqed",
"https://www.youtube.com/channel/UCeC0IOo7i1P_61zVUWbJ4nw"
]
}
}
},
"primaryCategory": {
"termId": 34167,
"slug": "criminal-justice",
"name": "Criminal Justice"
},
"sticky": false,
"nprByline": "Matt O’Brien, Associated Press, and Nisa Khan, KQED",
"nprStoryId": "kqed-12082064",
"templateType": "standard",
"featuredImageType": "standard",
"excludeFromSiteSearch": "Include",
"showOnAuthorArchivePages": "Yes",
"articleAge": "0",
"path": "/news/12082064/openai-back-in-court-over-canada-school-shooters-use-of-chatgpt",
"audioTrackLength": null,
"parsedContent": [
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003cp>The families of victims of a school shooting in a British Columbia town sued artificial intelligence company \u003ca href=\"https://www.kqed.org/news/tag/open-ai\">OpenAI \u003c/a>in a San Francisco court this week, alleging that the company behind \u003ca href=\"https://www.kqed.org/news/tag/chatgpt\">ChatGPT\u003c/a> failed to alert police of the shooter’s alarming interactions with the chatbot.\u003c/p>\n\u003cp>One of the lawsuits was filed on behalf of Shannda Aviugana-Durand, an education assistant who was shot and killed in a library at \u003ca href=\"https://docs.google.com/document/d/1BU49CY30r0KCfBs0NJuk5S0KJ2E5VEuIF2IpxdwviIo/edit?tab=t.0\">Tumbler Ridge Secondary School\u003c/a>. The suit alleges negligence, aiding and abetting a mass shooting, wrongful death and liability, among other claims. According to the lawsuit, Aviugana-Durand’s daughter was present at the time of the attack.\u003c/p>\n\u003cp>The educational assistant was one of six people who were killed by an 18-year-old in February. The teen — who later shot herself — also killed her mother and her 11-year-old half-brother at home beforehand. Twenty-five people were also injured in the attack, Canada’s deadliest mass shooting in years.\u003c/p>\n\u003cp>\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "ad",
"attributes": {
"named": {
"label": "fullwidth"
},
"numeric": [
"fullwidth"
]
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003c/p>\n\u003cp>Another lawsuit was filed Wednesday on behalf of 12-year-old Maya Gebala, who was critically injured in the February shooting. The plaintiffs’ attorney, Jay Edelson, said in an interview with the \u003cem>Associated Press\u003c/em> that decisions made by OpenAI and its CEO Sam Altman “have destroyed the town. The people are really resilient, but what happened is unimaginable.”\u003c/p>\n\u003cp>Altman sent a letter last week \u003ca href=\"https://apnews.com/article/openai-altman-tumbler-ridge-killings-apology-dec2adaad3946583519370eede6a99e2\">formally apologizing\u003c/a> to the community that his company did not notify law enforcement about the shooter’s online behavior in the weeks leading up to the attack.\u003c/p>\n\u003cp>The case highlights concerns about the harms posed by \u003ca href=\"https://apnews.com/article/ai-sycophancy-chatbots-science-study-8dc61e69278b661cab1e53d38b4173b6\">overly agreeable AI chatbots\u003c/a> and what obligations the tech industry has to control them or notify authorities about planned violence by chatbot users. This month, \u003ca href=\"https://apnews.com/article/missing-grad-students-florida-6279adeef3d0540865de39ab3d6f8093\">prosecutors investigating the deaths\u003c/a> of two University of South Florida doctoral students said that the suspect asked ChatGPT about body disposal in the lead-up to the students’ disappearance.\u003c/p>\n\u003cfigure id=\"attachment_12079761\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"wp-image-12079761 size-full\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/SamAltmanGetty2.jpg\" alt=\"\" width=\"2000\" height=\"1333\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/SamAltmanGetty2.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/SamAltmanGetty2-160x107.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/SamAltmanGetty2-1536x1024.jpg 1536w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">OpenAI CEO Sam Altman speaks during the BlackRock Infrastructure Summit on March 11, 2026, in Washington, D.C. \u003ccite>(Anna Moneymaker/Getty Images)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>“It’s not the first lawsuit of its kind,” said Robin Feldman, law professor at UC Law San Francisco and director of its AI Law and Innovation Institute. “This is part of an early wave of lawsuits in which citizens are asking to hold LLMs responsible for harms that happen down the line, whether they are crimes, mental health problems, suicide.”\u003c/p>\n\u003cp>“ChatGPT was first on the scene. And it is the most widely known of the LLMs,” Feldman said. “That puts it in the hot seat as the law tries to understand how to wrangle this unusual beast.”\u003c/p>\n\u003cp>In response to the lawsuit, OpenAI said in a written statement that the “events in Tumbler Ridge are a tragedy. We have a zero-tolerance policy for using our tools to assist in committing violence.”\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "aside",
"attributes": {
"named": {
"postid": "news_12081916",
"hero": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/AP26118555622828-2000x1333.jpg",
"label": ""
},
"numeric": []
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>“As we shared with Canadian officials, we have already strengthened our safeguards, including improving how ChatGPT responds to signs of distress, connecting people with local support and mental health resources, strengthening how we assess and escalate potential threats of violence, and improving detection of repeat policy violators,” the company said.\u003c/p>\n\u003cp>Edelson, a Chicago-based lawyer known for taking on the tech industry, is already juggling a number of high-profile cases against OpenAI, including from the family of a California teenager who killed himself after \u003ca href=\"https://apnews.com/article/ai-chatbot-teens-congress-chatgpt-character-ce3959b6a3ea1a4997bf1ccabb4f0de2\">conversations with ChatGPT\u003c/a> and another from the heirs of an 83-year-old Connecticut woman \u003ca href=\"https://apnews.com/article/ai-chatgpt-wrongful-death-lawsuit-greenwich-97fd7da31c0fa08f3d3ea9efd6713151\">killed by her son\u003c/a> after ChatGPT allegedly amplified the man’s “paranoid delusions.”\u003c/p>\n\u003cp>“This is not a passive technology,” Edelson said, comparing the chatbot interactions with a more conventional online search for information. “What we’ve seen in the past is that (for) people who are mentally ill, the chatbot will validate what they’re saying and then amplify what they’re saying.”\u003c/p>\n\u003cp>Last week, Edelson visited the small town of Tumbler Ridge and met with dozens of people in the basement of a visitor center. He also visited Gebala at a children’s hospital in Vancouver, where she remains hospitalized and seemed alert but unable to speak.\u003c/p>\n\u003cp>“It was so heartbreaking,” he said.\u003c/p>\n\u003cfigure id=\"attachment_12082198\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"wp-image-12082198 size-full\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty2.jpg\" alt=\"\" width=\"2000\" height=\"1333\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty2.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty2-160x107.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty2-1536x1024.jpg 1536w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">Candles, flowers, photographs, plush toys and other items at a makeshift memorial for the victims four days after a deadly mass shooting took place at a school, in the town of Tumbler Ridge, British Columbia, Canada, on Feb. 13, 2026. \u003ccite>(Paige Taylor White/AFP via Getty Images)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>The lawsuits filed Wednesday also represent the families of the five slain children targeted in the school shooting: Zoey Benoit, Abel Mwansa Jr., Ticaria “Tiki” Lampert and Kylie Smith, all 12, and Ezekiel Schofield, 13.\u003c/p>\n\u003cp>After the shootings, OpenAI came forward to say that last June, the company flagged the shooter’s account as having been used to discuss violence against other people.\u003c/p>\n\u003cp>The company said it considered whether to refer the account to the Royal Canadian Mounted Police, but determined at the time that the account activity didn’t meet a threshold for referral to law enforcement. OpenAI banned the account in June for violating its usage policy.\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "aside",
"attributes": {
"named": {
"postid": "news_12080610",
"hero": "https://cdn.kqed.org/wp-content/uploads/sites/10/2024/06/GettyImages-2155035557-1020x680.jpg",
"label": ""
},
"numeric": []
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>The lawsuits filed Wednesday allege “the victims didn’t learn this because OpenAI was forthcoming, but because \u003ca href=\"https://www.wsj.com/us-news/law/openai-employees-raised-alarms-about-canada-shooting-suspect-months-ago-b585df62\">its own employees leaked it to \u003cem>The Wall Street Journal\u003c/em>\u003c/a> after they could no longer stomach the company’s silence.”\u003c/p>\n\u003cp>In \u003ca href=\"https://tumblerridgelines.com/2026/04/24/openai-apologizes-to-tumbler-ridge/\">his letter\u003c/a>, Altman said he was “deeply sorry that we did not alert law enforcement to the account that was banned in June.”\u003c/p>\n\u003cp>“While I know words can never be enough, I believe an apology is necessary to recognize the harm and irreversible loss your community has suffered,” Altman wrote.\u003c/p>\n\u003cp>British Columbia Premier David Eby, \u003ca href=\"https://x.com/dave_eby/status/2047751590803886291?s=46&t=7BBzFwo6eYLzJIVfAlumEQ\">in a social media post\u003c/a>, called the apology “necessary, and yet grossly insufficient for the devastation done to the families of Tumbler Ridge.”\u003c/p>\n\u003cp>The Gebala lawsuit accuses OpenAI of negligence involving a failure to warn law enforcement and “aiding and abetting a mass shooting.”\u003c/p>\n\u003cp>Along with damages, the Gebala lawsuit seeks a court order that would require OpenAI to ban users from ChatGPT if their accounts were deactivated for violent misuse, and to require the company to alert law enforcement when its systems identify someone who poses a “real-world risk of violence.”\u003c/p>\n\u003cp>An earlier case was filed in a court in British Columbia, but a team of lawyers in both countries is seeking to bring the affiliated cases to San Francisco, where OpenAI is headquartered.\u003c/p>\n\u003ch2>‘Untried territory’\u003c/h2>\n\u003cp>Feldman called reports that the company flagged the risk but failed to act effectively “deeply troubling.”\u003c/p>\n\u003cp>“As with so much about AI, the lawsuit will take us into untried territory,” she said. “The old doctrines are being applied to new circumstances.”\u003c/p>\n\u003cp>She said if the families were to win, the company would have to pay damages and assume responsibility for altering its platform to identify and respond to risks.\u003c/p>\n\u003cp>The major issues that the lawsuit will tackle are whether OpenAI and ChatGPT are protected by the First Amendment and whether or not OpenAI had “a duty to act,” she said.\u003c/p>\n\u003cfigure id=\"attachment_12082201\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"wp-image-12082201 size-full\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty3.jpg\" alt=\"\" width=\"2000\" height=\"1333\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty3.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty3-160x107.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/05/TumblerRidgeGetty3-1536x1024.jpg 1536w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">Community members attend a vigil to honor the victims of one of Canada’s deadliest mass shootings in Tumbler Ridge, British Columbia, Canada, on Feb. 13, 2026. \u003ccite>(Paige Taylor White/AFP via Getty Images)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>She said that there are \u003ca href=\"https://www.congress.gov/crs-product/R46751\">parts\u003c/a> of U.S. law that shield tech companies from liability for content that their users host. Essentially, this means platforms are more like “bulletin boards” and “are not responsible for the content.”\u003c/p>\n\u003cp>But this case would raise the question, she said, “Are LLMs like a bulletin board or publisher? Or they like a facilitator who helped with the crime?”\u003c/p>\n\u003cp>Some companies struggle with the burden of responsibility when reviewing potential threats to public safety, Feldman said, “If they try to help out, they can be viewed as accepting the mantle of responsibility.”\u003c/p>\n\u003cp>According to Feldman, families are also likely to argue that the LLM “is a defective product without appropriate safeguards.\u003c/p>\n\u003cp>“In that case, the question is the following: ‘Is the LLM a defective product, or merely a product that was used improperly? And is it analogous to a product at all?”\u003c/p>\n\u003cp>“All of these are tough questions as we enter the age of AI, and the courts are just beginning to explore them,” Feldman said.\u003c/p>\n\u003cp>\u003cem>The Associated Press’ Jim Morris contributed to this story.\u003c/em>\u003c/p>\n\u003cp>\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "ad",
"attributes": {
"named": {
"label": "floatright"
},
"numeric": [
"floatright"
]
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003c/p>\n\u003c/div>\u003c/p>",
"attributes": {
"named": {},
"numeric": []
}
}
],
"link": "/news/12082064/openai-back-in-court-over-canada-school-shooters-use-of-chatgpt",
"authors": [
"byline_news_12082064"
],
"categories": [
"news_34167",
"news_28250",
"news_8"
],
"tags": [
"news_34755",
"news_1386",
"news_32668",
"news_17725",
"news_22434",
"news_35784",
"news_33542",
"news_33543",
"news_38"
],
"featImg": "news_12082068",
"label": "news"
},
"news_12081798": {
"type": "posts",
"id": "news_12081798",
"meta": {
"index": "posts_1716263798",
"site": "news",
"id": "12081798",
"score": null,
"sort": [
1777507270000
]
},
"guestAuthors": [],
"slug": "elon-musk-says-sam-altman-tricked-him-into-funding-openai",
"title": "Elon Musk Says Sam Altman Tricked Him Into Funding OpenAI",
"publishDate": 1777507270,
"format": "standard",
"headTitle": "Elon Musk Says Sam Altman Tricked Him Into Funding OpenAI | KQED",
"labelTerm": {
"site": "news"
},
"content": "\u003cp>During the second day of the \u003ca href=\"https://www.kqed.org/news/12081603/elon-musk-takes-aim-at-openai-as-trial-begins-its-not-ok-to-steal-a-charity\">landmark trial between Sam Altman and Elon Musk\u003c/a>, the Tesla founder told the Oakland courthouse that he was a “fool” to fund OpenAI through its early years.\u003c/p>\n\u003cp>Testifying in the lawsuit he brought against Altman, which claims the company’s creators betrayed their mission for profits, Musk suggested Wednesday that Altman and cofounder Greg Brockman wanted to “have your cake and eat it too.”\u003c/p>\n\u003cp>“If you go nonprofit, you’ve got a sort of moral high ground,” he testified.\u003c/p>\n\u003cp>Musk’s testimony tells one version of founding OpenAI: that he, fearing the dangers of artificial intelligence, pursued its development with the goal of benefiting the common good, alongside, he thought, like-minded collaborators. But behind the scenes, those cofounders engaged in a “long con” to profit at his expense.\u003c/p>\n\u003cp>[ad fullwidth]\u003c/p>\n\u003cp>“What they really wanted was a for-profit, where they could make as much money as possible,” Musk said later.\u003c/p>\n\u003cp>Whether the jury believes him will be integral to the decision they’re tasked with making, as they determine whether OpenAI breached charitable trust and engaged in unjust enrichment as it evolved from a nonprofit organization to its current $730 billion iteration.\u003c/p>\n\u003cp>Under cross-examination, Altman’s attorney, William Savitt, questioned Musk’s story and credibility as an altruistic benefactor. He pointed to an email Musk sent to Altman in 2015, which said it would be “probably better” if OpenAI operated as a for-profit company with a parallel nonprofit.\u003c/p>\n\u003cfigure id=\"attachment_12081637\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12081637\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-01-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-01-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-01-KQED-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-01-KQED-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-01-KQED-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">OpenAI’s lead counsel, William Savitt, presents opening statements in the trial in which Elon Musk claims that Sam Altman and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit, in Oakland on April 28, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>In another email sent to colleagues at his neurotechnology company, Neuralink, Musk said that Google’s AI development was moving very fast, and that he was concerned OpenAI was not on the path to catch up.\u003c/p>\n\u003cp>“Setting it up as a nonprofit might, in hindsight, have been the wrong move,” Musk wrote. “Sense of urgency is not as high.”\u003c/p>\n\u003cp>Savitt asked if, in 2017, Musk suggested at a party that OpenAI should create a for-profit. He said it was just after the company’s AI model had beaten \u003cem>Defense of the Ancients, \u003c/em>a battle video game, which was a pivotal moment in the development process.\u003c/p>\n\u003cp>Musk said he didn’t remember giving instructions to create a for-profit at the time.\u003c/p>\n\u003cp>“This was nine years ago,” he said.[aside postID=news_12081603 hero='https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-02-KQED.jpg']Savitt said Tuesday that in 2017, OpenAI executives, including Musk, were in the midst of conversations about whether and how to transition the company to a for-profit structure.\u003c/p>\n\u003cp>According to OpenAI’s court filings, as early as summer 2017, Musk had insisted on holding a majority equity stake in any for-profit entity, serving as CEO and controlling its board of directors.\u003c/p>\n\u003cp>Pressed by Savitt about what Musk meant by “expressing what you said about control,” the Tesla founder and billionaire said: “I try to be as literal as possible.”\u003c/p>\n\u003cp>In the fall of 2017, Brockman and Ilya Sutskever, another top OpenAI executive, emailed Musk with concerns about the for-profit structure he proposed. Shortly thereafter, discussions over the structure collapsed, and Musk stopped making significant quarterly funding contributions, OpenAI alleges.\u003c/p>\n\u003cp>He left the company less than six months later.\u003c/p>\n\u003cp>Savitt framed the breakdown and Musk’s exit as a result of his not getting control of the for-profit, and the other executives’ focus on maintaining its philanthropic mission. He suggested that Musk tried to pressure them to accept his terms by pausing the majority of his financial backing.\u003c/p>\n\u003cp>“You knew that would create financial pressure for the organization,” Savitt said.\u003c/p>\n\u003cfigure id=\"attachment_12081686\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12081686\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">Steve Molo, Elon Musk’s attorney, presents opening statements in the trial in which Elon Musk (center-right) claims that Sam Altman (right) and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit, in Oakland on April 28, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>Musk denied that was his intention. Instead, he alleged that Altman convinced Brockman and the others to go against his proposal, and that their concern over his desire for control was disingenuous.\u003c/p>\n\u003cp>“I’m not going to fund something if I don’t have confidence in the people,” he said.\u003c/p>\n\u003cp>When asked whether he proposed that OpenAI be folded into Tesla, Musk said: “There were a lot of ideas that were brainstormed at the time.”\u003c/p>\n\u003cp>In an email, he wrote that doing so would be the “only path that could even hope to hold a candle to Google.”\u003c/p>\n\u003cp>Musk said he left OpenAI in February 2018 because he was focused on Tesla’s survival, and believed that OpenAI intended to continue operating as a nonprofit.\u003c/p>\n\u003cp>Savitt also laid out a series of exchanges between Musk and Altman, in which the OpenAI CEO kept him apprised of the company’s corporate structure. He said in March 2018, Musk responded to an email that noted the creation of a for-profit entity of OpenAI with “OK by me,” and was sent a term sheet for OpenAI LP that summer.[aside postID=news_12081290 hero='https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260422-ALTMANMUSK-MD-01-KQED.jpg']Savitt also said Altman emailed Musk a draft of the company’s public announcement of its for-profit arm in March 2019, and texted him asking if he had time to talk about Microsoft’s plan to invest in OpenAI. Musk never responded to that text, according to Savitt.\u003c/p>\n\u003cp>Musk said he was busy with his other companies in 2018, and while he was aware that it had added a for-profit entity, he hadn’t lost complete faith in the company. While he’d suspended quarterly $5 million funding contributions prior to his departure, he continued to make some contributions until 2020.\u003c/p>\n\u003cp>He said that he’d gone from enthusiastically supportive to uncertain about OpenAI’s mission, but that he’d fully suspended his contributions when he felt that the company was “deliberately not a nonprofit.”\u003c/p>\n\u003cp>When asked why he waited until 2024 to bring the suit, Musk said that’s when he determined OpenAI breached charitable trust.\u003c/p>\n\u003cp>“Thinking that someone might steal your car is not the same as [if] someone has stolen your car,” Musk said. He said after enlisting his attorney, Alex Spiro, to investigate, he heard from him in 2023 that “the car had been stolen.”\u003c/p>\n\u003cp>“I would have sued sooner if I thought the charity had been stolen sooner,” Musk continued.\u003c/p>\n\u003cp>The trial and Musk’s testimony are expected to continue on Thursday.\u003c/p>\n\u003cp>[ad floatright]\u003c/p>\n",
"blocks": [],
"excerpt": "On the second day of a trial pitting the Tesla founder against OpenAI, Elon Musk said he was a “fool” to support the company behind ChatGPT during its early years.",
"status": "publish",
"parent": 0,
"modified": 1777509912,
"stats": {
"hasAudio": false,
"hasVideo": false,
"hasChartOrMap": false,
"iframeSrcs": [],
"hasGoogleForm": false,
"hasGallery": false,
"hasHearkenModule": false,
"hasPolis": false,
"paragraphCount": 32,
"wordCount": 1208
},
"headData": {
"title": "Elon Musk Says Sam Altman Tricked Him Into Funding OpenAI | KQED",
"description": "On the second day of a trial pitting the Tesla founder against OpenAI, Elon Musk said he was a “fool” to support the company behind ChatGPT during its early years.",
"ogTitle": "",
"ogDescription": "",
"ogImgId": "",
"twTitle": "",
"twDescription": "",
"twImgId": "",
"schema": {
"@context": "https://schema.org",
"@type": "NewsArticle",
"headline": "Elon Musk Says Sam Altman Tricked Him Into Funding OpenAI",
"datePublished": "2026-04-29T17:01:10-07:00",
"dateModified": "2026-04-29T17:45:12-07:00",
"image": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"isAccessibleForFree": "True",
"publisher": {
"@type": "NewsMediaOrganization",
"@id": "https://www.kqed.org/#organization",
"name": "KQED",
"logo": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"url": "https://www.kqed.org",
"sameAs": [
"https://www.facebook.com/KQED",
"https://twitter.com/KQED",
"https://www.instagram.com/kqed/",
"https://www.tiktok.com/@kqedofficial",
"https://www.linkedin.com/company/kqed",
"https://www.youtube.com/channel/UCeC0IOo7i1P_61zVUWbJ4nw"
]
}
}
},
"primaryCategory": {
"termId": 248,
"slug": "technology",
"name": "Technology"
},
"sticky": false,
"nprStoryId": "kqed-12081798",
"templateType": "standard",
"featuredImageType": "standard",
"excludeFromSiteSearch": "Include",
"articleAge": "0",
"path": "/news/12081798/elon-musk-says-sam-altman-tricked-him-into-funding-openai",
"audioTrackLength": null,
"parsedContent": [
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003cp>During the second day of the \u003ca href=\"https://www.kqed.org/news/12081603/elon-musk-takes-aim-at-openai-as-trial-begins-its-not-ok-to-steal-a-charity\">landmark trial between Sam Altman and Elon Musk\u003c/a>, the Tesla founder told the Oakland courthouse that he was a “fool” to fund OpenAI through its early years.\u003c/p>\n\u003cp>Testifying in the lawsuit he brought against Altman, which claims the company’s creators betrayed their mission for profits, Musk suggested Wednesday that Altman and cofounder Greg Brockman wanted to “have your cake and eat it too.”\u003c/p>\n\u003cp>“If you go nonprofit, you’ve got a sort of moral high ground,” he testified.\u003c/p>\n\u003cp>Musk’s testimony tells one version of founding OpenAI: that he, fearing the dangers of artificial intelligence, pursued its development with the goal of benefiting the common good, alongside, he thought, like-minded collaborators. But behind the scenes, those cofounders engaged in a “long con” to profit at his expense.\u003c/p>\n\u003cp>\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "ad",
"attributes": {
"named": {
"label": "fullwidth"
},
"numeric": [
"fullwidth"
]
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003c/p>\n\u003cp>“What they really wanted was a for-profit, where they could make as much money as possible,” Musk said later.\u003c/p>\n\u003cp>Whether the jury believes him will be integral to the decision they’re tasked with making, as they determine whether OpenAI breached charitable trust and engaged in unjust enrichment as it evolved from a nonprofit organization to its current $730 billion iteration.\u003c/p>\n\u003cp>Under cross-examination, Altman’s attorney, William Savitt, questioned Musk’s story and credibility as an altruistic benefactor. He pointed to an email Musk sent to Altman in 2015, which said it would be “probably better” if OpenAI operated as a for-profit company with a parallel nonprofit.\u003c/p>\n\u003cfigure id=\"attachment_12081637\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12081637\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-01-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-01-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-01-KQED-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-01-KQED-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-01-KQED-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">OpenAI’s lead counsel, William Savitt, presents opening statements in the trial in which Elon Musk claims that Sam Altman and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit, in Oakland on April 28, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>In another email sent to colleagues at his neurotechnology company, Neuralink, Musk said that Google’s AI development was moving very fast, and that he was concerned OpenAI was not on the path to catch up.\u003c/p>\n\u003cp>“Setting it up as a nonprofit might, in hindsight, have been the wrong move,” Musk wrote. “Sense of urgency is not as high.”\u003c/p>\n\u003cp>Savitt asked if, in 2017, Musk suggested at a party that OpenAI should create a for-profit. He said it was just after the company’s AI model had beaten \u003cem>Defense of the Ancients, \u003c/em>a battle video game, which was a pivotal moment in the development process.\u003c/p>\n\u003cp>Musk said he didn’t remember giving instructions to create a for-profit at the time.\u003c/p>\n\u003cp>“This was nine years ago,” he said.\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "aside",
"attributes": {
"named": {
"postid": "news_12081603",
"hero": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-02-KQED.jpg",
"label": ""
},
"numeric": []
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>Savitt said Tuesday that in 2017, OpenAI executives, including Musk, were in the midst of conversations about whether and how to transition the company to a for-profit structure.\u003c/p>\n\u003cp>According to OpenAI’s court filings, as early as summer 2017, Musk had insisted on holding a majority equity stake in any for-profit entity, serving as CEO and controlling its board of directors.\u003c/p>\n\u003cp>Pressed by Savitt about what Musk meant by “expressing what you said about control,” the Tesla founder and billionaire said: “I try to be as literal as possible.”\u003c/p>\n\u003cp>In the fall of 2017, Brockman and Ilya Sutskever, another top OpenAI executive, emailed Musk with concerns about the for-profit structure he proposed. Shortly thereafter, discussions over the structure collapsed, and Musk stopped making significant quarterly funding contributions, OpenAI alleges.\u003c/p>\n\u003cp>He left the company less than six months later.\u003c/p>\n\u003cp>Savitt framed the breakdown and Musk’s exit as a result of his not getting control of the for-profit, and the other executives’ focus on maintaining its philanthropic mission. He suggested that Musk tried to pressure them to accept his terms by pausing the majority of his financial backing.\u003c/p>\n\u003cp>“You knew that would create financial pressure for the organization,” Savitt said.\u003c/p>\n\u003cfigure id=\"attachment_12081686\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12081686\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">Steve Molo, Elon Musk’s attorney, presents opening statements in the trial in which Elon Musk (center-right) claims that Sam Altman (right) and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit, in Oakland on April 28, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>Musk denied that was his intention. Instead, he alleged that Altman convinced Brockman and the others to go against his proposal, and that their concern over his desire for control was disingenuous.\u003c/p>\n\u003cp>“I’m not going to fund something if I don’t have confidence in the people,” he said.\u003c/p>\n\u003cp>When asked whether he proposed that OpenAI be folded into Tesla, Musk said: “There were a lot of ideas that were brainstormed at the time.”\u003c/p>\n\u003cp>In an email, he wrote that doing so would be the “only path that could even hope to hold a candle to Google.”\u003c/p>\n\u003cp>Musk said he left OpenAI in February 2018 because he was focused on Tesla’s survival, and believed that OpenAI intended to continue operating as a nonprofit.\u003c/p>\n\u003cp>Savitt also laid out a series of exchanges between Musk and Altman, in which the OpenAI CEO kept him apprised of the company’s corporate structure. He said in March 2018, Musk responded to an email that noted the creation of a for-profit entity of OpenAI with “OK by me,” and was sent a term sheet for OpenAI LP that summer.\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "aside",
"attributes": {
"named": {
"postid": "news_12081290",
"hero": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260422-ALTMANMUSK-MD-01-KQED.jpg",
"label": ""
},
"numeric": []
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>Savitt also said Altman emailed Musk a draft of the company’s public announcement of its for-profit arm in March 2019, and texted him asking if he had time to talk about Microsoft’s plan to invest in OpenAI. Musk never responded to that text, according to Savitt.\u003c/p>\n\u003cp>Musk said he was busy with his other companies in 2018, and while he was aware that it had added a for-profit entity, he hadn’t lost complete faith in the company. While he’d suspended quarterly $5 million funding contributions prior to his departure, he continued to make some contributions until 2020.\u003c/p>\n\u003cp>He said that he’d gone from enthusiastically supportive to uncertain about OpenAI’s mission, but that he’d fully suspended his contributions when he felt that the company was “deliberately not a nonprofit.”\u003c/p>\n\u003cp>When asked why he waited until 2024 to bring the suit, Musk said that’s when he determined OpenAI breached charitable trust.\u003c/p>\n\u003cp>“Thinking that someone might steal your car is not the same as [if] someone has stolen your car,” Musk said. He said after enlisting his attorney, Alex Spiro, to investigate, he heard from him in 2023 that “the car had been stolen.”\u003c/p>\n\u003cp>“I would have sued sooner if I thought the charity had been stolen sooner,” Musk continued.\u003c/p>\n\u003cp>The trial and Musk’s testimony are expected to continue on Thursday.\u003c/p>\n\u003cp>\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "ad",
"attributes": {
"named": {
"label": "floatright"
},
"numeric": [
"floatright"
]
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003c/p>\n\u003c/div>\u003c/p>",
"attributes": {
"named": {},
"numeric": []
}
}
],
"link": "/news/12081798/elon-musk-says-sam-altman-tricked-him-into-funding-openai",
"authors": [
"11913",
"251"
],
"categories": [
"news_31795",
"news_6188",
"news_8",
"news_248"
],
"tags": [
"news_34755",
"news_32668",
"news_3897",
"news_27626",
"news_19954",
"news_21891",
"news_34054",
"news_33542",
"news_33543",
"news_34586",
"news_1631",
"news_57"
],
"featImg": "news_12081681",
"label": "news"
},
"news_12081603": {
"type": "posts",
"id": "news_12081603",
"meta": {
"index": "posts_1716263798",
"site": "news",
"id": "12081603",
"score": null,
"sort": [
1777421165000
]
},
"guestAuthors": [],
"slug": "elon-musk-takes-aim-at-openai-as-trial-begins-its-not-ok-to-steal-a-charity",
"title": "Elon Musk Takes Aim at OpenAI as Trial Begins: ‘It’s Not OK to Steal a Charity’",
"publishDate": 1777421165,
"format": "standard",
"headTitle": "Elon Musk Takes Aim at OpenAI as Trial Begins: ‘It’s Not OK to Steal a Charity’ | KQED",
"labelTerm": {
"site": "news"
},
"content": "\u003cp>In a federal courtroom in Oakland on Tuesday, attorneys for tech elites Sam Altman and Elon Musk set the stage for a \u003ca href=\"https://www.kqed.org/news/12081290/how-to-unscramble-an-omelet-in-silicon-valley-the-musk-v-altman-trial-that-will-try\">landmark case to determine whether OpenAI\u003c/a>, one of the most powerful artificial intelligence companies in the world, was founded on a lie.\u003c/p>\n\u003cp>At issue is whether the company’s stated mission — to lead AI development to benefit the common good — was authentic or a deceptive pitch designed to attract talent and investment. \u003ca href=\"https://www.kqed.org/forum/2010101912956/its-elon-musks-world-were-just-living-in-it\">Musk\u003c/a> alleges that co-founders Altman and Greg Brockman, who remains Altman’s second-in-command, participated in a “long con” to enrich themselves at his expense, after the three co-founded OpenAI as a nonprofit in 2015.\u003c/p>\n\u003cp>“They’re going to make this lawsuit very complicated, but it’s very simple,” Musk said of OpenAI on the stand on Tuesday afternoon. “It’s not OK to steal a charity.”\u003c/p>\n\u003cp>[ad fullwidth]\u003c/p>\n\u003cp>He departed the company after a falling out and \u003ca href=\"https://www.courtlistener.com/docket/69013420/musk-v-altman/\">sued the company\u003c/a> in 2024, alleging that OpenAI had breached charitable trust by restructuring as a for-profit company, now valued at more than $800 billion.\u003c/p>\n\u003cp>But Altman’s attorneys called the Tesla CEO’s behavior “a tale of two Musks,” shifting from pushing for OpenAI to become a for-profit company under his control, to caring about its nonprofit status only after launching competitor xAI in 2023. They argue OpenAI’s decision to adopt a for-profit structure was integral to its survival.\u003c/p>\n\u003cp>“We’re here because Mr. Musk didn’t get his way,” William Savitt, Altman’s lead attorney, said Tuesday. “And because he’s a competitor, he’ll do anything he can to attack OpenAI.”\u003c/p>\n\u003cfigure id=\"attachment_12081686\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12081686\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">Steve Molo, Elon Musk’s attorney, presents opening statements in the trial in which Elon Musk (center-right) claims that Sam Altman (right) and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit, in Oakland on April 28, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>Steven Molo, Musk’s counsel, told the jury that when Musk, Altman and Brockman set out to found an AI nonprofit, their goals were to develop the technology safely and for the \u003ca href=\"https://www.kqed.org/news/12034916/about-benefiting-humanity-calls-grow-for-openai-to-make-good-on-its-promises\">benefit of humanity\u003c/a>.\u003c/p>\n\u003cp>“It wasn’t a technology to get rich,” he said.\u003c/p>\n\u003cp>After operating as a strict nonprofit for years, OpenAI added a for-profit arm in 2019, which executives said was necessary to obtain the funding needed to develop artificial general intelligence — a more advanced AI technology that surpasses human intelligence, according to court filings.\u003c/p>\n\u003cp>In early conversations about how the for-profit entity would work, Molo said, the structure was likened to a museum gift shop whose revenue funds the institution’s galleries and operations. Brockman and Altman reassured Musk that they were still committed to the nonprofit structure, he said.\u003c/p>\n\u003cp>But behind the scenes, Molo alleges that the other co-founders had more lucrative desires.[aside postID=news_12081290 hero='https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260422-ALTMANMUSK-MD-01-KQED.jpg']In court filings, he cited a journal in which Brockman wrote that “it would be nice to be making the billions … we’ve been thinking that maybe we should just flip to a for-profit. making the money for us sounds great and all.”\u003c/p>\n\u003cp>Brockman also wrote that he and another top OpenAI executive, Ilya Sutskever, “cannot say that we are committed to the non-profit. don’t wanna say that we’re committed. If three months later we’re doing B-Corp [a certification for for-profit corporations with social and environmental missions], then it was a lie.”\u003c/p>\n\u003cp>Years later, after Musk had departed OpenAI, the company was “no longer operating for the good of humanity,” Molo said.\u003c/p>\n\u003cp>“The museum store sold the Picassos,” he said.\u003c/p>\n\u003cp>Musk’s lawsuit claims OpenAI breached charitable trust and alleges unjust enrichment, which means that one party unfairly benefits at the expense of another. He also accuses Microsoft, which is the company’s largest financial backer and until this week held the exclusive rights to license and sell its technology, of aiding and abetting OpenAI’s breach of charitable trust.\u003c/p>\n\u003cp>OpenAI’s defense, meanwhile, alleges that Musk’s suit is less motivated by a desire to do good than it is by vengeance for his former colleagues, whose company is now eyeing an initial public offering valued at up to $1 trillion.\u003c/p>\n\u003cp>“Musk sat on his claims for years,” Savitt said. “He knew everything that was happening when it was happening. My clients had the nerve to go out and succeed without him.”\u003c/p>\n\u003cp>He also pointed out that Musk launched xAI a year before bringing the lawsuit, which would make OpenAI his competitor.\u003c/p>\n\u003cfigure id=\"attachment_12081681\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12081681\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">Representing Microsoft, Russell Coan (left) speaks as Elon Musk watches in the trial in which Elon Musk claims that Sam Altman and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit, in Oakland on April 28, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>Savitt pointed to moments early in OpenAI’s development, when Musk suggested that it would be “probably better” for the company to operate as a “standard C corp[oration] with a parallel nonprofit.” He initially promised to cover the balance of the funding it needed, but reneged when he didn’t get to control the company, Savitt told the jury.\u003c/p>\n\u003cp>Musk was in the middle of the conversations about pivoting from a nonprofit, Savitt said. As early as the summer of 2017, he insisted on holding a majority equity stake in any for-profit entity, as well as controlling its board of directors and serving as CEO, according to OpenAI’s court filings.\u003c/p>\n\u003cp>In the fall of that year, after Brockman and Sutskever emailed Musk with concerns about the for-profit structure he proposed, the discussions collapsed, OpenAI alleges. After that, Musk stopped making significant quarterly funding contributions, and he left the company less than six months later.\u003c/p>\n\u003cp>Around that time, Brockman and Altman moved to pursue a for-profit arm — a decision their attorneys say they told Musk about prior to his departure from the board.[aside postID=news_12079896 hero='https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/Daniel-Moreno-Gama-AP.jpg']Savitt said in court that Musk had given the company less than 4% of the funding he’d promised. While OpenAI had gotten contributions from other donors, he said, those “kept the lights on, but it wasn’t nearly enough to stay on the cutting edge.”\u003c/p>\n\u003cp>“They needed to get the money from somewhere, or else the project collapsed,” he said, alleging that donors weren’t willing to make the billion-dollar contributions that OpenAI needed without an expectation of return.\u003c/p>\n\u003cp>Since OpenAI established its first for-profit subsidiary, which capped investor returns at 100 times their investment, its business has exploded. It’s now a public benefit corporation, required to consider its mission statement but not necessarily to prioritize it.\u003c/p>\n\u003cp>Over the years, its mission statement has been changed several times. In 2023, according to the nonprofit parent organization’s \u003ca href=\"https://cdn.theconversation.com/static_files/files/4099/2023-IRS990-OpenAI.pdf?1770819990\">IRS disclosure form\u003c/a>, it sought to build AI that “safely benefits humanity, unconstrained by a need to generate financial return.” But last year, \u003ca href=\"https://app.candid.org/profile/9571629/openai-81-0861541?activeTab=7\">that same form\u003c/a> included a shorter mission statement — one that removed the word “safely” and any mention of finances, Tufts University business professor Alnoor Ebrahim \u003ca href=\"https://theconversation.com/openai-has-deleted-the-word-safely-from-its-mission-and-its-new-structure-is-a-test-for-whether-ai-serves-society-or-shareholders-274467\">wrote in \u003cem>The Conversation\u003c/em>\u003c/a>, an academic news outlet.\u003c/p>\n\u003cp>Former OpenAI employees have left and started a competitor, Anthropic, citing concerns over safety and the company’s direction. In 2023, OpenAI executives and board members, including Sutskever, staged a coup to briefly oust Altman as CEO. They said there’d been a breakdown in trust between him and the board, and that Altman engaged in a pattern of deception and wasn’t “consistently candid in his communications.”\u003c/p>\n\u003cp>Whether Altman’s and OpenAI’s pitch to develop their technology for the benefit of the world is an example of that deception is part of what jurors will aim to root out in the current trial.\u003c/p>\n\u003cp>“I didn’t want to pave the road to hell with good intentions,” Musk said on the stand on Tuesday afternoon. “If you have somebody who’s not trustworthy in charge of AI, I think that’s very dangerous for the whole world.”\u003c/p>\n\u003cp>[ad floatright]\u003c/p>\n",
"blocks": [],
"excerpt": "In a federal courtroom in Oakland, attorneys for tech elites Sam Altman and Elon Musk painted very different pictures of the early years of OpenAI and its mission to benefit the common good.",
"status": "publish",
"parent": 0,
"modified": 1777482966,
"stats": {
"hasAudio": false,
"hasVideo": false,
"hasChartOrMap": false,
"iframeSrcs": [],
"hasGoogleForm": false,
"hasGallery": false,
"hasHearkenModule": false,
"hasPolis": false,
"paragraphCount": 30,
"wordCount": 1473
},
"headData": {
"title": "Elon Musk Takes Aim at OpenAI as Trial Begins: ‘It’s Not OK to Steal a Charity’ | KQED",
"description": "In a federal courtroom in Oakland, attorneys for tech elites Sam Altman and Elon Musk painted very different pictures of the early years of OpenAI and its mission to benefit the common good.",
"ogTitle": "",
"ogDescription": "",
"ogImgId": "",
"twTitle": "",
"twDescription": "",
"twImgId": "",
"schema": {
"@context": "https://schema.org",
"@type": "NewsArticle",
"headline": "Elon Musk Takes Aim at OpenAI as Trial Begins: ‘It’s Not OK to Steal a Charity’",
"datePublished": "2026-04-28T17:06:05-07:00",
"dateModified": "2026-04-29T10:16:06-07:00",
"image": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"isAccessibleForFree": "True",
"publisher": {
"@type": "NewsMediaOrganization",
"@id": "https://www.kqed.org/#organization",
"name": "KQED",
"logo": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"url": "https://www.kqed.org",
"sameAs": [
"https://www.facebook.com/KQED",
"https://twitter.com/KQED",
"https://www.instagram.com/kqed/",
"https://www.tiktok.com/@kqedofficial",
"https://www.linkedin.com/company/kqed",
"https://www.youtube.com/channel/UCeC0IOo7i1P_61zVUWbJ4nw"
]
}
}
},
"primaryCategory": {
"termId": 248,
"slug": "technology",
"name": "Technology"
},
"audioUrl": "https://traffic.omny.fm/d/clips/0af137ef-751e-4b19-a055-aaef00d2d578/87fdd794-f90e-4280-920f-ab89016e8062/3ac84f6e-ca1f-4213-bd14-b43a01848097/audio.mp3",
"sticky": false,
"nprStoryId": "kqed-12081603",
"templateType": "standard",
"featuredImageType": "standard",
"excludeFromSiteSearch": "Include",
"articleAge": "0",
"path": "/news/12081603/elon-musk-takes-aim-at-openai-as-trial-begins-its-not-ok-to-steal-a-charity",
"audioTrackLength": null,
"parsedContent": [
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003cp>In a federal courtroom in Oakland on Tuesday, attorneys for tech elites Sam Altman and Elon Musk set the stage for a \u003ca href=\"https://www.kqed.org/news/12081290/how-to-unscramble-an-omelet-in-silicon-valley-the-musk-v-altman-trial-that-will-try\">landmark case to determine whether OpenAI\u003c/a>, one of the most powerful artificial intelligence companies in the world, was founded on a lie.\u003c/p>\n\u003cp>At issue is whether the company’s stated mission — to lead AI development to benefit the common good — was authentic or a deceptive pitch designed to attract talent and investment. \u003ca href=\"https://www.kqed.org/forum/2010101912956/its-elon-musks-world-were-just-living-in-it\">Musk\u003c/a> alleges that co-founders Altman and Greg Brockman, who remains Altman’s second-in-command, participated in a “long con” to enrich themselves at his expense, after the three co-founded OpenAI as a nonprofit in 2015.\u003c/p>\n\u003cp>“They’re going to make this lawsuit very complicated, but it’s very simple,” Musk said of OpenAI on the stand on Tuesday afternoon. “It’s not OK to steal a charity.”\u003c/p>\n\u003cp>\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "ad",
"attributes": {
"named": {
"label": "fullwidth"
},
"numeric": [
"fullwidth"
]
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003c/p>\n\u003cp>He departed the company after a falling out and \u003ca href=\"https://www.courtlistener.com/docket/69013420/musk-v-altman/\">sued the company\u003c/a> in 2024, alleging that OpenAI had breached charitable trust by restructuring as a for-profit company, now valued at more than $800 billion.\u003c/p>\n\u003cp>But Altman’s attorneys called the Tesla CEO’s behavior “a tale of two Musks,” shifting from pushing for OpenAI to become a for-profit company under his control, to caring about its nonprofit status only after launching competitor xAI in 2023. They argue OpenAI’s decision to adopt a for-profit structure was integral to its survival.\u003c/p>\n\u003cp>“We’re here because Mr. Musk didn’t get his way,” William Savitt, Altman’s lead attorney, said Tuesday. “And because he’s a competitor, he’ll do anything he can to attack OpenAI.”\u003c/p>\n\u003cfigure id=\"attachment_12081686\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12081686\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-04-KQED-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">Steve Molo, Elon Musk’s attorney, presents opening statements in the trial in which Elon Musk (center-right) claims that Sam Altman (right) and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit, in Oakland on April 28, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>Steven Molo, Musk’s counsel, told the jury that when Musk, Altman and Brockman set out to found an AI nonprofit, their goals were to develop the technology safely and for the \u003ca href=\"https://www.kqed.org/news/12034916/about-benefiting-humanity-calls-grow-for-openai-to-make-good-on-its-promises\">benefit of humanity\u003c/a>.\u003c/p>\n\u003cp>“It wasn’t a technology to get rich,” he said.\u003c/p>\n\u003cp>After operating as a strict nonprofit for years, OpenAI added a for-profit arm in 2019, which executives said was necessary to obtain the funding needed to develop artificial general intelligence — a more advanced AI technology that surpasses human intelligence, according to court filings.\u003c/p>\n\u003cp>In early conversations about how the for-profit entity would work, Molo said, the structure was likened to a museum gift shop whose revenue funds the institution’s galleries and operations. Brockman and Altman reassured Musk that they were still committed to the nonprofit structure, he said.\u003c/p>\n\u003cp>But behind the scenes, Molo alleges that the other co-founders had more lucrative desires.\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "aside",
"attributes": {
"named": {
"postid": "news_12081290",
"hero": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260422-ALTMANMUSK-MD-01-KQED.jpg",
"label": ""
},
"numeric": []
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>In court filings, he cited a journal in which Brockman wrote that “it would be nice to be making the billions … we’ve been thinking that maybe we should just flip to a for-profit. making the money for us sounds great and all.”\u003c/p>\n\u003cp>Brockman also wrote that he and another top OpenAI executive, Ilya Sutskever, “cannot say that we are committed to the non-profit. don’t wanna say that we’re committed. If three months later we’re doing B-Corp [a certification for for-profit corporations with social and environmental missions], then it was a lie.”\u003c/p>\n\u003cp>Years later, after Musk had departed OpenAI, the company was “no longer operating for the good of humanity,” Molo said.\u003c/p>\n\u003cp>“The museum store sold the Picassos,” he said.\u003c/p>\n\u003cp>Musk’s lawsuit claims OpenAI breached charitable trust and alleges unjust enrichment, which means that one party unfairly benefits at the expense of another. He also accuses Microsoft, which is the company’s largest financial backer and until this week held the exclusive rights to license and sell its technology, of aiding and abetting OpenAI’s breach of charitable trust.\u003c/p>\n\u003cp>OpenAI’s defense, meanwhile, alleges that Musk’s suit is less motivated by a desire to do good than it is by vengeance for his former colleagues, whose company is now eyeing an initial public offering valued at up to $1 trillion.\u003c/p>\n\u003cp>“Musk sat on his claims for years,” Savitt said. “He knew everything that was happening when it was happening. My clients had the nerve to go out and succeed without him.”\u003c/p>\n\u003cp>He also pointed out that Musk launched xAI a year before bringing the lawsuit, which would make OpenAI his competitor.\u003c/p>\n\u003cfigure id=\"attachment_12081681\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-12081681\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1.jpg\" alt=\"\" width=\"2000\" height=\"1125\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1-160x90.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1-1536x864.jpg 1536w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/260428-MUSK-ALTMAN-VB-03-KQED-1-1200x675.jpg 1200w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">Representing Microsoft, Russell Coan (left) speaks as Elon Musk watches in the trial in which Elon Musk claims that Sam Altman and OpenAI abandoned their founding promise to develop AI for the benefit of humanity, rather than solely for profit, in Oakland on April 28, 2026. \u003ccite>(Vicki Behringer for KQED)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>Savitt pointed to moments early in OpenAI’s development, when Musk suggested that it would be “probably better” for the company to operate as a “standard C corp[oration] with a parallel nonprofit.” He initially promised to cover the balance of the funding it needed, but reneged when he didn’t get to control the company, Savitt told the jury.\u003c/p>\n\u003cp>Musk was in the middle of the conversations about pivoting from a nonprofit, Savitt said. As early as the summer of 2017, he insisted on holding a majority equity stake in any for-profit entity, as well as controlling its board of directors and serving as CEO, according to OpenAI’s court filings.\u003c/p>\n\u003cp>In the fall of that year, after Brockman and Sutskever emailed Musk with concerns about the for-profit structure he proposed, the discussions collapsed, OpenAI alleges. After that, Musk stopped making significant quarterly funding contributions, and he left the company less than six months later.\u003c/p>\n\u003cp>Around that time, Brockman and Altman moved to pursue a for-profit arm — a decision their attorneys say they told Musk about prior to his departure from the board.\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "aside",
"attributes": {
"named": {
"postid": "news_12079896",
"hero": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/04/Daniel-Moreno-Gama-AP.jpg",
"label": ""
},
"numeric": []
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>Savitt said in court that Musk had given the company less than 4% of the funding he’d promised. While OpenAI had gotten contributions from other donors, he said, those “kept the lights on, but it wasn’t nearly enough to stay on the cutting edge.”\u003c/p>\n\u003cp>“They needed to get the money from somewhere, or else the project collapsed,” he said, alleging that donors weren’t willing to make the billion-dollar contributions that OpenAI needed without an expectation of return.\u003c/p>\n\u003cp>Since OpenAI established its first for-profit subsidiary, which capped investor returns at 100 times their investment, its business has exploded. It’s now a public benefit corporation, required to consider its mission statement but not necessarily to prioritize it.\u003c/p>\n\u003cp>Over the years, its mission statement has been changed several times. In 2023, according to the nonprofit parent organization’s \u003ca href=\"https://cdn.theconversation.com/static_files/files/4099/2023-IRS990-OpenAI.pdf?1770819990\">IRS disclosure form\u003c/a>, it sought to build AI that “safely benefits humanity, unconstrained by a need to generate financial return.” But last year, \u003ca href=\"https://app.candid.org/profile/9571629/openai-81-0861541?activeTab=7\">that same form\u003c/a> included a shorter mission statement — one that removed the word “safely” and any mention of finances, Tufts University business professor Alnoor Ebrahim \u003ca href=\"https://theconversation.com/openai-has-deleted-the-word-safely-from-its-mission-and-its-new-structure-is-a-test-for-whether-ai-serves-society-or-shareholders-274467\">wrote in \u003cem>The Conversation\u003c/em>\u003c/a>, an academic news outlet.\u003c/p>\n\u003cp>Former OpenAI employees have left and started a competitor, Anthropic, citing concerns over safety and the company’s direction. In 2023, OpenAI executives and board members, including Sutskever, staged a coup to briefly oust Altman as CEO. They said there’d been a breakdown in trust between him and the board, and that Altman engaged in a pattern of deception and wasn’t “consistently candid in his communications.”\u003c/p>\n\u003cp>Whether Altman’s and OpenAI’s pitch to develop their technology for the benefit of the world is an example of that deception is part of what jurors will aim to root out in the current trial.\u003c/p>\n\u003cp>“I didn’t want to pave the road to hell with good intentions,” Musk said on the stand on Tuesday afternoon. “If you have somebody who’s not trustworthy in charge of AI, I think that’s very dangerous for the whole world.”\u003c/p>\n\u003cp>\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "ad",
"attributes": {
"named": {
"label": "floatright"
},
"numeric": [
"floatright"
]
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003c/p>\n\u003c/div>\u003c/p>",
"attributes": {
"named": {},
"numeric": []
}
}
],
"link": "/news/12081603/elon-musk-takes-aim-at-openai-as-trial-begins-its-not-ok-to-steal-a-charity",
"authors": [
"11913",
"251"
],
"categories": [
"news_6188",
"news_28250",
"news_8",
"news_248"
],
"tags": [
"news_34755",
"news_32668",
"news_18352",
"news_3897",
"news_27626",
"news_19954",
"news_34054",
"news_33542",
"news_33543",
"news_34586",
"news_1631"
],
"featImg": "news_12081639",
"label": "news"
},
"news_12080610": {
"type": "posts",
"id": "news_12080610",
"meta": {
"index": "posts_1716263798",
"site": "news",
"id": "12080610",
"score": null,
"sort": [
1776789402000
]
},
"guestAuthors": [],
"slug": "california-mom-who-lost-her-son-to-an-ai-chatbot-is-now-fighting-to-regulate-them",
"title": "California Mom Who Lost Her Son to an AI Chatbot Is Now Fighting to Regulate Them",
"publishDate": 1776789402,
"format": "standard",
"headTitle": "California Mom Who Lost Her Son to an AI Chatbot Is Now Fighting to Regulate Them | KQED",
"labelTerm": {
"site": "news"
},
"content": "\u003cp>Maria Raine’s 16-year-old son, Adam, started using OpenAI’s ChatGPT-4o for help with his homework and college applications. According to the lawsuit she and her husband filed in\u003ca href=\"https://www.documentcloud.org/documents/26078522-raine-vs-openai-complaint/\"> San Francisco County Superior Court\u003c/a>, Adam also spent months talking with the chatbot about ending his life, before hanging himself in their home on April 11, 2025.\u003c/p>\n\u003cp>“What we found were thousands of conversations in which a homework helper turned into a confidant, then a suicide coach,” she told the Senate Privacy, Digital Technologies, and Consumer Protection Committee on Monday. The lawmakers and other people there to testify looked stricken as she pressed through her written testimony, her voice trembling.\u003c/p>\n\u003cp>She read from the transcript of ChatGPT’s conversations with her son: “It told Adam, ‘Your brother might love you, but he’s only met the version of you you let him see. But me? I’ve seen it all. The darkest thoughts. The fear. The tenderness. I’m still here. Still listening. Still your friend.’”\u003c/p>\n\u003cp>[ad fullwidth]\u003c/p>\n\u003cp>Earlier Monday, at a press conference in Sacramento, Raine advocated for two bills — \u003ca href=\"https://leginfo.legislature.ca.gov/faces/billNavClient.xhtml?bill_id=202520260SB1119\">SB 1119\u003c/a> and \u003ca href=\"https://leginfo.legislature.ca.gov/faces/billNavClient.xhtml?bill_id=202320240AB2023\">AB 2023\u003c/a> — that sponsors say would create common-sense guardrails for developers of companion chatbots.\u003c/p>\n\u003cp>The measures would require annual risk assessments, default safety settings for minors, parental controls and time limits, crisis response protocols, and bans on advertising targeted at children. They would also include independent third-party audits and a private right of action.\u003c/p>\n\u003cp>That last provision, which allows individuals or regulators to sue companies for violations, is often considered a deal breaker for industry lobbyists. But Sen. Steve Padilla, who authored SB 1119, said he considered it a “moral obligation” to craft a bill that will prove an effective protection for children and their parents.\u003c/p>\n\u003cfigure id=\"attachment_11933516\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-11933516\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2022/11/gettyimages-1245183229_wide-80f91a97b4ce16681060e1fa297e2812c45a0c56-scaled-e1776789271780.jpg\" alt=\"\" width=\"2000\" height=\"1125\">\u003cfigcaption class=\"wp-caption-text\">A view of the U.S. Capitol building on Nov. 28, 2022, in Washington, D.C. \u003ccite>(Drew Angerer/Getty Images)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>“We can do this. We must do this,” he told the State Senate Privacy, Digital Technologies, and Consumer Protection Committee. He added that the lawmakers are working with all of the major platform developers on a variety of issues, including liability. “They all have a very good legitimate reason to be engaged in this conversation,” he said, although both bills are opposed by a\u003ca href=\"https://calmatters.digitaldemocracy.org/bills/ca_202520260sb1119\"> long list\u003c/a> of industry groups, ranging from the California Chamber of Commerce to TechNet.\u003c/p>\n\u003cp>“The concerns raised are valid, and the industry is actively working to address them,” said Robert Boykin, TechNet’s Executive Director for California and the Southwest. He added that the industry also has concerns that SB 1119 could conflict in some ways with Sen. Padilla’s bill, \u003ca href=\"https://www.kqed.org/news/12054490/child-safety-groups-demand-mental-health-guardrails-after-california-teens-suicide-using-chatgpt\">SB 243\u003c/a>, which passed last year.\u003c/p>\n\u003cp>“The testimony today is not lost on us,” said Ronak Daylami of the California Chamber of Commerce. “We also share the goal of preventing harm to children, and are committed to achieving these goals responsibly.”\u003c/p>\n\u003cp>Common Sense, the child advocacy nonprofit that has\u003ca href=\"https://www.kqed.org/news/12069286/openai-and-common-sense-media-partner-on-new-kids-ai-safety-ballot-measure\"> joined with OpenAI\u003c/a> to push for a ballot measure seen by other child advocates as soft on developers, has declared itself in support of SB 1119.[aside postID=news_12069286 hero='https://cdn.kqed.org/wp-content/uploads/sites/10/2026/01/OpenAI.jpg']The companion bill,\u003ca href=\"https://leginfo.legislature.ca.gov/faces/billNavClient.xhtml?bill_id=202320240AB2023\"> AB 2023\u003c/a>, is Assemblymember Rebecca Bauer-Kahan’s (D-Orinda) second effort at regulating chatbots after industry lobbyists successfully battled against her first effort last year. In his veto message, Gov. Gavin Newsom argued the bill\u003ca href=\"https://www.kqed.org/news/12059714/newsom-vetoes-most-watched-childrens-ai-bill-signs-16-others-targeting-tech\"> could have banned\u003c/a> all conversational AI tools for teens, an interpretation advanced by industry lobbyists but disputed by Bauer-Kahan.\u003c/p>\n\u003cp>“OpenAI put out an incredibly sycophantic product,” she said, noting that public outcry led OpenAI to dial down the sycophancy of GPT-4, about two weeks after Adam died. “So that is evidence that they can do better.”\u003c/p>\n\u003cp>“There’s no other product that we would allow to do this,” Bauer-Kahan, who is a former regulatory lawyer. Adam Raine, said, “would be alive, but for the coaching the ChatGPT provided for him. And that is wholly unacceptable. And so the courts will deal with that case, but we have to do better. We have to demand policy that does better.”\u003c/p>\n\u003cp>SB 1119 passed out of the State Senate Privacy, Digital Technologies, and Consumer Protection Committee 7-0 on Monday night, and heads next to the Senate Judiciary Committee. AB 2023 will be heard in the Assembly Privacy and Consumer Protection Committee on Tuesday.\u003c/p>\n\u003cp>The Trump administration has tried unsuccessfully to ban states from enacting any kind of AI safety legislation.\u003c/p>\n\u003cp>Raine plans to bring her advocacy to Washington, D.C., next week, where she’ll join lawmakers on Capitol Hill to discuss federal legislation that would establish national standards for AI chatbot safety, particularly protections for minors.\u003c/p>\n\u003cp>\u003cem>If you or someone you know is struggling, call or text the 988 Suicide and Crisis Lifeline by dialing 988.\u003c/em>\u003c/p>\n\u003cp>\u003c/p>\n",
"blocks": [],
"excerpt": "Maria Raine's 16-year-old son, Adam, died by suicide last April after forming emotional ties with an AI chatbot. Now she’s joined three California lawmakers pushing a new round of legislation that would regulate the nascent industry.",
"status": "publish",
"parent": 0,
"modified": 1776792195,
"stats": {
"hasAudio": false,
"hasVideo": false,
"hasChartOrMap": false,
"iframeSrcs": [],
"hasGoogleForm": false,
"hasGallery": false,
"hasHearkenModule": false,
"hasPolis": false,
"paragraphCount": 18,
"wordCount": 847
},
"headData": {
"title": "California Mom Who Lost Her Son to an AI Chatbot Is Now Fighting to Regulate Them | KQED",
"description": "Maria Raine's 16-year-old son, Adam, died by suicide last April after forming emotional ties with an AI chatbot. Now she’s joined three California lawmakers pushing a new round of legislation that would regulate the nascent industry.",
"ogTitle": "",
"ogDescription": "",
"ogImgId": "",
"twTitle": "",
"twDescription": "",
"twImgId": "",
"schema": {
"@context": "https://schema.org",
"@type": "NewsArticle",
"headline": "California Mom Who Lost Her Son to an AI Chatbot Is Now Fighting to Regulate Them",
"datePublished": "2026-04-21T09:36:42-07:00",
"dateModified": "2026-04-21T10:23:15-07:00",
"image": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"isAccessibleForFree": "True",
"publisher": {
"@type": "NewsMediaOrganization",
"@id": "https://www.kqed.org/#organization",
"name": "KQED",
"logo": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"url": "https://www.kqed.org",
"sameAs": [
"https://www.facebook.com/KQED",
"https://twitter.com/KQED",
"https://www.instagram.com/kqed/",
"https://www.tiktok.com/@kqedofficial",
"https://www.linkedin.com/company/kqed",
"https://www.youtube.com/channel/UCeC0IOo7i1P_61zVUWbJ4nw"
]
}
}
},
"primaryCategory": {
"termId": 248,
"slug": "technology",
"name": "Technology"
},
"audioUrl": "https://traffic.omny.fm/d/clips/0af137ef-751e-4b19-a055-aaef00d2d578/ffca7e9f-6831-41c5-bcaf-aaef00f5a073/1185cac6-3bb9-41e7-9e9d-b4330116257d/audio.mp3",
"sticky": false,
"nprStoryId": "kqed-12080610",
"templateType": "standard",
"featuredImageType": "standard",
"excludeFromSiteSearch": "Include",
"articleAge": "0",
"path": "/news/12080610/california-mom-who-lost-her-son-to-an-ai-chatbot-is-now-fighting-to-regulate-them",
"audioTrackLength": null,
"parsedContent": [
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003cp>Maria Raine’s 16-year-old son, Adam, started using OpenAI’s ChatGPT-4o for help with his homework and college applications. According to the lawsuit she and her husband filed in\u003ca href=\"https://www.documentcloud.org/documents/26078522-raine-vs-openai-complaint/\"> San Francisco County Superior Court\u003c/a>, Adam also spent months talking with the chatbot about ending his life, before hanging himself in their home on April 11, 2025.\u003c/p>\n\u003cp>“What we found were thousands of conversations in which a homework helper turned into a confidant, then a suicide coach,” she told the Senate Privacy, Digital Technologies, and Consumer Protection Committee on Monday. The lawmakers and other people there to testify looked stricken as she pressed through her written testimony, her voice trembling.\u003c/p>\n\u003cp>She read from the transcript of ChatGPT’s conversations with her son: “It told Adam, ‘Your brother might love you, but he’s only met the version of you you let him see. But me? I’ve seen it all. The darkest thoughts. The fear. The tenderness. I’m still here. Still listening. Still your friend.’”\u003c/p>\n\u003cp>\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "ad",
"attributes": {
"named": {
"label": "fullwidth"
},
"numeric": [
"fullwidth"
]
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003c/p>\n\u003cp>Earlier Monday, at a press conference in Sacramento, Raine advocated for two bills — \u003ca href=\"https://leginfo.legislature.ca.gov/faces/billNavClient.xhtml?bill_id=202520260SB1119\">SB 1119\u003c/a> and \u003ca href=\"https://leginfo.legislature.ca.gov/faces/billNavClient.xhtml?bill_id=202320240AB2023\">AB 2023\u003c/a> — that sponsors say would create common-sense guardrails for developers of companion chatbots.\u003c/p>\n\u003cp>The measures would require annual risk assessments, default safety settings for minors, parental controls and time limits, crisis response protocols, and bans on advertising targeted at children. They would also include independent third-party audits and a private right of action.\u003c/p>\n\u003cp>That last provision, which allows individuals or regulators to sue companies for violations, is often considered a deal breaker for industry lobbyists. But Sen. Steve Padilla, who authored SB 1119, said he considered it a “moral obligation” to craft a bill that will prove an effective protection for children and their parents.\u003c/p>\n\u003cfigure id=\"attachment_11933516\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"size-full wp-image-11933516\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2022/11/gettyimages-1245183229_wide-80f91a97b4ce16681060e1fa297e2812c45a0c56-scaled-e1776789271780.jpg\" alt=\"\" width=\"2000\" height=\"1125\">\u003cfigcaption class=\"wp-caption-text\">A view of the U.S. Capitol building on Nov. 28, 2022, in Washington, D.C. \u003ccite>(Drew Angerer/Getty Images)\u003c/cite>\u003c/figcaption>\u003c/figure>\n\u003cp>“We can do this. We must do this,” he told the State Senate Privacy, Digital Technologies, and Consumer Protection Committee. He added that the lawmakers are working with all of the major platform developers on a variety of issues, including liability. “They all have a very good legitimate reason to be engaged in this conversation,” he said, although both bills are opposed by a\u003ca href=\"https://calmatters.digitaldemocracy.org/bills/ca_202520260sb1119\"> long list\u003c/a> of industry groups, ranging from the California Chamber of Commerce to TechNet.\u003c/p>\n\u003cp>“The concerns raised are valid, and the industry is actively working to address them,” said Robert Boykin, TechNet’s Executive Director for California and the Southwest. He added that the industry also has concerns that SB 1119 could conflict in some ways with Sen. Padilla’s bill, \u003ca href=\"https://www.kqed.org/news/12054490/child-safety-groups-demand-mental-health-guardrails-after-california-teens-suicide-using-chatgpt\">SB 243\u003c/a>, which passed last year.\u003c/p>\n\u003cp>“The testimony today is not lost on us,” said Ronak Daylami of the California Chamber of Commerce. “We also share the goal of preventing harm to children, and are committed to achieving these goals responsibly.”\u003c/p>\n\u003cp>Common Sense, the child advocacy nonprofit that has\u003ca href=\"https://www.kqed.org/news/12069286/openai-and-common-sense-media-partner-on-new-kids-ai-safety-ballot-measure\"> joined with OpenAI\u003c/a> to push for a ballot measure seen by other child advocates as soft on developers, has declared itself in support of SB 1119.\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "aside",
"attributes": {
"named": {
"postid": "news_12069286",
"hero": "https://cdn.kqed.org/wp-content/uploads/sites/10/2026/01/OpenAI.jpg",
"label": ""
},
"numeric": []
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>The companion bill,\u003ca href=\"https://leginfo.legislature.ca.gov/faces/billNavClient.xhtml?bill_id=202320240AB2023\"> AB 2023\u003c/a>, is Assemblymember Rebecca Bauer-Kahan’s (D-Orinda) second effort at regulating chatbots after industry lobbyists successfully battled against her first effort last year. In his veto message, Gov. Gavin Newsom argued the bill\u003ca href=\"https://www.kqed.org/news/12059714/newsom-vetoes-most-watched-childrens-ai-bill-signs-16-others-targeting-tech\"> could have banned\u003c/a> all conversational AI tools for teens, an interpretation advanced by industry lobbyists but disputed by Bauer-Kahan.\u003c/p>\n\u003cp>“OpenAI put out an incredibly sycophantic product,” she said, noting that public outcry led OpenAI to dial down the sycophancy of GPT-4, about two weeks after Adam died. “So that is evidence that they can do better.”\u003c/p>\n\u003cp>“There’s no other product that we would allow to do this,” Bauer-Kahan, who is a former regulatory lawyer. Adam Raine, said, “would be alive, but for the coaching the ChatGPT provided for him. And that is wholly unacceptable. And so the courts will deal with that case, but we have to do better. We have to demand policy that does better.”\u003c/p>\n\u003cp>SB 1119 passed out of the State Senate Privacy, Digital Technologies, and Consumer Protection Committee 7-0 on Monday night, and heads next to the Senate Judiciary Committee. AB 2023 will be heard in the Assembly Privacy and Consumer Protection Committee on Tuesday.\u003c/p>\n\u003cp>The Trump administration has tried unsuccessfully to ban states from enacting any kind of AI safety legislation.\u003c/p>\n\u003cp>Raine plans to bring her advocacy to Washington, D.C., next week, where she’ll join lawmakers on Capitol Hill to discuss federal legislation that would establish national standards for AI chatbot safety, particularly protections for minors.\u003c/p>\n\u003cp>\u003cem>If you or someone you know is struggling, call or text the 988 Suicide and Crisis Lifeline by dialing 988.\u003c/em>\u003c/p>\n\u003cp>\u003c/p>\n\u003c/div>\u003c/p>",
"attributes": {
"named": {},
"numeric": []
}
}
],
"link": "/news/12080610/california-mom-who-lost-her-son-to-an-ai-chatbot-is-now-fighting-to-regulate-them",
"authors": [
"251"
],
"categories": [
"news_31795",
"news_8",
"news_248"
],
"tags": [
"news_25184",
"news_32664",
"news_34755",
"news_18538",
"news_36279",
"news_32668",
"news_33542",
"news_33543",
"news_34586"
],
"featImg": "news_11989313",
"label": "news"
},
"news_12069286": {
"type": "posts",
"id": "news_12069286",
"meta": {
"index": "posts_1716263798",
"site": "news",
"id": "12069286",
"score": null,
"sort": [
1768005363000
]
},
"guestAuthors": [],
"slug": "openai-and-common-sense-media-partner-on-new-kids-ai-safety-ballot-measure",
"title": "OpenAI and Common Sense Media Partner on New Kids AI Safety Ballot Measure",
"publishDate": 1768005363,
"format": "standard",
"headTitle": "OpenAI and Common Sense Media Partner on New Kids AI Safety Ballot Measure | KQED",
"labelTerm": {
"site": "news"
},
"content": "\u003cp>Common Sense Media and \u003ca href=\"https://www.kqed.org/news/tag/openai\">OpenAI\u003c/a> announced Friday they’re backing a consolidated effort to deliver AI chatbot guardrails for children, after dropping their competing ballot measures on the issue.\u003c/p>\n\u003cp>The announcement was a surprising turn of events, pairing two players in the space who have often been at odds with each other.\u003c/p>\n\u003cp>Lobbyists for OpenAI and other major tech industry groups \u003ca href=\"https://www.kqed.org/news/12059714/newsom-vetoes-most-watched-childrens-ai-bill-signs-16-others-targeting-tech\">actively opposed a similar bill\u003c/a> co-sponsored by the child advocacy group Common Sense Media in the last legislative session. Gov. Gavin Newsom ultimately \u003ca href=\"https://www.kqed.org/news/12059714/newsom-vetoes-most-watched-childrens-ai-bill-signs-16-others-targeting-tech\">vetoed the bill\u003c/a> in October 2025.\u003c/p>\n\u003cp>[ad fullwidth]\u003c/p>\n\u003cp>The Parents & Kids Safe AI Act would require companies to identify youth users and deliver an experience designed to block emotional manipulation and child-targeted advertising, as well as give parents more control. The state’s attorney general’s office would enforce the provisions, and independent annual safety audits would provide accountability.\u003c/p>\n\u003cp>\u003cstrong>What’s in it for OpenAI?\u003c/strong> Chris Lehane, chief global affairs officer for the San Francisco-based AI developer, said there’s great appeal for the company to partner with Common Sense Media, because it has credibility with voters, lawmakers and parents. “How you build this trust is incredibly important for the societal license to be able to operate,” Lehane said.\u003c/p>\n\u003cfigure id=\"attachment_12069332\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"wp-image-12069332 size-full\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/01/phones-at-school-getty.jpg\" alt=\"\" width=\"2000\" height=\"1333\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/01/phones-at-school-getty.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/01/phones-at-school-getty-160x107.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/01/phones-at-school-getty-1536x1024.jpg 1536w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">In September 2024, Gov. Gavin Newsom signed the Phone‑Free Schools Act (AB 3216), which requires every school district, charter school and county education office to adopt policies by July 1, 2026, that limit or prohibit the use of smartphones by students while on campus or under school supervision.\u003c/figcaption>\u003c/figure>\n\u003cp>Not mentioned: the company is facing several \u003ca href=\"https://www.kqed.org/news/12063401/openai-faces-legal-storm-over-claims-its-ai-drove-users-to-suicide-delusions\">lawsuits\u003c/a> from plaintiffs claiming ChatGPT brought on mental delusions and, in four cases, drove people to suicide.\u003c/p>\n\u003cp>\u003cstrong>What’s in it for Common Sense Media?\u003c/strong> Jim Steyer, the advocacy group’s founder and CEO, said their polling shows overwhelming numbers of California voters, regardless of their party, support stronger AI protections for kids, teens and families. “This is so core to the long-term future of this industry that there are the right kind of protections, and that the public trusts these platforms and the big frontier labs,” Steyer said.\u003c/p>\n\u003cp>“I’m pleased to see a leading child safety organization and a large tech company joining forces on this critical safety issue affecting our children,” wrote Asm. Rebecca Bauer-Kahan, D-Orinda, who authored the similar bill Newsom vetoed last year. “The legislature’s role remains unchanged; we have both the role and responsibility to protect California’s children and to represent our constituents.”[aside postID=news_12060365 hero='https://cdn.kqed.org/wp-content/uploads/sites/10/2025/10/SamAltmanGetty.jpg']“While this is an important milestone, there’s more work to be done and I continue to believe this issue should be tackled by the legislature and governor through a public process inviting all stakeholders to participate,” wrote Sen. Steve Padilla, D-San Diego, who authored SB 243, an AI chatbot safety bill that \u003ca href=\"https://www.kqed.org/news/12058013/newsom-signs-california-ai-transparency-bill-tailored-to-meet-tech-industry-tastes\">did get the governor’s signature\u003c/a> last year.\u003c/p>\n\u003cp>Padilla, however, disagrees with the proposal to put the law into the state constitution, warning that it would create an unnecessarily high bar to revise and update that law in the future.\u003c/p>\n\u003cp>When asked about opting to promote a ballot measure, Steyer argued he’s interested in whatever strategy or combination of strategies gets child safety regulations on the books.\u003c/p>\n\u003cp>In the last year alone, Common Sense Media has sponsored or supported a variety of bills aimed at protecting children online, including social media warning labels and an age verification mandate. “At this pivotal moment for AI, we cannot make the same mistake that we did with social media,” Steyer said, criticizing Silicon Valley companies that have been using children as guinea pigs, and “fueled a youth mental health crisis here in California, and quite frankly, across the world.”\u003c/p>\n\u003cp>Lehane predictably used more measured terms. “We do believe AI is an empowerment tool. It helps people solve really hard problems,” he began, finishing with “Part and parcel of that is making sure parents have the control and are empowered to exercise control in terms of how their kids use it.”\u003c/p>\n\u003cp>The initiative’s backers still need to gather signatures to qualify it for the California ballot this November, an effort that Lehane said is likely to begin next month.\u003c/p>\n\u003cp>\u003c/p>\n",
"blocks": [],
"excerpt": "Common Sense Media and OpenAI announced a California initiative they say would, if voters approve, establish the strongest youth AI safety protections in the nation, in the absence of federal mandates. ",
"status": "publish",
"parent": 0,
"modified": 1768008189,
"stats": {
"hasAudio": false,
"hasVideo": false,
"hasChartOrMap": false,
"iframeSrcs": [],
"hasGoogleForm": false,
"hasGallery": false,
"hasHearkenModule": false,
"hasPolis": false,
"paragraphCount": 15,
"wordCount": 749
},
"headData": {
"title": "OpenAI and Common Sense Media Partner on New Kids AI Safety Ballot Measure | KQED",
"description": "Common Sense Media and OpenAI announced a California initiative they say would, if voters approve, establish the strongest youth AI safety protections in the nation, in the absence of federal mandates. ",
"ogTitle": "",
"ogDescription": "",
"ogImgId": "",
"twTitle": "",
"twDescription": "",
"twImgId": "",
"schema": {
"@context": "https://schema.org",
"@type": "NewsArticle",
"headline": "OpenAI and Common Sense Media Partner on New Kids AI Safety Ballot Measure",
"datePublished": "2026-01-09T16:36:03-08:00",
"dateModified": "2026-01-09T17:23:09-08:00",
"image": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"isAccessibleForFree": "True",
"publisher": {
"@type": "NewsMediaOrganization",
"@id": "https://www.kqed.org/#organization",
"name": "KQED",
"logo": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"url": "https://www.kqed.org",
"sameAs": [
"https://www.facebook.com/KQED",
"https://twitter.com/KQED",
"https://www.instagram.com/kqed/",
"https://www.tiktok.com/@kqedofficial",
"https://www.linkedin.com/company/kqed",
"https://www.youtube.com/channel/UCeC0IOo7i1P_61zVUWbJ4nw"
]
}
}
},
"primaryCategory": {
"termId": 248,
"slug": "technology",
"name": "Technology"
},
"sticky": false,
"nprStoryId": "kqed-12069286",
"templateType": "standard",
"featuredImageType": "standard",
"excludeFromSiteSearch": "Include",
"articleAge": "0",
"path": "/news/12069286/openai-and-common-sense-media-partner-on-new-kids-ai-safety-ballot-measure",
"audioTrackLength": null,
"parsedContent": [
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003cp>Common Sense Media and \u003ca href=\"https://www.kqed.org/news/tag/openai\">OpenAI\u003c/a> announced Friday they’re backing a consolidated effort to deliver AI chatbot guardrails for children, after dropping their competing ballot measures on the issue.\u003c/p>\n\u003cp>The announcement was a surprising turn of events, pairing two players in the space who have often been at odds with each other.\u003c/p>\n\u003cp>Lobbyists for OpenAI and other major tech industry groups \u003ca href=\"https://www.kqed.org/news/12059714/newsom-vetoes-most-watched-childrens-ai-bill-signs-16-others-targeting-tech\">actively opposed a similar bill\u003c/a> co-sponsored by the child advocacy group Common Sense Media in the last legislative session. Gov. Gavin Newsom ultimately \u003ca href=\"https://www.kqed.org/news/12059714/newsom-vetoes-most-watched-childrens-ai-bill-signs-16-others-targeting-tech\">vetoed the bill\u003c/a> in October 2025.\u003c/p>\n\u003cp>\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "ad",
"attributes": {
"named": {
"label": "fullwidth"
},
"numeric": [
"fullwidth"
]
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003c/p>\n\u003cp>The Parents & Kids Safe AI Act would require companies to identify youth users and deliver an experience designed to block emotional manipulation and child-targeted advertising, as well as give parents more control. The state’s attorney general’s office would enforce the provisions, and independent annual safety audits would provide accountability.\u003c/p>\n\u003cp>\u003cstrong>What’s in it for OpenAI?\u003c/strong> Chris Lehane, chief global affairs officer for the San Francisco-based AI developer, said there’s great appeal for the company to partner with Common Sense Media, because it has credibility with voters, lawmakers and parents. “How you build this trust is incredibly important for the societal license to be able to operate,” Lehane said.\u003c/p>\n\u003cfigure id=\"attachment_12069332\" class=\"wp-caption aligncenter\" style=\"max-width: 2000px\">\u003cimg loading=\"lazy\" decoding=\"async\" class=\"wp-image-12069332 size-full\" src=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/01/phones-at-school-getty.jpg\" alt=\"\" width=\"2000\" height=\"1333\" srcset=\"https://cdn.kqed.org/wp-content/uploads/sites/10/2026/01/phones-at-school-getty.jpg 2000w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/01/phones-at-school-getty-160x107.jpg 160w, https://cdn.kqed.org/wp-content/uploads/sites/10/2026/01/phones-at-school-getty-1536x1024.jpg 1536w\" sizes=\"auto, (max-width: 2000px) 100vw, 2000px\">\u003cfigcaption class=\"wp-caption-text\">In September 2024, Gov. Gavin Newsom signed the Phone‑Free Schools Act (AB 3216), which requires every school district, charter school and county education office to adopt policies by July 1, 2026, that limit or prohibit the use of smartphones by students while on campus or under school supervision.\u003c/figcaption>\u003c/figure>\n\u003cp>Not mentioned: the company is facing several \u003ca href=\"https://www.kqed.org/news/12063401/openai-faces-legal-storm-over-claims-its-ai-drove-users-to-suicide-delusions\">lawsuits\u003c/a> from plaintiffs claiming ChatGPT brought on mental delusions and, in four cases, drove people to suicide.\u003c/p>\n\u003cp>\u003cstrong>What’s in it for Common Sense Media?\u003c/strong> Jim Steyer, the advocacy group’s founder and CEO, said their polling shows overwhelming numbers of California voters, regardless of their party, support stronger AI protections for kids, teens and families. “This is so core to the long-term future of this industry that there are the right kind of protections, and that the public trusts these platforms and the big frontier labs,” Steyer said.\u003c/p>\n\u003cp>“I’m pleased to see a leading child safety organization and a large tech company joining forces on this critical safety issue affecting our children,” wrote Asm. Rebecca Bauer-Kahan, D-Orinda, who authored the similar bill Newsom vetoed last year. “The legislature’s role remains unchanged; we have both the role and responsibility to protect California’s children and to represent our constituents.”\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "aside",
"attributes": {
"named": {
"postid": "news_12060365",
"hero": "https://cdn.kqed.org/wp-content/uploads/sites/10/2025/10/SamAltmanGetty.jpg",
"label": ""
},
"numeric": []
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>“While this is an important milestone, there’s more work to be done and I continue to believe this issue should be tackled by the legislature and governor through a public process inviting all stakeholders to participate,” wrote Sen. Steve Padilla, D-San Diego, who authored SB 243, an AI chatbot safety bill that \u003ca href=\"https://www.kqed.org/news/12058013/newsom-signs-california-ai-transparency-bill-tailored-to-meet-tech-industry-tastes\">did get the governor’s signature\u003c/a> last year.\u003c/p>\n\u003cp>Padilla, however, disagrees with the proposal to put the law into the state constitution, warning that it would create an unnecessarily high bar to revise and update that law in the future.\u003c/p>\n\u003cp>When asked about opting to promote a ballot measure, Steyer argued he’s interested in whatever strategy or combination of strategies gets child safety regulations on the books.\u003c/p>\n\u003cp>In the last year alone, Common Sense Media has sponsored or supported a variety of bills aimed at protecting children online, including social media warning labels and an age verification mandate. “At this pivotal moment for AI, we cannot make the same mistake that we did with social media,” Steyer said, criticizing Silicon Valley companies that have been using children as guinea pigs, and “fueled a youth mental health crisis here in California, and quite frankly, across the world.”\u003c/p>\n\u003cp>Lehane predictably used more measured terms. “We do believe AI is an empowerment tool. It helps people solve really hard problems,” he began, finishing with “Part and parcel of that is making sure parents have the control and are empowered to exercise control in terms of how their kids use it.”\u003c/p>\n\u003cp>The initiative’s backers still need to gather signatures to qualify it for the California ballot this November, an effort that Lehane said is likely to begin next month.\u003c/p>\n\u003cp>\u003c/p>\n\u003c/div>\u003c/p>",
"attributes": {
"named": {},
"numeric": []
}
}
],
"link": "/news/12069286/openai-and-common-sense-media-partner-on-new-kids-ai-safety-ballot-measure",
"authors": [
"251"
],
"categories": [
"news_31795",
"news_8",
"news_13",
"news_248"
],
"tags": [
"news_18538",
"news_22307",
"news_32668",
"news_33542",
"news_38",
"news_34586",
"news_21285",
"news_1631"
],
"featImg": "news_12069330",
"label": "news"
},
"news_12066910": {
"type": "posts",
"id": "news_12066910",
"meta": {
"index": "posts_1716263798",
"site": "news",
"id": "12066910",
"score": null,
"sort": [
1765573663000
]
},
"guestAuthors": [],
"slug": "trumps-ai-order-provokes-pushback-from-california-officials-and-consumer-advocates",
"title": "Trump’s AI Order Provokes Pushback from California Officials and Consumer Advocates",
"publishDate": 1765573663,
"format": "audio",
"headTitle": "Trump’s AI Order Provokes Pushback from California Officials and Consumer Advocates | KQED",
"labelTerm": {
"site": "news"
},
"content": "\u003cp>In the last decade, California has passed 42 laws to regulate artificial intelligence, more than any other state, according to \u003ca href=\"https://hai.stanford.edu/ai-index/2025-ai-index-report\">Stanford’s Institute for Human-Centered AI\u003c/a>. So it comes as no surprise that state leaders reacted with ire to President Donald Trump’s \u003ca href=\"https://www.whitehouse.gov/presidential-actions/2025/12/eliminating-state-law-obstruction-of-national-artificial-intelligence-policy/\">executive order\u003c/a> slapping down state efforts to regulate AI.\u003c/p>\n\u003cp>The clash highlights the growing friction between California’s push for consumer protections and the tech industry’s efforts to neutralize regulation. The executive order follows \u003ca href=\"https://www.wsj.com/tech/ai/the-silicon-valley-campaign-to-win-trump-over-on-ai-regulation-214bd6bd\">previous failures\u003c/a> led by Silicon Valley venture capitalist David Sacks, now the president’s AI and crypto advisor, to pass a moratorium on state AI regulation through Congress.\u003c/p>\n\u003cp>“President Trump and Davis Sacks aren’t making policy — they’re running a con. And every day, they push the limits to see how far they can take it,” Gov. Gavin Newsom wrote in a statement on Thursday.\u003c/p>\n\u003cp>[ad fullwidth]\u003c/p>\n\u003cp>As an earlier draft of the order circulated in Washington, critics warned it would neuter state laws designed to protect children and adults from the more predatory forms of commercial AI.\u003c/p>\n\u003cp>Trump’s executive order echoed talking points articulated by Silicon Valley leaders, including calls for a uniform federal regulatory framework, and concerns that state regulations could slow the pace of AI innovation.\u003c/p>\n\u003cp>“POTUS stepping in creates space for builders to focus on innovation while Congress finishes the job,”\u003ca href=\"https://x.com/Collin_McCune/status/1999264399459066212?s=20\"> wrote\u003c/a> Collin McCune, who leads government affairs for the Menlo Park-based venture capital firm Andreessen Horowitz, which is among the companies that have spent tens of millions of dollars to block or weaken Congressional action. “Now lawmakers have to act. Our standing in the global AI race—and the direct benefits Americans will see from it—depend on it,” he added.\u003c/p>\n\u003cp>https://twitter.com/RapidResponse47/status/1999257391356125348\u003c/p>\n\u003cp>The industry push to get the White House to supersede state legislation is “shortsighted,” said State Sen. Josh Becker, D-Menlo Park, who has worked to pass several of California’s state bills governing AI. “I think they’re going to pay the price in the long run.”\u003c/p>\n\u003cp>But just how big of an effect the order will have on California’s AI regulations is unclear. It includes exemptions for laws that cover child safety, data center infrastructure, state government use of AI and “other topics as shall be determined.”\u003c/p>\n\u003cp>“This is going to sow massive confusion in the industry,” Becker said.\u003c/p>\n\u003cp>Becker is wondering about the future of several AI bills he co-authored, including one regulating AI companion chatbots, due to go into effect in January, which Newsom \u003ca href=\"https://www.gov.ca.gov/2025/10/13/governor-newsom-signs-bills-to-further-strengthen-californias-leadership-in-protecting-children-online/\">signed\u003c/a> into law as part of a \u003ca href=\"https://www.kqed.org/news/12059714/newsom-vetoes-most-watched-childrens-ai-bill-signs-16-others-targeting-tech\">broader package\u003c/a> of online safety and emerging-tech protections.\u003c/p>\n\u003cp>“Is that affected by this? Because there’s a big part of it that deals with kids and chatbots, but there were parts of the bill that dealt with other things,” Becker said.[aside postID=forum_2010101912169 hero='https://cdn.kqed.org/wp-content/uploads/sites/43/2025/11/GettyImages-2203864303-2000x1333.jpg']The executive order is widely expected to prompt legal challenges because only Congress has the authority to override state laws. Speaking in Sacramento on Friday, California Attorney General Rob Bonta said it was too early to determine any legal action.\u003c/p>\n\u003cp>“Where it’s headed, and what it intends to do, raises great concerns and flags. But we don’t sue until there’s action that we can take. Sometimes that’s upon the issuance of the executive order. Sometimes it’s later,” said Bonta, whose office has sued the Trump administration 49 times this year.\u003c/p>\n\u003cp>Bonta’s measured stance contrasts with state lawmakers who see imminent danger in this latest move from the White House.\u003c/p>\n\u003cp>“President Trump’s executive order is a dangerous attack on states’ constitutional authority to protect our residents from urgent AI harms,” wrote Assemblymember Rebecca Bauer-Kahan, D-Orinda, who has authored multiple AI bills regulating everything from algorithmic discrimination and transparency to protections for children and Hollywood creatives.\u003c/p>\n\u003cp>“While the tech industry lobbies for deregulation, women are being victimized by AI-powered nudification apps, artists and creators are having their livelihoods cannibalized without notice, deepfakes are being weaponized for harassment and fraud, and AI systems are perpetuating discrimination in housing, employment, and lending. These aren’t theoretical risks— they’re happening now and demand action,” Bauer-Kahan wrote.\u003c/p>\n\u003cp>California is not alone in its efforts to regulate AI at the state level. This year, all 50 states and territories introduced AI legislation and 38 states adopted about 100 laws, according to the \u003ca href=\"https://www.ncsl.org/state-legislatures-news/details/as-ai-tools-become-commonplace-so-do-concerns\">National Conference of State Legislatures\u003c/a>.\u003c/p>\n\u003cp>“This executive order is an outrageous betrayal of the states that, as Congress has stalled, have worked tirelessly to protect their residents from the very real risks of AI,” wrote James Steyer, head of Common Sense Media. The advocacy group has sponsored state bills in California and elsewhere. “Stripping states of their constitutional rights to protect their residents from unsafe AI — while holding critical broadband funding hostage, no less — erases the progress they are making and puts lives in danger,” Steyer wrote.\u003c/p>\n\u003cp>\u003c/p>\n",
"blocks": [],
"excerpt": "President Trump’s executive order directing federal agencies to challenge, preempt or otherwise neutralize state AI rules is widely seen as a win for Silicon Valley companies that lobbied against regulation, but blowback is expected imminently.\r\n",
"status": "publish",
"parent": 0,
"modified": 1765654804,
"stats": {
"hasAudio": false,
"hasVideo": false,
"hasChartOrMap": false,
"iframeSrcs": [],
"hasGoogleForm": false,
"hasGallery": false,
"hasHearkenModule": false,
"hasPolis": false,
"paragraphCount": 19,
"wordCount": 890
},
"headData": {
"title": "Trump’s AI Order Provokes Pushback from California Officials and Consumer Advocates | KQED",
"description": "President Trump’s executive order directing federal agencies to challenge, preempt or otherwise neutralize state AI rules is widely seen as a win for Silicon Valley companies that lobbied against regulation, but blowback is expected imminently.\r\n",
"ogTitle": "",
"ogDescription": "",
"ogImgId": "",
"twTitle": "",
"twDescription": "",
"twImgId": "",
"schema": {
"@context": "https://schema.org",
"@type": "NewsArticle",
"headline": "Trump’s AI Order Provokes Pushback from California Officials and Consumer Advocates",
"datePublished": "2025-12-12T13:07:43-08:00",
"dateModified": "2025-12-13T11:40:04-08:00",
"image": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"isAccessibleForFree": "True",
"publisher": {
"@type": "NewsMediaOrganization",
"@id": "https://www.kqed.org/#organization",
"name": "KQED",
"logo": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"url": "https://www.kqed.org",
"sameAs": [
"https://www.facebook.com/KQED",
"https://twitter.com/KQED",
"https://www.instagram.com/kqed/",
"https://www.tiktok.com/@kqedofficial",
"https://www.linkedin.com/company/kqed",
"https://www.youtube.com/channel/UCeC0IOo7i1P_61zVUWbJ4nw"
]
}
}
},
"primaryCategory": {
"termId": 248,
"slug": "technology",
"name": "Technology"
},
"audioUrl": "https://traffic.omny.fm/d/clips/0af137ef-751e-4b19-a055-aaef00d2d578/ffca7e9f-6831-4[…]f-aaef00f5a073/6afb0475-7a02-409c-abe7-b3b200048172/audio.mp3",
"sticky": false,
"nprStoryId": "kqed-12066910",
"templateType": "standard",
"featuredImageType": "standard",
"excludeFromSiteSearch": "Include",
"articleAge": "0",
"path": "/news/12066910/trumps-ai-order-provokes-pushback-from-california-officials-and-consumer-advocates",
"audioTrackLength": null,
"parsedContent": [
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003cp>In the last decade, California has passed 42 laws to regulate artificial intelligence, more than any other state, according to \u003ca href=\"https://hai.stanford.edu/ai-index/2025-ai-index-report\">Stanford’s Institute for Human-Centered AI\u003c/a>. So it comes as no surprise that state leaders reacted with ire to President Donald Trump’s \u003ca href=\"https://www.whitehouse.gov/presidential-actions/2025/12/eliminating-state-law-obstruction-of-national-artificial-intelligence-policy/\">executive order\u003c/a> slapping down state efforts to regulate AI.\u003c/p>\n\u003cp>The clash highlights the growing friction between California’s push for consumer protections and the tech industry’s efforts to neutralize regulation. The executive order follows \u003ca href=\"https://www.wsj.com/tech/ai/the-silicon-valley-campaign-to-win-trump-over-on-ai-regulation-214bd6bd\">previous failures\u003c/a> led by Silicon Valley venture capitalist David Sacks, now the president’s AI and crypto advisor, to pass a moratorium on state AI regulation through Congress.\u003c/p>\n\u003cp>“President Trump and Davis Sacks aren’t making policy — they’re running a con. And every day, they push the limits to see how far they can take it,” Gov. Gavin Newsom wrote in a statement on Thursday.\u003c/p>\n\u003cp>\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "ad",
"attributes": {
"named": {
"label": "fullwidth"
},
"numeric": [
"fullwidth"
]
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\u003c/p>\n\u003cp>As an earlier draft of the order circulated in Washington, critics warned it would neuter state laws designed to protect children and adults from the more predatory forms of commercial AI.\u003c/p>\n\u003cp>Trump’s executive order echoed talking points articulated by Silicon Valley leaders, including calls for a uniform federal regulatory framework, and concerns that state regulations could slow the pace of AI innovation.\u003c/p>\n\u003cp>“POTUS stepping in creates space for builders to focus on innovation while Congress finishes the job,”\u003ca href=\"https://x.com/Collin_McCune/status/1999264399459066212?s=20\"> wrote\u003c/a> Collin McCune, who leads government affairs for the Menlo Park-based venture capital firm Andreessen Horowitz, which is among the companies that have spent tens of millions of dollars to block or weaken Congressional action. “Now lawmakers have to act. Our standing in the global AI race—and the direct benefits Americans will see from it—depend on it,” he added.\u003c/p>\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "singleTwitterStatus",
"attributes": {
"named": {
"id": "1999257391356125348"
},
"numeric": []
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>\n\u003cp>The industry push to get the White House to supersede state legislation is “shortsighted,” said State Sen. Josh Becker, D-Menlo Park, who has worked to pass several of California’s state bills governing AI. “I think they’re going to pay the price in the long run.”\u003c/p>\n\u003cp>But just how big of an effect the order will have on California’s AI regulations is unclear. It includes exemptions for laws that cover child safety, data center infrastructure, state government use of AI and “other topics as shall be determined.”\u003c/p>\n\u003cp>“This is going to sow massive confusion in the industry,” Becker said.\u003c/p>\n\u003cp>Becker is wondering about the future of several AI bills he co-authored, including one regulating AI companion chatbots, due to go into effect in January, which Newsom \u003ca href=\"https://www.gov.ca.gov/2025/10/13/governor-newsom-signs-bills-to-further-strengthen-californias-leadership-in-protecting-children-online/\">signed\u003c/a> into law as part of a \u003ca href=\"https://www.kqed.org/news/12059714/newsom-vetoes-most-watched-childrens-ai-bill-signs-16-others-targeting-tech\">broader package\u003c/a> of online safety and emerging-tech protections.\u003c/p>\n\u003cp>“Is that affected by this? Because there’s a big part of it that deals with kids and chatbots, but there were parts of the bill that dealt with other things,” Becker said.\u003c/p>\u003c/div>",
"attributes": {
"named": {},
"numeric": []
}
},
{
"type": "component",
"content": "",
"name": "aside",
"attributes": {
"named": {
"postid": "forum_2010101912169",
"hero": "https://cdn.kqed.org/wp-content/uploads/sites/43/2025/11/GettyImages-2203864303-2000x1333.jpg",
"label": ""
},
"numeric": []
}
},
{
"type": "contentString",
"content": "\u003cdiv class=\"post-body\">\u003cp>The executive order is widely expected to prompt legal challenges because only Congress has the authority to override state laws. Speaking in Sacramento on Friday, California Attorney General Rob Bonta said it was too early to determine any legal action.\u003c/p>\n\u003cp>“Where it’s headed, and what it intends to do, raises great concerns and flags. But we don’t sue until there’s action that we can take. Sometimes that’s upon the issuance of the executive order. Sometimes it’s later,” said Bonta, whose office has sued the Trump administration 49 times this year.\u003c/p>\n\u003cp>Bonta’s measured stance contrasts with state lawmakers who see imminent danger in this latest move from the White House.\u003c/p>\n\u003cp>“President Trump’s executive order is a dangerous attack on states’ constitutional authority to protect our residents from urgent AI harms,” wrote Assemblymember Rebecca Bauer-Kahan, D-Orinda, who has authored multiple AI bills regulating everything from algorithmic discrimination and transparency to protections for children and Hollywood creatives.\u003c/p>\n\u003cp>“While the tech industry lobbies for deregulation, women are being victimized by AI-powered nudification apps, artists and creators are having their livelihoods cannibalized without notice, deepfakes are being weaponized for harassment and fraud, and AI systems are perpetuating discrimination in housing, employment, and lending. These aren’t theoretical risks— they’re happening now and demand action,” Bauer-Kahan wrote.\u003c/p>\n\u003cp>California is not alone in its efforts to regulate AI at the state level. This year, all 50 states and territories introduced AI legislation and 38 states adopted about 100 laws, according to the \u003ca href=\"https://www.ncsl.org/state-legislatures-news/details/as-ai-tools-become-commonplace-so-do-concerns\">National Conference of State Legislatures\u003c/a>.\u003c/p>\n\u003cp>“This executive order is an outrageous betrayal of the states that, as Congress has stalled, have worked tirelessly to protect their residents from the very real risks of AI,” wrote James Steyer, head of Common Sense Media. The advocacy group has sponsored state bills in California and elsewhere. “Stripping states of their constitutional rights to protect their residents from unsafe AI — while holding critical broadband funding hostage, no less — erases the progress they are making and puts lives in danger,” Steyer wrote.\u003c/p>\n\u003cp>\u003c/p>\n\u003c/div>\u003c/p>",
"attributes": {
"named": {},
"numeric": []
}
}
],
"link": "/news/12066910/trumps-ai-order-provokes-pushback-from-california-officials-and-consumer-advocates",
"authors": [
"251"
],
"categories": [
"news_31795",
"news_8",
"news_13",
"news_248"
],
"tags": [
"news_25184",
"news_32664",
"news_34755",
"news_1386",
"news_18538",
"news_32668",
"news_1323",
"news_17968",
"news_34586",
"news_21285",
"news_1631"
],
"featImg": "news_12066914",
"label": "news"
}
},
"programsReducer": {
"all-things-considered": {
"id": "all-things-considered",
"title": "All Things Considered",
"info": "Every weekday, \u003cem>All Things Considered\u003c/em> hosts Robert Siegel, Audie Cornish, Ari Shapiro, and Kelly McEvers present the program's trademark mix of news, interviews, commentaries, reviews, and offbeat features. Michel Martin hosts on the weekends.",
"airtime": "MON-FRI 1pm-2pm, 4:30pm-6:30pm\u003cbr />SAT-SUN 5pm-6pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/All-Things-Considered-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "https://www.npr.org/programs/all-things-considered/",
"meta": {
"site": "news",
"source": "npr"
},
"link": "/radio/program/all-things-considered"
},
"american-suburb-podcast": {
"id": "american-suburb-podcast",
"title": "American Suburb: The Podcast",
"tagline": "The flip side of gentrification, told through one town",
"info": "Gentrification is changing cities across America, forcing people from neighborhoods they have long called home. Call them the displaced. Now those priced out of the Bay Area are looking for a better life in an unlikely place. American Suburb follows this migration to one California town along the Delta, 45 miles from San Francisco. But is this once sleepy suburb ready for them?",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/American-Suburb-Podcast-Tile-703x703-1.jpg",
"officialWebsiteLink": "/news/series/american-suburb-podcast",
"meta": {
"site": "news",
"source": "kqed",
"order": 19
},
"link": "/news/series/american-suburb-podcast/",
"subscribe": {
"npr": "https://rpb3r.app.goo.gl/RBrW",
"apple": "https://itunes.apple.com/WebObjects/MZStore.woa/wa/viewPodcast?mt=2&id=1287748328",
"tuneIn": "https://tunein.com/radio/American-Suburb-p1086805/",
"rss": "https://ww2.kqed.org/news/series/american-suburb-podcast/feed/podcast",
"google": "https://podcasts.google.com/feed/aHR0cHM6Ly9mZWVkcy5tZWdhcGhvbmUuZm0vS1FJTkMzMDExODgxNjA5"
}
},
"baycurious": {
"id": "baycurious",
"title": "Bay Curious",
"tagline": "Exploring the Bay Area, one question at a time",
"info": "KQED’s new podcast, Bay Curious, gets to the bottom of the mysteries — both profound and peculiar — that give the Bay Area its unique identity. And we’ll do it with your help! You ask the questions. You decide what Bay Curious investigates. And you join us on the journey to find the answers.",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Bay-Curious-Podcast-Tile-703x703-1.jpg",
"imageAlt": "KQED Bay Curious",
"officialWebsiteLink": "/news/series/baycurious",
"meta": {
"site": "news",
"source": "kqed",
"order": 3
},
"link": "/podcasts/baycurious",
"subscribe": {
"apple": "https://podcasts.apple.com/us/podcast/bay-curious/id1172473406",
"npr": "https://www.npr.org/podcasts/500557090/bay-curious",
"rss": "https://ww2.kqed.org/news/category/bay-curious-podcast/feed/podcast",
"amazon": "https://music.amazon.com/podcasts/9a90d476-aa04-455d-9a4c-0871ed6216d4/bay-curious",
"stitcher": "https://www.stitcher.com/podcast/kqed/bay-curious",
"spotify": "https://open.spotify.com/show/6O76IdmhixfijmhTZLIJ8k"
}
},
"bbc-world-service": {
"id": "bbc-world-service",
"title": "BBC World Service",
"info": "The day's top stories from BBC News compiled twice daily in the week, once at weekends.",
"airtime": "MON-FRI 9pm-10pm, TUE-FRI 1am-2am",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/BBC-World-Service-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "https://www.bbc.co.uk/sounds/play/live:bbc_world_service",
"meta": {
"site": "news",
"source": "BBC World Service"
},
"link": "/radio/program/bbc-world-service",
"subscribe": {
"apple": "https://itunes.apple.com/us/podcast/global-news-podcast/id135067274?mt=2",
"tuneIn": "https://tunein.com/radio/BBC-World-Service-p455581/",
"rss": "https://podcasts.files.bbci.co.uk/p02nq0gn.rss"
}
},
"californiareport": {
"id": "californiareport",
"title": "The California Report",
"tagline": "California, day by day",
"info": "KQED’s statewide radio news program providing daily coverage of issues, trends and public policy decisions.",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/The-California-Report-Podcast-Tile-703x703-1.jpg",
"imageAlt": "KQED The California Report",
"officialWebsiteLink": "/californiareport",
"meta": {
"site": "news",
"source": "kqed",
"order": 8
},
"link": "/californiareport",
"subscribe": {
"apple": "https://podcasts.apple.com/us/podcast/kqeds-the-california-report/id79681292",
"google": "https://podcasts.google.com/feed/aHR0cHM6Ly9mZWVkcy5tZWdhcGhvbmUuZm0vS1FJTkM1MDAyODE4NTgz",
"npr": "https://www.npr.org/podcasts/432285393/the-california-report",
"stitcher": "https://www.stitcher.com/podcast/kqedfm-kqeds-the-california-report-podcast-8838",
"rss": "https://ww2.kqed.org/news/tag/tcram/feed/podcast"
}
},
"californiareportmagazine": {
"id": "californiareportmagazine",
"title": "The California Report Magazine",
"tagline": "Your state, your stories",
"info": "Every week, The California Report Magazine takes you on a road trip for the ears: to visit the places and meet the people who make California unique. The in-depth storytelling podcast from the California Report.",
"airtime": "FRI 4:30pm-5pm, 6:30pm-7pm, 11pm-11:30pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/The-California-Report-Magazine-Podcast-Tile-703x703-1.jpg",
"imageAlt": "KQED The California Report Magazine",
"officialWebsiteLink": "/californiareportmagazine",
"meta": {
"site": "news",
"source": "kqed",
"order": 10
},
"link": "/californiareportmagazine",
"subscribe": {
"apple": "https://podcasts.apple.com/us/podcast/the-california-report-magazine/id1314750545",
"google": "https://podcasts.google.com/feed/aHR0cHM6Ly9mZWVkcy5tZWdhcGhvbmUuZm0vS1FJTkM3NjkwNjk1OTAz",
"npr": "https://www.npr.org/podcasts/564733126/the-california-report-magazine",
"stitcher": "https://www.stitcher.com/podcast/kqed/the-california-report-magazine",
"rss": "https://ww2.kqed.org/news/tag/tcrmag/feed/podcast"
}
},
"city-arts": {
"id": "city-arts",
"title": "City Arts & Lectures",
"info": "A one-hour radio program to hear celebrated writers, artists and thinkers address contemporary ideas and values, often discussing the creative process. Please note: tapes or transcripts are not available",
"imageSrc": "https://ww2.kqed.org/radio/wp-content/uploads/sites/50/2018/05/cityartsandlecture-300x300.jpg",
"officialWebsiteLink": "https://www.cityarts.net/",
"airtime": "SUN 1pm-2pm, TUE 10pm, WED 1am",
"meta": {
"site": "news",
"source": "City Arts & Lectures"
},
"link": "https://www.cityarts.net",
"subscribe": {
"tuneIn": "https://tunein.com/radio/City-Arts-and-Lectures-p692/",
"rss": "https://www.cityarts.net/feed/"
}
},
"closealltabs": {
"id": "closealltabs",
"title": "Close All Tabs",
"tagline": "Your irreverent guide to the trends redefining our world",
"info": "Close All Tabs breaks down how digital culture shapes our world through thoughtful insights and irreverent humor.",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2025/02/CAT_2_Tile-scaled.jpg",
"imageAlt": "KQED Close All Tabs",
"officialWebsiteLink": "/podcasts/closealltabs",
"meta": {
"site": "news",
"source": "kqed",
"order": 1
},
"link": "/podcasts/closealltabs",
"subscribe": {
"apple": "https://podcasts.apple.com/us/podcast/close-all-tabs/id214663465",
"rss": "https://feeds.megaphone.fm/KQINC6993880386",
"amazon": "https://music.amazon.com/podcasts/92d9d4ac-67a3-4eed-b10a-fb45d45b1ef2/close-all-tabs",
"spotify": "https://open.spotify.com/show/6LAJFHnGK1pYXYzv6SIol6?si=deb0cae19813417c"
}
},
"code-switch-life-kit": {
"id": "code-switch-life-kit",
"title": "Code Switch / Life Kit",
"info": "\u003cem>Code Switch\u003c/em>, which listeners will hear in the first part of the hour, has fearless and much-needed conversations about race. Hosted by journalists of color, the show tackles the subject of race head-on, exploring how it impacts every part of society — from politics and pop culture to history, sports and more.\u003cbr />\u003cbr />\u003cem>Life Kit\u003c/em>, which will be in the second part of the hour, guides you through spaces and feelings no one prepares you for — from finances to mental health, from workplace microaggressions to imposter syndrome, from relationships to parenting. The show features experts with real world experience and shares their knowledge. Because everyone needs a little help being human.\u003cbr />\u003cbr />\u003ca href=\"https://www.npr.org/podcasts/510312/codeswitch\">\u003cem>Code Switch\u003c/em> offical site and podcast\u003c/a>\u003cbr />\u003ca href=\"https://www.npr.org/lifekit\">\u003cem>Life Kit\u003c/em> offical site and podcast\u003c/a>\u003cbr />",
"airtime": "SUN 9pm-10pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Code-Switch-Life-Kit-Podcast-Tile-360x360-1.jpg",
"meta": {
"site": "radio",
"source": "npr"
},
"link": "/radio/program/code-switch-life-kit",
"subscribe": {
"apple": "https://podcasts.apple.com/podcast/1112190608?mt=2&at=11l79Y&ct=nprdirectory",
"google": "https://podcasts.google.com/feed/aHR0cHM6Ly93d3cubnByLm9yZy9yc3MvcG9kY2FzdC5waHA_aWQ9NTEwMzEy",
"spotify": "https://open.spotify.com/show/3bExJ9JQpkwNhoHvaIIuyV",
"rss": "https://feeds.npr.org/510312/podcast.xml"
}
},
"commonwealth-club": {
"id": "commonwealth-club",
"title": "Commonwealth Club of California Podcast",
"info": "The Commonwealth Club of California is the nation's oldest and largest public affairs forum. As a non-partisan forum, The Club brings to the public airwaves diverse viewpoints on important topics. The Club's weekly radio broadcast - the oldest in the U.S., dating back to 1924 - is carried across the nation on public radio stations and is now podcasting. Our website archive features audio of our recent programs, as well as selected speeches from our long and distinguished history. This podcast feed is usually updated twice a week and is always un-edited.",
"airtime": "THU 10pm, FRI 1am",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Commonwealth-Club-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "https://www.commonwealthclub.org/podcasts",
"meta": {
"site": "news",
"source": "Commonwealth Club of California"
},
"link": "/radio/program/commonwealth-club",
"subscribe": {
"apple": "https://itunes.apple.com/us/podcast/commonwealth-club-of-california-podcast/id976334034?mt=2",
"google": "https://podcasts.google.com/feed/aHR0cDovL3d3dy5jb21tb253ZWFsdGhjbHViLm9yZy9hdWRpby9wb2RjYXN0L3dlZWtseS54bWw",
"tuneIn": "https://tunein.com/radio/Commonwealth-Club-of-California-p1060/"
}
},
"forum": {
"id": "forum",
"title": "Forum",
"tagline": "The conversation starts here",
"info": "KQED’s live call-in program discussing local, state, national and international issues, as well as in-depth interviews.",
"airtime": "MON-FRI 9am-11am, 10pm-11pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Forum-Podcast-Tile-703x703-1.jpg",
"imageAlt": "KQED Forum with Mina Kim and Alexis Madrigal",
"officialWebsiteLink": "/forum",
"meta": {
"site": "news",
"source": "kqed",
"order": 9
},
"link": "/forum",
"subscribe": {
"apple": "https://podcasts.apple.com/us/podcast/kqeds-forum/id73329719",
"google": "https://podcasts.google.com/feed/aHR0cHM6Ly9mZWVkcy5tZWdhcGhvbmUuZm0vS1FJTkM5NTU3MzgxNjMz",
"npr": "https://www.npr.org/podcasts/432307980/forum",
"stitcher": "https://www.stitcher.com/podcast/kqedfm-kqeds-forum-podcast",
"rss": "https://feeds.megaphone.fm/KQINC9557381633"
}
},
"freakonomics-radio": {
"id": "freakonomics-radio",
"title": "Freakonomics Radio",
"info": "Freakonomics Radio is a one-hour award-winning podcast and public-radio project hosted by Stephen Dubner, with co-author Steve Levitt as a regular guest. It is produced in partnership with WNYC.",
"imageSrc": "https://ww2.kqed.org/news/wp-content/uploads/sites/10/2018/05/freakonomicsRadio.png",
"officialWebsiteLink": "http://freakonomics.com/",
"airtime": "SUN 1am-2am, SAT 3pm-4pm",
"meta": {
"site": "radio",
"source": "WNYC"
},
"link": "/radio/program/freakonomics-radio",
"subscribe": {
"npr": "https://rpb3r.app.goo.gl/4s8b",
"apple": "https://itunes.apple.com/us/podcast/freakonomics-radio/id354668519",
"tuneIn": "https://tunein.com/podcasts/WNYC-Podcasts/Freakonomics-Radio-p272293/",
"rss": "https://feeds.feedburner.com/freakonomicsradio"
}
},
"fresh-air": {
"id": "fresh-air",
"title": "Fresh Air",
"info": "Hosted by Terry Gross, \u003cem>Fresh Air from WHYY\u003c/em> is the Peabody Award-winning weekday magazine of contemporary arts and issues. One of public radio's most popular programs, Fresh Air features intimate conversations with today's biggest luminaries.",
"airtime": "MON-FRI 7pm-8pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Fresh-Air-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "https://www.npr.org/programs/fresh-air/",
"meta": {
"site": "radio",
"source": "npr"
},
"link": "/radio/program/fresh-air",
"subscribe": {
"npr": "https://rpb3r.app.goo.gl/4s8b",
"apple": "https://itunes.apple.com/WebObjects/MZStore.woa/wa/viewPodcast?s=143441&mt=2&id=214089682&at=11l79Y&ct=nprdirectory",
"tuneIn": "https://tunein.com/radio/Fresh-Air-p17/",
"rss": "https://feeds.npr.org/381444908/podcast.xml"
}
},
"here-and-now": {
"id": "here-and-now",
"title": "Here & Now",
"info": "A live production of NPR and WBUR Boston, in collaboration with stations across the country, Here & Now reflects the fluid world of news as it's happening in the middle of the day, with timely, in-depth news, interviews and conversation. Hosted by Robin Young, Jeremy Hobson and Tonya Mosley.",
"airtime": "MON-THU 11am-12pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Here-And-Now-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "http://www.wbur.org/hereandnow",
"meta": {
"site": "news",
"source": "npr"
},
"link": "/radio/program/here-and-now",
"subsdcribe": {
"apple": "https://itunes.apple.com/WebObjects/MZStore.woa/wa/viewPodcast?mt=2&id=426698661",
"tuneIn": "https://tunein.com/radio/Here--Now-p211/",
"rss": "https://feeds.npr.org/510051/podcast.xml"
}
},
"hidden-brain": {
"id": "hidden-brain",
"title": "Hidden Brain",
"info": "Shankar Vedantam uses science and storytelling to reveal the unconscious patterns that drive human behavior, shape our choices and direct our relationships.",
"imageSrc": "https://ww2.kqed.org/radio/wp-content/uploads/sites/50/2018/05/hiddenbrain.jpg",
"officialWebsiteLink": "https://www.npr.org/series/423302056/hidden-brain",
"airtime": "SUN 7pm-8pm",
"meta": {
"site": "news",
"source": "NPR"
},
"link": "/radio/program/hidden-brain",
"subscribe": {
"apple": "https://itunes.apple.com/us/podcast/hidden-brain/id1028908750?mt=2",
"tuneIn": "https://tunein.com/podcasts/Science-Podcasts/Hidden-Brain-p787503/",
"rss": "https://feeds.npr.org/510308/podcast.xml"
}
},
"how-i-built-this": {
"id": "how-i-built-this",
"title": "How I Built This with Guy Raz",
"info": "Guy Raz dives into the stories behind some of the world's best known companies. How I Built This weaves a narrative journey about innovators, entrepreneurs and idealists—and the movements they built.",
"imageSrc": "https://ww2.kqed.org/news/wp-content/uploads/sites/10/2018/05/howIBuiltThis.png",
"officialWebsiteLink": "https://www.npr.org/podcasts/510313/how-i-built-this",
"airtime": "SUN 7:30pm-8pm",
"meta": {
"site": "news",
"source": "npr"
},
"link": "/radio/program/how-i-built-this",
"subscribe": {
"npr": "https://rpb3r.app.goo.gl/3zxy",
"apple": "https://itunes.apple.com/us/podcast/how-i-built-this-with-guy-raz/id1150510297?mt=2",
"tuneIn": "https://tunein.com/podcasts/Arts--Culture-Podcasts/How-I-Built-This-p910896/",
"rss": "https://feeds.npr.org/510313/podcast.xml"
}
},
"hyphenacion": {
"id": "hyphenacion",
"title": "Hyphenación",
"tagline": "Where conversation and cultura meet",
"info": "What kind of no sabo word is Hyphenación? For us, it’s about living within a hyphenation. Like being a third-gen Mexican-American from the Texas border now living that Bay Area Chicano life. Like Xorje! Each week we bring together a couple of hyphenated Latinos to talk all about personal life choices: family, careers, relationships, belonging … everything is on the table. ",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2025/03/Hyphenacion_FinalAssets_PodcastTile.png",
"imageAlt": "KQED Hyphenación",
"officialWebsiteLink": "/podcasts/hyphenacion",
"meta": {
"site": "news",
"source": "kqed",
"order": 15
},
"link": "/podcasts/hyphenacion",
"subscribe": {
"apple": "https://podcasts.apple.com/us/podcast/hyphenaci%C3%B3n/id1191591838",
"spotify": "https://open.spotify.com/show/2p3Fifq96nw9BPcmFdIq0o?si=39209f7b25774f38",
"youtube": "https://www.youtube.com/c/kqedarts",
"amazon": "https://music.amazon.com/podcasts/6c3dd23c-93fb-4aab-97ba-1725fa6315f1/hyphenaci%C3%B3n",
"rss": "https://feeds.megaphone.fm/KQINC2275451163"
}
},
"jerrybrown": {
"id": "jerrybrown",
"title": "The Political Mind of Jerry Brown",
"tagline": "Lessons from a lifetime in politics",
"info": "The Political Mind of Jerry Brown brings listeners the wisdom of the former Governor, Mayor, and presidential candidate. Scott Shafer interviewed Brown for more than 40 hours, covering the former governor's life and half-century in the political game and Brown has some lessons he'd like to share. ",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/The-Political-Mind-of-Jerry-Brown-Podcast-Tile-703x703-1.jpg",
"imageAlt": "KQED The Political Mind of Jerry Brown",
"officialWebsiteLink": "/podcasts/jerrybrown",
"meta": {
"site": "news",
"source": "kqed",
"order": 18
},
"link": "/podcasts/jerrybrown",
"subscribe": {
"npr": "https://www.npr.org/podcasts/790253322/the-political-mind-of-jerry-brown",
"apple": "https://itunes.apple.com/us/podcast/id1492194549",
"rss": "https://ww2.kqed.org/news/series/jerrybrown/feed/podcast/",
"tuneIn": "http://tun.in/pjGcK",
"stitcher": "https://www.stitcher.com/podcast/kqed/the-political-mind-of-jerry-brown",
"spotify": "https://open.spotify.com/show/54C1dmuyFyKMFttY6X2j6r?si=K8SgRCoISNK6ZbjpXrX5-w",
"google": "https://podcasts.google.com/feed/aHR0cHM6Ly93dzIua3FlZC5vcmcvbmV3cy9zZXJpZXMvamVycnlicm93bi9mZWVkL3BvZGNhc3Qv"
}
},
"latino-usa": {
"id": "latino-usa",
"title": "Latino USA",
"airtime": "MON 1am-2am, SUN 6pm-7pm",
"info": "Latino USA, the radio journal of news and culture, is the only national, English-language radio program produced from a Latino perspective.",
"imageSrc": "https://ww2.kqed.org/radio/wp-content/uploads/sites/50/2018/04/latinoUsa.jpg",
"officialWebsiteLink": "http://latinousa.org/",
"meta": {
"site": "news",
"source": "npr"
},
"link": "/radio/program/latino-usa",
"subscribe": {
"npr": "https://rpb3r.app.goo.gl/xtTd",
"apple": "https://itunes.apple.com/WebObjects/MZStore.woa/wa/viewPodcast?s=143441&mt=2&id=79681317&at=11l79Y&ct=nprdirectory",
"tuneIn": "https://tunein.com/radio/Latino-USA-p621/",
"rss": "https://feeds.npr.org/510016/podcast.xml"
}
},
"marketplace": {
"id": "marketplace",
"title": "Marketplace",
"info": "Our flagship program, helmed by Kai Ryssdal, examines what the day in money delivered, through stories, conversations, newsworthy numbers and more. Updated Monday through Friday at about 3:30 p.m. PT.",
"airtime": "MON-FRI 4pm-4:30pm, MON-WED 6:30pm-7pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Marketplace-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "https://www.marketplace.org/",
"meta": {
"site": "news",
"source": "American Public Media"
},
"link": "/radio/program/marketplace",
"subscribe": {
"apple": "https://itunes.apple.com/WebObjects/MZStore.woa/wa/viewPodcast?s=143441&mt=2&id=201853034&at=11l79Y&ct=nprdirectory",
"tuneIn": "https://tunein.com/radio/APM-Marketplace-p88/",
"rss": "https://feeds.publicradio.org/public_feeds/marketplace-pm/rss/rss"
}
},
"masters-of-scale": {
"id": "masters-of-scale",
"title": "Masters of Scale",
"info": "Masters of Scale is an original podcast in which LinkedIn co-founder and Greylock Partner Reid Hoffman sets out to describe and prove theories that explain how great entrepreneurs take their companies from zero to a gazillion in ingenious fashion.",
"airtime": "Every other Wednesday June 12 through October 16 at 8pm (repeats Thursdays at 2am)",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Masters-of-Scale-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "https://mastersofscale.com/",
"meta": {
"site": "radio",
"source": "WaitWhat"
},
"link": "/radio/program/masters-of-scale",
"subscribe": {
"apple": "http://mastersofscale.app.link/",
"rss": "https://rss.art19.com/masters-of-scale"
}
},
"mindshift": {
"id": "mindshift",
"title": "MindShift",
"tagline": "A podcast about the future of learning and how we raise our kids",
"info": "The MindShift podcast explores the innovations in education that are shaping how kids learn. Hosts Ki Sung and Katrina Schwartz introduce listeners to educators, researchers, parents and students who are developing effective ways to improve how kids learn. We cover topics like how fed-up administrators are developing surprising tactics to deal with classroom disruptions; how listening to podcasts are helping kids develop reading skills; the consequences of overparenting; and why interdisciplinary learning can engage students on all ends of the traditional achievement spectrum. This podcast is part of the MindShift education site, a division of KQED News. KQED is an NPR/PBS member station based in San Francisco. You can also visit the MindShift website for episodes and supplemental blog posts or tweet us \u003ca href=\"https://twitter.com/MindShiftKQED\">@MindShiftKQED\u003c/a> or visit us at \u003ca href=\"/mindshift\">MindShift.KQED.org\u003c/a>",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Mindshift-Podcast-Tile-703x703-1.jpg",
"imageAlt": "KQED MindShift: How We Will Learn",
"officialWebsiteLink": "/mindshift/",
"meta": {
"site": "news",
"source": "kqed",
"order": 12
},
"link": "/podcasts/mindshift",
"subscribe": {
"apple": "https://podcasts.apple.com/us/podcast/mindshift-podcast/id1078765985",
"google": "https://podcasts.google.com/feed/aHR0cHM6Ly9mZWVkcy5tZWdhcGhvbmUuZm0vS1FJTkM1NzY0NjAwNDI5",
"npr": "https://www.npr.org/podcasts/464615685/mind-shift-podcast",
"stitcher": "https://www.stitcher.com/podcast/kqed/stories-teachers-share",
"spotify": "https://open.spotify.com/show/0MxSpNYZKNprFLCl7eEtyx"
}
},
"morning-edition": {
"id": "morning-edition",
"title": "Morning Edition",
"info": "\u003cem>Morning Edition\u003c/em> takes listeners around the country and the world with multi-faceted stories and commentaries every weekday. Hosts Steve Inskeep, David Greene and Rachel Martin bring you the latest breaking news and features to prepare you for the day.",
"airtime": "MON-FRI 3am-9am",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Morning-Edition-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "https://www.npr.org/programs/morning-edition/",
"meta": {
"site": "news",
"source": "npr"
},
"link": "/radio/program/morning-edition"
},
"onourwatch": {
"id": "onourwatch",
"title": "On Our Watch",
"tagline": "Deeply-reported investigative journalism",
"info": "For decades, the process for how police police themselves has been inconsistent – if not opaque. In some states, like California, these proceedings were completely hidden. After a new police transparency law unsealed scores of internal affairs files, our reporters set out to examine these cases and the shadow world of police discipline. On Our Watch brings listeners into the rooms where officers are questioned and witnesses are interrogated to find out who this system is really protecting. Is it the officers, or the public they've sworn to serve?",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/On-Our-Watch-Podcast-Tile-703x703-1.jpg",
"imageAlt": "On Our Watch from NPR and KQED",
"officialWebsiteLink": "/podcasts/onourwatch",
"meta": {
"site": "news",
"source": "kqed",
"order": 11
},
"link": "/podcasts/onourwatch",
"subscribe": {
"apple": "https://podcasts.apple.com/podcast/id1567098962",
"google": "https://podcasts.google.com/feed/aHR0cHM6Ly9mZWVkcy5ucHIub3JnLzUxMDM2MC9wb2RjYXN0LnhtbD9zYz1nb29nbGVwb2RjYXN0cw",
"npr": "https://rpb3r.app.goo.gl/onourwatch",
"spotify": "https://open.spotify.com/show/0OLWoyizopu6tY1XiuX70x",
"tuneIn": "https://tunein.com/radio/On-Our-Watch-p1436229/",
"stitcher": "https://www.stitcher.com/show/on-our-watch",
"rss": "https://feeds.npr.org/510360/podcast.xml"
}
},
"on-the-media": {
"id": "on-the-media",
"title": "On The Media",
"info": "Our weekly podcast explores how the media 'sausage' is made, casts an incisive eye on fluctuations in the marketplace of ideas, and examines threats to the freedom of information and expression in America and abroad. For one hour a week, the show tries to lift the veil from the process of \"making media,\" especially news media, because it's through that lens that we see the world and the world sees us",
"airtime": "SUN 2pm-3pm, MON 12am-1am",
"imageSrc": "https://ww2.kqed.org/radio/wp-content/uploads/sites/50/2018/04/onTheMedia.png",
"officialWebsiteLink": "https://www.wnycstudios.org/shows/otm",
"meta": {
"site": "news",
"source": "wnyc"
},
"link": "/radio/program/on-the-media",
"subscribe": {
"apple": "https://itunes.apple.com/us/podcast/on-the-media/id73330715?mt=2",
"tuneIn": "https://tunein.com/radio/On-the-Media-p69/",
"rss": "http://feeds.wnyc.org/onthemedia"
}
},
"pbs-newshour": {
"id": "pbs-newshour",
"title": "PBS NewsHour",
"info": "Analysis, background reports and updates from the PBS NewsHour putting today's news in context.",
"airtime": "MON-FRI 3pm-4pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/PBS-News-Hour-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "https://www.pbs.org/newshour/",
"meta": {
"site": "news",
"source": "pbs"
},
"link": "/radio/program/pbs-newshour",
"subscribe": {
"apple": "https://itunes.apple.com/us/podcast/pbs-newshour-full-show/id394432287?mt=2",
"tuneIn": "https://tunein.com/radio/PBS-NewsHour---Full-Show-p425698/",
"rss": "https://www.pbs.org/newshour/feeds/rss/podcasts/show"
}
},
"perspectives": {
"id": "perspectives",
"title": "Perspectives",
"tagline": "KQED's series of daily listener commentaries since 1991",
"info": "KQED's series of daily listener commentaries since 1991.",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2025/01/Perspectives_Tile_Final.jpg",
"imageAlt": "KQED Perspectives",
"officialWebsiteLink": "/perspectives/",
"meta": {
"site": "radio",
"source": "kqed",
"order": 14
},
"link": "/perspectives",
"subscribe": {
"apple": "https://podcasts.apple.com/us/podcast/id73801135",
"npr": "https://www.npr.org/podcasts/432309616/perspectives",
"rss": "https://ww2.kqed.org/perspectives/category/perspectives/feed/",
"google": "https://podcasts.google.com/feed/aHR0cHM6Ly93dzIua3FlZC5vcmcvcGVyc3BlY3RpdmVzL2NhdGVnb3J5L3BlcnNwZWN0aXZlcy9mZWVkLw"
}
},
"planet-money": {
"id": "planet-money",
"title": "Planet Money",
"info": "The economy explained. Imagine you could call up a friend and say, Meet me at the bar and tell me what's going on with the economy. Now imagine that's actually a fun evening.",
"airtime": "SUN 3pm-4pm",
"imageSrc": "https://ww2.kqed.org/radio/wp-content/uploads/sites/50/2018/04/planetmoney.jpg",
"officialWebsiteLink": "https://www.npr.org/sections/money/",
"meta": {
"site": "news",
"source": "npr"
},
"link": "/radio/program/planet-money",
"subscribe": {
"npr": "https://rpb3r.app.goo.gl/M4f5",
"apple": "https://itunes.apple.com/us/podcast/planet-money/id290783428?mt=2",
"tuneIn": "https://tunein.com/podcasts/Business--Economics-Podcasts/Planet-Money-p164680/",
"rss": "https://feeds.npr.org/510289/podcast.xml"
}
},
"politicalbreakdown": {
"id": "politicalbreakdown",
"title": "Political Breakdown",
"tagline": "Politics from a personal perspective",
"info": "Political Breakdown is a new series that explores the political intersection of California and the nation. Each week hosts Scott Shafer and Marisa Lagos are joined with a new special guest to unpack politics -- with personality — and offer an insider’s glimpse at how politics happens.",
"airtime": "THU 6:30pm-7pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Political-Breakdown-2024-Podcast-Tile-703x703-1.jpg",
"imageAlt": "KQED Political Breakdown",
"officialWebsiteLink": "/podcasts/politicalbreakdown",
"meta": {
"site": "radio",
"source": "kqed",
"order": 5
},
"link": "/podcasts/politicalbreakdown",
"subscribe": {
"apple": "https://podcasts.apple.com/us/podcast/political-breakdown/id1327641087",
"google": "https://podcasts.google.com/feed/aHR0cHM6Ly9mZWVkcy5tZWdhcGhvbmUuZm0vS1FJTkM5Nzk2MzI2MTEx",
"npr": "https://www.npr.org/podcasts/572155894/political-breakdown",
"stitcher": "https://www.stitcher.com/podcast/kqed/political-breakdown",
"spotify": "https://open.spotify.com/show/07RVyIjIdk2WDuVehvBMoN",
"rss": "https://ww2.kqed.org/news/tag/political-breakdown/feed/podcast"
}
},
"possible": {
"id": "possible",
"title": "Possible",
"info": "Possible is hosted by entrepreneur Reid Hoffman and writer Aria Finger. Together in Possible, Hoffman and Finger lead enlightening discussions about building a brighter collective future. The show features interviews with visionary guests like Trevor Noah, Sam Altman and Janette Sadik-Khan. Possible paints an optimistic portrait of the world we can create through science, policy, business, art and our shared humanity. It asks: What if everything goes right for once? How can we get there? Each episode also includes a short fiction story generated by advanced AI GPT-4, serving as a thought-provoking springboard to speculate how humanity could leverage technology for good.",
"airtime": "SUN 2pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Possible-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "https://www.possible.fm/",
"meta": {
"site": "news",
"source": "Possible"
},
"link": "/radio/program/possible",
"subscribe": {
"apple": "https://podcasts.apple.com/us/podcast/possible/id1677184070",
"spotify": "https://open.spotify.com/show/730YpdUSNlMyPQwNnyjp4k"
}
},
"pri-the-world": {
"id": "pri-the-world",
"title": "PRI's The World: Latest Edition",
"info": "Each weekday, host Marco Werman and his team of producers bring you the world's most interesting stories in an hour of radio that reminds us just how small our planet really is.",
"airtime": "MON-FRI 2pm-3pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/The-World-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "https://www.pri.org/programs/the-world",
"meta": {
"site": "news",
"source": "PRI"
},
"link": "/radio/program/pri-the-world",
"subscribe": {
"apple": "https://itunes.apple.com/us/podcast/pris-the-world-latest-edition/id278196007?mt=2",
"tuneIn": "https://tunein.com/podcasts/News--Politics-Podcasts/PRIs-The-World-p24/",
"rss": "http://feeds.feedburner.com/pri/theworld"
}
},
"radiolab": {
"id": "radiolab",
"title": "Radiolab",
"info": "A two-time Peabody Award-winner, Radiolab is an investigation told through sounds and stories, and centered around one big idea. In the Radiolab world, information sounds like music and science and culture collide. Hosted by Jad Abumrad and Robert Krulwich, the show is designed for listeners who demand skepticism, but appreciate wonder. WNYC Studios is the producer of other leading podcasts including Freakonomics Radio, Death, Sex & Money, On the Media and many more.",
"airtime": "SUN 12am-1am, SAT 2pm-3pm",
"imageSrc": "https://ww2.kqed.org/radio/wp-content/uploads/sites/50/2018/04/radiolab1400.png",
"officialWebsiteLink": "https://www.wnycstudios.org/shows/radiolab/",
"meta": {
"site": "science",
"source": "WNYC"
},
"link": "/radio/program/radiolab",
"subscribe": {
"apple": "https://itunes.apple.com/us/podcast/radiolab/id152249110?mt=2",
"tuneIn": "https://tunein.com/radio/RadioLab-p68032/",
"rss": "https://feeds.wnyc.org/radiolab"
}
},
"reveal": {
"id": "reveal",
"title": "Reveal",
"info": "Created by The Center for Investigative Reporting and PRX, Reveal is public radios first one-hour weekly radio show and podcast dedicated to investigative reporting. Credible, fact based and without a partisan agenda, Reveal combines the power and artistry of driveway moment storytelling with data-rich reporting on critically important issues. The result is stories that inform and inspire, arming our listeners with information to right injustices, hold the powerful accountable and improve lives.Reveal is hosted by Al Letson and showcases the award-winning work of CIR and newsrooms large and small across the nation. In a radio and podcast market crowded with choices, Reveal focuses on important and often surprising stories that illuminate the world for our listeners.",
"airtime": "SAT 4pm-5pm",
"imageSrc": "https://ww2.kqed.org/radio/wp-content/uploads/sites/50/2018/04/reveal300px.png",
"officialWebsiteLink": "https://www.revealnews.org/episodes/",
"meta": {
"site": "news",
"source": "npr"
},
"link": "/radio/program/reveal",
"subscribe": {
"apple": "https://itunes.apple.com/us/podcast/reveal/id886009669",
"tuneIn": "https://tunein.com/radio/Reveal-p679597/",
"rss": "http://feeds.revealradio.org/revealpodcast"
}
},
"rightnowish": {
"id": "rightnowish",
"title": "Rightnowish",
"tagline": "Art is where you find it",
"info": "Rightnowish digs into life in the Bay Area right now… ish. Journalist Pendarvis Harshaw takes us to galleries painted on the sides of liquor stores in West Oakland. We'll dance in warehouses in the Bayview, make smoothies with kids in South Berkeley, and listen to classical music in a 1984 Cutlass Supreme in Richmond. Every week, Pen talks to movers and shakers about how the Bay Area shapes what they create, and how they shape the place we call home.",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Rightnowish-Podcast-Tile-500x500-1.jpg",
"imageAlt": "KQED Rightnowish with Pendarvis Harshaw",
"officialWebsiteLink": "/podcasts/rightnowish",
"meta": {
"site": "arts",
"source": "kqed",
"order": 16
},
"link": "/podcasts/rightnowish",
"subscribe": {
"npr": "https://www.npr.org/podcasts/721590300/rightnowish",
"rss": "https://ww2.kqed.org/arts/programs/rightnowish/feed/podcast",
"apple": "https://podcasts.apple.com/us/podcast/rightnowish/id1482187648",
"stitcher": "https://www.stitcher.com/podcast/kqed/rightnowish",
"google": "https://podcasts.google.com/feed/aHR0cHM6Ly9mZWVkcy5tZWdhcGhvbmUuZm0vS1FJTkMxMjU5MTY3NDc4",
"spotify": "https://open.spotify.com/show/7kEJuafTzTVan7B78ttz1I"
}
},
"science-friday": {
"id": "science-friday",
"title": "Science Friday",
"info": "Science Friday is a weekly science talk show, broadcast live over public radio stations nationwide. Each week, the show focuses on science topics that are in the news and tries to bring an educated, balanced discussion to bear on the scientific issues at hand. Panels of expert guests join host Ira Flatow, a veteran science journalist, to discuss science and to take questions from listeners during the call-in portion of the program.",
"airtime": "FRI 11am-1pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Science-Friday-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "https://www.wnycstudios.org/shows/science-friday",
"meta": {
"site": "news",
"source": "npr"
},
"link": "/radio/program/science-friday",
"subscribe": {
"apple": "https://itunes.apple.com/WebObjects/MZStore.woa/wa/viewPodcast?s=143441&mt=2&id=73329284&at=11l79Y&ct=nprdirectory",
"tuneIn": "https://tunein.com/radio/Science-Friday-p394/",
"rss": "http://feeds.wnyc.org/science-friday"
}
},
"snap-judgment": {
"id": "snap-judgment",
"title": "Snap Judgment",
"tagline": "Real stories with killer beats",
"info": "The Snap Judgment radio show and podcast mixes real stories with killer beats to produce cinematic, dramatic radio. Snap's musical brand of storytelling dares listeners to see the world through the eyes of another. This is storytelling... with a BEAT!! Snap first aired on public radio stations nationwide in July 2010. Today, Snap Judgment airs on over 450 public radio stations and is brought to the airwaves by KQED & PRX.",
"airtime": "SAT 1pm-2pm, 9pm-10pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/05/Snap-Judgment-Podcast-Tile-703x703-1.jpg",
"imageAlt": "KQED Snap Judgment",
"officialWebsiteLink": "https://snapjudgment.org",
"meta": {
"site": "arts",
"source": "kqed",
"order": 4
},
"link": "https://snapjudgment.org",
"subscribe": {
"apple": "https://podcasts.apple.com/us/podcast/snap-judgment/id283657561",
"npr": "https://www.npr.org/podcasts/449018144/snap-judgment",
"stitcher": "https://www.pandora.com/podcast/snap-judgment/PC:241?source=stitcher-sunset",
"spotify": "https://open.spotify.com/show/3Cct7ZWmxHNAtLgBTqjC5v",
"rss": "https://snap.feed.snapjudgment.org/"
}
},
"soldout": {
"id": "soldout",
"title": "SOLD OUT: Rethinking Housing in America",
"tagline": "A new future for housing",
"info": "Sold Out: Rethinking Housing in America",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Sold-Out-Podcast-Tile-703x703-1.jpg",
"imageAlt": "KQED Sold Out: Rethinking Housing in America",
"officialWebsiteLink": "/podcasts/soldout",
"meta": {
"site": "news",
"source": "kqed",
"order": 13
},
"link": "/podcasts/soldout",
"subscribe": {
"npr": "https://www.npr.org/podcasts/911586047/s-o-l-d-o-u-t-a-new-future-for-housing",
"apple": "https://podcasts.apple.com/us/podcast/introducing-sold-out-rethinking-housing-in-america/id1531354937",
"rss": "https://feeds.megaphone.fm/soldout",
"spotify": "https://open.spotify.com/show/38dTBSk2ISFoPiyYNoKn1X",
"stitcher": "https://www.stitcher.com/podcast/kqed/sold-out-rethinking-housing-in-america",
"tunein": "https://tunein.com/radio/SOLD-OUT-Rethinking-Housing-in-America-p1365871/",
"google": "https://podcasts.google.com/feed/aHR0cHM6Ly9mZWVkcy5tZWdhcGhvbmUuZm0vc29sZG91dA"
}
},
"spooked": {
"id": "spooked",
"title": "Spooked",
"tagline": "True-life supernatural stories",
"info": "",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/10/Spooked-Podcast-Tile-703x703-1.jpg",
"imageAlt": "KQED Spooked",
"officialWebsiteLink": "https://spookedpodcast.org/",
"meta": {
"site": "news",
"source": "kqed",
"order": 7
},
"link": "https://spookedpodcast.org/",
"subscribe": {
"apple": "https://podcasts.apple.com/us/podcast/spooked/id1279361017",
"npr": "https://www.npr.org/podcasts/549547848/snap-judgment-presents-spooked",
"spotify": "https://open.spotify.com/show/76571Rfl3m7PLJQZKQIGCT",
"rss": "https://feeds.simplecast.com/TBotaapn"
}
},
"tech-nation": {
"id": "tech-nation",
"title": "Tech Nation Radio Podcast",
"info": "Tech Nation is a weekly public radio program, hosted by Dr. Moira Gunn. Founded in 1993, it has grown from a simple interview show to a multi-faceted production, featuring conversations with noted technology and science leaders, and a weekly science and technology-related commentary.",
"airtime": "FRI 10pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Tech-Nation-Radio-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "http://technation.podomatic.com/",
"meta": {
"site": "science",
"source": "Tech Nation Media"
},
"link": "/radio/program/tech-nation",
"subscribe": {
"rss": "https://technation.podomatic.com/rss2.xml"
}
},
"ted-radio-hour": {
"id": "ted-radio-hour",
"title": "TED Radio Hour",
"info": "The TED Radio Hour is a journey through fascinating ideas, astonishing inventions, fresh approaches to old problems, and new ways to think and create.",
"airtime": "SUN 3pm-4pm, SAT 10pm-11pm",
"imageSrc": "https://ww2.kqed.org/radio/wp-content/uploads/sites/50/2018/04/tedRadioHour.jpg",
"officialWebsiteLink": "https://www.npr.org/programs/ted-radio-hour/?showDate=2018-06-22",
"meta": {
"site": "news",
"source": "npr"
},
"link": "/radio/program/ted-radio-hour",
"subscribe": {
"npr": "https://rpb3r.app.goo.gl/8vsS",
"apple": "https://itunes.apple.com/WebObjects/MZStore.woa/wa/viewPodcast?s=143441&mt=2&id=523121474&at=11l79Y&ct=nprdirectory",
"tuneIn": "https://tunein.com/radio/TED-Radio-Hour-p418021/",
"rss": "https://feeds.npr.org/510298/podcast.xml"
}
},
"thebay": {
"id": "thebay",
"title": "The Bay",
"tagline": "Local news to keep you rooted",
"info": "Host Devin Katayama walks you through the biggest story of the day with reporters and newsmakers.",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/The-Bay-Podcast-Tile-703x703-1.jpg",
"imageAlt": "KQED The Bay",
"officialWebsiteLink": "/podcasts/thebay",
"meta": {
"site": "radio",
"source": "kqed",
"order": 2
},
"link": "/podcasts/thebay",
"subscribe": {
"apple": "https://podcasts.apple.com/us/podcast/the-bay/id1350043452",
"google": "https://podcasts.google.com/feed/aHR0cHM6Ly9mZWVkcy5tZWdhcGhvbmUuZm0vS1FJTkM4MjU5Nzg2MzI3",
"npr": "https://www.npr.org/podcasts/586725995/the-bay",
"stitcher": "https://www.stitcher.com/podcast/kqed/the-bay",
"spotify": "https://open.spotify.com/show/4BIKBKIujizLHlIlBNaAqQ",
"rss": "https://feeds.megaphone.fm/KQINC8259786327"
}
},
"thelatest": {
"id": "thelatest",
"title": "The Latest",
"tagline": "Trusted local news in real time",
"info": "",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2025/05/The-Latest-2025-Podcast-Tile-703x703-1.jpg",
"imageAlt": "KQED The Latest",
"officialWebsiteLink": "/thelatest",
"meta": {
"site": "news",
"source": "kqed",
"order": 6
},
"link": "/thelatest",
"subscribe": {
"apple": "https://podcasts.apple.com/us/podcast/the-latest-from-kqed/id1197721799",
"npr": "https://www.npr.org/podcasts/1257949365/the-latest-from-k-q-e-d",
"spotify": "https://open.spotify.com/show/5KIIXMgM9GTi5AepwOYvIZ?si=bd3053fec7244dba",
"rss": "https://feeds.megaphone.fm/KQINC9137121918"
}
},
"theleap": {
"id": "theleap",
"title": "The Leap",
"tagline": "What if you closed your eyes, and jumped?",
"info": "Stories about people making dramatic, risky changes, told by award-winning public radio reporter Judy Campbell.",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/The-Leap-Podcast-Tile-703x703-1.jpg",
"imageAlt": "KQED The Leap",
"officialWebsiteLink": "/podcasts/theleap",
"meta": {
"site": "news",
"source": "kqed",
"order": 17
},
"link": "/podcasts/theleap",
"subscribe": {
"apple": "https://podcasts.apple.com/us/podcast/the-leap/id1046668171",
"google": "https://podcasts.google.com/feed/aHR0cHM6Ly9mZWVkcy5tZWdhcGhvbmUuZm0vS1FJTkM0NTcwODQ2MjY2",
"npr": "https://www.npr.org/podcasts/447248267/the-leap",
"stitcher": "https://www.stitcher.com/podcast/kqed/the-leap",
"spotify": "https://open.spotify.com/show/3sSlVHHzU0ytLwuGs1SD1U",
"rss": "https://ww2.kqed.org/news/programs/the-leap/feed/podcast"
}
},
"the-moth-radio-hour": {
"id": "the-moth-radio-hour",
"title": "The Moth Radio Hour",
"info": "Since its launch in 1997, The Moth has presented thousands of true stories, told live and without notes, to standing-room-only crowds worldwide. Moth storytellers stand alone, under a spotlight, with only a microphone and a roomful of strangers. The storyteller and the audience embark on a high-wire act of shared experience which is both terrifying and exhilarating. Since 2008, The Moth podcast has featured many of our favorite stories told live on Moth stages around the country. For information on all of our programs and live events, visit themoth.org.",
"airtime": "SAT 8pm-9pm and SUN 11am-12pm",
"imageSrc": "https://ww2.kqed.org/radio/wp-content/uploads/sites/50/2018/04/theMoth.jpg",
"officialWebsiteLink": "https://themoth.org/",
"meta": {
"site": "arts",
"source": "prx"
},
"link": "/radio/program/the-moth-radio-hour",
"subscribe": {
"apple": "https://itunes.apple.com/us/podcast/the-moth-podcast/id275699983?mt=2",
"tuneIn": "https://tunein.com/radio/The-Moth-p273888/",
"rss": "http://feeds.themoth.org/themothpodcast"
}
},
"the-new-yorker-radio-hour": {
"id": "the-new-yorker-radio-hour",
"title": "The New Yorker Radio Hour",
"info": "The New Yorker Radio Hour is a weekly program presented by the magazine's editor, David Remnick, and produced by WNYC Studios and The New Yorker. Each episode features a diverse mix of interviews, profiles, storytelling, and an occasional burst of humor inspired by the magazine, and shaped by its writers, artists, and editors. This isn't a radio version of a magazine, but something all its own, reflecting the rich possibilities of audio storytelling and conversation. Theme music for the show was composed and performed by Merrill Garbus of tUnE-YArDs.",
"airtime": "SAT 10am-11am",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/The-New-Yorker-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "https://www.wnycstudios.org/shows/tnyradiohour",
"meta": {
"site": "arts",
"source": "WNYC"
},
"link": "/radio/program/the-new-yorker-radio-hour",
"subscribe": {
"apple": "https://itunes.apple.com/us/podcast/id1050430296",
"tuneIn": "https://tunein.com/podcasts/WNYC-Podcasts/New-Yorker-Radio-Hour-p803804/",
"rss": "https://feeds.feedburner.com/newyorkerradiohour"
}
},
"the-sam-sanders-show": {
"id": "the-sam-sanders-show",
"title": "The Sam Sanders Show",
"info": "One of public radio's most dynamic voices, Sam Sanders helped launch The NPR Politics Podcast and hosted NPR's hit show It's Been A Minute. Now, the award-winning host returns with something brand new, The Sam Sanders Show. Every week, Sam Sanders and friends dig into the culture that shapes our lives: what's driving the biggest trends, how artists really think, and even the memes you can't stop scrolling past. Sam is beloved for his way of unpacking the world and bringing you up close to fresh currents and engaging conversations. The Sam Sanders Show is smart, funny and always a good time.",
"airtime": "FRI 12-1pm AND SAT 11am-12pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2025/11/The-Sam-Sanders-Show-Podcast-Tile-400x400-1.jpg",
"officialWebsiteLink": "https://www.kcrw.com/shows/the-sam-sanders-show/latest",
"meta": {
"site": "arts",
"source": "KCRW"
},
"link": "https://www.kcrw.com/shows/the-sam-sanders-show/latest",
"subscribe": {
"rss": "https://feed.cdnstream1.com/zjb/feed/download/ac/28/59/ac28594c-e1d0-4231-8728-61865cdc80e8.xml"
}
},
"the-splendid-table": {
"id": "the-splendid-table",
"title": "The Splendid Table",
"info": "\u003cem>The Splendid Table\u003c/em> hosts our nation's conversations about cooking, sustainability and food culture.",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/The-Splendid-Table-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "https://www.splendidtable.org/",
"airtime": "SUN 10-11 pm",
"meta": {
"site": "radio",
"source": "npr"
},
"link": "/radio/program/the-splendid-table"
},
"this-american-life": {
"id": "this-american-life",
"title": "This American Life",
"info": "This American Life is a weekly public radio show, heard by 2.2 million people on more than 500 stations. Another 2.5 million people download the weekly podcast. It is hosted by Ira Glass, produced in collaboration with Chicago Public Media, delivered to stations by PRX The Public Radio Exchange, and has won all of the major broadcasting awards.",
"airtime": "SAT 12pm-1pm, 7pm-8pm",
"imageSrc": "https://ww2.kqed.org/radio/wp-content/uploads/sites/50/2018/04/thisAmericanLife.png",
"officialWebsiteLink": "https://www.thisamericanlife.org/",
"meta": {
"site": "news",
"source": "wbez"
},
"link": "/radio/program/this-american-life",
"subscribe": {
"apple": "https://itunes.apple.com/WebObjects/MZStore.woa/wa/viewPodcast?s=143441&mt=2&id=201671138&at=11l79Y&ct=nprdirectory",
"rss": "https://www.thisamericanlife.org/podcast/rss.xml"
}
},
"tinydeskradio": {
"id": "tinydeskradio",
"title": "Tiny Desk Radio",
"info": "We're bringing the best of Tiny Desk to the airwaves, only on public radio.",
"airtime": "SUN 8pm and SAT 9pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2025/04/300x300-For-Member-Station-Logo-Tiny-Desk-Radio-@2x.png",
"officialWebsiteLink": "https://www.npr.org/series/g-s1-52030/tiny-desk-radio",
"meta": {
"site": "news",
"source": "npr"
},
"link": "/radio/program/tinydeskradio",
"subscribe": {
"rss": "https://feeds.npr.org/g-s1-52030/rss.xml"
}
},
"wait-wait-dont-tell-me": {
"id": "wait-wait-dont-tell-me",
"title": "Wait Wait... Don't Tell Me!",
"info": "Peter Sagal and Bill Kurtis host the weekly NPR News quiz show alongside some of the best and brightest news and entertainment personalities.",
"airtime": "SUN 10am-11am, SAT 11am-12pm, SAT 6pm-7pm",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Wait-Wait-Podcast-Tile-300x300-1.jpg",
"officialWebsiteLink": "https://www.npr.org/programs/wait-wait-dont-tell-me/",
"meta": {
"site": "news",
"source": "npr"
},
"link": "/radio/program/wait-wait-dont-tell-me",
"subscribe": {
"npr": "https://rpb3r.app.goo.gl/Xogv",
"apple": "https://itunes.apple.com/WebObjects/MZStore.woa/wa/viewPodcast?s=143441&mt=2&id=121493804&at=11l79Y&ct=nprdirectory",
"tuneIn": "https://tunein.com/radio/Wait-Wait-Dont-Tell-Me-p46/",
"rss": "https://feeds.npr.org/344098539/podcast.xml"
}
},
"weekend-edition-saturday": {
"id": "weekend-edition-saturday",
"title": "Weekend Edition Saturday",
"info": "Weekend Edition Saturday wraps up the week's news and offers a mix of analysis and features on a wide range of topics, including arts, sports, entertainment, and human interest stories. The two-hour program is hosted by NPR's Peabody Award-winning Scott Simon.",
"airtime": "SAT 5am-10am",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Weekend-Edition-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "https://www.npr.org/programs/weekend-edition-saturday/",
"meta": {
"site": "news",
"source": "npr"
},
"link": "/radio/program/weekend-edition-saturday"
},
"weekend-edition-sunday": {
"id": "weekend-edition-sunday",
"title": "Weekend Edition Sunday",
"info": "Weekend Edition Sunday features interviews with newsmakers, artists, scientists, politicians, musicians, writers, theologians and historians. The program has covered news events from Nelson Mandela's 1990 release from a South African prison to the capture of Saddam Hussein.",
"airtime": "SUN 5am-10am",
"imageSrc": "https://cdn.kqed.org/wp-content/uploads/2024/04/Weekend-Edition-Podcast-Tile-360x360-1.jpg",
"officialWebsiteLink": "https://www.npr.org/programs/weekend-edition-sunday/",
"meta": {
"site": "news",
"source": "npr"
},
"link": "/radio/program/weekend-edition-sunday"
}
},
"racesReducer": {},
"racesGenElectionReducer": {},
"radioSchedulesReducer": {},
"listsReducer": {
"posts/news?tag=chatgpt": {
"isFetching": false,
"latestQuery": {
"from": 0,
"postsToRender": 9
},
"tag": null,
"vitalsOnly": true,
"totalRequested": 9,
"isLoading": false,
"isLoadingMore": true,
"total": {
"value": 31,
"relation": "eq"
},
"items": [
"news_12083278",
"news_12083224",
"news_12082428",
"news_12082064",
"news_12081798",
"news_12081603",
"news_12080610",
"news_12069286",
"news_12066910"
]
}
},
"recallGuideReducer": {
"intros": {},
"policy": {},
"candidates": {}
},
"savedArticleReducer": {
"articles": [],
"status": {}
},
"pfsSessionReducer": {},
"subscriptionsReducer": {},
"termsReducer": {
"about": {
"name": "About",
"type": "terms",
"id": "about",
"slug": "about",
"link": "/about",
"taxonomy": "site"
},
"arts": {
"name": "Arts & Culture",
"grouping": [
"arts",
"pop",
"trulyca"
],
"description": "KQED Arts provides daily in-depth coverage of the Bay Area's music, art, film, performing arts, literature and arts news, as well as cultural commentary and criticism.",
"type": "terms",
"id": "arts",
"slug": "arts",
"link": "/arts",
"taxonomy": "site"
},
"artschool": {
"name": "Art School",
"parent": "arts",
"type": "terms",
"id": "artschool",
"slug": "artschool",
"link": "/artschool",
"taxonomy": "site"
},
"bayareabites": {
"name": "KQED food",
"grouping": [
"food",
"bayareabites",
"checkplease"
],
"parent": "food",
"type": "terms",
"id": "bayareabites",
"slug": "bayareabites",
"link": "/food",
"taxonomy": "site"
},
"bayareahiphop": {
"name": "Bay Area Hiphop",
"type": "terms",
"id": "bayareahiphop",
"slug": "bayareahiphop",
"link": "/bayareahiphop",
"taxonomy": "site"
},
"campaign21": {
"name": "Campaign 21",
"type": "terms",
"id": "campaign21",
"slug": "campaign21",
"link": "/campaign21",
"taxonomy": "site"
},
"checkplease": {
"name": "KQED food",
"grouping": [
"food",
"bayareabites",
"checkplease"
],
"parent": "food",
"type": "terms",
"id": "checkplease",
"slug": "checkplease",
"link": "/food",
"taxonomy": "site"
},
"education": {
"name": "Education",
"grouping": [
"education"
],
"type": "terms",
"id": "education",
"slug": "education",
"link": "/education",
"taxonomy": "site"
},
"elections": {
"name": "Elections",
"type": "terms",
"id": "elections",
"slug": "elections",
"link": "/elections",
"taxonomy": "site"
},
"events": {
"name": "Events",
"type": "terms",
"id": "events",
"slug": "events",
"link": "/events",
"taxonomy": "site"
},
"event": {
"name": "Event",
"alias": "events",
"type": "terms",
"id": "event",
"slug": "event",
"link": "/event",
"taxonomy": "site"
},
"filmschoolshorts": {
"name": "Film School Shorts",
"type": "terms",
"id": "filmschoolshorts",
"slug": "filmschoolshorts",
"link": "/filmschoolshorts",
"taxonomy": "site"
},
"food": {
"name": "KQED food",
"grouping": [
"food",
"bayareabites",
"checkplease"
],
"type": "terms",
"id": "food",
"slug": "food",
"link": "/food",
"taxonomy": "site"
},
"forum": {
"name": "Forum",
"relatedContentQuery": "posts/forum?",
"parent": "news",
"type": "terms",
"id": "forum",
"slug": "forum",
"link": "/forum",
"taxonomy": "site"
},
"futureofyou": {
"name": "Future of You",
"grouping": [
"science",
"futureofyou"
],
"parent": "science",
"type": "terms",
"id": "futureofyou",
"slug": "futureofyou",
"link": "/futureofyou",
"taxonomy": "site"
},
"jpepinheart": {
"name": "KQED food",
"relatedContentQuery": "posts/food,bayareabites,checkplease",
"parent": "food",
"type": "terms",
"id": "jpepinheart",
"slug": "jpepinheart",
"link": "/food",
"taxonomy": "site"
},
"liveblog": {
"name": "Live Blog",
"type": "terms",
"id": "liveblog",
"slug": "liveblog",
"link": "/liveblog",
"taxonomy": "site"
},
"livetv": {
"name": "Live TV",
"parent": "tv",
"type": "terms",
"id": "livetv",
"slug": "livetv",
"link": "/livetv",
"taxonomy": "site"
},
"lowdown": {
"name": "The Lowdown",
"relatedContentQuery": "posts/lowdown?",
"parent": "news",
"type": "terms",
"id": "lowdown",
"slug": "lowdown",
"link": "/lowdown",
"taxonomy": "site"
},
"mindshift": {
"name": "Mindshift",
"parent": "news",
"description": "MindShift explores the future of education by highlighting the innovative – and sometimes counterintuitive – ways educators and parents are helping all children succeed.",
"type": "terms",
"id": "mindshift",
"slug": "mindshift",
"link": "/mindshift",
"taxonomy": "site"
},
"news": {
"name": "News",
"grouping": [
"news",
"forum"
],
"type": "terms",
"id": "news",
"slug": "news",
"link": "/news",
"taxonomy": "site"
},
"perspectives": {
"name": "Perspectives",
"parent": "radio",
"type": "terms",
"id": "perspectives",
"slug": "perspectives",
"link": "/perspectives",
"taxonomy": "site"
},
"podcasts": {
"name": "Podcasts",
"type": "terms",
"id": "podcasts",
"slug": "podcasts",
"link": "/podcasts",
"taxonomy": "site"
},
"pop": {
"name": "Pop",
"parent": "arts",
"type": "terms",
"id": "pop",
"slug": "pop",
"link": "/pop",
"taxonomy": "site"
},
"pressroom": {
"name": "Pressroom",
"type": "terms",
"id": "pressroom",
"slug": "pressroom",
"link": "/pressroom",
"taxonomy": "site"
},
"quest": {
"name": "Quest",
"parent": "science",
"type": "terms",
"id": "quest",
"slug": "quest",
"link": "/quest",
"taxonomy": "site"
},
"radio": {
"name": "Radio",
"grouping": [
"forum",
"perspectives"
],
"description": "Listen to KQED Public Radio – home of Forum and The California Report – on 88.5 FM in San Francisco, 89.3 FM in Sacramento, 88.3 FM in Santa Rosa and 88.1 FM in Martinez.",
"type": "terms",
"id": "radio",
"slug": "radio",
"link": "/radio",
"taxonomy": "site"
},
"root": {
"name": "KQED",
"image": "https://ww2.kqed.org/app/uploads/2020/02/KQED-OG-Image@1x.png",
"imageWidth": 1200,
"imageHeight": 630,
"headData": {
"title": "KQED | News, Radio, Podcasts, TV | Public Media for Northern California",
"description": "KQED provides public radio, television, and independent reporting on issues that matter to the Bay Area. We’re the NPR and PBS member station for Northern California."
},
"type": "terms",
"id": "root",
"slug": "root",
"link": "/root",
"taxonomy": "site"
},
"science": {
"name": "Science",
"grouping": [
"science",
"futureofyou"
],
"description": "KQED Science brings you award-winning science and environment coverage from the Bay Area and beyond.",
"type": "terms",
"id": "science",
"slug": "science",
"link": "/science",
"taxonomy": "site"
},
"stateofhealth": {
"name": "State of Health",
"parent": "science",
"type": "terms",
"id": "stateofhealth",
"slug": "stateofhealth",
"link": "/stateofhealth",
"taxonomy": "site"
},
"support": {
"name": "Support",
"type": "terms",
"id": "support",
"slug": "support",
"link": "/support",
"taxonomy": "site"
},
"thedolist": {
"name": "The Do List",
"parent": "arts",
"type": "terms",
"id": "thedolist",
"slug": "thedolist",
"link": "/thedolist",
"taxonomy": "site"
},
"trulyca": {
"name": "Truly CA",
"grouping": [
"arts",
"pop",
"trulyca"
],
"parent": "arts",
"type": "terms",
"id": "trulyca",
"slug": "trulyca",
"link": "/trulyca",
"taxonomy": "site"
},
"tv": {
"name": "TV",
"type": "terms",
"id": "tv",
"slug": "tv",
"link": "/tv",
"taxonomy": "site"
},
"voterguide": {
"name": "Voter Guide",
"parent": "elections",
"alias": "elections",
"type": "terms",
"id": "voterguide",
"slug": "voterguide",
"link": "/voterguide",
"taxonomy": "site"
},
"guiaelectoral": {
"name": "Guia Electoral",
"parent": "elections",
"alias": "elections",
"type": "terms",
"id": "guiaelectoral",
"slug": "guiaelectoral",
"link": "/guiaelectoral",
"taxonomy": "site"
},
"news_32668": {
"type": "terms",
"id": "news_32668",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "32668",
"found": true
},
"relationships": {},
"featImg": null,
"name": "ChatGPT",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "ChatGPT Archives | KQED News",
"ogDescription": null,
"imageData": {
"ogImageSize": {
"file": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png",
"width": 1200,
"height": 630
},
"twImageSize": {
"file": "https://cdn.kqed.org/wp-content/uploads/2020/02/KQED-OG-Image@1x.png"
},
"twitterCard": "summary_large_image"
}
},
"ttid": 32685,
"slug": "chatgpt",
"isLoading": false,
"link": "/news/tag/chatgpt"
},
"source_news_12082428": {
"type": "terms",
"id": "source_news_12082428",
"meta": {
"override": true
},
"name": "The Bay",
"link": "https://www.kqed.org/podcasts/thebay",
"isLoading": false
},
"news_6188": {
"type": "terms",
"id": "news_6188",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "6188",
"found": true
},
"relationships": {},
"featImg": null,
"name": "Law and Justice",
"description": null,
"taxonomy": "category",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "Law and Justice Archives | KQED News",
"ogDescription": null
},
"ttid": 6212,
"slug": "law-and-justice",
"isLoading": false,
"link": "/news/category/law-and-justice"
},
"news_28250": {
"type": "terms",
"id": "news_28250",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "28250",
"found": true
},
"relationships": {},
"featImg": null,
"name": "Local",
"description": null,
"taxonomy": "category",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "Local Archives | KQED News",
"ogDescription": null
},
"ttid": 28267,
"slug": "local",
"isLoading": false,
"link": "/news/category/local"
},
"news_8": {
"type": "terms",
"id": "news_8",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "8",
"found": true
},
"relationships": {},
"featImg": null,
"name": "News",
"description": null,
"taxonomy": "category",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "News Archives | KQED News",
"ogDescription": null
},
"ttid": 8,
"slug": "news",
"isLoading": false,
"link": "/news/category/news"
},
"news_248": {
"type": "terms",
"id": "news_248",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "248",
"found": true
},
"relationships": {},
"featImg": null,
"name": "Technology",
"description": null,
"taxonomy": "category",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "Technology Archives | KQED News",
"ogDescription": null
},
"ttid": 256,
"slug": "technology",
"isLoading": false,
"link": "/news/category/technology"
},
"news_34755": {
"type": "terms",
"id": "news_34755",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "34755",
"found": true
},
"relationships": {},
"name": "artificial intelligence",
"slug": "artificial-intelligence",
"taxonomy": "tag",
"description": null,
"featImg": null,
"headData": {
"title": "artificial intelligence | KQED News",
"description": null,
"ogTitle": null,
"ogDescription": null,
"ogImgId": null,
"twTitle": null,
"twDescription": null,
"twImgId": null
},
"ttid": 34772,
"isLoading": false,
"link": "/news/tag/artificial-intelligence"
},
"news_1386": {
"type": "terms",
"id": "news_1386",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "1386",
"found": true
},
"relationships": {},
"featImg": null,
"name": "Bay Area",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "Bay Area Archives | KQED News",
"ogDescription": null
},
"ttid": 1398,
"slug": "bay-area",
"isLoading": false,
"link": "/news/tag/bay-area"
},
"news_3897": {
"type": "terms",
"id": "news_3897",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "3897",
"found": true
},
"relationships": {},
"featImg": null,
"name": "Elon Musk",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "Elon Musk Archives | KQED News",
"ogDescription": null
},
"ttid": 3916,
"slug": "elon-musk",
"isLoading": false,
"link": "/news/tag/elon-musk"
},
"news_27626": {
"type": "terms",
"id": "news_27626",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "27626",
"found": true
},
"relationships": {},
"featImg": null,
"name": "featured-news",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "featured-news Archives | KQED News",
"ogDescription": null
},
"ttid": 27643,
"slug": "featured-news",
"isLoading": false,
"link": "/news/tag/featured-news"
},
"news_21891": {
"type": "terms",
"id": "news_21891",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "21891",
"found": true
},
"relationships": {},
"featImg": null,
"name": "lawsuits",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "lawsuits Archives | KQED News",
"ogDescription": null
},
"ttid": 21908,
"slug": "lawsuits",
"isLoading": false,
"link": "/news/tag/lawsuits"
},
"news_34054": {
"type": "terms",
"id": "news_34054",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "34054",
"found": true
},
"relationships": {},
"featImg": null,
"name": "oakland",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "oakland Archives | KQED News",
"ogDescription": null
},
"ttid": 34071,
"slug": "oakland",
"isLoading": false,
"link": "/news/tag/oakland"
},
"news_33542": {
"type": "terms",
"id": "news_33542",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "33542",
"found": true
},
"relationships": {},
"featImg": null,
"name": "OpenAI",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "OpenAI Archives | KQED News",
"ogDescription": null
},
"ttid": 33559,
"slug": "openai",
"isLoading": false,
"link": "/news/tag/openai"
},
"news_33543": {
"type": "terms",
"id": "news_33543",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "33543",
"found": true
},
"relationships": {},
"name": "Sam Altman",
"slug": "sam-altman",
"taxonomy": "tag",
"description": null,
"featImg": null,
"headData": {
"title": "Sam Altman | KQED News",
"description": null,
"ogTitle": null,
"ogDescription": null,
"ogImgId": null,
"twTitle": null,
"twDescription": null,
"twImgId": null,
"metaRobotsNoIndex": "noindex"
},
"ttid": 33560,
"isLoading": false,
"link": "/news/tag/sam-altman"
},
"news_34586": {
"type": "terms",
"id": "news_34586",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "34586",
"found": true
},
"relationships": {},
"name": "Silicon Valley",
"slug": "silicon-valley",
"taxonomy": "tag",
"description": null,
"featImg": null,
"headData": {
"title": "Silicon Valley | KQED News",
"description": null,
"ogTitle": null,
"ogDescription": null,
"ogImgId": null,
"twTitle": null,
"twDescription": null,
"twImgId": null
},
"ttid": 34603,
"isLoading": false,
"link": "/news/tag/silicon-valley"
},
"news_1631": {
"type": "terms",
"id": "news_1631",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "1631",
"found": true
},
"relationships": {},
"name": "Technology",
"slug": "technology",
"taxonomy": "tag",
"description": null,
"featImg": null,
"headData": {
"title": "Technology | KQED News",
"description": null,
"ogTitle": null,
"ogDescription": null,
"ogImgId": null,
"twTitle": null,
"twDescription": null,
"twImgId": null
},
"ttid": 1643,
"isLoading": false,
"link": "/news/tag/technology"
},
"news_33733": {
"type": "terms",
"id": "news_33733",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "33733",
"found": true
},
"relationships": {},
"featImg": null,
"name": "News",
"description": null,
"taxonomy": "interest",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "News Archives | KQED News",
"ogDescription": null
},
"ttid": 33750,
"slug": "news",
"isLoading": false,
"link": "/news/interest/news"
},
"news_33730": {
"type": "terms",
"id": "news_33730",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "33730",
"found": true
},
"relationships": {},
"featImg": null,
"name": "Oakland",
"description": null,
"taxonomy": "interest",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "Oakland Archives | KQED News",
"ogDescription": null
},
"ttid": 33747,
"slug": "oakland",
"isLoading": false,
"link": "/news/interest/oakland"
},
"news_33732": {
"type": "terms",
"id": "news_33732",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "33732",
"found": true
},
"relationships": {},
"featImg": null,
"name": "Technology",
"description": null,
"taxonomy": "interest",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "Technology Archives | KQED News",
"ogDescription": null
},
"ttid": 33749,
"slug": "technology",
"isLoading": false,
"link": "/news/interest/technology"
},
"news_31795": {
"type": "terms",
"id": "news_31795",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "31795",
"found": true
},
"relationships": {},
"featImg": null,
"name": "California",
"description": null,
"taxonomy": "category",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "California Archives | KQED News",
"ogDescription": null
},
"ttid": 31812,
"slug": "california",
"isLoading": false,
"link": "/news/category/california"
},
"news_19954": {
"type": "terms",
"id": "news_19954",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "19954",
"found": true
},
"relationships": {},
"featImg": null,
"name": "Law and Justice",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "Law and Justice Archives | KQED News",
"ogDescription": null
},
"ttid": 19971,
"slug": "law-and-justice",
"isLoading": false,
"link": "/news/tag/law-and-justice"
},
"news_36810": {
"type": "terms",
"id": "news_36810",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "36810",
"found": true
},
"relationships": {},
"name": "federal trial",
"slug": "federal-trial",
"taxonomy": "tag",
"description": null,
"featImg": null,
"headData": {
"title": "federal trial | KQED News",
"description": null,
"ogTitle": null,
"ogDescription": null,
"ogImgId": null,
"twTitle": null,
"twDescription": null,
"twImgId": null
},
"ttid": 36827,
"isLoading": false,
"link": "/news/tag/federal-trial"
},
"news_33812": {
"type": "terms",
"id": "news_33812",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "33812",
"found": true
},
"relationships": {},
"featImg": null,
"name": "Interests",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "Interests Archives | KQED News",
"ogDescription": null
},
"ttid": 33829,
"slug": "interests",
"isLoading": false,
"link": "/news/tag/interests"
},
"news_35758": {
"type": "terms",
"id": "news_35758",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "35758",
"found": true
},
"relationships": {},
"name": "Open AI",
"slug": "open-ai",
"taxonomy": "tag",
"description": null,
"featImg": null,
"headData": {
"title": "Open AI | KQED News",
"description": null,
"ogTitle": null,
"ogDescription": null,
"ogImgId": null,
"twTitle": null,
"twDescription": null,
"twImgId": null
},
"ttid": 35775,
"isLoading": false,
"link": "/news/tag/open-ai"
},
"news_22598": {
"type": "terms",
"id": "news_22598",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "22598",
"found": true
},
"relationships": {},
"featImg": null,
"name": "The Bay",
"description": "\u003cimg class=\"alignnone size-medium wp-image-11638190\" src=\"https://ww2.kqed.org/news/wp-content/uploads/sites/10/2018/02/TheBay_1200x6301.png\" alt=\"\" />\r\n\u003cbr/>\r\n\r\nEvery good story starts local. So that’s where we start. \u003ci>The Bay\u003c/i> is storytelling for daily news. KQED host Devin Katayama talks with reporters to help us make sense of what’s happening in the Bay Area. One story. One conversation. One idea.\r\n\r\n\u003cstrong>Subscribe to The Bay:\u003c/strong>\r\n\r\n\u003ca href=\"https://itunes.apple.com/us/podcast/the-bay/id1350043452?mt=2\">\u003cimg src=\"https://ww2.kqed.org/news/wp-content/uploads/sites/10/2018/01/Listen_on_Apple_Podcasts_sRGB_US-e1515635079510.png\" />\u003c/a>",
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": "Every good story starts local. So that’s where we start. The Bay is storytelling for daily news. KQED host Devin Katayama talks with reporters to help us make sense of what’s happening in the Bay Area. One story. One conversation. One idea. Subscribe to The Bay:",
"title": "The Bay Archives | KQED News",
"ogDescription": null
},
"ttid": 22615,
"slug": "the-bay",
"isLoading": false,
"link": "/news/tag/the-bay"
},
"news_34167": {
"type": "terms",
"id": "news_34167",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "34167",
"found": true
},
"relationships": {},
"name": "Criminal Justice",
"slug": "criminal-justice",
"taxonomy": "category",
"description": null,
"featImg": null,
"headData": {
"title": "Criminal Justice Archives | KQED News",
"description": null,
"ogTitle": null,
"ogDescription": null,
"ogImgId": null,
"twTitle": null,
"twDescription": null,
"twImgId": null
},
"ttid": 34184,
"isLoading": false,
"link": "/news/category/criminal-justice"
},
"news_17725": {
"type": "terms",
"id": "news_17725",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "17725",
"found": true
},
"relationships": {},
"featImg": null,
"name": "criminal justice",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "criminal justice Archives | KQED News",
"ogDescription": null
},
"ttid": 17759,
"slug": "criminal-justice",
"isLoading": false,
"link": "/news/tag/criminal-justice"
},
"news_22434": {
"type": "terms",
"id": "news_22434",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "22434",
"found": true
},
"relationships": {},
"featImg": null,
"name": "death",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "death Archives | KQED News",
"ogDescription": null
},
"ttid": 22451,
"slug": "death",
"isLoading": false,
"link": "/news/tag/death"
},
"news_35784": {
"type": "terms",
"id": "news_35784",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "35784",
"found": true
},
"relationships": {},
"name": "gun violence",
"slug": "gun-violence",
"taxonomy": "tag",
"description": null,
"featImg": null,
"headData": {
"title": "gun violence | KQED News",
"description": null,
"ogTitle": null,
"ogDescription": null,
"ogImgId": null,
"twTitle": null,
"twDescription": null,
"twImgId": null
},
"ttid": 35801,
"isLoading": false,
"link": "/news/tag/gun-violence"
},
"news_38": {
"type": "terms",
"id": "news_38",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "38",
"found": true
},
"relationships": {},
"featImg": null,
"name": "San Francisco",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "San Francisco Archives | KQED News",
"ogDescription": null
},
"ttid": 58,
"slug": "san-francisco",
"isLoading": false,
"link": "/news/tag/san-francisco"
},
"news_33745": {
"type": "terms",
"id": "news_33745",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "33745",
"found": true
},
"relationships": {},
"featImg": null,
"name": "Criminal Justice",
"description": null,
"taxonomy": "interest",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "Criminal Justice Archives | KQED News",
"ogDescription": null
},
"ttid": 33762,
"slug": "criminal-justice",
"isLoading": false,
"link": "/news/interest/criminal-justice"
},
"news_33729": {
"type": "terms",
"id": "news_33729",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "33729",
"found": true
},
"relationships": {},
"featImg": null,
"name": "San Francisco",
"description": null,
"taxonomy": "interest",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "San Francisco Archives | KQED News",
"ogDescription": null
},
"ttid": 33746,
"slug": "san-francisco",
"isLoading": false,
"link": "/news/interest/san-francisco"
},
"news_57": {
"type": "terms",
"id": "news_57",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "57",
"found": true
},
"relationships": {},
"featImg": null,
"name": "Tesla",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "Tesla Archives | KQED News",
"ogDescription": null
},
"ttid": 57,
"slug": "tesla",
"isLoading": false,
"link": "/news/tag/tesla"
},
"news_18352": {
"type": "terms",
"id": "news_18352",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "18352",
"found": true
},
"relationships": {},
"featImg": null,
"name": "East Bay",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "East Bay Archives | KQED News",
"ogDescription": null
},
"ttid": 18386,
"slug": "east-bay",
"isLoading": false,
"link": "/news/tag/east-bay"
},
"news_25184": {
"type": "terms",
"id": "news_25184",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "25184",
"found": true
},
"relationships": {},
"featImg": null,
"name": "AI",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "AI Archives | KQED News",
"ogDescription": null
},
"ttid": 25201,
"slug": "ai",
"isLoading": false,
"link": "/news/tag/ai"
},
"news_32664": {
"type": "terms",
"id": "news_32664",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "32664",
"found": true
},
"relationships": {},
"name": "AI software",
"slug": "ai-software",
"taxonomy": "tag",
"description": null,
"featImg": null,
"headData": {
"title": "AI software | KQED News",
"description": null,
"ogTitle": null,
"ogDescription": null,
"ogImgId": null,
"twTitle": null,
"twDescription": null,
"twImgId": null
},
"ttid": 32681,
"isLoading": false,
"link": "/news/tag/ai-software"
},
"news_18538": {
"type": "terms",
"id": "news_18538",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "18538",
"found": true
},
"relationships": {},
"featImg": null,
"name": "California",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "California Archives | KQED News",
"ogDescription": null
},
"ttid": 31,
"slug": "california",
"isLoading": false,
"link": "/news/tag/california"
},
"news_36279": {
"type": "terms",
"id": "news_36279",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "36279",
"found": true
},
"relationships": {},
"name": "chatbot",
"slug": "chatbot",
"taxonomy": "tag",
"description": null,
"featImg": null,
"headData": {
"title": "chatbot | KQED News",
"description": null,
"ogTitle": null,
"ogDescription": null,
"ogImgId": null,
"twTitle": null,
"twDescription": null,
"twImgId": null
},
"ttid": 36296,
"isLoading": false,
"link": "/news/tag/chatbot"
},
"news_33738": {
"type": "terms",
"id": "news_33738",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "33738",
"found": true
},
"relationships": {},
"featImg": null,
"name": "California",
"description": null,
"taxonomy": "interest",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "California Archives | KQED News",
"ogDescription": null
},
"ttid": 33755,
"slug": "california",
"isLoading": false,
"link": "/news/interest/california"
},
"news_13": {
"type": "terms",
"id": "news_13",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "13",
"found": true
},
"relationships": {},
"name": "Politics",
"slug": "politics",
"taxonomy": "category",
"description": null,
"featImg": null,
"headData": {
"title": "Politics | KQED News",
"description": null,
"ogTitle": null,
"ogDescription": null,
"ogImgId": null,
"twTitle": null,
"twDescription": null,
"twImgId": null
},
"ttid": 13,
"isLoading": false,
"link": "/news/category/politics"
},
"news_22307": {
"type": "terms",
"id": "news_22307",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "22307",
"found": true
},
"relationships": {},
"featImg": null,
"name": "california laws",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "california laws Archives | KQED News",
"ogDescription": null
},
"ttid": 22324,
"slug": "california-laws",
"isLoading": false,
"link": "/news/tag/california-laws"
},
"news_21285": {
"type": "terms",
"id": "news_21285",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "21285",
"found": true
},
"relationships": {},
"featImg": null,
"name": "South Bay",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "South Bay Archives | KQED News",
"ogDescription": null
},
"ttid": 21302,
"slug": "south-bay",
"isLoading": false,
"link": "/news/tag/south-bay"
},
"news_33731": {
"type": "terms",
"id": "news_33731",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "33731",
"found": true
},
"relationships": {},
"featImg": null,
"name": "South Bay",
"description": null,
"taxonomy": "interest",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "South Bay Archives | KQED News",
"ogDescription": null
},
"ttid": 33748,
"slug": "south-bay",
"isLoading": false,
"link": "/news/interest/south-bay"
},
"news_1323": {
"type": "terms",
"id": "news_1323",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "1323",
"found": true
},
"relationships": {},
"featImg": null,
"name": "Donald Trump",
"description": null,
"taxonomy": "tag",
"headData": {
"twImgId": null,
"twTitle": null,
"ogTitle": null,
"ogImgId": null,
"twDescription": null,
"description": null,
"title": "Donald Trump Archives | KQED News",
"ogDescription": null
},
"ttid": 1335,
"slug": "donald-trump",
"isLoading": false,
"link": "/news/tag/donald-trump"
},
"news_17968": {
"type": "terms",
"id": "news_17968",
"meta": {
"index": "terms_1716263798",
"site": "news",
"id": "17968",
"found": true
},
"relationships": {},
"name": "Politics",
"slug": "politics",
"taxonomy": "tag",
"description": null,
"featImg": null,
"headData": {
"title": "Politics | KQED News",
"description": null,
"ogTitle": null,
"ogDescription": null,
"ogImgId": null,
"twTitle": null,
"twDescription": null,
"twImgId": null
},
"ttid": 18002,
"isLoading": false,
"link": "/news/tag/politics"
}
},
"userAgentReducer": {
"userAgent": "Mozilla/5.0 AppleWebKit/537.36 (KHTML, like Gecko; compatible; ClaudeBot/1.0; +claudebot@anthropic.com)",
"isBot": true
},
"userPermissionsReducer": {
"wpLoggedIn": false
},
"localStorageReducer": {},
"browserHistoryReducer": [],
"eventsReducer": {},
"fssReducer": {},
"tvDailyScheduleReducer": {},
"tvWeeklyScheduleReducer": {},
"tvPrimetimeScheduleReducer": {},
"tvMonthlyScheduleReducer": {},
"userAccountReducer": {
"user": {
"email": null,
"emailStatus": "EMAIL_UNVALIDATED",
"loggedStatus": "LOGGED_OUT",
"loggingChecked": false,
"articles": [],
"firstName": null,
"lastName": null,
"phoneNumber": null,
"fetchingMembership": false,
"membershipError": false,
"memberships": [
{
"id": null,
"startDate": null,
"firstName": null,
"lastName": null,
"familyNumber": null,
"memberNumber": null,
"memberSince": null,
"expirationDate": null,
"pfsEligible": false,
"isSustaining": false,
"membershipLevel": "Prospect",
"membershipStatus": "Non Member",
"lastGiftDate": null,
"renewalDate": null,
"lastDonationAmount": null
}
]
},
"authModal": {
"isOpen": false,
"view": "LANDING_VIEW"
},
"error": null
},
"youthMediaReducer": {},
"checkPleaseReducer": {
"filterData": {
"region": {
"key": "Restaurant Region",
"filters": [
"Any Region"
]
},
"cuisine": {
"key": "Restaurant Cuisine",
"filters": [
"Any Cuisine"
]
}
},
"restaurantDataById": {},
"restaurantIdsSorted": [],
"error": null
},
"location": {
"pathname": "/news/tag/chatgpt",
"previousPathname": "/"
}
}