Skip to content

Commit 9e9ec20

Browse files
walkerfrankenbergjsanford8philcluff
authored
refactor: switch from mux/ai package to Mux Robots (#230)
* refactor: switch from mux/ai package to robots API * tweak * address comments, clean up * address comments * move to step * remove got * use the new sdk * make it work * fix a couple of things * revert next env change * add compatibility route * latest * update typescript SDK * update to use step * bump thresholds down a tiny bit * use latest sdk * remove jose transform ignore * Refactor to use hooks * hack around webhook shape being wrong * update to latest typescript sdk * This might work * Simplify * OK lets try timeouts again * Re-add waiting for captions * Not sure where this came from * Yoloing in sentry * This is a hack * Throw * Error? * Maybe this? * Meh, screw it * Update PII config in Sentry --------- Co-authored-by: Justin Sanford <justin.h.sanford@gmail.com> Co-authored-by: Phil <578330+philcluff@users.noreply.github.com>
1 parent cb08190 commit 9e9ec20

25 files changed

Lines changed: 5030 additions & 2664 deletions

.env.local.example

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,6 @@ AIRTABLE_BASE_ID=
33
MUX_TOKEN_ID=
44
MUX_TOKEN_SECRET=
55
MUX_WEBHOOK_SIGNATURE_SECRET=
6-
OPENAI_API_KEY=
7-
HIVE_API_KEY=
86
NEXT_PUBLIC_MUX_ENV_KEY=
97
SLACK_WEBHOOK_ASSET_READY=
108
SLACK_MODERATOR_PASSWORD=

.gitignore

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,3 +32,6 @@ npm-debug.log*
3232
.vercel
3333

3434
*.tsbuildinfo
35+
36+
# Sentry Config File
37+
.env.sentry-build-plugin

app/api/report/route.ts

Lines changed: 10 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,24 +1,26 @@
11
import { NextRequest, NextResponse } from 'next/server';
2-
import { RequestError } from 'got';
3-
import got from '../../../lib/got-client';
42
import { sendAbuseReport } from '../../../lib/slack-notifier';
53

64
const notify = async ({playbackId, reason, comment }: { playbackId: string, reason: string, comment?: string }) => {
75
if (process.env.AIRTABLE_KEY && process.env.AIRTABLE_BASE_ID) {
86
try {
9-
await got.post(`https://api.airtable.com/v0/${process.env.AIRTABLE_BASE_ID}/Reported`, {
7+
const res = await fetch(`https://api.airtable.com/v0/${process.env.AIRTABLE_BASE_ID}/Reported`, {
8+
method: 'POST',
109
headers: {
11-
Authorization: `Bearer ${process.env.AIRTABLE_KEY}`
10+
Authorization: `Bearer ${process.env.AIRTABLE_KEY}`,
11+
'Content-Type': 'application/json',
1212
},
13-
json: {
13+
body: JSON.stringify({
1414
records: [
1515
{fields: { playbackId, reason, comment, status: "Pending" } },
1616
]
17-
}
17+
}),
1818
});
19+
if (!res.ok) {
20+
console.error('Airtable responded with', res.status, await res.text()); // eslint-disable-line no-console
21+
}
1922
} catch (e) {
20-
const err = (e as RequestError);
21-
console.error('Error reporting to airtable', err.response?.body, e); // eslint-disable-line no-console
23+
console.error('Error reporting to airtable', e); // eslint-disable-line no-console
2224
}
2325
}
2426
try {

app/api/uploads/route.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@ export async function POST(_request: NextRequest) {
1414
{
1515
generated_subtitles: [
1616
{
17-
// @ts-expect-error - generated_subtitles with language_code: 'auto' is supported but not in types yet
1817
language_code: 'auto',
1918
},
2019
],

app/api/webhooks/mux-ai/route.ts

Lines changed: 3 additions & 119 deletions
Original file line numberDiff line numberDiff line change
@@ -1,119 +1,3 @@
1-
import { NextRequest, NextResponse } from 'next/server';
2-
import Mux from '@mux/mux-node';
3-
import { start, resumeHook } from 'workflow/api';
4-
import { moderateAndSummarize, captionHookToken } from '../../../../workflows/process-mux-ai';
5-
import type { CaptionHookPayload } from '../../../../types';
6-
7-
const webhookSignatureSecret = process.env.MUX_WEBHOOK_SIGNATURE_SECRET;
8-
const mux = new Mux();
9-
10-
export async function POST(request: NextRequest) {
11-
// Get raw body as text (NOT json) for signature verification
12-
const rawBody = await request.text();
13-
14-
// Verify signature
15-
if (webhookSignatureSecret) {
16-
// Convert headers to plain object for Mux SDK
17-
const headers: Record<string, string> = {};
18-
request.headers.forEach((value, key) => {
19-
headers[key] = value;
20-
});
21-
22-
try {
23-
mux.webhooks.verifySignature(rawBody, headers, webhookSignatureSecret);
24-
} catch (e) {
25-
console.error('Error verifyWebhookSignature - is the correct signature secret set?', e);
26-
return NextResponse.json(
27-
{ message: (e as Error).message },
28-
{ status: 400 }
29-
);
30-
}
31-
} else {
32-
console.log('Skipping webhook sig verification because no secret is configured'); // eslint-disable-line no-console
33-
}
34-
35-
try {
36-
// Parse JSON inside try/catch to handle malformed payloads
37-
const jsonBody = JSON.parse(rawBody);
38-
const { data, type } = jsonBody;
39-
40-
// Handle video.asset.ready - start unified AI workflow
41-
if (type === 'video.asset.ready') {
42-
const assetId = data.id;
43-
44-
const workflowRun = await start(moderateAndSummarize, [assetId]);
45-
46-
return NextResponse.json({
47-
message: 'AI workflow started',
48-
asset_id: assetId,
49-
workflow_id: workflowRun.runId
50-
});
51-
}
52-
53-
// Handle video.asset.track.ready - resume caption hook
54-
if (type === 'video.asset.track.ready') {
55-
const track = data;
56-
57-
if (track.type === 'text' && track.text_type === 'subtitles' && track.text_source === 'generated_vod') {
58-
const assetId = track.asset_id;
59-
const token = captionHookToken(assetId);
60-
61-
try {
62-
await resumeHook<CaptionHookPayload>(token, { includeTranscript: true });
63-
} catch (e) {
64-
// Hook may not exist yet if captions arrived before workflow started
65-
console.log(`Could not resume caption hook for asset ${assetId}: ${(e as Error).message}`); // eslint-disable-line no-console
66-
}
67-
68-
return NextResponse.json({
69-
message: 'Caption hook resumed',
70-
asset_id: assetId,
71-
track_id: track.id,
72-
});
73-
}
74-
75-
return NextResponse.json({ message: 'Track type not relevant for AI processing' });
76-
}
77-
78-
// Handle video.asset.track.errored - resume caption hook with appropriate includeTranscript
79-
if (type === 'video.asset.track.errored') {
80-
const track = data;
81-
82-
if (track.type === 'text' && track.text_type === 'subtitles' && track.text_source === 'generated_vod') {
83-
const assetId = track.asset_id;
84-
const errorMessages: string[] = track.error?.messages || [];
85-
const token = captionHookToken(assetId);
86-
87-
// If error is due to no audio or failed generation, proceed without transcript
88-
const isExpectedError = errorMessages.includes('Asset does not have an audio track') ||
89-
errorMessages.includes('Failed to generate caption track');
90-
91-
if (isExpectedError) {
92-
console.log(`Track errored for asset ${assetId} (${errorMessages.join(', ')}), resuming hook without transcript`); // eslint-disable-line no-console
93-
} else {
94-
console.log(`Track errored for asset ${assetId} with unhandled error: ${errorMessages.join(', ')}`); // eslint-disable-line no-console
95-
}
96-
97-
try {
98-
await resumeHook<CaptionHookPayload>(token, { includeTranscript: false });
99-
} catch (e) {
100-
console.log(`Could not resume caption hook for asset ${assetId}: ${(e as Error).message}`); // eslint-disable-line no-console
101-
}
102-
103-
return NextResponse.json({
104-
message: 'Caption hook resumed (track errored)',
105-
asset_id: assetId,
106-
track_id: track.id,
107-
});
108-
}
109-
110-
return NextResponse.json({ message: 'Track type not relevant for AI processing' });
111-
}
112-
113-
// Event type not handled
114-
return NextResponse.json({ message: 'Event type not handled' });
115-
} catch (e) {
116-
console.error('Request error', e); // eslint-disable-line no-console
117-
return NextResponse.json({ error: 'Error handling webhook' }, { status: 500 });
118-
}
119-
}
1+
// Compatibility shim: Mux webhook configs pointing at /api/webhooks/mux-ai
2+
// continue to work after the handler moved to /api/webhooks/mux.
3+
export { POST } from '../mux/route';

app/api/webhooks/mux/route.ts

Lines changed: 189 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,189 @@
1+
import { NextRequest, NextResponse } from 'next/server';
2+
import Mux from '@mux/mux-node';
3+
import { start, resumeHook } from 'workflow/api';
4+
import {
5+
moderateAndSummarize,
6+
moderationHookToken,
7+
summarizeHookToken,
8+
askQuestionsHookToken,
9+
captionHookToken,
10+
} from '../../../../workflows/process-mux-ai';
11+
import type { RobotsJobHookPayload, CaptionHookPayload } from '../../../../types';
12+
13+
const webhookSignatureSecret = process.env.MUX_WEBHOOK_SIGNATURE_SECRET;
14+
const mux = new Mux();
15+
16+
type RobotsJobStatus = 'completed' | 'errored' | 'cancelled';
17+
18+
// Robots job webhook data — we only read the fields we need. Everything else is
19+
// ignored; the workflow calls `.retrieve()` for authoritative outputs rather than
20+
// trusting the webhook body shape.
21+
interface RobotsJobWebhookData {
22+
id: string;
23+
resources?: { assets: Array<{ id: string }> };
24+
errors?: Array<{ type: string; message: string; retryable?: boolean }>;
25+
}
26+
27+
function buildRobotsHookPayload(data: RobotsJobWebhookData, terminalStatus: RobotsJobStatus): RobotsJobHookPayload {
28+
if (terminalStatus === 'errored') {
29+
return {
30+
status: 'errored',
31+
errorMessage: data.errors?.[0]?.message ?? 'Unknown error',
32+
};
33+
}
34+
if (terminalStatus === 'cancelled') {
35+
return { status: 'cancelled' };
36+
}
37+
return { status: 'completed' };
38+
}
39+
40+
export async function POST(request: NextRequest) {
41+
// Get raw body as text (NOT json) for signature verification and parsing
42+
const rawBody = await request.text();
43+
44+
// Verify webhook signature (required in production, optional in development)
45+
if (webhookSignatureSecret) {
46+
// Convert headers to plain object for Mux SDK
47+
const headers: Record<string, string> = {};
48+
request.headers.forEach((value, key) => {
49+
headers[key] = value;
50+
});
51+
52+
try {
53+
mux.webhooks.verifySignature(rawBody, headers, webhookSignatureSecret);
54+
} catch (e) {
55+
console.error('Error verifyWebhookSignature - is the correct signature secret set?', e);
56+
return NextResponse.json(
57+
{ message: (e as Error).message },
58+
{ status: 400 }
59+
);
60+
}
61+
} else if (process.env.NODE_ENV === 'production') {
62+
console.error('MUX_WEBHOOK_SIGNATURE_SECRET is not set — rejecting webhook in production');
63+
return NextResponse.json(
64+
{ message: 'Webhook signature verification is required in production' },
65+
{ status: 500 }
66+
);
67+
} else {
68+
console.log('Skipping webhook sig verification because no secret is configured'); // eslint-disable-line no-console
69+
}
70+
71+
try {
72+
// Parse JSON inside try/catch to handle malformed payloads
73+
const jsonBody = JSON.parse(rawBody);
74+
const { data, type } = jsonBody;
75+
76+
// Handle video.asset.ready - start unified AI workflow
77+
if (type === 'video.asset.ready') {
78+
const assetId = data.id;
79+
80+
const workflowRun = await start(moderateAndSummarize, [assetId]);
81+
82+
return NextResponse.json({
83+
message: 'AI workflow started',
84+
asset_id: assetId,
85+
workflow_id: workflowRun.runId
86+
});
87+
}
88+
89+
// Handle Robots job terminal events — resume the workflow's matching hook.
90+
// Event type form: `robots.job.<workflow>.<status>`, e.g. `robots.job.moderate.completed`.
91+
// We only care about terminal statuses; pending/processing fall through.
92+
const robotsMatch = /^robots\.job\.(moderate|summarize|ask_questions)\.(completed|errored|cancelled)$/.exec(type);
93+
if (robotsMatch) {
94+
const [, workflow, status] = robotsMatch as unknown as [string, 'moderate' | 'summarize' | 'ask_questions', RobotsJobStatus];
95+
const jobData = data as RobotsJobWebhookData;
96+
const assetId = jobData.resources?.assets?.[0]?.id;
97+
98+
if (!assetId) {
99+
console.log(`Robots ${workflow} webhook missing resources.assets[0].id (job ${jobData.id})`); // eslint-disable-line no-console
100+
return NextResponse.json({ message: 'Robots event missing asset id' });
101+
}
102+
103+
const token =
104+
workflow === 'moderate' ? moderationHookToken(assetId)
105+
: workflow === 'summarize' ? summarizeHookToken(assetId)
106+
: askQuestionsHookToken(assetId);
107+
108+
const payload = buildRobotsHookPayload(jobData, status);
109+
110+
try {
111+
await resumeHook<RobotsJobHookPayload>(token, payload);
112+
} catch (e) {
113+
// Hook may not exist (stale workflow run, redelivered webhook after the workflow moved on, etc.)
114+
console.log(`Could not resume robots ${workflow} hook for asset ${assetId}: ${(e as Error).message}`); // eslint-disable-line no-console
115+
}
116+
117+
return NextResponse.json({
118+
message: `Robots ${workflow} hook resumed (${status})`,
119+
asset_id: assetId,
120+
job_id: jobData.id,
121+
});
122+
}
123+
124+
// Handle video.asset.track.ready — resume caption hook
125+
if (type === 'video.asset.track.ready') {
126+
const track = data;
127+
128+
if (track.type === 'text' && track.text_type === 'subtitles' && track.text_source === 'generated_vod') {
129+
const assetId = track.asset_id;
130+
const token = captionHookToken(assetId);
131+
132+
try {
133+
await resumeHook<CaptionHookPayload>(token, { includeTranscript: true });
134+
} catch (e) {
135+
// Hook may not exist yet if captions arrived before workflow reached this point
136+
console.log(`Could not resume caption hook for asset ${assetId}: ${(e as Error).message}`); // eslint-disable-line no-console
137+
}
138+
139+
return NextResponse.json({
140+
message: 'Caption hook resumed',
141+
asset_id: assetId,
142+
track_id: track.id,
143+
});
144+
}
145+
146+
return NextResponse.json({ message: 'Track type not relevant for AI processing' });
147+
}
148+
149+
// Handle video.asset.track.errored — resume caption hook without transcript
150+
if (type === 'video.asset.track.errored') {
151+
const track = data;
152+
153+
if (track.type === 'text' && track.text_type === 'subtitles' && track.text_source === 'generated_vod') {
154+
const assetId = track.asset_id;
155+
const errorMessages: string[] = track.error?.messages || [];
156+
const token = captionHookToken(assetId);
157+
158+
const isExpectedError = errorMessages.includes('Asset does not have an audio track') ||
159+
errorMessages.includes('Failed to generate caption track');
160+
161+
if (isExpectedError) {
162+
console.log(`Track errored for asset ${assetId} (${errorMessages.join(', ')}), resuming hook without transcript`); // eslint-disable-line no-console
163+
} else {
164+
console.log(`Track errored for asset ${assetId} with unhandled error: ${errorMessages.join(', ')}`); // eslint-disable-line no-console
165+
}
166+
167+
try {
168+
await resumeHook<CaptionHookPayload>(token, { includeTranscript: false });
169+
} catch (e) {
170+
console.log(`Could not resume caption hook for asset ${assetId}: ${(e as Error).message}`); // eslint-disable-line no-console
171+
}
172+
173+
return NextResponse.json({
174+
message: 'Caption hook resumed (track errored)',
175+
asset_id: assetId,
176+
track_id: track.id,
177+
});
178+
}
179+
180+
return NextResponse.json({ message: 'Track type not relevant for AI processing' });
181+
}
182+
183+
// Return 200 for unhandled event types to prevent Mux from retrying
184+
return NextResponse.json({ message: 'Event type not handled' });
185+
} catch (e) {
186+
console.error('Request error', e); // eslint-disable-line no-console
187+
return NextResponse.json({ error: 'Error handling webhook' }, { status: 500 });
188+
}
189+
}

app/global-error.tsx

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
"use client";
2+
3+
import * as Sentry from "@sentry/nextjs";
4+
import NextError from "next/error";
5+
import { useEffect } from "react";
6+
7+
export default function GlobalError({
8+
error,
9+
}: {
10+
error: Error & { digest?: string };
11+
}) {
12+
useEffect(() => {
13+
Sentry.captureException(error);
14+
}, [error]);
15+
16+
return (
17+
<html lang="en">
18+
<body>
19+
{/* `NextError` is the default Next.js error page component. Its type
20+
definition requires a `statusCode` prop. However, since the App Router
21+
does not expose status codes for errors, we simply pass 0 to render a
22+
generic error message. */}
23+
<NextError statusCode={0} />
24+
</body>
25+
</html>
26+
);
27+
}

0 commit comments

Comments
 (0)