Skip to content

Commit f465781

Browse files
committed
fix: resolve WAV export audio distortion issue
- Replace manual OfflineAudioContext with Tone.Offline() for proper offline rendering and transport synchronization - Add master gain (0.8) and limiter (-1dB) to prevent clipping - Simplify audio/MIDI clip scheduling for offline context - Remove unused Tooltip imports from PianoRoll Fixes #7
1 parent d4accd0 commit f465781

2 files changed

Lines changed: 53 additions & 111 deletions

File tree

components/compose/editors/PianoRoll.tsx

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4,11 +4,6 @@ import { useCallback, useMemo, useRef, useState, useEffect, memo } from 'react';
44
import { ZoomIn, ZoomOut, AlertCircle } from 'lucide-react';
55
import { useProjectStore, useUIStore } from '@/lib/store';
66
import { Button } from '@/components/ui/button';
7-
import {
8-
Tooltip,
9-
TooltipContent,
10-
TooltipTrigger,
11-
} from '@/components/ui/tooltip';
127
import {
138
Select,
149
SelectContent,

lib/audio/offline-renderer.ts

Lines changed: 53 additions & 106 deletions
Original file line numberDiff line numberDiff line change
@@ -170,99 +170,72 @@ function createSynthForTrack(track: Track): SynthType {
170170
// ============================================
171171

172172
/**
173-
* Export project to WAV with real-time progress updates
174-
* Uses OfflineAudioContext with suspend/resume for accurate progress
173+
* Export project to WAV using Tone.Offline for proper offline rendering
174+
* Tone.Offline handles context switching and transport synchronization correctly
175175
*/
176176
export async function exportProjectToWav(
177177
project: Project,
178178
onProgress?: ProgressCallback,
179179
options: ExportOptions = {}
180180
): Promise<Blob> {
181181
const {
182-
sampleRate = 44100,
183182
tailSeconds = 2,
184183
} = options;
185184

186-
// Store original context to restore later
187-
const originalContext = Tone.getContext();
185+
// 1. Calculate total duration
186+
const maxBar = project.clips.reduce((max, clip) => {
187+
return Math.max(max, clip.startBar + clip.lengthBars);
188+
}, 0);
188189

189-
try {
190-
// 1. Calculate total duration
191-
const maxBar = project.clips.reduce((max, clip) => {
192-
return Math.max(max, clip.startBar + clip.lengthBars);
193-
}, 0);
194-
195-
const beatsPerBar = project.timeSignature[0];
196-
const duration = barsToSeconds(maxBar, project.bpm, beatsPerBar) + tailSeconds;
197-
198-
if (duration <= tailSeconds) {
199-
throw new Error('Project has no clips to export');
200-
}
201-
202-
203-
// 2. Create offline context
204-
const offlineCtx = new OfflineAudioContext(2, sampleRate * duration, sampleRate);
190+
const beatsPerBar = project.timeSignature[0];
191+
const duration = barsToSeconds(maxBar, project.bpm, beatsPerBar) + tailSeconds;
205192

206-
// 3. Set up progress checkpoints via suspend/resume
207-
const checkpointInterval = 1; // seconds
208-
const checkpoints: number[] = [];
209-
for (let t = checkpointInterval; t < duration; t += checkpointInterval) {
210-
checkpoints.push(t);
211-
}
212-
213-
// Schedule suspend points for progress tracking
214-
checkpoints.forEach(time => {
215-
offlineCtx.suspend(time).then(() => {
216-
const progress = Math.round((time / duration) * 100);
217-
onProgress?.(progress);
218-
offlineCtx.resume();
219-
});
220-
});
193+
if (duration <= tailSeconds) {
194+
throw new Error('Project has no clips to export');
195+
}
221196

222-
// 4. Switch Tone.js to offline context
223-
const toneOfflineCtx = new Tone.Context(offlineCtx);
224-
Tone.setContext(toneOfflineCtx);
197+
onProgress?.(0);
225198

226-
// Set up transport for offline context
227-
const transport = Tone.getTransport();
199+
// 2. Use Tone.Offline for proper offline rendering
200+
// This handles context switching and transport sync correctly
201+
const renderedBuffer = await Tone.Offline(async ({ transport }) => {
202+
// Set up transport
228203
transport.bpm.value = project.bpm;
229204
transport.timeSignature = project.timeSignature;
230205

231-
// 5. Build audio graph and schedule clips
232-
const nodesToDispose: Tone.ToneAudioNode[] = [];
206+
// Create master chain: masterGain -> limiter -> destination
207+
const masterLimiter = new Tone.Limiter(-1);
208+
masterLimiter.toDestination();
209+
210+
const masterGain = new Tone.Gain(0.8);
211+
masterGain.connect(masterLimiter);
233212

213+
// Process each track
234214
for (const track of project.tracks) {
235215
if (track.muted) continue;
236216

237-
// Build track chain: input -> effects -> gain -> pan -> destination
217+
// Build track chain: input -> effects -> gain -> pan -> masterGain
238218
const panner = new Tone.Panner(track.pan);
239-
panner.toDestination();
240-
nodesToDispose.push(panner);
219+
panner.connect(masterGain);
241220

242221
const gain = new Tone.Gain(track.volume);
243222
gain.connect(panner);
244-
nodesToDispose.push(gain);
245223

246224
// Build effects chain
247225
let chainInput: Tone.ToneAudioNode = gain;
248226
if (track.effects && track.effects.length > 0) {
249227
const activeEffects = track.effects.filter(e => e.active);
250228

251-
// Effects connect in reverse (last effect -> gain)
252229
const effectNodes: Tone.ToneAudioNode[] = [];
253230
for (const effect of activeEffects) {
254231
const node = await createEffectNode(effect);
255232
if (node) {
256233
effectNodes.push(node);
257-
nodesToDispose.push(node);
258234
}
259235
}
260236

261-
// Connect: entry -> effect1 -> effect2 -> ... -> gain
262237
if (effectNodes.length > 0) {
263238
const entryGain = new Tone.Gain(1);
264-
nodesToDispose.push(entryGain);
265-
266239
let current: Tone.ToneAudioNode = entryGain;
267240
for (const effectNode of effectNodes) {
268241
current.connect(effectNode);
@@ -280,70 +253,48 @@ export async function exportProjectToWav(
280253
const clipStartSeconds = barsToSeconds(clip.startBar, project.bpm, beatsPerBar);
281254

282255
if (clip.type === 'audio' && clip.activeTakeId) {
283-
// Schedule audio clip
284-
await scheduleAudioClipOffline(
256+
await scheduleAudioClipForOffline(
285257
clip,
286258
chainInput,
287259
clipStartSeconds,
288260
project,
289-
offlineCtx,
290-
nodesToDispose
261+
transport
291262
);
292263
} else if ((clip.type === 'midi' || clip.type === 'drum') && clip.notes) {
293-
// Schedule MIDI/Drum clip
294-
await scheduleMidiClipOffline(
264+
await scheduleMidiClipForOffline(
295265
clip,
296266
track,
297267
chainInput,
298268
clipStartSeconds,
299269
project,
300-
nodesToDispose
270+
transport
301271
);
302272
}
303273
}
304274
}
305275

306-
// 6. Start transport and render
276+
// Start transport - Tone.Offline will handle the rendering
307277
transport.start(0);
308278

309-
onProgress?.(0); // Initial progress
310-
const renderedBuffer = await offlineCtx.startRendering();
311-
onProgress?.(100); // Complete
312-
313-
314-
// 7. Cleanup offline nodes
315-
transport.stop();
316-
transport.cancel();
317-
nodesToDispose.forEach(node => {
318-
try {
319-
node.dispose();
320-
} catch (_e) {
321-
// Ignore disposal errors
322-
}
323-
});
324-
325-
// 8. Convert to WAV
326-
const wavBlob = audioBufferToWav(renderedBuffer);
279+
}, duration);
327280

328-
return wavBlob;
281+
onProgress?.(100);
329282

330-
} finally {
331-
// 9. ALWAYS restore original context
332-
Tone.setContext(originalContext);
333-
}
283+
// 3. Convert to WAV (get the underlying AudioBuffer from ToneAudioBuffer)
284+
const wavBlob = audioBufferToWav(renderedBuffer.get() as AudioBuffer);
285+
return wavBlob;
334286
}
335287

336288
// ============================================
337-
// Audio Clip Scheduling (Offline)
289+
// Audio Clip Scheduling (for Tone.Offline)
338290
// ============================================
339291

340-
async function scheduleAudioClipOffline(
292+
async function scheduleAudioClipForOffline(
341293
clip: Clip,
342294
destination: Tone.ToneAudioNode,
343295
startTime: number,
344-
project: Project,
345-
offlineCtx: OfflineAudioContext,
346-
nodesToDispose: Tone.ToneAudioNode[]
296+
_project: Project,
297+
_transport: typeof Tone.Transport
347298
): Promise<void> {
348299
if (!clip.activeTakeId) return;
349300

@@ -354,16 +305,18 @@ async function scheduleAudioClipOffline(
354305
}
355306

356307
try {
357-
// Decode audio in offline context - create a proper ArrayBuffer copy
308+
// Create a proper ArrayBuffer copy from Uint8Array
358309
const arrayBuffer = new ArrayBuffer(take.audioData.byteLength);
359310
new Uint8Array(arrayBuffer).set(take.audioData);
360-
const audioBuffer = await offlineCtx.decodeAudioData(arrayBuffer);
361311

362-
// Create Tone buffer and player
363-
const toneBuffer = new Tone.ToneAudioBuffer(audioBuffer);
364-
const player = new Tone.Player(toneBuffer);
312+
// Decode using the Tone.js context (works in offline context)
313+
const audioCtx = Tone.getContext().rawContext;
314+
const audioBuffer = await audioCtx.decodeAudioData(arrayBuffer);
315+
const buffer = new Tone.ToneAudioBuffer(audioBuffer);
316+
317+
// Create player
318+
const player = new Tone.Player(buffer);
365319
player.connect(destination);
366-
nodesToDispose.push(player);
367320

368321
// Apply fades
369322
player.fadeIn = clip.fadeIn || 0;
@@ -372,7 +325,7 @@ async function scheduleAudioClipOffline(
372325
// Calculate trim/duration
373326
const trimStart = clip.trimStart || 0;
374327
const trimEnd = clip.trimEnd || 0;
375-
const sourceDuration = toneBuffer.duration;
328+
const sourceDuration = buffer.duration;
376329
const playDuration = Math.max(0, sourceDuration - trimStart - trimEnd);
377330

378331
if (playDuration > 0) {
@@ -387,37 +340,33 @@ async function scheduleAudioClipOffline(
387340
}
388341

389342
// ============================================
390-
// MIDI Clip Scheduling (Offline)
343+
// MIDI Clip Scheduling (for Tone.Offline)
391344
// ============================================
392345

393-
async function scheduleMidiClipOffline(
346+
async function scheduleMidiClipForOffline(
394347
clip: Clip,
395348
track: Track,
396349
destination: Tone.ToneAudioNode,
397350
startTime: number,
398351
project: Project,
399-
nodesToDispose: Tone.ToneAudioNode[]
352+
transport: typeof Tone.Transport
400353
): Promise<void> {
401354
if (!clip.notes?.length) return;
402355

403356
const synth = createSynthForTrack(track);
404357
synth.connect(destination);
405-
nodesToDispose.push(synth);
406358

407359
// Wait for synth to be ready (important for Sampler which loads async)
408360
await waitForSynthReady(synth);
409361

410-
const transport = Tone.getTransport();
411-
412-
// Check if synth is polyphonic (PolySynth and Sampler can handle multiple notes at same time)
362+
// Check if synth is polyphonic
413363
const isPolyphonic = synth instanceof Tone.PolySynth || synth instanceof Tone.Sampler;
414364

415365
// Group notes by start time to handle concurrent notes for monophonic synths
416366
const notesByTime = new Map<number, typeof clip.notes>();
417367
for (const note of clip.notes) {
418368
const noteOffsetSeconds = beatsToSeconds(note.startBeat, project.bpm);
419369
const absoluteTime = startTime + noteOffsetSeconds;
420-
// Round to avoid floating point issues
421370
const timeKey = Math.round(absoluteTime * 10000) / 10000;
422371

423372
if (!notesByTime.has(timeKey)) {
@@ -426,11 +375,10 @@ async function scheduleMidiClipOffline(
426375
notesByTime.get(timeKey)!.push(note);
427376
}
428377

429-
// Schedule notes, adding tiny offsets for monophonic synths with concurrent notes
378+
// Schedule notes using the provided transport
430379
for (const [timeKey, notes] of notesByTime) {
431380
notes.forEach((note, index) => {
432381
const noteDurationSeconds = beatsToSeconds(note.duration, project.bpm);
433-
// For monophonic synths, add 1ms offset for each concurrent note
434382
const offset = isPolyphonic ? 0 : index * 0.001;
435383
const scheduledTime = timeKey + offset;
436384

@@ -444,7 +392,6 @@ async function scheduleMidiClipOffline(
444392
}, scheduledTime);
445393
});
446394
}
447-
448395
}
449396

450397
// ============================================

0 commit comments

Comments
 (0)