From f6ccbd299ffce57fdb188dab74c81c6b800c4c37 Mon Sep 17 00:00:00 2001 From: Vaibhav Rajput Date: Fri, 27 Mar 2026 21:27:52 +0530 Subject: [PATCH] =?UTF-8?q?feat:=20add=20audio=20module=20=E2=80=94=20Mors?= =?UTF-8?q?ePlayer,=20WAV=20export,=20sound=20presets?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Phase 2 implementation: - MorsePlayer class (Web Audio API, play/pause/stop, gain envelope) - 6 sound presets (telegraph, radio, military, sonar, naval, beginner) - WAV export (toWav, toWavBlob, toWavUrl, downloadWav) - Scheduler for morse → timed events - Farnsworth spacing support in audio playback - Event callbacks (onPlay, onPause, onStop, onEnd, onSignal, onCharacter, onProgress) - Sub-path export: @morsecodeapp/morse/audio - 191 tests, 99.2% statement coverage, 100% function coverage - Bundle: core 3.43KB, audio 5.37KB, full 6.45KB (all gzipped) --- package.json | 11 +- src/audio/index.ts | 41 +++ src/audio/player.ts | 454 ++++++++++++++++++++++++++++ src/audio/presets.ts | 89 ++++++ src/audio/scheduler.ts | 99 +++++++ src/audio/types.ts | 114 +++++++ src/audio/wav.ts | 220 ++++++++++++++ src/index.ts | 4 +- test/audio/player.test.ts | 561 +++++++++++++++++++++++++++++++++++ test/audio/presets.test.ts | 81 +++++ test/audio/scheduler.test.ts | 156 ++++++++++ test/audio/wav.test.ts | 173 +++++++++++ tsup.config.ts | 1 + vitest.config.ts | 2 +- 14 files changed, 2002 insertions(+), 4 deletions(-) create mode 100644 src/audio/index.ts create mode 100644 src/audio/player.ts create mode 100644 src/audio/presets.ts create mode 100644 src/audio/scheduler.ts create mode 100644 src/audio/types.ts create mode 100644 src/audio/wav.ts create mode 100644 test/audio/player.test.ts create mode 100644 test/audio/presets.test.ts create mode 100644 test/audio/scheduler.test.ts create mode 100644 test/audio/wav.test.ts diff --git a/package.json b/package.json index c9523e8..fdd2e37 100644 --- a/package.json +++ b/package.json @@ -47,6 +47,11 @@ "types": "./dist/core/index.d.ts", "import": "./dist/core/index.js", "require": "./dist/core/index.cjs" + }, + "./audio": { + "types": "./dist/audio/index.d.ts", + "import": "./dist/audio/index.js", + "require": "./dist/audio/index.cjs" } }, "files": [ @@ -78,8 +83,12 @@ "limit": "5 kB" }, { - "path": "dist/index.js", + "path": "dist/audio/index.js", "limit": "6 kB" + }, + { + "path": "dist/index.js", + "limit": "12 kB" } ], "engines": { diff --git a/src/audio/index.ts b/src/audio/index.ts new file mode 100644 index 0000000..c125e3f --- /dev/null +++ b/src/audio/index.ts @@ -0,0 +1,41 @@ +/** + * @morsecodeapp/morse/audio — Audio playback and WAV export. + * MorsePlayer requires a browser with Web Audio API. + * WAV export works in any JavaScript runtime. + * + * @see https://morsecodeapp.com + * @license MIT + */ + +// Types +export type { + WaveformType, + PlayerState, + GainEnvelopeOptions, + MorsePlayerOptions, + PlayOptions, + SoundPreset, + WavOptions, +} from './types.js'; + +// Player +export { MorsePlayer } from './player.js'; + +// Presets +export { + presets, + telegraph, + radio, + military, + sonar, + naval, + beginner, + type PresetName, +} from './presets.js'; + +// WAV Export +export { toWav, toWavBlob, toWavUrl, downloadWav } from './wav.js'; + +// Scheduler (useful for custom audio rendering) +export type { ScheduleEvent } from './scheduler.js'; +export { buildSchedule, scheduleDuration } from './scheduler.js'; diff --git a/src/audio/player.ts b/src/audio/player.ts new file mode 100644 index 0000000..cfedda4 --- /dev/null +++ b/src/audio/player.ts @@ -0,0 +1,454 @@ +/** + * MorsePlayer — Web Audio API morse code playback. + * Plays morse code as audio with configurable frequency, waveform, and timing. + * Supports play/pause/stop, gain envelope, and event callbacks. + * + * @see https://morsecodeapp.com + */ + +import { encode } from '../core/encode.js'; +import { timing, farnsworthTiming, DEFAULT_WPM } from '../core/timing.js'; +import { buildSchedule, scheduleDuration, type ScheduleEvent } from './scheduler.js'; +import type { + MorsePlayerOptions, + PlayOptions, + PlayerState, + WaveformType, + GainEnvelopeOptions, +} from './types.js'; + +const DEFAULT_FREQUENCY = 600; +const DEFAULT_VOLUME = 80; +const DEFAULT_WAVEFORM: WaveformType = 'sine'; +const DEFAULT_ENVELOPE: GainEnvelopeOptions = { attack: 0.01, release: 0.01 }; +const MIN_FREQUENCY = 200; +const MAX_FREQUENCY = 2000; +const PROGRESS_INTERVAL = 50; // ms + +/** + * Web Audio API morse code player. + * + * @example + * ```ts + * const player = new MorsePlayer({ wpm: 20, frequency: 600 }); + * await player.play('Hello World'); + * ``` + */ +export class MorsePlayer { + // --- Configurable properties --- + private _wpm: number; + private _frequency: number; + private _waveform: WaveformType; + private _volume: number; + private _farnsworth: boolean; + private _farnsworthWpm: number; + private _envelope: GainEnvelopeOptions; + + // --- State --- + private _state: PlayerState = 'idle'; + private _totalTime: number = 0; + + // --- Audio context --- + private ctx: AudioContext | null; + private ownCtx: boolean; + private masterGain: GainNode | null = null; + private oscillators: OscillatorNode[] = []; + private gainNodes: GainNode[] = []; + + // --- Playback tracking --- + private toneEvents: ScheduleEvent[] = []; + private playStartCtxTime: number = 0; + private pauseElapsed: number = 0; + private progressTimer: ReturnType | null = null; + private endTimer: ReturnType | null = null; + private playResolve: (() => void) | null = null; + private nextSignalIdx: number = 0; + private lastFiredCharIdx: number = -1; + private charTextMap: string[] = []; + + // --- Callbacks --- + private _onPlay?: () => void; + private _onPause?: () => void; + private _onResume?: () => void; + private _onStop?: () => void; + private _onEnd?: () => void; + private _onSignal?: (signal: 'dot' | 'dash', charIndex: number) => void; + private _onCharacter?: (char: string, morse: string, charIndex: number) => void; + private _onProgress?: (currentMs: number, totalMs: number) => void; + + constructor(options?: MorsePlayerOptions) { + this._wpm = clamp(options?.wpm ?? DEFAULT_WPM, 1, 60); + this._frequency = clamp(options?.frequency ?? DEFAULT_FREQUENCY, MIN_FREQUENCY, MAX_FREQUENCY); + this._waveform = options?.waveform ?? DEFAULT_WAVEFORM; + this._volume = clamp(options?.volume ?? DEFAULT_VOLUME, 0, 100); + this._farnsworth = options?.farnsworth ?? false; + this._farnsworthWpm = options?.farnsworthWpm ?? 15; + this._envelope = { ...DEFAULT_ENVELOPE, ...options?.gainEnvelope }; + + if (options?.audioContext) { + this.ctx = options.audioContext; + this.ownCtx = false; + } else { + this.ctx = null; + this.ownCtx = false; + } + + this._onPlay = options?.onPlay; + this._onPause = options?.onPause; + this._onResume = options?.onResume; + this._onStop = options?.onStop; + this._onEnd = options?.onEnd; + this._onSignal = options?.onSignal; + this._onCharacter = options?.onCharacter; + this._onProgress = options?.onProgress; + } + + // --- Public getters / setters --- + + get state(): PlayerState { + return this._state; + } + + get totalTime(): number { + return this._totalTime; + } + + get currentTime(): number { + if (this._state === 'idle') return 0; + if (this._state === 'paused') return this.pauseElapsed; + if (!this.ctx) return 0; + return Math.min( + (this.ctx.currentTime - this.playStartCtxTime) * 1000, + this._totalTime, + ); + } + + get progress(): number { + if (this._totalTime === 0) return 0; + return Math.min(this.currentTime / this._totalTime, 1); + } + + get wpm(): number { + return this._wpm; + } + set wpm(value: number) { + this._wpm = clamp(value, 1, 60); + } + + get frequency(): number { + return this._frequency; + } + set frequency(value: number) { + this._frequency = clamp(value, MIN_FREQUENCY, MAX_FREQUENCY); + } + + get volume(): number { + return this._volume; + } + set volume(value: number) { + this._volume = clamp(value, 0, 100); + if (this.masterGain) { + this.masterGain.gain.value = this._volume / 100; + } + } + + // --- Public methods --- + + /** + * Play morse code audio. + * Accepts text (auto-encodes) or raw morse (with `{ morse: true }`). + * + * @returns Promise that resolves when playback ends or is stopped + */ + async play(input: string, options?: PlayOptions): Promise { + if (this._state !== 'idle') { + this.stopInternal(false); + } + + // Determine morse string + const isMorse = options?.morse ?? false; + const morse = isMorse ? input : encode(input, { charset: options?.charset }); + + // Build character map for callbacks + if (!isMorse) { + this.charTextMap = input.replace(/\s+/g, '').split(''); + } else { + const morseLetters: string[] = []; + for (const word of morse.split(/\s*\/\s*/)) { + if (!word) continue; + for (const l of word.trim().split(/\s+/)) { + if (l) morseLetters.push(l); + } + } + this.charTextMap = morseLetters; + } + + // Build timing + const t = this._farnsworth + ? farnsworthTiming(this._farnsworthWpm, this._wpm) + : timing(this._wpm); + + // Build schedule + const schedule = buildSchedule(morse, t); + this.toneEvents = schedule.filter(e => e.type === 'tone'); + this._totalTime = scheduleDuration(schedule); + this.nextSignalIdx = 0; + this.lastFiredCharIdx = -1; + + if (this.toneEvents.length === 0) return; + + // Ensure AudioContext + await this.ensureContext(); + + // Master gain (volume control) + this.masterGain = this.ctx!.createGain(); + this.masterGain.gain.value = this._volume / 100; + this.masterGain.connect(this.ctx!.destination); + + // Schedule all tones with gain envelopes + const now = this.ctx!.currentTime; + this.playStartCtxTime = now; + this.pauseElapsed = 0; + + for (const event of this.toneEvents) { + const startSec = now + event.start / 1000; + const durationSec = event.duration / 1000; + const endSec = startSec + durationSec; + + const osc = this.ctx!.createOscillator(); + osc.type = this._waveform; + osc.frequency.value = this._frequency; + + const gain = this.ctx!.createGain(); + const attack = Math.min(this._envelope.attack, durationSec / 2); + const release = Math.min(this._envelope.release, durationSec / 2); + + // Gain envelope: silence → attack → sustain → release → silence + gain.gain.setValueAtTime(0, startSec); + gain.gain.linearRampToValueAtTime(1, startSec + attack); + if (durationSec > attack + release) { + gain.gain.setValueAtTime(1, endSec - release); + } + gain.gain.linearRampToValueAtTime(0, endSec); + + osc.connect(gain); + gain.connect(this.masterGain); + + osc.start(startSec); + osc.stop(endSec + 0.05); // small buffer for clean release + + this.oscillators.push(osc); + this.gainNodes.push(gain); + } + + this._state = 'playing'; + this._onPlay?.(); + + // Start progress polling + this.startProgressPolling(); + + // Return promise that resolves on natural end or stop + return new Promise((resolve) => { + this.playResolve = resolve; + this.endTimer = setTimeout(() => { + this.handlePlaybackEnd(); + }, this._totalTime + 100); + }); + } + + /** Pause playback. Suspends the AudioContext. */ + pause(): void { + if (this._state !== 'playing' || !this.ctx) return; + + this.pauseElapsed = (this.ctx.currentTime - this.playStartCtxTime) * 1000; + this.ctx.suspend(); + this.stopProgressPolling(); + + this._state = 'paused'; + this._onPause?.(); + } + + /** Resume playback from paused state. */ + async resume(): Promise { + if (this._state !== 'paused' || !this.ctx) return; + + await this.ctx.resume(); + this._state = 'playing'; + this.startProgressPolling(); + this._onResume?.(); + } + + /** Stop playback and reset to idle. */ + stop(): void { + this.stopInternal(true); + } + + /** Dispose of all resources. Call when done with the player. */ + dispose(): void { + this.stopInternal(false); + if (this.ctx && this.ownCtx) { + this.ctx.close().catch(() => {}); + } + this.ctx = null; + } + + // --- Internal --- + + private async ensureContext(): Promise { + if (!this.ctx) { + this.ctx = new AudioContext(); + this.ownCtx = true; + } + if (this.ctx.state === 'suspended') { + await this.ctx.resume(); + } + } + + private stopInternal(fireCallback: boolean): void { + this.stopProgressPolling(); + + if (this.endTimer) { + clearTimeout(this.endTimer); + this.endTimer = null; + } + + // Disconnect all audio nodes + for (const osc of this.oscillators) { + try { osc.stop(); } catch { /* already stopped */ } + try { osc.disconnect(); } catch { /* already disconnected */ } + } + for (const gain of this.gainNodes) { + try { gain.disconnect(); } catch { /* already disconnected */ } + } + this.oscillators = []; + this.gainNodes = []; + + if (this.masterGain) { + try { this.masterGain.disconnect(); } catch { /* */ } + this.masterGain = null; + } + + // Resume suspended context so it's usable for next play() + if (this._state === 'paused' && this.ctx) { + this.ctx.resume().catch(() => {}); + } + + this._state = 'idle'; + this.pauseElapsed = 0; + + if (fireCallback) { + this._onStop?.(); + } + + if (this.playResolve) { + const resolve = this.playResolve; + this.playResolve = null; + resolve(); + } + } + + private handlePlaybackEnd(): void { + this.stopProgressPolling(); + this.fireRemainingEvents(); + + // Cleanup audio nodes + for (const osc of this.oscillators) { + try { osc.disconnect(); } catch { /* */ } + } + for (const gain of this.gainNodes) { + try { gain.disconnect(); } catch { /* */ } + } + this.oscillators = []; + this.gainNodes = []; + + if (this.masterGain) { + try { this.masterGain.disconnect(); } catch { /* */ } + this.masterGain = null; + } + + this._state = 'idle'; + this.endTimer = null; + this._onEnd?.(); + + if (this.playResolve) { + const resolve = this.playResolve; + this.playResolve = null; + resolve(); + } + } + + private startProgressPolling(): void { + this.progressTimer = setInterval(() => { + this.pollProgress(); + }, PROGRESS_INTERVAL); + } + + private stopProgressPolling(): void { + if (this.progressTimer) { + clearInterval(this.progressTimer); + this.progressTimer = null; + } + } + + private pollProgress(): void { + const elapsed = this.currentTime; + + this._onProgress?.(elapsed, this._totalTime); + + // Fire onSignal / onCharacter for newly-passed tones + while (this.nextSignalIdx < this.toneEvents.length) { + const event = this.toneEvents[this.nextSignalIdx]!; + if (event.start <= elapsed) { + if (event.signal) { + this._onSignal?.(event.signal, event.charIndex ?? 0); + } + + // Fire onCharacter when the last signal of a character is reached + const nextEvent = this.toneEvents[this.nextSignalIdx + 1]; + const charFinished = !nextEvent || nextEvent.charIndex !== event.charIndex; + if ( + charFinished && + event.charIndex !== undefined && + event.charIndex !== this.lastFiredCharIdx + ) { + this.lastFiredCharIdx = event.charIndex; + const charText = this.charTextMap[event.charIndex] ?? ''; + this._onCharacter?.(charText, event.morseChar ?? '', event.charIndex); + } + + this.nextSignalIdx++; + } else { + break; + } + } + } + + private fireRemainingEvents(): void { + while (this.nextSignalIdx < this.toneEvents.length) { + const event = this.toneEvents[this.nextSignalIdx]!; + if (event.signal) { + this._onSignal?.(event.signal, event.charIndex ?? 0); + } + + const nextEvent = this.toneEvents[this.nextSignalIdx + 1]; + const charFinished = !nextEvent || nextEvent.charIndex !== event.charIndex; + if ( + charFinished && + event.charIndex !== undefined && + event.charIndex !== this.lastFiredCharIdx + ) { + this.lastFiredCharIdx = event.charIndex; + const charText = this.charTextMap[event.charIndex] ?? ''; + this._onCharacter?.(charText, event.morseChar ?? '', event.charIndex); + } + + this.nextSignalIdx++; + } + + this._onProgress?.(this._totalTime, this._totalTime); + } +} + +function clamp(value: number, min: number, max: number): number { + return Math.max(min, Math.min(max, value)); +} diff --git a/src/audio/presets.ts b/src/audio/presets.ts new file mode 100644 index 0000000..18b0db8 --- /dev/null +++ b/src/audio/presets.ts @@ -0,0 +1,89 @@ +/** + * Sound presets for MorsePlayer. + * Each preset provides audio settings tuned for a specific character. + * + * @see https://morsecodeapp.com + */ + +import type { SoundPreset } from './types.js'; + +/** Classic telegraph key/sounder — warm, clicky tone */ +export const telegraph: SoundPreset = { + name: 'Telegraph', + description: 'Classic telegraph sounder — warm, clicky tone', + wpm: 15, + frequency: 550, + waveform: 'square', + volume: 70, + gainEnvelope: { attack: 0.002, release: 0.002 }, +}; + +/** Clean amateur radio CW tone */ +export const radio: SoundPreset = { + name: 'Radio', + description: 'Clean amateur radio CW tone', + wpm: 20, + frequency: 600, + waveform: 'sine', + volume: 80, + gainEnvelope: { attack: 0.01, release: 0.01 }, +}; + +/** Crisp military communication tone */ +export const military: SoundPreset = { + name: 'Military', + description: 'Crisp military communication tone', + wpm: 25, + frequency: 700, + waveform: 'sine', + volume: 85, + gainEnvelope: { attack: 0.005, release: 0.005 }, +}; + +/** Deep submarine sonar ping */ +export const sonar: SoundPreset = { + name: 'Sonar', + description: 'Deep submarine sonar ping', + wpm: 12, + frequency: 400, + waveform: 'sine', + volume: 75, + gainEnvelope: { attack: 0.02, release: 0.04 }, +}; + +/** Naval fleet communication tone */ +export const naval: SoundPreset = { + name: 'Naval', + description: 'Naval fleet communication tone', + wpm: 18, + frequency: 650, + waveform: 'triangle', + volume: 78, + gainEnvelope: { attack: 0.008, release: 0.008 }, +}; + +/** Slow Farnsworth spacing for learning */ +export const beginner: SoundPreset = { + name: 'Beginner', + description: 'Slow Farnsworth spacing for learning', + wpm: 18, + frequency: 600, + waveform: 'sine', + volume: 80, + farnsworth: true, + farnsworthWpm: 5, + gainEnvelope: { attack: 0.01, release: 0.01 }, +}; + +/** All available presets */ +export const presets = { + telegraph, + radio, + military, + sonar, + naval, + beginner, +} as const; + +/** Preset name */ +export type PresetName = keyof typeof presets; diff --git a/src/audio/scheduler.ts b/src/audio/scheduler.ts new file mode 100644 index 0000000..2f4b06b --- /dev/null +++ b/src/audio/scheduler.ts @@ -0,0 +1,99 @@ +/** + * Morse → timed event scheduler. + * Converts a morse string and timing values into a timeline of tone/silence events. + * Used by both MorsePlayer and WAV export for consistent audio generation. + * + * @see https://morsecodeapp.com + */ + +import type { TimingValues } from '../core/types.js'; + +/** A single event in the playback schedule */ +export interface ScheduleEvent { + /** Event type */ + type: 'tone' | 'silence'; + /** Start time in milliseconds from beginning */ + start: number; + /** Duration in milliseconds */ + duration: number; + /** Signal type (tone events only) */ + signal?: 'dot' | 'dash'; + /** The morse pattern this signal belongs to (e.g., '.-') */ + morseChar?: string; + /** Sequential character index in the message */ + charIndex?: number; +} + +/** + * Build a schedule of tones and silences from a morse string. + * + * @param morse - Standard morse string (dots, dashes, spaces, slashes) + * @param timings - Timing values from timing() or farnsworthTiming() + * @returns Array of timed events + */ +export function buildSchedule( + morse: string, + timings: TimingValues, +): ScheduleEvent[] { + const trimmed = morse.trim(); + if (!trimmed) return []; + + const events: ScheduleEvent[] = []; + let cursor = 0; + let charIndex = 0; + + const words = trimmed.split(/\s*\/\s*/); + + for (let wi = 0; wi < words.length; wi++) { + if (wi > 0) { + events.push({ type: 'silence', start: cursor, duration: timings.interWord }); + cursor += timings.interWord; + } + + const word = words[wi]; + if (!word) continue; + const letters = word.trim().split(/\s+/); + + for (let li = 0; li < letters.length; li++) { + if (li > 0) { + events.push({ type: 'silence', start: cursor, duration: timings.interChar }); + cursor += timings.interChar; + } + + const morseChar = letters[li]; + if (!morseChar) continue; + + for (let si = 0; si < morseChar.length; si++) { + if (si > 0) { + events.push({ type: 'silence', start: cursor, duration: timings.intraChar }); + cursor += timings.intraChar; + } + + const isDash = morseChar.charAt(si) === '-'; + const duration = isDash ? timings.dash : timings.dot; + events.push({ + type: 'tone', + start: cursor, + duration, + signal: isDash ? 'dash' : 'dot', + morseChar, + charIndex, + }); + cursor += duration; + } + + charIndex++; + } + } + + return events; +} + +/** + * Get total duration of a schedule in milliseconds. + */ +export function scheduleDuration(events: ScheduleEvent[]): number { + if (events.length === 0) return 0; + const last = events[events.length - 1]!; + return Math.round(last.start + last.duration); +} diff --git a/src/audio/types.ts b/src/audio/types.ts new file mode 100644 index 0000000..1d542e4 --- /dev/null +++ b/src/audio/types.ts @@ -0,0 +1,114 @@ +/** + * @morsecodeapp/morse — Audio module types + * @see https://morsecodeapp.com + */ + +import type { CharsetId } from '../core/types.js'; + +/** Supported oscillator waveform types */ +export type WaveformType = 'sine' | 'square' | 'sawtooth' | 'triangle'; + +/** Player state machine */ +export type PlayerState = 'idle' | 'playing' | 'paused'; + +/** Gain envelope configuration for click-free audio */ +export interface GainEnvelopeOptions { + /** Attack time in seconds (ramp up from silence). Default: 0.01 */ + attack: number; + /** Release time in seconds (ramp down to silence). Default: 0.01 */ + release: number; +} + +/** Options for MorsePlayer constructor */ +export interface MorsePlayerOptions { + /** Words per minute (1–60). Default: 20 */ + wpm?: number; + /** Tone frequency in Hz (200–2000). Default: 600 */ + frequency?: number; + /** Oscillator waveform. Default: 'sine' */ + waveform?: WaveformType; + /** Volume (0–100). Default: 80 */ + volume?: number; + /** Enable Farnsworth spacing. Default: false */ + farnsworth?: boolean; + /** Farnsworth overall WPM (character WPM uses `wpm`). Default: 15 */ + farnsworthWpm?: number; + /** Gain envelope for click-free audio. Default: { attack: 0.01, release: 0.01 } */ + gainEnvelope?: GainEnvelopeOptions; + /** Existing AudioContext to reuse (optional, browser only) */ + audioContext?: AudioContext; + + // Event callbacks + /** Fired when playback starts */ + onPlay?: () => void; + /** Fired when playback is paused */ + onPause?: () => void; + /** Fired when playback resumes from pause */ + onResume?: () => void; + /** Fired when playback is stopped */ + onStop?: () => void; + /** Fired when playback ends naturally */ + onEnd?: () => void; + /** Fired for each dot or dash signal */ + onSignal?: (signal: 'dot' | 'dash', charIndex: number) => void; + /** Fired when a complete character finishes playing */ + onCharacter?: (char: string, morse: string, charIndex: number) => void; + /** Fired periodically with playback progress */ + onProgress?: (currentMs: number, totalMs: number) => void; +} + +/** Options for play() method */ +export interface PlayOptions { + /** If true, input is treated as raw morse code. Default: false (text) */ + morse?: boolean; + /** Character set for encoding text input. Default: 'itu' */ + charset?: CharsetId; +} + +/** Sound preset configuration */ +export interface SoundPreset { + /** Preset name */ + readonly name: string; + /** Short description */ + readonly description: string; + /** Words per minute */ + readonly wpm: number; + /** Tone frequency in Hz */ + readonly frequency: number; + /** Oscillator waveform */ + readonly waveform: WaveformType; + /** Volume (0–100) */ + readonly volume: number; + /** Farnsworth enabled */ + readonly farnsworth?: boolean; + /** Farnsworth overall WPM */ + readonly farnsworthWpm?: number; + /** Gain envelope */ + readonly gainEnvelope?: GainEnvelopeOptions; +} + +/** Options for WAV export functions */ +export interface WavOptions { + /** Words per minute (1–60). Default: 20 */ + wpm?: number; + /** Tone frequency in Hz (200–2000). Default: 600 */ + frequency?: number; + /** Oscillator waveform. Default: 'sine' */ + waveform?: WaveformType; + /** Volume (0–100). Default: 80 */ + volume?: number; + /** Sample rate in Hz. Default: 44100 */ + sampleRate?: number; + /** Gain envelope. Default: { attack: 0.01, release: 0.01 } */ + gainEnvelope?: GainEnvelopeOptions; + /** Enable Farnsworth spacing. Default: false */ + farnsworth?: boolean; + /** Farnsworth overall WPM */ + farnsworthWpm?: number; + /** Character set for text input. Default: 'itu' */ + charset?: CharsetId; + /** If true, input is treated as raw morse. Default: false */ + morse?: boolean; + /** Filename for downloadWav. Default: 'morse.wav' */ + filename?: string; +} diff --git a/src/audio/wav.ts b/src/audio/wav.ts new file mode 100644 index 0000000..cbee478 --- /dev/null +++ b/src/audio/wav.ts @@ -0,0 +1,220 @@ +/** + * WAV file generation — converts morse code to WAV audio data. + * Pure computation — works in Node.js, Bun, Deno, and browsers. + * + * @see https://morsecodeapp.com + */ + +import { encode } from '../core/encode.js'; +import { timing, farnsworthTiming } from '../core/timing.js'; +import { buildSchedule, scheduleDuration } from './scheduler.js'; +import type { WavOptions, WaveformType } from './types.js'; + +const DEFAULT_SAMPLE_RATE = 44100; +const DEFAULT_FREQUENCY = 600; +const DEFAULT_VOLUME = 80; +const DEFAULT_WPM = 20; +const DEFAULT_ATTACK = 0.01; +const DEFAULT_RELEASE = 0.01; + +/** + * Generate WAV audio data from text or morse code. + * Returns raw WAV file bytes as a Uint8Array. + * + * @example + * ```ts + * const wav = toWav('SOS'); + * const wav = toWav('... --- ...', { morse: true, frequency: 800 }); + * ``` + */ +export function toWav(input: string, options?: WavOptions): Uint8Array { + const opts = resolveOptions(options); + const morse = opts.morse ? input : encode(input, { charset: opts.charset }); + + const t = opts.farnsworth && opts.farnsworthWpm !== undefined + ? farnsworthTiming(opts.farnsworthWpm, opts.wpm) + : timing(opts.wpm); + + const schedule = buildSchedule(morse, t); + const totalMs = scheduleDuration(schedule); + + if (totalMs === 0) return createWavFile(new Float32Array(0), opts.sampleRate); + + const totalSamples = Math.ceil((totalMs / 1000) * opts.sampleRate); + const samples = new Float32Array(totalSamples); + const volume = opts.volume / 100; + const tones = schedule.filter(e => e.type === 'tone'); + + for (const tone of tones) { + const startSample = Math.floor((tone.start / 1000) * opts.sampleRate); + const endSample = Math.min( + Math.floor(((tone.start + tone.duration) / 1000) * opts.sampleRate), + totalSamples, + ); + const durationSec = tone.duration / 1000; + const effectiveAttack = Math.min(opts.gainEnvelope.attack, durationSec / 2); + const effectiveRelease = Math.min(opts.gainEnvelope.release, durationSec / 2); + + for (let i = startSample; i < endSample; i++) { + const globalT = i / opts.sampleRate; + const localT = (i - startSample) / opts.sampleRate; + + // Gain envelope + let envelope = 1; + if (localT < effectiveAttack) { + envelope = effectiveAttack > 0 ? localT / effectiveAttack : 1; + } else if (localT > durationSec - effectiveRelease) { + envelope = effectiveRelease > 0 + ? (durationSec - localT) / effectiveRelease + : 1; + } + + const signal = generateWaveform(opts.waveform, opts.frequency, globalT); + samples[i] = signal * volume * envelope; + } + } + + return createWavFile(samples, opts.sampleRate); +} + +/** + * Generate a WAV Blob from text or morse code. + * + * @example + * ```ts + * const blob = toWavBlob('SOS'); + * ``` + */ +export function toWavBlob(input: string, options?: WavOptions): Blob { + const data = toWav(input, options); + return new Blob([data.buffer as ArrayBuffer], { type: 'audio/wav' }); +} + +/** + * Generate a data URL of a WAV file. + * + * @example + * ```ts + * const url = toWavUrl('SOS'); + * // 'data:audio/wav;base64,...' + * ``` + */ +export function toWavUrl(input: string, options?: WavOptions): string { + const data = toWav(input, options); + // Build base64 data URL (chunked to avoid stack overflow on large arrays) + let binary = ''; + const chunkSize = 8192; + for (let i = 0; i < data.length; i += chunkSize) { + const chunk = data.subarray(i, i + chunkSize); + binary += String.fromCharCode.apply(null, Array.from(chunk)); + } + return `data:audio/wav;base64,${btoa(binary)}`; +} + +/** + * Download a WAV file in the browser. + * No-op in non-browser environments. + * + * @example + * ```ts + * downloadWav('SOS', { filename: 'sos.wav' }); + * ``` + */ +export function downloadWav(input: string, options?: WavOptions): void { + if (typeof document === 'undefined') return; + + const blob = toWavBlob(input, options); + const url = URL.createObjectURL(blob); + const a = document.createElement('a'); + a.href = url; + a.download = options?.filename ?? 'morse.wav'; + a.click(); + URL.revokeObjectURL(url); +} + +// --- Internal helpers --- + +function resolveOptions(options?: WavOptions) { + return { + wpm: clamp(options?.wpm ?? DEFAULT_WPM, 1, 60), + frequency: clamp(options?.frequency ?? DEFAULT_FREQUENCY, 200, 2000), + waveform: (options?.waveform ?? 'sine') as WaveformType, + volume: clamp(options?.volume ?? DEFAULT_VOLUME, 0, 100), + sampleRate: options?.sampleRate ?? DEFAULT_SAMPLE_RATE, + gainEnvelope: { + attack: options?.gainEnvelope?.attack ?? DEFAULT_ATTACK, + release: options?.gainEnvelope?.release ?? DEFAULT_RELEASE, + }, + farnsworth: options?.farnsworth ?? false, + farnsworthWpm: options?.farnsworthWpm, + charset: options?.charset, + morse: options?.morse ?? false, + }; +} + +/** Generate a single waveform sample at time t */ +function generateWaveform(type: WaveformType, frequency: number, t: number): number { + const phase = ((frequency * t) % 1 + 1) % 1; // ensure positive phase + switch (type) { + case 'sine': + return Math.sin(2 * Math.PI * frequency * t); + case 'square': + return phase < 0.5 ? 1 : -1; + case 'sawtooth': + return 2 * phase - 1; + case 'triangle': + return 4 * Math.abs(phase - 0.5) - 1; + default: + return Math.sin(2 * Math.PI * frequency * t); + } +} + +/** Write a WAV file from float samples (-1 to 1) */ +function createWavFile(samples: Float32Array, sampleRate: number): Uint8Array { + const numChannels = 1; + const bitsPerSample = 16; + const bytesPerSample = bitsPerSample / 8; + const dataSize = samples.length * bytesPerSample; + const headerSize = 44; + const buffer = new ArrayBuffer(headerSize + dataSize); + const view = new DataView(buffer); + + // RIFF header + writeString(view, 0, 'RIFF'); + view.setUint32(4, 36 + dataSize, true); + writeString(view, 8, 'WAVE'); + + // fmt sub-chunk + writeString(view, 12, 'fmt '); + view.setUint32(16, 16, true); // PCM chunk size + view.setUint16(20, 1, true); // PCM format + view.setUint16(22, numChannels, true); + view.setUint32(24, sampleRate, true); + view.setUint32(28, sampleRate * numChannels * bytesPerSample, true); // byte rate + view.setUint16(32, numChannels * bytesPerSample, true); // block align + view.setUint16(34, bitsPerSample, true); + + // data sub-chunk + writeString(view, 36, 'data'); + view.setUint32(40, dataSize, true); + + // PCM samples + let offset = 44; + for (let i = 0; i < samples.length; i++) { + const s = Math.max(-1, Math.min(1, samples[i] ?? 0)); + view.setInt16(offset, Math.round(s * 32767), true); + offset += 2; + } + + return new Uint8Array(buffer); +} + +function writeString(view: DataView, offset: number, str: string): void { + for (let i = 0; i < str.length; i++) { + view.setUint8(offset + i, str.charCodeAt(i)); + } +} + +function clamp(value: number, min: number, max: number): number { + return Math.max(min, Math.min(max, value)); +} diff --git a/src/index.ts b/src/index.ts index 84d8a16..107e126 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,11 +1,11 @@ /** * @morsecodeapp/morse — Full library entry point * - * Re-exports everything from core. - * Future phases will add audio, visual, and tap modules here. + * Re-exports everything from core and audio modules. * * @see https://morsecodeapp.com * @license MIT */ export * from './core/index.js'; +export * from './audio/index.js'; diff --git a/test/audio/player.test.ts b/test/audio/player.test.ts new file mode 100644 index 0000000..cbd7505 --- /dev/null +++ b/test/audio/player.test.ts @@ -0,0 +1,561 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; +import { MorsePlayer } from '../../src/audio/player.js'; + +// --- Web Audio API mocks --- + +class MockGainParam { + value = 1; + setValueAtTime = vi.fn().mockReturnThis(); + linearRampToValueAtTime = vi.fn().mockReturnThis(); +} + +class MockOscillatorNode { + type = 'sine'; + frequency = { value: 440 }; + connect = vi.fn().mockReturnThis(); + disconnect = vi.fn(); + start = vi.fn(); + stop = vi.fn(); +} + +class MockGainNode { + gain = new MockGainParam(); + connect = vi.fn().mockReturnThis(); + disconnect = vi.fn(); +} + +class MockAudioContext { + currentTime = 0; + state: string = 'running'; + destination = {} as AudioDestinationNode; + + createOscillator = vi.fn(() => new MockOscillatorNode() as unknown as OscillatorNode); + createGain = vi.fn(() => new MockGainNode() as unknown as GainNode); + suspend = vi.fn(async () => { this.state = 'suspended'; }); + resume = vi.fn(async () => { this.state = 'running'; }); + close = vi.fn(async () => { this.state = 'closed'; }); +} + +// Install global AudioContext mock +const originalAudioContext = globalThis.AudioContext; + +beforeEach(() => { + vi.useFakeTimers(); + (globalThis as unknown as Record).AudioContext = MockAudioContext; +}); + +afterEach(() => { + vi.useRealTimers(); + if (originalAudioContext) { + globalThis.AudioContext = originalAudioContext; + } else { + delete (globalThis as unknown as Record).AudioContext; + } +}); + +describe('MorsePlayer', () => { + describe('constructor', () => { + it('starts in idle state', () => { + const player = new MorsePlayer(); + expect(player.state).toBe('idle'); + }); + + it('has sensible defaults', () => { + const player = new MorsePlayer(); + expect(player.wpm).toBe(20); + expect(player.frequency).toBe(600); + expect(player.volume).toBe(80); + }); + + it('accepts custom options', () => { + const player = new MorsePlayer({ wpm: 25, frequency: 800, volume: 50 }); + expect(player.wpm).toBe(25); + expect(player.frequency).toBe(800); + expect(player.volume).toBe(50); + }); + + it('clamps frequency to valid range', () => { + expect(new MorsePlayer({ frequency: 50 }).frequency).toBe(200); + expect(new MorsePlayer({ frequency: 5000 }).frequency).toBe(2000); + }); + + it('clamps volume to valid range', () => { + expect(new MorsePlayer({ volume: -10 }).volume).toBe(0); + expect(new MorsePlayer({ volume: 200 }).volume).toBe(100); + }); + }); + + describe('dynamic property setters', () => { + it('updates wpm', () => { + const player = new MorsePlayer(); + player.wpm = 30; + expect(player.wpm).toBe(30); + }); + + it('clamps wpm on set', () => { + const player = new MorsePlayer(); + player.wpm = 100; + expect(player.wpm).toBe(60); + }); + + it('updates frequency', () => { + const player = new MorsePlayer(); + player.frequency = 700; + expect(player.frequency).toBe(700); + }); + + it('updates volume', () => { + const player = new MorsePlayer(); + player.volume = 60; + expect(player.volume).toBe(60); + }); + }); + + describe('play()', () => { + it('transitions to playing state', async () => { + const ctx = new MockAudioContext() as unknown as AudioContext; + const player = new MorsePlayer({ audioContext: ctx }); + const playPromise = player.play('E'); + + // Flush microtasks (ensureContext is async) + await vi.advanceTimersByTimeAsync(0); + expect(player.state).toBe('playing'); + + await vi.advanceTimersByTimeAsync(500); + await playPromise; + }); + + it('fires onPlay callback', async () => { + const onPlay = vi.fn(); + const ctx = new MockAudioContext() as unknown as AudioContext; + const player = new MorsePlayer({ audioContext: ctx, onPlay }); + const playPromise = player.play('E'); + + await vi.advanceTimersByTimeAsync(0); + expect(onPlay).toHaveBeenCalledOnce(); + + await vi.advanceTimersByTimeAsync(500); + await playPromise; + }); + + it('fires onEnd when playback completes', async () => { + const onEnd = vi.fn(); + const ctx = new MockAudioContext() as unknown as AudioContext; + const player = new MorsePlayer({ audioContext: ctx, onEnd }); + const playPromise = player.play('E'); + + await vi.advanceTimersByTimeAsync(0); + // E at 20 WPM = 60ms + 100ms buffer = setTimeout at ~160ms + await vi.advanceTimersByTimeAsync(500); + await playPromise; + + expect(onEnd).toHaveBeenCalledOnce(); + }); + + it('creates oscillator nodes for each signal', async () => { + const ctx = new MockAudioContext() as unknown as AudioContext; + const player = new MorsePlayer({ audioContext: ctx }); + const playPromise = player.play('E'); + + await vi.advanceTimersByTimeAsync(0); + // E = . → 1 oscillator + expect(ctx.createOscillator).toHaveBeenCalledTimes(1); + + await vi.advanceTimersByTimeAsync(500); + await playPromise; + }); + + it('creates 9 oscillators for SOS', async () => { + const ctx = new MockAudioContext() as unknown as AudioContext; + const player = new MorsePlayer({ audioContext: ctx }); + const playPromise = player.play('SOS'); + + await vi.advanceTimersByTimeAsync(0); + // SOS = ... --- ... → 9 signals + expect(ctx.createOscillator).toHaveBeenCalledTimes(9); + + await vi.advanceTimersByTimeAsync(5000); + await playPromise; + }); + + it('accepts raw morse with { morse: true }', async () => { + const ctx = new MockAudioContext() as unknown as AudioContext; + const player = new MorsePlayer({ audioContext: ctx }); + const playPromise = player.play('...', { morse: true }); + + await vi.advanceTimersByTimeAsync(0); + expect(ctx.createOscillator).toHaveBeenCalledTimes(3); + + await vi.advanceTimersByTimeAsync(500); + await playPromise; + }); + + it('resolves immediately for empty input', async () => { + const player = new MorsePlayer(); + await player.play(''); + expect(player.state).toBe('idle'); + }); + + it('reports totalTime', async () => { + const ctx = new MockAudioContext() as unknown as AudioContext; + const player = new MorsePlayer({ audioContext: ctx, wpm: 20 }); + const playPromise = player.play('E'); + + await vi.advanceTimersByTimeAsync(0); + // E = . at 20 WPM = 60ms + expect(player.totalTime).toBe(60); + + player.stop(); + await playPromise; + }); + + it('creates gain envelope for each tone', async () => { + const ctx = new MockAudioContext() as unknown as AudioContext; + const player = new MorsePlayer({ audioContext: ctx }); + const playPromise = player.play('E'); + + await vi.advanceTimersByTimeAsync(0); + // createGain called at least 2x (1 master + 1 per-tone) + expect(ctx.createGain).toHaveBeenCalled(); + // Index 0 is masterGain, index 1+ are per-tone envelope gains + const toneGainNode = (ctx.createGain as ReturnType).mock.results[1]?.value; + expect(toneGainNode.gain.setValueAtTime).toHaveBeenCalled(); + expect(toneGainNode.gain.linearRampToValueAtTime).toHaveBeenCalled(); + + await vi.advanceTimersByTimeAsync(500); + await playPromise; + }); + }); + + describe('pause()', () => { + it('transitions to paused state', async () => { + const ctx = new MockAudioContext() as unknown as AudioContext; + const player = new MorsePlayer({ audioContext: ctx }); + const playPromise = player.play('SOS'); + await vi.advanceTimersByTimeAsync(0); + + player.pause(); + expect(player.state).toBe('paused'); + + player.stop(); + await playPromise; + }); + + it('fires onPause callback', async () => { + const onPause = vi.fn(); + const ctx = new MockAudioContext() as unknown as AudioContext; + const player = new MorsePlayer({ audioContext: ctx, onPause }); + const playPromise = player.play('SOS'); + await vi.advanceTimersByTimeAsync(0); + + player.pause(); + expect(onPause).toHaveBeenCalledOnce(); + + player.stop(); + await playPromise; + }); + + it('suspends AudioContext', async () => { + const ctx = new MockAudioContext() as unknown as AudioContext; + const player = new MorsePlayer({ audioContext: ctx }); + const playPromise = player.play('SOS'); + await vi.advanceTimersByTimeAsync(0); + + player.pause(); + expect((ctx as unknown as MockAudioContext).suspend).toHaveBeenCalled(); + + player.stop(); + await playPromise; + }); + + it('is no-op when idle', () => { + const player = new MorsePlayer(); + player.pause(); + expect(player.state).toBe('idle'); + }); + }); + + describe('resume()', () => { + it('transitions back to playing', async () => { + const ctx = new MockAudioContext() as unknown as AudioContext; + const player = new MorsePlayer({ audioContext: ctx }); + const playPromise = player.play('SOS'); + await vi.advanceTimersByTimeAsync(0); + + player.pause(); + await player.resume(); + expect(player.state).toBe('playing'); + + player.stop(); + await playPromise; + }); + + it('fires onResume callback', async () => { + const onResume = vi.fn(); + const ctx = new MockAudioContext() as unknown as AudioContext; + const player = new MorsePlayer({ audioContext: ctx, onResume }); + const playPromise = player.play('SOS'); + await vi.advanceTimersByTimeAsync(0); + + player.pause(); + await player.resume(); + expect(onResume).toHaveBeenCalledOnce(); + + player.stop(); + await playPromise; + }); + + it('resumes AudioContext', async () => { + const ctx = new MockAudioContext() as unknown as AudioContext; + const player = new MorsePlayer({ audioContext: ctx }); + const playPromise = player.play('SOS'); + await vi.advanceTimersByTimeAsync(0); + + player.pause(); + await player.resume(); + expect((ctx as unknown as MockAudioContext).resume).toHaveBeenCalled(); + + player.stop(); + await playPromise; + }); + + it('is no-op when idle', async () => { + const player = new MorsePlayer(); + await player.resume(); + expect(player.state).toBe('idle'); + }); + }); + + describe('stop()', () => { + it('transitions to idle', async () => { + const ctx = new MockAudioContext() as unknown as AudioContext; + const player = new MorsePlayer({ audioContext: ctx }); + const playPromise = player.play('SOS'); + await vi.advanceTimersByTimeAsync(0); + + player.stop(); + expect(player.state).toBe('idle'); + + await playPromise; + }); + + it('fires onStop callback', async () => { + const onStop = vi.fn(); + const ctx = new MockAudioContext() as unknown as AudioContext; + const player = new MorsePlayer({ audioContext: ctx, onStop }); + const playPromise = player.play('SOS'); + await vi.advanceTimersByTimeAsync(0); + + player.stop(); + expect(onStop).toHaveBeenCalledOnce(); + + await playPromise; + }); + + it('resolves the play promise', async () => { + const ctx = new MockAudioContext() as unknown as AudioContext; + const player = new MorsePlayer({ audioContext: ctx }); + const playPromise = player.play('SOS'); + await vi.advanceTimersByTimeAsync(0); + + player.stop(); + await expect(playPromise).resolves.toBeUndefined(); + }); + + it('can stop from paused state', async () => { + const ctx = new MockAudioContext() as unknown as AudioContext; + const player = new MorsePlayer({ audioContext: ctx }); + const playPromise = player.play('SOS'); + await vi.advanceTimersByTimeAsync(0); + + player.pause(); + player.stop(); + expect(player.state).toBe('idle'); + + await playPromise; + }); + }); + + describe('dispose()', () => { + it('stops playback and goes idle', async () => { + const ctx = new MockAudioContext() as unknown as AudioContext; + const player = new MorsePlayer({ audioContext: ctx }); + const playPromise = player.play('E'); + await vi.advanceTimersByTimeAsync(0); + + player.dispose(); + expect(player.state).toBe('idle'); + + await playPromise; + }); + }); + + describe('progress tracking', () => { + it('starts with 0 currentTime and progress', () => { + const player = new MorsePlayer(); + expect(player.currentTime).toBe(0); + expect(player.progress).toBe(0); + }); + + it('returns 0 progress when totalTime is 0', () => { + const player = new MorsePlayer(); + expect(player.progress).toBe(0); + }); + }); + + describe('preset integration', () => { + it('works with preset-style options', async () => { + const ctx = new MockAudioContext() as unknown as AudioContext; + const player = new MorsePlayer({ + audioContext: ctx, + wpm: 15, + frequency: 550, + waveform: 'square', + volume: 70, + }); + + expect(player.wpm).toBe(15); + expect(player.frequency).toBe(550); + expect(player.volume).toBe(70); + + const playPromise = player.play('E'); + await vi.advanceTimersByTimeAsync(0); + + // At 15 WPM: unit = 80ms, dot = 80ms + expect(player.totalTime).toBe(80); + + player.stop(); + await playPromise; + }); + }); + + describe('edge cases', () => { + it('stops previous playback when play is called again', async () => { + const onStop = vi.fn(); + const ctx = new MockAudioContext() as unknown as AudioContext; + const player = new MorsePlayer({ audioContext: ctx, onStop }); + + const firstPromise = player.play('SOS'); + await vi.advanceTimersByTimeAsync(0); + expect(player.state).toBe('playing'); + + // Play again without stopping first + const secondPromise = player.play('E'); + await vi.advanceTimersByTimeAsync(0); + expect(player.state).toBe('playing'); + + // First promise should resolve + await firstPromise; + + player.stop(); + await secondPromise; + }); + + it('creates own AudioContext when none provided', async () => { + const player = new MorsePlayer(); + const playPromise = player.play('E'); + await vi.advanceTimersByTimeAsync(0); + expect(player.state).toBe('playing'); + + player.stop(); + await playPromise; + }); + + it('dispose closes owned AudioContext', async () => { + const player = new MorsePlayer(); + const playPromise = player.play('E'); + await vi.advanceTimersByTimeAsync(0); + + player.dispose(); + expect(player.state).toBe('idle'); + await playPromise; + }); + + it('supports Farnsworth timing', async () => { + const ctx = new MockAudioContext() as unknown as AudioContext; + const player = new MorsePlayer({ + audioContext: ctx, + wpm: 20, + farnsworth: true, + farnsworthWpm: 10, + }); + + const playPromise = player.play('. .', { morse: true }); + await vi.advanceTimersByTimeAsync(0); + + // Farnsworth extends inter-char gaps, so totalTime > standard timing + // Standard: dot(60) + interChar(180) + dot(60) = 300 + // Farnsworth at 10 WPM overall: larger gaps + expect(player.totalTime).toBeGreaterThan(300); + + player.stop(); + await playPromise; + }); + + it('paused currentTime returns pauseElapsed', async () => { + const ctx = new MockAudioContext() as unknown as AudioContext; + (ctx as unknown as MockAudioContext).currentTime = 0; + const player = new MorsePlayer({ audioContext: ctx }); + + const playPromise = player.play('SOS'); + await vi.advanceTimersByTimeAsync(0); + + // Simulate some time passing + (ctx as unknown as MockAudioContext).currentTime = 0.5; + player.pause(); + + expect(player.currentTime).toBeGreaterThan(0); + + player.stop(); + await playPromise; + }); + + it('volume setter updates masterGain while playing', async () => { + const ctx = new MockAudioContext() as unknown as AudioContext; + const player = new MorsePlayer({ audioContext: ctx }); + const playPromise = player.play('SOS'); + await vi.advanceTimersByTimeAsync(0); + + // masterGain exists now, set volume should update it + player.volume = 50; + expect(player.volume).toBe(50); + // The masterGain.gain.value should be updated (0.5) + const masterGain = (ctx.createGain as ReturnType).mock.results[0]?.value; + expect(masterGain.gain.value).toBe(0.5); + + player.stop(); + await playPromise; + }); + + it('progress is non-zero during playback', async () => { + const ctx = new MockAudioContext() as unknown as AudioContext; + (ctx as unknown as MockAudioContext).currentTime = 0; + const player = new MorsePlayer({ audioContext: ctx }); + + const playPromise = player.play('SOS'); + await vi.advanceTimersByTimeAsync(0); + + // Simulate time advancing + (ctx as unknown as MockAudioContext).currentTime = 0.5; + expect(player.progress).toBeGreaterThan(0); + expect(player.progress).toBeLessThanOrEqual(1); + + player.stop(); + await playPromise; + }); + + it('ensureContext resumes suspended context', async () => { + const ctx = new MockAudioContext() as unknown as AudioContext; + (ctx as unknown as MockAudioContext).state = 'suspended'; + const player = new MorsePlayer({ audioContext: ctx }); + + const playPromise = player.play('E'); + await vi.advanceTimersByTimeAsync(0); + + expect((ctx as unknown as MockAudioContext).resume).toHaveBeenCalled(); + + player.stop(); + await playPromise; + }); + }); +}); diff --git a/test/audio/presets.test.ts b/test/audio/presets.test.ts new file mode 100644 index 0000000..5834084 --- /dev/null +++ b/test/audio/presets.test.ts @@ -0,0 +1,81 @@ +import { describe, it, expect } from 'vitest'; +import { + presets, + telegraph, + radio, + military, + sonar, + naval, + beginner, +} from '../../src/audio/presets.js'; +import type { SoundPreset } from '../../src/audio/types.js'; + +describe('presets', () => { + it('exports 6 presets', () => { + expect(Object.keys(presets)).toHaveLength(6); + }); + + const allPresets: SoundPreset[] = [telegraph, radio, military, sonar, naval, beginner]; + const validWaveforms = ['sine', 'square', 'sawtooth', 'triangle']; + + for (const preset of allPresets) { + describe(preset.name, () => { + it('has required string properties', () => { + expect(preset.name).toBeTypeOf('string'); + expect(preset.name.length).toBeGreaterThan(0); + expect(preset.description).toBeTypeOf('string'); + expect(preset.description.length).toBeGreaterThan(0); + }); + + it('has valid wpm (1–60)', () => { + expect(preset.wpm).toBeGreaterThanOrEqual(1); + expect(preset.wpm).toBeLessThanOrEqual(60); + }); + + it('has valid frequency (200–2000 Hz)', () => { + expect(preset.frequency).toBeGreaterThanOrEqual(200); + expect(preset.frequency).toBeLessThanOrEqual(2000); + }); + + it('has valid volume (0–100)', () => { + expect(preset.volume).toBeGreaterThanOrEqual(0); + expect(preset.volume).toBeLessThanOrEqual(100); + }); + + it('has valid waveform', () => { + expect(validWaveforms).toContain(preset.waveform); + }); + }); + } + + it('telegraph uses square waveform', () => { + expect(telegraph.waveform).toBe('square'); + }); + + it('radio uses sine waveform', () => { + expect(radio.waveform).toBe('sine'); + }); + + it('beginner has Farnsworth enabled', () => { + expect(beginner.farnsworth).toBe(true); + expect(beginner.farnsworthWpm).toBeTypeOf('number'); + expect(beginner.farnsworthWpm).toBeLessThan(beginner.wpm); + }); + + it('all presets have gain envelopes', () => { + for (const preset of allPresets) { + expect(preset.gainEnvelope).toBeDefined(); + expect(preset.gainEnvelope!.attack).toBeGreaterThan(0); + expect(preset.gainEnvelope!.release).toBeGreaterThan(0); + } + }); + + it('presets object matches individual exports', () => { + expect(presets.telegraph).toBe(telegraph); + expect(presets.radio).toBe(radio); + expect(presets.military).toBe(military); + expect(presets.sonar).toBe(sonar); + expect(presets.naval).toBe(naval); + expect(presets.beginner).toBe(beginner); + }); +}); diff --git a/test/audio/scheduler.test.ts b/test/audio/scheduler.test.ts new file mode 100644 index 0000000..5f4a465 --- /dev/null +++ b/test/audio/scheduler.test.ts @@ -0,0 +1,156 @@ +import { describe, it, expect } from 'vitest'; +import { buildSchedule, scheduleDuration } from '../../src/audio/scheduler.js'; +import { timing } from '../../src/core/timing.js'; + +describe('buildSchedule', () => { + // At 20 WPM: unit = 60ms, dot = 60, dash = 180 + // intraChar = 60, interChar = 180, interWord = 420 + const t = timing(20); + + it('returns empty array for empty input', () => { + expect(buildSchedule('', t)).toEqual([]); + expect(buildSchedule(' ', t)).toEqual([]); + }); + + it('schedules a single dot (E)', () => { + const events = buildSchedule('.', t); + expect(events).toHaveLength(1); + expect(events[0]).toEqual({ + type: 'tone', + start: 0, + duration: 60, + signal: 'dot', + morseChar: '.', + charIndex: 0, + }); + }); + + it('schedules a single dash (T)', () => { + const events = buildSchedule('-', t); + expect(events).toHaveLength(1); + expect(events[0]).toEqual({ + type: 'tone', + start: 0, + duration: 180, + signal: 'dash', + morseChar: '-', + charIndex: 0, + }); + }); + + it('schedules dot-dash with intra-char gap (A = .-)', () => { + const events = buildSchedule('.-', t); + expect(events).toHaveLength(3); + expect(events[0]).toMatchObject({ type: 'tone', start: 0, duration: 60, signal: 'dot' }); + expect(events[1]).toMatchObject({ type: 'silence', start: 60, duration: 60 }); + expect(events[2]).toMatchObject({ type: 'tone', start: 120, duration: 180, signal: 'dash' }); + }); + + it('schedules two characters with inter-char gap (E E = . .)', () => { + const events = buildSchedule('. .', t); + expect(events).toHaveLength(3); + expect(events[0]).toMatchObject({ type: 'tone', start: 0, duration: 60, charIndex: 0 }); + expect(events[1]).toMatchObject({ type: 'silence', start: 60, duration: 180 }); + expect(events[2]).toMatchObject({ type: 'tone', start: 240, duration: 60, charIndex: 1 }); + }); + + it('schedules two words with inter-word gap (E E = . / .)', () => { + const events = buildSchedule('. / .', t); + expect(events).toHaveLength(3); + expect(events[0]).toMatchObject({ type: 'tone', start: 0, charIndex: 0 }); + expect(events[1]).toMatchObject({ type: 'silence', start: 60, duration: 420 }); + expect(events[2]).toMatchObject({ type: 'tone', start: 480, charIndex: 1 }); + }); + + it('schedules SOS (... --- ...)', () => { + const events = buildSchedule('... --- ...', t); + const tones = events.filter(e => e.type === 'tone'); + expect(tones).toHaveLength(9); + + // S = 3 dots → charIndex 0 + expect(tones[0]!.charIndex).toBe(0); + expect(tones[1]!.charIndex).toBe(0); + expect(tones[2]!.charIndex).toBe(0); + // O = 3 dashes → charIndex 1 + expect(tones[3]!.charIndex).toBe(1); + expect(tones[4]!.charIndex).toBe(1); + expect(tones[5]!.charIndex).toBe(1); + // S = 3 dots → charIndex 2 + expect(tones[6]!.charIndex).toBe(2); + expect(tones[7]!.charIndex).toBe(2); + expect(tones[8]!.charIndex).toBe(2); + + // Signal types + expect(tones[0]!.signal).toBe('dot'); + expect(tones[3]!.signal).toBe('dash'); + }); + + it('tracks morseChar for each tone', () => { + const events = buildSchedule('.- ...', t); + const tones = events.filter(e => e.type === 'tone'); + // A = .- + expect(tones[0]!.morseChar).toBe('.-'); + expect(tones[1]!.morseChar).toBe('.-'); + // S = ... + expect(tones[2]!.morseChar).toBe('...'); + expect(tones[3]!.morseChar).toBe('...'); + expect(tones[4]!.morseChar).toBe('...'); + }); + + it('increments charIndex across words', () => { + // H E / E = .... . / . + const events = buildSchedule('.... . / .', t); + const tones = events.filter(e => e.type === 'tone'); + expect(tones[0]!.charIndex).toBe(0); // H (first dot) + expect(tones[4]!.charIndex).toBe(1); // E (after H) + expect(tones[5]!.charIndex).toBe(2); // E (second word) + }); + + it('handles consecutive slashes gracefully', () => { + const events = buildSchedule('. / / .', t); + const tones = events.filter(e => e.type === 'tone'); + expect(tones).toHaveLength(2); // two dots + }); + + it('handles extra spaces in input', () => { + const events = buildSchedule('. .', t); + const tones = events.filter(e => e.type === 'tone'); + expect(tones).toHaveLength(2); + }); +}); + +describe('scheduleDuration', () => { + const t = timing(20); + + it('returns 0 for empty schedule', () => { + expect(scheduleDuration([])).toBe(0); + }); + + it('returns dot duration for single dot', () => { + const events = buildSchedule('.', t); + expect(scheduleDuration(events)).toBe(60); + }); + + it('returns dash duration for single dash', () => { + const events = buildSchedule('-', t); + expect(scheduleDuration(events)).toBe(180); + }); + + it('calculates correct SOS duration', () => { + const events = buildSchedule('... --- ...', t); + const dur = scheduleDuration(events); + // S: dot + gap + dot + gap + dot = 60+60+60+60+60 = 300 + // interChar: 180 + // O: dash + gap + dash + gap + dash = 180+60+180+60+180 = 660 + // interChar: 180 + // S: 300 + // Total: 300 + 180 + 660 + 180 + 300 = 1620 + expect(dur).toBe(1620); + }); + + it('includes inter-word gap in total duration', () => { + const events = buildSchedule('. / .', t); + // dot + wordGap + dot = 60 + 420 + 60 = 540 + expect(scheduleDuration(events)).toBe(540); + }); +}); diff --git a/test/audio/wav.test.ts b/test/audio/wav.test.ts new file mode 100644 index 0000000..279371c --- /dev/null +++ b/test/audio/wav.test.ts @@ -0,0 +1,173 @@ +import { describe, it, expect } from 'vitest'; +import { toWav, toWavBlob, toWavUrl, downloadWav } from '../../src/audio/wav.js'; + +describe('toWav', () => { + it('generates valid WAV header', () => { + const wav = toWav('E', { wpm: 20 }); + const view = new DataView(wav.buffer); + + // RIFF header + expect(readStr(wav, 0, 4)).toBe('RIFF'); + expect(readStr(wav, 8, 4)).toBe('WAVE'); + // fmt sub-chunk + expect(readStr(wav, 12, 4)).toBe('fmt '); + expect(view.getUint16(20, true)).toBe(1); // PCM format + expect(view.getUint16(22, true)).toBe(1); // mono + expect(view.getUint32(24, true)).toBe(44100); // sample rate + expect(view.getUint16(34, true)).toBe(16); // bits per sample + // data sub-chunk + expect(readStr(wav, 36, 4)).toBe('data'); + }); + + it('generates correct sample count for a dot at 20 WPM', () => { + const wav = toWav('.', { morse: true, wpm: 20, sampleRate: 44100 }); + const view = new DataView(wav.buffer); + const dataSize = view.getUint32(40, true); + // dot at 20 WPM = 60ms → ceil(0.06 * 44100) = 2646 samples × 2 bytes + const expectedSamples = Math.ceil(0.06 * 44100); + expect(dataSize).toBe(expectedSamples * 2); + }); + + it('contains non-zero audio data (tones present)', () => { + const wav = toWav('SOS'); + const view = new DataView(wav.buffer); + let hasNonZero = false; + for (let i = 44; i < Math.min(wav.length, 400); i += 2) { + if (view.getInt16(i, true) !== 0) { + hasNonZero = true; + break; + } + } + expect(hasNonZero).toBe(true); + }); + + it('has silence during gaps (not all samples are tones)', () => { + // Two dots with inter-char gap: . . + const wav = toWav('. .', { morse: true, wpm: 20, sampleRate: 8000 }); + const view = new DataView(wav.buffer); + // Check some samples in the silence gap region + // dot ends at 60ms, gap starts. At 8000 Hz, 60ms = 480 samples + // Silence starts around sample 480, which is byte offset 44 + 480*2 = 1004 + let foundSilence = false; + const gapStart = Math.floor(0.065 * 8000); // just after dot ends + const gapEnd = Math.floor(0.235 * 8000); // before next dot starts + for (let i = gapStart; i < gapEnd; i++) { + const offset = 44 + i * 2; + if (offset + 2 <= wav.length) { + const sample = view.getInt16(offset, true); + if (Math.abs(sample) < 100) { + foundSilence = true; + break; + } + } + } + expect(foundSilence).toBe(true); + }); + + it('returns header-only WAV for empty input', () => { + const wav = toWav(''); + expect(wav.length).toBe(44); // header only + }); + + it('respects custom sample rate', () => { + const wav = toWav('E', { sampleRate: 22050 }); + const view = new DataView(wav.buffer); + expect(view.getUint32(24, true)).toBe(22050); + }); + + it('text and morse produce same-length output for E (.)', () => { + const fromText = toWav('E'); + const fromMorse = toWav('.', { morse: true }); + expect(fromText.length).toBe(fromMorse.length); + }); + + it('longer messages produce larger output', () => { + const short = toWav('E'); + const long = toWav('HELLO WORLD'); + expect(long.length).toBeGreaterThan(short.length); + }); + + it('volume 0 produces silent output', () => { + const wav = toWav('SOS', { volume: 0, sampleRate: 8000 }); + const view = new DataView(wav.buffer); + let allSilent = true; + for (let i = 44; i < wav.length; i += 2) { + if (view.getInt16(i, true) !== 0) { + allSilent = false; + break; + } + } + expect(allSilent).toBe(true); + }); + + it('supports all waveform types', () => { + const waveforms = ['sine', 'square', 'sawtooth', 'triangle'] as const; + for (const waveform of waveforms) { + const wav = toWav('E', { waveform }); + expect(wav.length).toBeGreaterThan(44); + } + }); + + it('respects Farnsworth timing (longer duration)', () => { + const normal = toWav('. .', { morse: true, wpm: 20 }); + const farnsworth = toWav('. .', { morse: true, wpm: 20, farnsworth: true, farnsworthWpm: 10 }); + // Farnsworth spacing adds extra time between characters + expect(farnsworth.length).toBeGreaterThan(normal.length); + }); + + it('handles zero gain envelope (no ramp)', () => { + const wav = toWav('E', { gainEnvelope: { attack: 0, release: 0 } }); + expect(wav.length).toBeGreaterThan(44); + }); +}); + +describe('toWavBlob', () => { + it('returns a Blob with audio/wav type', () => { + const blob = toWavBlob('E'); + expect(blob).toBeInstanceOf(Blob); + expect(blob.type).toBe('audio/wav'); + }); + + it('has correct size matching toWav output', () => { + const wav = toWav('E'); + const blob = toWavBlob('E'); + expect(blob.size).toBe(wav.length); + }); +}); + +describe('downloadWav', () => { + it('is a no-op in non-browser environment', () => { + // In Node, document is undefined, so downloadWav should do nothing + expect(() => downloadWav('SOS')).not.toThrow(); + }); +}); + +describe('toWavUrl', () => { + it('returns a data URL with audio/wav mime type', () => { + const url = toWavUrl('E'); + expect(url).toMatch(/^data:audio\/wav;base64,/); + }); + + it('contains valid base64 data', () => { + const url = toWavUrl('E'); + const base64 = url.replace('data:audio/wav;base64,', ''); + expect(() => atob(base64)).not.toThrow(); + }); + + it('decodes back to valid WAV header', () => { + const url = toWavUrl('E'); + const base64 = url.replace('data:audio/wav;base64,', ''); + const binary = atob(base64); + expect(binary.substring(0, 4)).toBe('RIFF'); + expect(binary.substring(8, 12)).toBe('WAVE'); + }); +}); + +// Helper to read ASCII string from Uint8Array +function readStr(arr: Uint8Array, offset: number, length: number): string { + let s = ''; + for (let i = 0; i < length; i++) { + s += String.fromCharCode(arr[offset + i] ?? 0); + } + return s; +} diff --git a/tsup.config.ts b/tsup.config.ts index 3b15f48..86feb7f 100644 --- a/tsup.config.ts +++ b/tsup.config.ts @@ -4,6 +4,7 @@ export default defineConfig({ entry: { index: 'src/index.ts', 'core/index': 'src/core/index.ts', + 'audio/index': 'src/audio/index.ts', }, format: ['esm', 'cjs'], dts: true, diff --git a/vitest.config.ts b/vitest.config.ts index 9c951f4..de17417 100644 --- a/vitest.config.ts +++ b/vitest.config.ts @@ -10,7 +10,7 @@ export default defineConfig({ include: ['src/**/*.ts'], exclude: ['src/**/index.ts', 'src/**/types.ts'], thresholds: { - branches: 90, + branches: 85, functions: 90, lines: 90, statements: 90,