LLM API Reference
This page contains all TypeScript interfaces extracted from source code. Designed for LLMs and coding agents — no prose, just types.
Source of truth: packages/browser/src/WaveformPlaylistContext.tsx, packages/browser/src/MediaElementPlaylistContext.tsx
Provider Props (WaveformPlaylistProvider)​
interface WaveformPlaylistProviderProps {
tracks: ClipTrack[];
children: ReactNode;
timescale?: boolean;
mono?: boolean;
waveHeight?: number; // Default: 80
samplesPerPixel?: number; // Default: 1024
zoomLevels?: number[];
automaticScroll?: boolean; // Default: false
theme?: Partial<WaveformPlaylistTheme>;
controls?: { show: boolean; width: number }; // Default: { show: false, width: 0 }
annotationList?: {
annotations?: AnnotationData[];
editable?: boolean;
isContinuousPlay?: boolean;
linkEndpoints?: boolean;
controls?: AnnotationAction[];
};
effects?: EffectsFunction;
onReady?: () => void;
onAnnotationsChange?: (annotations: AnnotationData[]) => void;
/** Called when engine clip operations (move/trim/split) update tracks */
onTracksChange?: (tracks: ClipTrack[]) => void;
barWidth?: number; // Default: 1
barGap?: number; // Default: 0
progressBarWidth?: number; // Default: barWidth + barGap
/** Defer engine build during progressive loading — tracks render but engine isn't built */
deferEngineRebuild?: boolean; // Default: false
/** SoundFont cache for sample-based MIDI playback */
soundFontCache?: SoundFontCache;
/** Disable automatic stop when cursor reaches end of longest track */
indefinitePlayback?: boolean; // Default: false
/** Desired AudioContext sample rate for pre-computed peaks matching */
sampleRate?: number;
}
Provider Props (MediaElementPlaylistProvider)​
interface FadeConfig {
duration: number; // Duration of the fade in seconds
type?: FadeType; // 'linear' | 'logarithmic' | 'exponential' | 'sCurve'
}
interface MediaElementTrackConfig {
source: string; // Audio source URL or Blob URL
waveformData: WaveformDataObject; // Pre-computed waveform data (required)
name?: string; // Track name for display
fadeIn?: FadeConfig; // Fade in (requires audioContext)
fadeOut?: FadeConfig; // Fade out (requires audioContext)
}
interface MediaElementPlaylistProviderProps {
track: MediaElementTrackConfig;
children: ReactNode;
audioContext?: AudioContext; // Enables Web Audio routing (fades, effects)
samplesPerPixel?: number; // Default: 1024
waveHeight?: number; // Default: 100
timescale?: boolean; // Default: false
playbackRate?: number; // Default: 1 (range: 0.5–2.0)
preservesPitch?: boolean; // Default: true (set false for external pitch processor)
automaticScroll?: boolean; // Default: false
theme?: Partial<WaveformPlaylistTheme>;
controls?: { show: boolean; width: number }; // Default: { show: false, width: 0 }
annotationList?: {
annotations?: any[];
isContinuousPlay?: boolean;
};
onAnnotationsChange?: (annotations: AnnotationData[]) => void;
onReady?: () => void;
barWidth?: number; // Default: 1
barGap?: number; // Default: 0
progressBarWidth?: number; // Default: barWidth + barGap
}
MediaElement Context Hooks​
useMediaElementAnimation()​
interface MediaElementAnimationContextValue {
isPlaying: boolean;
currentTime: number;
currentTimeRef: RefObject<number>;
}
useMediaElementState()​
interface MediaElementStateContextValue {
continuousPlay: boolean;
annotations: AnnotationData[];
activeAnnotationId: string | null;
playbackRate: number;
isAutomaticScroll: boolean;
}
useMediaElementControls()​
interface MediaElementControlsContextValue {
play: (startTime?: number) => void;
pause: () => void;
stop: () => void;
seekTo: (time: number) => void;
setPlaybackRate: (rate: number) => void;
setContinuousPlay: (enabled: boolean) => void;
setAnnotations: Dispatch<SetStateAction<AnnotationData[]>>;
setActiveAnnotationId: (id: string | null) => void;
setAutomaticScroll: (enabled: boolean) => void;
setScrollContainer: (element: HTMLDivElement | null) => void;
scrollContainerRef: RefObject<HTMLDivElement | null>;
}
useMediaElementData()​
interface MediaElementDataContextValue {
duration: number;
peaksDataArray: TrackClipPeaks[];
sampleRate: number;
waveHeight: number;
timeScaleHeight: number;
samplesPerPixel: number;
playoutRef: RefObject<MediaElementPlayout | null>; // .outputNode for effects bridge
controls: { show: boolean; width: number };
barWidth: number;
barGap: number;
progressBarWidth: number;
fadeIn?: FadeConfig;
fadeOut?: FadeConfig;
}
WaveformPlaylist Context Hooks​
usePlaybackAnimation()​
interface PlaybackAnimationContextValue {
isPlaying: boolean;
currentTime: number;
currentTimeRef: RefObject<number>;
playbackStartTimeRef: RefObject<number>;
audioStartPositionRef: RefObject<number>;
/** Returns current playback time from engine (auto-wraps at loop boundaries). */
getPlaybackTime: () => number;
}
usePlaylistState()​
interface PlaylistStateContextValue {
continuousPlay: boolean;
linkEndpoints: boolean;
annotationsEditable: boolean;
isAutomaticScroll: boolean;
isLoopEnabled: boolean;
annotations: AnnotationData[];
activeAnnotationId: string | null;
selectionStart: number;
selectionEnd: number;
selectedTrackId: string | null;
loopStart: number;
loopEnd: number;
/** Whether playback continues past the end of loaded audio */
indefinitePlayback: boolean;
/** Whether undo is available */
canUndo: boolean;
/** Whether redo is available */
canRedo: boolean;
}
usePlaylistControls()​
interface PlaylistControlsContextValue {
// Playback
play: (startTime?: number, playDuration?: number) => Promise<void>;
pause: () => void;
stop: () => void;
seekTo: (time: number) => void;
setCurrentTime: (time: number) => void;
// Track controls
setTrackMute: (trackIndex: number, muted: boolean) => void;
setTrackSolo: (trackIndex: number, soloed: boolean) => void;
setTrackVolume: (trackIndex: number, volume: number) => void;
setTrackPan: (trackIndex: number, pan: number) => void;
// Selection
setSelection: (start: number, end: number) => void;
setSelectedTrackId: (trackId: string | null) => void;
// Time format
setTimeFormat: (format: TimeFormat) => void;
formatTime: (seconds: number) => string;
// Zoom
zoomIn: () => void;
zoomOut: () => void;
// Master volume
setMasterVolume: (volume: number) => void;
// Scroll
setAutomaticScroll: (enabled: boolean) => void;
setScrollContainer: (element: HTMLDivElement | null) => void;
scrollContainerRef: RefObject<HTMLDivElement | null>;
// Annotation controls
setContinuousPlay: (enabled: boolean) => void;
setLinkEndpoints: (enabled: boolean) => void;
setAnnotationsEditable: (enabled: boolean) => void;
setAnnotations: Dispatch<SetStateAction<AnnotationData[]>>;
setActiveAnnotationId: (id: string | null) => void;
// Loop controls
setLoopEnabled: (enabled: boolean) => void;
setLoopRegion: (start: number, end: number) => void;
setLoopRegionFromSelection: () => void;
clearLoopRegion: () => void;
// Undo/redo
undo: () => void;
redo: () => void;
}
usePlaylistData()​
interface PlaylistDataContextValue {
duration: number;
audioBuffers: AudioBuffer[];
peaksDataArray: TrackClipPeaks[];
trackStates: TrackState[];
tracks: ClipTrack[];
sampleRate: number;
waveHeight: number;
timeScaleHeight: number;
minimumPlaylistHeight: number;
controls: { show: boolean; width: number };
playoutRef: RefObject<PlaylistEngine | null>; // PlaylistEngine from @waveform-playlist/engine
samplesPerPixel: number;
timeFormat: TimeFormat;
masterVolume: number;
canZoomIn: boolean;
canZoomOut: boolean;
barWidth: number;
barGap: number;
progressBarWidth: number;
isReady: boolean;
mono: boolean;
/** Ref toggled during boundary trim drags — when true, loadAudio skips engine rebuild */
isDraggingRef: MutableRefObject<boolean>;
onTracksChange: ((tracks: ClipTrack[]) => void) | undefined;
}
Data Types​
interface TrackState {
name: string;
muted: boolean;
soloed: boolean;
volume: number;
pan: number;
}
interface ClipPeaks {
clipId: string;
trackName: string;
peaks: PeakData;
startSample: number;
durationSamples: number;
fadeIn?: Fade;
fadeOut?: Fade;
}
type TrackClipPeaks = ClipPeaks[];
useAudioTracks​
function useAudioTracks(
configs: AudioTrackConfig[],
options?: UseAudioTracksOptions
): {
tracks: ClipTrack[];
loading: boolean;
error: string | null;
progress: number;
};
interface AudioTrackConfig {
src?: string;
audioBuffer?: AudioBuffer;
name?: string;
muted?: boolean;
soloed?: boolean;
volume?: number;
pan?: number;
color?: string;
effects?: TrackEffectsFunction;
startTime?: number;
duration?: number;
offset?: number;
fadeIn?: Fade;
fadeOut?: Fade;
waveformData?: WaveformDataObject;
}
interface UseAudioTracksOptions {
immediate?: boolean; // Default: false — render placeholders instantly, audio fills in
/** @deprecated Use immediate */
progressive?: boolean; // Alias for immediate
}
useDynamicTracks​
type TrackSource =
| File // Drag-and-drop / file input
| Blob // Raw audio blob
| string // URL shorthand
| { src: string; name?: string }; // URL with optional name
interface TrackLoadError {
name: string; // Display name of the failed source
error: Error; // The underlying error
}
function useDynamicTracks(): UseDynamicTracksReturn;
interface UseDynamicTracksReturn {
tracks: ClipTrack[]; // Includes placeholders + loaded
addTracks: (sources: TrackSource[]) => void; // Add files or URLs at runtime
removeTrack: (trackId: string) => void; // Remove by id, aborts in-flight fetch
loadingCount: number; // Number currently decoding
isLoading: boolean; // loadingCount > 0
errors: TrackLoadError[]; // Failed loads (tracks auto-removed)
}
Imperative complement to useAudioTracks. Creates placeholder tracks (clips: []) immediately when addTracks() is called. Placeholders show track controls with empty waveform area while audio decodes in parallel. Each placeholder is atomically replaced with the loaded track (same id) on success, or removed on error with the failure recorded in errors.
Effects Hooks​
useDynamicEffects​
function useDynamicEffects(fftSize?: number): UseDynamicEffectsReturn;
interface UseDynamicEffectsReturn {
activeEffects: ActiveEffect[];
availableEffects: EffectDefinition[];
addEffect: (effectId: string) => void;
removeEffect: (instanceId: string) => void;
updateParameter: (instanceId: string, paramName: string, value: number | string | boolean) => void;
toggleBypass: (instanceId: string) => void;
reorderEffects: (fromIndex: number, toIndex: number) => void;
clearAllEffects: () => void;
masterEffects: EffectsFunction;
createOfflineEffectsFunction: () => EffectsFunction | undefined;
analyserRef: RefObject<any>;
}
interface ActiveEffect {
instanceId: string;
effectId: string;
definition: EffectDefinition;
params: Record<string, number | string | boolean>;
bypassed: boolean;
}
useTrackDynamicEffects​
function useTrackDynamicEffects(): UseTrackDynamicEffectsReturn;
interface UseTrackDynamicEffectsReturn {
trackEffectsState: Map<string, TrackActiveEffect[]>;
addEffectToTrack: (trackId: string, effectId: string) => void;
removeEffectFromTrack: (trackId: string, instanceId: string) => void;
updateTrackEffectParameter: (trackId: string, instanceId: string, paramName: string, value: number | string | boolean) => void;
toggleBypass: (trackId: string, instanceId: string) => void;
clearTrackEffects: (trackId: string) => void;
getTrackEffectsFunction: (trackId: string) => TrackEffectsFunction | undefined;
createOfflineTrackEffectsFunction: (trackId: string) => TrackEffectsFunction | undefined;
availableEffects: EffectDefinition[];
}
interface TrackActiveEffect {
instanceId: string;
effectId: string;
definition: EffectDefinition;
params: Record<string, number | string | boolean>;
bypassed: boolean;
}
Editing Hooks​
useClipDragHandlers​
function useClipDragHandlers(options: UseClipDragHandlersOptions): {
onDragStart: (event: DragStartCallback) => void;
onDragMove: (event: DragMoveCallback) => void;
onDragEnd: (event: DragEndCallback) => void;
};
interface UseClipDragHandlersOptions {
tracks: ClipTrack[];
onTracksChange: (tracks: ClipTrack[]) => void;
samplesPerPixel: number;
engineRef: RefObject<PlaylistEngine | null>;
/** Ref toggled during boundary trim drags. Obtain from usePlaylistData(). */
isDraggingRef: MutableRefObject<boolean>;
/** Optional snap function for boundary trims — snaps a sample position to nearest grid line */
snapSamplePosition?: (samplePos: number) => number;
}
Delegates move to engine.moveClip() and trim to engine.trimClip(). During trim drags, isDraggingRef prevents engine rebuild. Cancel is handled inside onDragEnd via event.canceled — reverts React state for boundary trims, resets isDraggingRef. Collision detection is handled separately by ClipCollisionModifier (passed to DragDropProvider's modifiers prop).
useDragSensors​
function useDragSensors(options?: DragSensorOptions): PluginDescriptor[];
interface DragSensorOptions {
touchOptimized?: boolean; // default false — all pointers use 1px distance activation
touchDelay?: number; // default 250ms (only when touchOptimized)
touchTolerance?: number; // default 5px (only when touchOptimized)
mouseDistance?: number; // default 1px
}
Returns configured PointerSensor descriptors for DragDropProvider's sensors prop. Default mode uses distance-based activation for all pointer types. Touch-optimized mode uses delay-based activation for touch and distance-based for mouse/pen.
ClipCollisionModifier​
class ClipCollisionModifier extends Modifier {
static configure(options: { tracks: ClipTrack[]; samplesPerPixel: number }): PluginDescriptor;
}
Collision detection modifier for clip moves — constrains horizontal drag to prevent overlapping clips. Passed to DragDropProvider's modifiers array alongside RestrictToHorizontalAxis.
SnapToGridModifier​
type SnapTo = 'bar' | 'beat' | 'off';
type SnapToGridOptions =
| { mode: 'beats'; snapTo: SnapTo; bpm: number; timeSignature: [number, number]; samplesPerPixel: number; sampleRate: number }
| { mode: 'timescale'; gridSamples: number; samplesPerPixel: number };
class SnapToGridModifier extends Modifier {
static configure(options: SnapToGridOptions): PluginDescriptor;
}
Snap-to-grid modifier for clip moves. 'beats' mode quantizes in PPQN tick space; 'timescale' mode quantizes by gridSamples. Snaps the clip's absolute timeline position to the grid (not the drag delta). Skips boundary trims (handled separately by useClipDragHandlers). Compose: snap first, then ClipCollisionModifier constrains the snapped position.
noDropAnimationPlugins​
const noDropAnimationPlugins: PluginDescriptor[];
Configures DragDropProvider's Feedback plugin with dropAnimation: null to prevent snap-back animation on clip drop. Pass to DragDropProvider's plugins prop. Only needed for clip moves (boundary trims use feedback: 'none' per-entity).
useClipSplitting​
function useClipSplitting(options: UseClipSplittingOptions): UseClipSplittingResult;
interface UseClipSplittingOptions {
tracks: ClipTrack[];
samplesPerPixel: number;
engineRef: RefObject<PlaylistEngine | null>;
}
interface UseClipSplittingResult {
splitClipAtPlayhead: () => boolean;
splitClipAt: (trackIndex: number, clipIndex: number, splitTime: number) => boolean;
}
Delegates to engine.splitClip() — engine handles clip creation, adapter sync, and statechange emission.
MIDI (@waveform-playlist/midi)​
Load and parse MIDI files into ClipTrack[] with midiNotes for piano roll visualization and SoundFont/PolySynth playback.
useMidiTracks​
function useMidiTracks(configs: MidiTrackConfig[], options: UseMidiTracksOptions): UseMidiTracksReturn;
interface MidiTrackConfig {
src?: string; // URL to .mid file
midiNotes?: MidiNoteData[]; // Pre-parsed notes (skip fetch+parse)
name?: string; // Track display name
muted?: boolean;
soloed?: boolean;
volume?: number; // Default: 1.0
pan?: number; // Default: 0
color?: string;
startTime?: number; // Clip position in seconds (default 0)
duration?: number; // Override clip duration in seconds
flatten?: boolean; // Merge all MIDI tracks into one (default false)
}
interface UseMidiTracksOptions {
sampleRate: number; // Required — pass AudioContext.sampleRate
}
interface UseMidiTracksReturn {
tracks: ClipTrack[]; // Loaded tracks with midiNotes on clips
loading: boolean;
error: string | null;
loadedCount: number;
totalCount: number;
}
One MidiTrackConfig with src can produce multiple ClipTrack objects (one per MIDI channel in the file). Configs with midiNotes produce exactly one track. All tracks are returned at once after loading completes.
parseMidiFile / parseMidiUrl​
function parseMidiFile(data: ArrayBuffer, options?: ParseMidiOptions): ParsedMidi;
function parseMidiUrl(url: string, options?: ParseMidiOptions, signal?: AbortSignal): Promise<ParsedMidi>;
interface ParseMidiOptions {
flatten?: boolean; // Merge all tracks into one
}
interface ParsedMidi {
tracks: ParsedMidiTrack[];
duration: number; // Total duration in seconds
name: string; // Song name from MIDI header
bpm: number; // First tempo (default 120)
timeSignature: [number, number]; // Default [4, 4]
}
interface ParsedMidiTrack {
name: string; // Track name
notes: MidiNoteData[]; // Notes in MidiNoteData format
duration: number; // Duration in seconds
channel: number; // MIDI channel (9 = percussion)
instrument: string; // GM instrument name
programNumber: number; // GM program number (0-127)
}
Pure functions — no React dependency. parseMidiFile takes an ArrayBuffer, parseMidiUrl fetches then parses. Notes are in seconds (tempo-adjusted by @tonejs/midi).
Engine (@waveform-playlist/engine)​
Framework-agnostic timeline engine. Used internally by the browser package provider.
import { PlaylistEngine } from '@waveform-playlist/engine';
import type { EngineState, PlayoutAdapter, EngineEvents, PlaylistEngineOptions } from '@waveform-playlist/engine';
interface PlaylistEngineOptions {
adapter?: PlayoutAdapter;
sampleRate?: number; // Default: 48000 (use AudioContext.sampleRate for hardware rate)
samplesPerPixel?: number; // Default: 1000
zoomLevels?: number[]; // Default: [256, 512, 1024, 2048, 4096, 8192]
}
interface EngineState {
tracks: ClipTrack[];
tracksVersion: number; // Monotonic counter, increments on track mutations only
duration: number;
currentTime: number;
isPlaying: boolean;
samplesPerPixel: number;
sampleRate: number;
selectedTrackId: string | null;
zoomIndex: number;
canZoomIn: boolean;
canZoomOut: boolean;
selectionStart: number; // Guaranteed: selectionStart <= selectionEnd
selectionEnd: number;
masterVolume: number; // 0.0–1.0
loopStart: number; // Guaranteed: loopStart <= loopEnd
loopEnd: number;
isLoopEnabled: boolean;
}
// Key methods on PlaylistEngine:
// Track management: setTracks(), addTrack(), removeTrack(), selectTrack()
// Clip editing: moveClip(), trimClip(), splitClip()
// Playback: play(), pause(), stop(), seek()
// State: setSelection(), setLoopRegion(), setLoopEnabled(), setMasterVolume()
// Zoom: zoomIn(), zoomOut(), setZoomLevel()
// Events: on(event, listener), off(event, listener)
// Lifecycle: dispose()
// Pure operations (also exported for use without PlaylistEngine):
import { constrainClipDrag, constrainBoundaryTrim, canSplitAt, splitClip, calculateSplitPoint } from '@waveform-playlist/engine';
import { calculateDuration, clampSeekPosition, findClosestZoomIndex } from '@waveform-playlist/engine';
Recording (@waveform-playlist/recording)​
useIntegratedRecording​
function useIntegratedRecording(
tracks: ClipTrack[],
setTracks: (tracks: ClipTrack[]) => void,
selectedTrackId: string | null,
options?: IntegratedRecordingOptions
): UseIntegratedRecordingReturn;
interface IntegratedRecordingOptions {
currentTime?: number;
audioConstraints?: MediaTrackConstraints;
channelCount?: number; // Default: 1 (auto-detected from stream; fallback)
samplesPerPixel?: number; // Default: 1024
bits?: 8 | 16; // Default: 16
}
interface UseIntegratedRecordingReturn {
isRecording: boolean;
isPaused: boolean;
duration: number;
level: number;
peakLevel: number;
error: Error | null;
stream: MediaStream | null;
devices: MicrophoneDevice[];
hasPermission: boolean;
selectedDevice: string | null;
startRecording: () => void;
stopRecording: () => void;
pauseRecording: () => void;
resumeRecording: () => void;
requestMicAccess: () => Promise<void>;
changeDevice: (deviceId: string) => Promise<void>;
recordingPeaks: (Int8Array | Int16Array)[];
}
useMicrophoneLevel​
function useMicrophoneLevel(
stream: MediaStream | null,
options?: {
channelCount?: number; // Default: 1
updateRate?: number; // Default: 60
}
): UseMicrophoneLevelReturn;
interface UseMicrophoneLevelReturn {
level: number; // 0-1 peak level (max across channels)
peakLevel: number; // 0-1 held peak with decay (max across channels)
levels: number[]; // Per-channel peak levels (0-1)
peakLevels: number[]; // Per-channel held peak levels with decay (0-1)
rmsLevels: number[]; // Per-channel RMS levels (0-1)
resetPeak: () => void; // Reset held peaks
}
useOutputMeter​
function useOutputMeter(options?: UseOutputMeterOptions): UseOutputMeterReturn;
interface UseOutputMeterOptions {
channelCount?: number; // Default: 2
updateRate?: number; // Default: 60
isPlaying?: boolean; // Reset levels when false
}
interface UseOutputMeterReturn {
levels: number[]; // Per-channel peak output levels (0-1)
peakLevels: number[]; // Per-channel held peak levels with decay (0-1)
rmsLevels: number[]; // Per-channel RMS output levels (0-1)
resetPeak: () => void; // Reset all peak hold indicators
}
SegmentedVUMeter​
interface ColorStop {
dB: number;
color: string;
}
interface SegmentedVUMeterProps {
levels: number[];
peakLevels?: number[];
channelLabels?: string[];
orientation?: 'vertical' | 'horizontal';
segmentCount?: number;
dBRange?: [number, number];
showScale?: boolean;
colorStops?: ColorStop[];
segmentWidth?: number;
segmentHeight?: number;
segmentGap?: number;
className?: string;
}
Export​
useExportWav​
function useExportWav(): UseExportWavReturn;
interface UseExportWavReturn {
exportWav: (tracks: ClipTrack[], trackStates: TrackState[], options?: ExportOptions) => Promise<ExportResult>;
isExporting: boolean;
progress: number;
error: string | null;
}
interface ExportOptions {
filename?: string;
mode?: 'master' | 'individual';
trackIndex?: number;
autoDownload?: boolean;
applyEffects?: boolean; // Default: true
effectsFunction?: EffectsFunction;
createOfflineTrackEffects?: (trackId: string) => TrackEffectsFunction | undefined;
onProgress?: (progress: number) => void;
bitDepth?: 16 | 32;
}
interface ExportResult {
audioBuffer: AudioBuffer;
blob: Blob;
duration: number;
}
Keyboard Shortcuts​
function useKeyboardShortcuts(options: UseKeyboardShortcutsOptions): void;
interface UseKeyboardShortcutsOptions {
shortcuts: KeyboardShortcut[];
enabled?: boolean;
}
interface KeyboardShortcut {
key: string;
ctrlKey?: boolean;
shiftKey?: boolean;
metaKey?: boolean;
altKey?: boolean;
action: () => void;
description?: string;
preventDefault?: boolean;
}
function usePlaybackShortcuts(options?: UsePlaybackShortcutsOptions): UsePlaybackShortcutsReturn;
// Default shortcuts: Space (play/pause), Escape (stop), 0 (rewind)
Waveform Component Props​
interface WaveformProps {
renderTrackControls?: (trackIndex: number) => ReactNode;
renderTick?: (label: string, pixelPosition: number) => ReactNode;
/** @deprecated Use renderTick */
renderTimestamp?: (timeMs: number, pixelPosition: number) => ReactNode;
renderPlayhead?: RenderPlayheadFunction;
renderAnnotationItem?: (props: RenderAnnotationItemProps) => ReactNode;
getAnnotationBoxLabel?: GetAnnotationBoxLabelFn;
annotationControls?: AnnotationAction[];
annotationListConfig?: AnnotationActionOptions;
annotationTextHeight?: number;
scrollActivePosition?: ScrollLogicalPosition;
scrollActiveContainer?: 'nearest' | 'all';
className?: string;
showClipHeaders?: boolean; // Default: false
interactiveClips?: boolean; // Default: false
showFades?: boolean;
touchOptimized?: boolean;
recordingState?: {
isRecording: boolean;
trackId: string;
startSample: number;
durationSamples: number;
peaks: (Int8Array | Int16Array)[]; // Per-channel live peaks
bits: 8 | 16; // Bit depth of peak values
};
}
Pre-built Components​
Buttons: PlayButton, PauseButton, StopButton, RewindButton, FastForwardButton,
SkipBackwardButton, SkipForwardButton, LoopButton, SetLoopRegionButton,
ZoomInButton, ZoomOutButton, ExportWavButton, DownloadAnnotationsButton
Controls: MasterVolumeControl, TimeFormatSelect, AudioPosition, SelectionTimeInputs
Checkboxes: AutomaticScrollCheckbox, ContinuousPlayCheckbox, LinkEndpointsCheckbox, EditableCheckbox
Playheads: Playhead, PlayheadWithMarker (from @waveform-playlist/ui-components)
Error Handling: PlaylistErrorBoundary (from @waveform-playlist/ui-components)
All button/control components connect to context automatically. No props required for basic usage. All accept className and style.
ClipInteractionProvider​
import { ClipInteractionProvider } from '@waveform-playlist/browser';
interface ClipInteractionProviderProps {
snap?: boolean; // Default: false — enable snap-to-grid (auto-detects beats vs timescale from context)
touchOptimized?: boolean; // Default: false — 250ms delay activation for touch input
children: React.ReactNode;
}
Declarative wrapper that encapsulates all clip drag/move/trim/snap/collision setup. Replaces manual DragDropProvider + useClipDragHandlers + useDragSensors + modifier configuration. When present, interactiveClips is auto-enabled on descendant Waveform components via ClipInteractionContext. When snap is enabled, reads BeatsAndBarsProvider context: if scaleMode="beats" and snapTo!="off", clips snap to beats/bars; otherwise falls back to timescale-based snapping.
useClipInteractionEnabled()​
import { useClipInteractionEnabled } from '@waveform-playlist/browser';
function useClipInteractionEnabled(): boolean;
Returns true when the component is inside a ClipInteractionProvider. Used internally by Waveform to auto-enable interactiveClips.
KeyboardShortcuts​
import { KeyboardShortcuts } from '@waveform-playlist/browser';
interface KeyboardShortcutsProps {
playback?: boolean; // Default: false — Space (play/pause), Escape (stop), 0 (rewind)
clipSplitting?: boolean; // Default: false — 's' key splits clip at playhead
annotations?: boolean; // Default: false — arrow nav, boundary editing, Enter to play
undo?: boolean; // Default: false — Cmd/Ctrl+Z (undo), Cmd/Ctrl+Shift+Z (redo)
additionalShortcuts?: KeyboardShortcut[]; // Appended to enabled defaults
}
Self-closing component that sets up keyboard shortcuts declaratively. Must be inside WaveformPlaylistProvider. Reads all required data (tracks, sampleRate, samplesPerPixel, annotations, etc.) from context internally. Replaces manual usePlaybackShortcuts + useClipSplitting + useAnnotationKeyboardControls wiring.
PlaylistErrorBoundary​
import { PlaylistErrorBoundary } from '@waveform-playlist/ui-components';
interface PlaylistErrorBoundaryProps {
children: React.ReactNode;
fallback?: React.ReactNode; // Custom fallback UI
}
Catches render errors in child components. Uses plain CSS (works without ThemeProvider).
Beats & Bars (@waveform-playlist/ui-components)​
type SnapTo = 'bar' | 'beat' | 'off';
interface BeatsAndBarsProviderProps {
bpm: number;
timeSignature: [number, number]; // [numerator, denominator]
snapTo: SnapTo;
children: React.ReactNode;
}
interface BeatsAndBarsContextValue {
bpm: number;
timeSignature: [number, number];
snapTo: SnapTo;
ticksPerBeat: number; // Derived from timeSignature + PPQN
ticksPerBar: number; // Derived from timeSignature + PPQN
}
Optional context — useBeatsAndBars() returns null when no provider is present. When present, SmartScale renders beats & bars ticks instead of temporal ticks.
// Zoom-dependent temporal scale thresholds
function getScaleInfo(samplesPerPixel: number): {
marker: number; // ms between major labeled ticks
bigStep: number; // ms between medium ticks
smallStep: number; // ms between minor ticks
};
Virtual Scrolling (@waveform-playlist/ui-components)​
Viewport-aware canvas rendering — only mounts canvas chunks visible in the scroll container plus a 1.5x overscan buffer.
// ScrollViewport — viewport state
interface ScrollViewport {
scrollLeft: number;
containerWidth: number;
visibleStart: number; // Left edge including overscan buffer
visibleEnd: number; // Right edge including overscan buffer
}
// ScrollViewportProvider — wraps scrollable container
interface ScrollViewportProviderProps {
containerRef: React.RefObject<HTMLElement | null>;
children: React.ReactNode;
}
const ScrollViewportProvider: React.FC<ScrollViewportProviderProps>;
// useScrollViewport — full viewport state (re-renders on every scroll update)
function useScrollViewport(): ScrollViewport | null;
// useScrollViewportSelector — fine-grained subscription (re-renders only when selector result changes)
function useScrollViewportSelector<T>(selector: (viewport: ScrollViewport | null) => T): T;
// useVisibleChunkIndices — which canvas chunks are visible
// originX converts local chunk coords to global viewport space (for clips not at position 0)
function useVisibleChunkIndices(totalWidth: number, chunkWidth: number, originX?: number): number[];
// ClipViewportOriginProvider — provides clip's pixel offset to descendant Channel/SpectrogramChannel
interface ClipViewportOriginProviderProps {
originX: number; // Clip's pixel-space left offset within the timeline
children: React.ReactNode;
}
const ClipViewportOriginProvider: React.FC<ClipViewportOriginProviderProps>;
// useClipViewportOrigin — read clip's pixel offset (defaults to 0 outside provider)
function useClipViewportOrigin(): number;
Used internally by Channel, SpectrogramChannel, and TimeScale. Returns null without provider (backwards compatible).
Spectrogram (@waveform-playlist/spectrogram)​
Spectrogram is an optional package. Integrate via SpectrogramProvider:
import { SpectrogramProvider } from '@waveform-playlist/spectrogram';
<WaveformPlaylistProvider tracks={tracks}>
<SpectrogramProvider config={spectrogramConfig} colorMap="viridis">
<Waveform />
</SpectrogramProvider>
</WaveformPlaylistProvider>
interface SpectrogramProviderProps {
config?: SpectrogramConfig;
colorMap?: ColorMapValue;
/** Number of Web Workers for parallel FFT computation. Default: 2 (one per stereo channel). */
workerPoolSize?: number;
children: ReactNode;
}
// From @waveform-playlist/core
type FFTSize = 256 | 512 | 1024 | 2048 | 4096 | 8192;
type ColorMapEntry = [number, number, number] | [number, number, number, number];
type ColorMapName = 'viridis' | 'magma' | 'inferno' | 'grayscale' | 'igray' | 'roseus';
type ColorMapValue = ColorMapName | ColorMapEntry[];
type RenderMode = 'waveform' | 'spectrogram' | 'both';
interface SpectrogramConfig {
fftSize?: FFTSize; // Default: 2048
hopSize?: number; // Default: fftSize / 4
windowFunction?: 'hann' | 'hamming' | 'blackman' | 'rectangular' | 'bartlett' | 'blackman-harris';
alpha?: number; // Window function parameter (0-1)
frequencyScale?: 'linear' | 'logarithmic' | 'mel' | 'bark' | 'erb'; // Default: 'mel'
minFrequency?: number; // Default: 0
maxFrequency?: number; // Default: sampleRate / 2
gainDb?: number; // Default: 20
rangeDb?: number; // Default: 80
zeroPaddingFactor?: number; // Default: 2
labels?: boolean; // Default: false
labelsColor?: string;
labelsBackground?: string;
}
interface SpectrogramData {
fftSize: number;
windowSize: number;
frequencyBinCount: number;
sampleRate: number;
hopSize: number;
frameCount: number;
data: Float32Array; // frameCount * frequencyBinCount (row-major)
gainDb: number;
rangeDb: number;
}
interface TrackSpectrogramOverrides {
renderMode: RenderMode;
config?: SpectrogramConfig;
colorMap?: ColorMapValue;
}
// From @waveform-playlist/spectrogram
interface SpectrogramWorkerApi {
computeFFT(params: SpectrogramWorkerFFTParams, generation?: number): Promise<{ cacheKey: string }>;
renderChunks(params: SpectrogramWorkerRenderChunksParams, generation?: number): Promise<void>;
abortGeneration(generation: number): void;
registerCanvas(canvasId: string, canvas: OffscreenCanvas): void;
unregisterCanvas(canvasId: string): void;
registerAudioData(clipId: string, channelDataArrays: Float32Array[], sampleRate: number): void;
unregisterAudioData(clipId: string): void;
terminate(): void;
}
class SpectrogramAbortError extends Error {} // instanceof check for aborted FFT computations
// Key exports
export { SpectrogramProvider, SpectrogramAbortError } from '@waveform-playlist/spectrogram';
export { computeSpectrogram, computeSpectrogramMono, getColorMap, getFrequencyScale } from '@waveform-playlist/spectrogram';
export { createSpectrogramWorker, createSpectrogramWorkerPool } from '@waveform-playlist/spectrogram';
export { SpectrogramMenuItems, SpectrogramSettingsModal } from '@waveform-playlist/spectrogram';
// Integration context (from @waveform-playlist/browser)
export { useSpectrogramIntegration, SpectrogramIntegrationProvider } from '@waveform-playlist/browser';
export type { SpectrogramIntegration, SpectrogramWorkerApi } from '@waveform-playlist/browser';
Utilities​
// Waveform data (BBC audiowaveform)
loadWaveformData(src: string): Promise<WaveformData>;
waveformDataToPeaks(data: WaveformData, samplesPerPixel: number): PeakData;
loadPeaksFromWaveformData(src: string, samplesPerPixel: number): Promise<PeakData>;
getWaveformDataMetadata(data: WaveformData): { sampleRate: number; duration: number; channels: number };
// Effects
effectDefinitions: EffectDefinition[];
effectCategories: string[];
getEffectDefinition(id: string): EffectDefinition | undefined;
getEffectsByCategory(category: string): EffectDefinition[];
createEffectInstance(definition: EffectDefinition): EffectInstance;
createEffectChain(effects: EffectInstance[]): void;
// Keyboard
getShortcutLabel(shortcut: KeyboardShortcut): string;
// Returns e.g. "Cmd+Shift+S" on Mac, "Ctrl+Shift+S" on Windows