Svelte Audio UI
DocsParticles
0
Overview
  • Introduction
  • Get Started
Components
  • Audio Player
  • Audio Provider
  • Audio Queue
  • Audio Track
  • Audio Playback Speed
UI
  • Fader
  • Knob
  • Slider
  • Sortable List
  • XY Pad
Libs
  • Audio Store
  • HTML Audio
Resources
  • llms.txt
  • llms-full.txt

On This Page

InstallationImportCore APIThe htmlAudio SingletonLifecyclePlaybackVolumeStateEventsUtilitiesformatDurationhtmlAudio.isLiveTypesTrackExamplesBasic PlaybackVolume FadingListening to ProgressRelatedNotes

HTML Audio

Previous

The core HTMLAudio singleton for robust audio playback.


The htmlAudio singleton manages playback of HTML5 audio with automatic retry logic, event handling, and volume fading. Use it alongside the Audio Store for full player functionality.

Installation

pnpm dlx shadcn-svelte@latest add https://svelte-audio-ui.vercel.app/r/provider.json
npx shadcn-svelte@latest add https://svelte-audio-ui.vercel.app/r/provider.json
npx shadcn-svelte@latest add https://svelte-audio-ui.vercel.app/r/provider.json
bun x shadcn-svelte@latest add https://svelte-audio-ui.vercel.app/r/provider.json

Copy and paste the following code into your project.

export type Track = {
  id?: string | number;
  url: string;
  title?: string;
  artist?: string;
  artwork?: string;
  images?: string[];
  duration?: number;
  album?: string;
  genre?: string;
  live?: boolean;
  [key: string]: unknown;
};

type LoadParams = {
  url: string;
  startTime?: number;
  isLiveStream?: boolean;
};

type SetVolumeParams = {
  volume: number;
  fadeTime?: number;
};

type FadeVolumeParams = {
  audio: HTMLAudioElement;
  targetVolume: number;
  duration: number;
};

class HtmlAudio {
  private audio: HTMLAudioElement | null = null;
  private isInitialized = false;
  private playPromise: Promise<void> | null = null;
  private lastVolume = 1;
  private fadeTimeout: ReturnType<typeof setTimeout> | null = null;
  private retryAttempts = 0;
  private readonly maxRetries = 3;
  private readonly eventTarget = new EventTarget();
  private readonly LOAD_TIMEOUT_LIVE = 60_000;
  private readonly LOAD_TIMEOUT_NORMAL = 30_000;
  private readonly FADE_UPDATE_INTERVAL = 16;

  init(): void {
    if (this.isInitialized || !this.isClient()) return;
    this.isInitialized = true;
    if (this.isClient()) {
      this.audio = new Audio();
      this.setupEventListeners();
    }
  }

  private setupEventListeners(): void {
    if (!this.isClient()) return;
    const audio = this.ensureAudio();
    if (!audio) return;

    audio.addEventListener("error", () => {
      if (this.retryAttempts < this.maxRetries) {
        this.retryAttempts++;
        setTimeout(() => this.reloadAudio(), 1000);
      }
      this.eventTarget.dispatchEvent(new CustomEvent("audioError"));
    });

    audio.addEventListener("playing", () => {
      this.retryAttempts = 0;
      this.eventTarget.dispatchEvent(new CustomEvent("bufferingEnd"));
      this.eventTarget.dispatchEvent(new CustomEvent("playbackStarted"));
    });

    audio.addEventListener("canplaythrough", () => {
      this.retryAttempts = 0;
      this.eventTarget.dispatchEvent(new CustomEvent("bufferingEnd"));
    });

    audio.addEventListener("waiting", () => {
      this.eventTarget.dispatchEvent(new CustomEvent("bufferingStart"));
    });

    audio.addEventListener("progress", () => {
      const buffered = audio.buffered;
      const currentTime = audio.currentTime;
      let bufferedEnd = 0;
      if (buffered.length === 0) return;

      for (let i = buffered.length - 1; i >= 0; i--) {
        if (buffered.start(i) <= currentTime) {
          bufferedEnd = buffered.end(i);
          break;
        }
      }
      if (bufferedEnd === 0) bufferedEnd = buffered.end(0);
      if (bufferedEnd > 0) {
        this.eventTarget.dispatchEvent(
          new CustomEvent("bufferUpdate", { detail: { bufferedTime: bufferedEnd } })
        );
      }
    });
  }

  cleanup(): void {
    if (this.audio) {
      this.audio.pause();
      this.audio.src = "";
      this.audio.load();
    }
    if (this.fadeTimeout) {
      clearTimeout(this.fadeTimeout);
      this.fadeTimeout = null;
    }
    this.playPromise = null;
  }

  getAudioElement(): HTMLAudioElement | null {
    if (!this.isClient()) return null;
    return this.audio;
  }

  private isClient(): boolean {
    return typeof window !== "undefined" && !!window.document;
  }

  private ensureAudio(): HTMLAudioElement {
    if (!this.isClient()) throw new Error("Audio module not available on server side");
    if (!this.audio) throw new Error("Audio module not initialized");
    return this.audio;
  }

  private ifClient<T>(fn: () => T): T | undefined {
    if (!this.isClient()) return;
    return fn();
  }

  async load(params: LoadParams): Promise<void> {
    const { url, startTime = 0, isLiveStream = false } = params;
    const result = this.ifClient(() => this._load({ url, startTime, isLiveStream }));
    if (result) await result;
  }

  private async _load(params: {
    url: string;
    startTime: number;
    isLiveStream: boolean;
  }): Promise<void> {
    const { url, startTime, isLiveStream } = params;
    const audio = this.ensureAudio();
    if (!audio) return;

    try {
      this.retryAttempts = 0;
      if (audio.src === url) {
        if (audio.currentTime !== startTime && !isLiveStream) audio.currentTime = startTime;
        return;
      }

      audio.pause();
      audio.src = "";
      audio.src = url;
      audio.preload = "auto";

      const loadTimeout = isLiveStream ? this.LOAD_TIMEOUT_LIVE : this.LOAD_TIMEOUT_NORMAL;

      await new Promise<void>((resolve, reject) => {
        let timeoutId: ReturnType<typeof setTimeout> | null = null;
        let isResolved = false;

        const cleanup = () => {
          if (timeoutId) {
            clearTimeout(timeoutId);
            timeoutId = null;
          }
          audio.removeEventListener("loadedmetadata", handleLoadSuccess);
          audio.removeEventListener("canplay", handleLoadSuccess);
          audio.removeEventListener("canplaythrough", handleLoadSuccess);
          audio.removeEventListener("error", handleErrorLoading);
        };

        const handleTimeout = () => {
          if (isResolved) return;
          isResolved = true;
          cleanup();
          reject(new Error(`Audio load timeout (${loadTimeout / 1000}s)`));
        };

        const handleLoadSuccess = () => {
          if (isResolved) return;
          isResolved = true;
          cleanup();
          if (startTime > 0 && !isLiveStream) audio.currentTime = startTime;
          resolve();
        };

        const handleErrorLoading = () => {
          if (isResolved) return;
          isResolved = true;
          cleanup();
          const error = audio.error;
          reject(
            new Error(
              `Audio load failed: ${error?.message || `code ${error?.code ?? "unknown"}`}`
            )
          );
        };

        timeoutId = setTimeout(handleTimeout, loadTimeout);
        audio.addEventListener("loadedmetadata", handleLoadSuccess);
        audio.addEventListener("canplay", handleLoadSuccess);
        audio.addEventListener("canplaythrough", handleLoadSuccess);
        audio.addEventListener("error", handleErrorLoading);
        audio.load();
      });
    } catch (error) {
      console.error("Audio load process error:", error);
      throw error;
    }
  }

  async play(): Promise<void> {
    const result = this.ifClient(() => this._play());
    if (result) await result;
  }

  private async _play(): Promise<void> {
    if (!this.audio) throw new Error("Audio module not initialized");
    try {
      if (!this.audio.paused) return;
      this.playPromise = this.audio.play();
      await this.playPromise;
      this.playPromise = null;
    } catch (error) {
      this.playPromise = null;
      throw error;
    }
  }

  private reloadAudio(): void {
    if (!this.isClient()) return;
    const audio = this.ensureAudio();
    const currentTime = audio.currentTime;
    const wasPlaying = !audio.paused;
    const currentSrc = audio.src;

    audio.pause();
    audio.src = "";
    audio.load();
    audio.src = currentSrc;
    audio.preload = "auto";
    audio.load();

    const setTimeAndPlay = () => {
      if (audio.readyState >= audio.HAVE_METADATA) {
        audio.currentTime = currentTime;
        if (wasPlaying) this.play().catch(() => {});
        audio.removeEventListener("loadedmetadata", setTimeAndPlay);
      }
    };
    audio.addEventListener("loadedmetadata", setTimeAndPlay);
  }

  pause(): void {
    this.ifClient(() => {
      const audio = this.ensureAudio();
      audio.pause();
    });
  }

  setVolume(params: SetVolumeParams): void {
    const { volume, fadeTime = 0 } = params;
    this.ifClient(() => {
      const audio = this.ensureAudio();
      if (this.fadeTimeout) {
        clearTimeout(this.fadeTimeout);
        this.fadeTimeout = null;
      }
      if (fadeTime <= 0) {
        audio.volume = Math.max(0, Math.min(1, volume));
        if (volume > 0) this.lastVolume = volume;
        return;
      }
      this.fadeVolume({ audio, targetVolume: volume, duration: fadeTime });
    });
  }

  private fadeVolume(params: FadeVolumeParams): void {
    const { audio, targetVolume, duration } = params;
    if (!this.isClient()) return;
    const startVolume = audio.volume;
    const endVolume = Math.max(0, Math.min(1, targetVolume));
    const startTime = performance.now();

    const updateVolume = () => {
      const elapsed = performance.now() - startTime;
      const progress = Math.min(1, elapsed / duration);
      audio.volume = startVolume + (endVolume - startVolume) * progress;
      if (progress < 1) {
        this.fadeTimeout = setTimeout(updateVolume, this.FADE_UPDATE_INTERVAL);
      } else {
        if (endVolume > 0) this.lastVolume = endVolume;
        this.fadeTimeout = null;
      }
    };
    updateVolume();
  }

  getVolume(): number {
    return (
      this.ifClient(() => {
        const a = this.ensureAudio();
        return a.volume;
      }) ?? 0
    );
  }

  setMuted(muted: boolean): void {
    this.ifClient(() => {
      const audio = this.ensureAudio();
      if (audio.muted === muted) return;
      if (muted) {
        if (audio.volume > 0) this.lastVolume = audio.volume;
        this.fadeVolume({ audio, targetVolume: 0, duration: 200 });
        audio.muted = true;
      } else {
        audio.muted = false;
        this.fadeVolume({ audio, targetVolume: this.lastVolume, duration: 200 });
      }
    });
  }

  getDuration(): number {
    return (
      this.ifClient(() => {
        const a = this.ensureAudio();
        return a.duration;
      }) ?? 0
    );
  }
  getCurrentTime(): number {
    return (
      this.ifClient(() => {
        const a = this.ensureAudio();
        return a.currentTime;
      }) ?? 0
    );
  }

  setCurrentTime(time: number): void {
    this.ifClient(() => {
      const audio = this.ensureAudio();
      const duration = audio.duration;
      if (Number.isNaN(duration)) return;
      const validTime =
        time >= 0 && time <= duration ? time : Math.max(0, Math.min(time, duration));
      if (audio.readyState >= audio.HAVE_METADATA) audio.currentTime = validTime;
    });
  }

  addEventListener(
    type: string,
    listener: EventListenerOrEventListenerObject | null,
    options?: boolean | AddEventListenerOptions
  ): void {
    this.eventTarget.addEventListener(type, listener, options);
  }

  removeEventListener(
    type: string,
    callback: EventListenerOrEventListenerObject | null,
    options?: EventListenerOptions | boolean
  ): void {
    this.eventTarget.removeEventListener(type, callback, options);
  }

  getSource(): string {
    return (
      this.ifClient(() => {
        const a = this.ensureAudio();
        return a.src;
      }) ?? ""
    );
  }
  isPaused(): boolean {
    return (
      this.ifClient(() => {
        const a = this.ensureAudio();
        return a.paused;
      }) ?? true
    );
  }

  getBufferedRanges(): TimeRanges | null {
    if (!this.isClient() || !this.audio) return null;
    return this.audio.buffered;
  }

  setPlaybackRate(rate: number): void {
    this.ifClient(() => {
      const audio = this.ensureAudio();
      if (this.isLive(audio.duration)) return;
      audio.playbackRate = Math.max(0.25, Math.min(2, rate));
    });
  }

  getPlaybackRate(): number {
    return (
      this.ifClient(() => {
        const a = this.ensureAudio();
        return a.playbackRate;
      }) ?? 1
    );
  }

  isLive(duration: number): boolean {
    if (duration === 0) return false;
    return (
      Number.isNaN(duration) ||
      duration === Number.POSITIVE_INFINITY ||
      duration === Number.NEGATIVE_INFINITY
    );
  }
}

export const htmlAudio = new HtmlAudio();

const MINUTE_IN_SECONDS = 60;

export function formatDuration(seconds: number): string {
  if (!Number.isFinite(seconds) || seconds < 0) return "0:00";
  const minutes = Math.floor(seconds / MINUTE_IN_SECONDS);
  const remainingSeconds = Math.floor(seconds % MINUTE_IN_SECONDS);
  return `${minutes}:${remainingSeconds < 10 ? "0" : ""}${remainingSeconds}`;
}

Import

Import the singleton and helpers from the audio library:

<script lang="ts">
  import {
    htmlAudio,
    formatDuration,
    type Track,
  } from "$lib/html-audio";
</script>

Core API

The htmlAudio Singleton

Manages the underlying HTMLAudioElement, playback state, retries, and custom events. Initialize on client start — the instance is built to be server-safe.

import { htmlAudio } from "$lib/html-audio";
 
// Initialize on the client
htmlAudio.init();
 
// Load and play
await htmlAudio.load({ url: "https://example.com/audio.mp3", startTime: 0 });
await htmlAudio.play();
Client initialization:

The htmlAudio singleton must be initialized on the client.

Call htmlAudio.init() inside onMount or an $effect so the underlying HTMLAudioElement is created only in the browser environment.

Lifecycle

Method Description
init() Initialize on the client. Safe to call multiple times.
cleanup() Reset and release the audio element (pause, clear src).

Playback

Method Description
load({ url, startTime?, isLiveStream? }) Load an audio source and wait for readiness. Pass isLiveStream: true for live streams (longer timeout, no seek). Returns Promise<void>.
play() Start or resume playback. Returns a promise that resolves when the browser allows playback.
pause() Pause playback immediately.
setCurrentTime(time) Seek when metadata is available. Ignored and bounded by duration safely.
setPlaybackRate(rate) Adjust playback speed (0.25 - 2). Disabled automatically if it's a live stream.

Browser autoplay restrictions:

The play() call returns a promise which may be rejected by browser autoplay policies if there was no recent user gesture. Make sure playback is initiated by a user interaction.

Live streams: For live streams (when isLiveStream is true) seeking is disabled and longer timeouts (60s instead of 30s) are used carefully to handle stream buffering.

Volume

Method Description
setVolume({ volume, fadeTime? }) Set or fade volume (0–1). If fadeTime > 0, animates smoothly.
getVolume() Return current volume (0–1).
setMuted(muted) Mute or unmute. Restores previous volume when unmuting using a smooth fade.

State

Method Description
getDuration() Return loaded source duration (seconds) or 0 if unavailable.
getCurrentTime() Return current playback position (seconds).
isPaused() Return boolean — is playback paused.
getBufferedRanges() Return underlying TimeRanges or null.
getSource() Return current source URL string.
getAudioElement() Return raw HTMLAudioElement or null on server.
getPlaybackRate() Return current playback rate.

Events

The library emits custom events via an internal EventTarget. You can listen to these without messing with the raw <audio> element events:

htmlAudio.addEventListener("bufferingStart", () =>
  console.log("Buffering...")
);
htmlAudio.addEventListener("bufferingEnd", () => console.log("Ready to play"));
htmlAudio.addEventListener("playbackStarted", () => console.log("Playing"));
htmlAudio.addEventListener("audioError", () => console.error("Error"));
htmlAudio.addEventListener("bufferUpdate", (e) => {
  if (e instanceof CustomEvent)
    console.log("Buffered:", e.detail.bufferedTime);
});

Utilities

formatDuration

Format seconds into an MM:SS string. Extremely handy for UI constraints. Handles invalid input gracefully.

import { formatDuration } from "$lib/html-audio";
 
console.log(formatDuration(125)); // "2:05"
console.log(formatDuration(3661)); // "61:01"

htmlAudio.isLive

Check if a duration value indicates a live stream.

import { htmlAudio } from "$lib/html-audio";
 
const duration = htmlAudio.getDuration();
 
if (htmlAudio.isLive(duration)) {
  // Disable scrubbers, hide fast-forward buttons, etc.
}

Live Stream Detection: The isLive() method checks if a duration isNaN,Infinity, or -Infinity. A duration of 0 just means the metadata hasn't loaded yet.

Types

Track

A versatile representation of an audio track to be used across your components:

Prop Type Description
id string | number Unique identifier (optional).
url string URL of the audio file or stream.
title string Track title.
artist string Artist name.
artwork string Album artwork URL.
images string[] Array of image URLs.
duration number Track duration in seconds.
album string Album name.
genre string Genre.
live boolean Whether this is a live stream.
[key: string] unknown Additional ad-hoc properties.

Examples

Basic Playback

Just wire it up on mount.

<script lang="ts">
  import { htmlAudio } from "$lib/html-audio";
  import { onMount } from "svelte";
 
  onMount(() => {
    htmlAudio.init();
  });
 
  async function playTrack(url: string) {
    try {
      await htmlAudio.load({ url, startTime: 0 });
      await htmlAudio.play();
    } catch (error) {
      console.error("Playback failed:", error);
    }
  }
</script>
 
<button onclick={() => playTrack("/music.mp3")}>Play Track</button>

Volume Fading

Awesome polish feature: you can smoothly fade the volume instead of jarring the user.

import { htmlAudio } from "$lib/html-audio";
 
// Immediate jump
htmlAudio.setVolume({ volume: 0.5 });
 
// Smooth, butter-like fade over 1 second (1000ms)
htmlAudio.setVolume({ volume: 0.8, fadeTime: 1000 });
 
// Mute with memory (remembers previous volume when toggled back)
htmlAudio.setMuted(true);
htmlAudio.setMuted(false);

Listening to Progress

<script lang="ts">
  import { htmlAudio, formatDuration } from "$lib/html-audio";
  import { onMount, onDestroy } from "svelte";
 
  let time = $state(0);
 
  onMount(() => {
    const updateTime = () => {
      time = htmlAudio.getCurrentTime();
    };
 
    // Poll the time roughly every frame
    const interval = setInterval(updateTime, 100);
 
    return () => clearInterval(interval);
  });
</script>
 
<span>{formatDuration(time)}</span>

Related

  • Audio Store — A Svelte 5 global store wrapping this class to provide reactive state.
  • Audio Provider — Composable provider that orchestrates htmlAudio inside your layout.
  • Audio Player — Composable player UI components.

Notes

  • Singleton pattern: All methods access the exact same htmlAudio instance behind the scenes.
  • Server-safe: Most methods (getVolume(), etc) have built-in isClient() guards, so they won't blow up during SSR.
  • Resilience: It handles native HTML5 <audio> random error events entirely manually, throwing up to 3 retries under the hood before giving up.
  • Polished Fades: Built-in volume cross-fades use getAnimationFrame / timers for smooth transitions. Just supply fadeTime.
Audio Store
Built by ddtamn. The source code is available on Github