Live Streaming and Media
Live streaming delivers real-time audio and video content over the internet. In this lesson, we'll explore technologies and techniques for implementing live media streaming in web applications.
Live Video Streaming Concepts
Live video streaming involves capturing, encoding, transmitting, and playing video content with minimal delay. Understanding the streaming pipeline is essential for building robust applications.
Streaming Pipeline:
- Capture: Obtain video/audio from camera/microphone
- Encode: Compress media using codecs (H.264, VP8, Opus)
- Package: Segment into chunks (HLS, DASH)
- Transmit: Send over network (HTTP, WebRTC, WebSocket)
- Decode: Decompress on client side
- Play: Render video/audio in browser
// Accessing user media (camera/microphone)
async function startMediaCapture() {
try {
const stream = await navigator.mediaDevices.getUserMedia({
video: {
width: { ideal: 1920 },
height: { ideal: 1080 },
frameRate: { ideal: 30 }
},
audio: {
echoCancellation: true,
noiseSuppression: true,
sampleRate: 48000
}
});
// Display local preview
const videoElement = document.getElementById('localVideo');
videoElement.srcObject = stream;
return stream;
} catch (error) {
console.error('Failed to access media devices:', error);
throw error;
}
}
// Display available devices
async function listMediaDevices() {
const devices = await navigator.mediaDevices.enumerateDevices();
const cameras = devices.filter(d => d.kind === 'videoinput');
const microphones = devices.filter(d => d.kind === 'audioinput');
console.log('Cameras:', cameras);
console.log('Microphones:', microphones);
return { cameras, microphones };
}
// Switch camera
async function switchCamera(deviceId) {
const stream = await navigator.mediaDevices.getUserMedia({
video: { deviceId: { exact: deviceId } },
audio: true
});
return stream;
}
HLS (HTTP Live Streaming)
HLS is a widely-supported streaming protocol that breaks video into small HTTP-downloadable segments.
// HLS streaming with hls.js library
import Hls from 'hls.js';
class HLSPlayer {
constructor(videoElement) {
this.video = videoElement;
this.hls = null;
}
loadStream(streamUrl) {
if (Hls.isSupported()) {
this.hls = new Hls({
enableWorker: true,
lowLatencyMode: true,
backBufferLength: 90
});
this.hls.loadSource(streamUrl);
this.hls.attachMedia(this.video);
this.hls.on(Hls.Events.MANIFEST_PARSED, () => {
console.log('Stream ready to play');
this.video.play();
});
this.hls.on(Hls.Events.ERROR, (event, data) => {
console.error('HLS Error:', data);
if (data.fatal) {
switch (data.type) {
case Hls.ErrorTypes.NETWORK_ERROR:
console.log('Network error, trying to recover');
this.hls.startLoad();
break;
case Hls.ErrorTypes.MEDIA_ERROR:
console.log('Media error, trying to recover');
this.hls.recoverMediaError();
break;
default:
console.error('Fatal error, destroying player');
this.destroy();
break;
}
}
});
} else if (this.video.canPlayType('application/vnd.apple.mpegurl')) {
// Native HLS support (iOS Safari)
this.video.src = streamUrl;
this.video.play();
}
}
destroy() {
if (this.hls) {
this.hls.destroy();
this.hls = null;
}
}
getStats() {
if (!this.hls) return null;
return {
currentLevel: this.hls.currentLevel,
autoLevelEnabled: this.hls.autoLevelEnabled,
bandwidth: this.hls.bandwidthEstimate,
bufferLength: this.hls.media?.buffered.length,
dropped: this.hls.dropped
};
}
}
// Usage
const video = document.getElementById('video');
const player = new HLSPlayer(video);
player.loadStream('https://example.com/stream.m3u8');
HLS Advantages: Works over standard HTTP, CDN-friendly, adaptive bitrate streaming, wide device support. Typical latency: 10-30 seconds.
DASH (Dynamic Adaptive Streaming over HTTP)
DASH is an adaptive bitrate streaming standard similar to HLS, offering more flexibility and codec-agnostic streaming.
// DASH streaming with dash.js
import dashjs from 'dashjs';
class DASHPlayer {
constructor(videoElement) {
this.video = videoElement;
this.player = dashjs.MediaPlayer().create();
}
loadStream(streamUrl) {
this.player.initialize(this.video, streamUrl, true);
// Configure player
this.player.updateSettings({
streaming: {
lowLatencyEnabled: true,
liveDelay: 3, // Target latency in seconds
bufferTimeDefault: 12,
bufferTimeMax: 30
}
});
// Event listeners
this.player.on('streamInitialized', () => {
console.log('Stream initialized');
const tracks = this.player.getTracksFor('video');
console.log('Available quality levels:', tracks);
});
this.player.on('qualityChangeRendered', (e) => {
console.log('Quality changed:', e.newQuality);
});
this.player.on('error', (e) => {
console.error('DASH Error:', e.error);
});
}
setQuality(qualityIndex) {
this.player.setQualityFor('video', qualityIndex);
}
enableAutoQuality() {
this.player.updateSettings({
streaming: {
abr: {
autoSwitchBitrate: { video: true }
}
}
});
}
getMetrics() {
return {
currentBitrate: this.player.getBitrateInfoListFor('video'),
bufferLevel: this.player.getBufferLength('video'),
droppedFrames: this.player.getMetricsFor('video')?.DroppedFrames
};
}
destroy() {
this.player.reset();
}
}
// Usage
const video = document.getElementById('video');
const player = new DASHPlayer(video);
player.loadStream('https://example.com/stream.mpd');
Media Streaming with WebRTC
WebRTC provides ultra-low latency streaming (sub-second) ideal for real-time communication and interactive applications.
// WebRTC peer-to-peer streaming
class WebRTCStreamer {
constructor(socket) {
this.socket = socket;
this.peerConnection = null;
this.localStream = null;
}
async startStreaming() {
// Get local media
this.localStream = await navigator.mediaDevices.getUserMedia({
video: true,
audio: true
});
// Create peer connection
this.peerConnection = new RTCPeerConnection({
iceServers: [
{ urls: 'stun:stun.l.google.com:19302' },
{
urls: 'turn:turn.example.com:3478',
username: 'user',
credential: 'pass'
}
]
});
// Add local stream tracks
this.localStream.getTracks().forEach(track => {
this.peerConnection.addTrack(track, this.localStream);
});
// Handle ICE candidates
this.peerConnection.onicecandidate = (event) => {
if (event.candidate) {
this.socket.emit('ice-candidate', {
candidate: event.candidate
});
}
};
// Create and send offer
const offer = await this.peerConnection.createOffer();
await this.peerConnection.setLocalDescription(offer);
this.socket.emit('offer', {
sdp: this.peerConnection.localDescription
});
// Handle answer
this.socket.on('answer', async (data) => {
await this.peerConnection.setRemoteDescription(
new RTCSessionDescription(data.sdp)
);
});
// Handle remote ICE candidates
this.socket.on('ice-candidate', async (data) => {
await this.peerConnection.addIceCandidate(
new RTCIceCandidate(data.candidate)
);
});
}
async receiveStream() {
this.peerConnection = new RTCPeerConnection({
iceServers: [
{ urls: 'stun:stun.l.google.com:19302' }
]
});
// Handle incoming tracks
this.peerConnection.ontrack = (event) => {
const remoteVideo = document.getElementById('remoteVideo');
remoteVideo.srcObject = event.streams[0];
};
// Handle ICE candidates
this.peerConnection.onicecandidate = (event) => {
if (event.candidate) {
this.socket.emit('ice-candidate', {
candidate: event.candidate
});
}
};
// Handle offer
this.socket.on('offer', async (data) => {
await this.peerConnection.setRemoteDescription(
new RTCSessionDescription(data.sdp)
);
const answer = await this.peerConnection.createAnswer();
await this.peerConnection.setLocalDescription(answer);
this.socket.emit('answer', {
sdp: this.peerConnection.localDescription
});
});
// Handle remote ICE candidates
this.socket.on('ice-candidate', async (data) => {
await this.peerConnection.addIceCandidate(
new RTCIceCandidate(data.candidate)
);
});
}
stopStreaming() {
if (this.localStream) {
this.localStream.getTracks().forEach(track => track.stop());
}
if (this.peerConnection) {
this.peerConnection.close();
}
}
}
// Usage
const socket = io();
const streamer = new WebRTCStreamer(socket);
// Start broadcasting
document.getElementById('startBtn').addEventListener('click', () => {
streamer.startStreaming();
});
// Watch stream
document.getElementById('watchBtn').addEventListener('click', () => {
streamer.receiveStream();
});
WebRTC Considerations: While WebRTC offers ultra-low latency (<500ms), it requires signaling server infrastructure, TURN servers for NAT traversal, and doesn't scale as well as HTTP-based streaming for large audiences.
Audio Streaming
Audio streaming follows similar principles but with lower bandwidth requirements and simpler processing.
// Real-time audio streaming with Web Audio API
class AudioStreamer {
constructor(socket) {
this.socket = socket;
this.audioContext = new (window.AudioContext || window.webkitAudioContext)();
this.mediaStream = null;
this.processor = null;
}
async startStreaming() {
// Get microphone access
this.mediaStream = await navigator.mediaDevices.getUserMedia({
audio: {
echoCancellation: true,
noiseSuppression: true,
autoGainControl: true
}
});
// Create audio processing pipeline
const source = this.audioContext.createMediaStreamSource(this.mediaStream);
this.processor = this.audioContext.createScriptProcessor(4096, 1, 1);
this.processor.onaudioprocess = (e) => {
const audioData = e.inputBuffer.getChannelData(0);
// Convert to 16-bit PCM
const pcmData = this.float32To16BitPCM(audioData);
// Send to server
this.socket.emit('audio-data', pcmData);
};
source.connect(this.processor);
this.processor.connect(this.audioContext.destination);
}
float32To16BitPCM(float32Array) {
const buffer = new ArrayBuffer(float32Array.length * 2);
const view = new DataView(buffer);
for (let i = 0; i < float32Array.length; i++) {
const s = Math.max(-1, Math.min(1, float32Array[i]));
view.setInt16(i * 2, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
}
return buffer;
}
async playReceivedAudio() {
const audioQueue = [];
let isPlaying = false;
this.socket.on('audio-data', (arrayBuffer) => {
audioQueue.push(arrayBuffer);
if (!isPlaying) {
this.playNextChunk(audioQueue);
isPlaying = true;
}
});
}
playNextChunk(queue) {
if (queue.length === 0) return;
const arrayBuffer = queue.shift();
this.audioContext.decodeAudioData(arrayBuffer, (audioBuffer) => {
const source = this.audioContext.createBufferSource();
source.buffer = audioBuffer;
source.connect(this.audioContext.destination);
source.start();
source.onended = () => {
this.playNextChunk(queue);
};
});
}
stop() {
if (this.mediaStream) {
this.mediaStream.getTracks().forEach(track => track.stop());
}
if (this.processor) {
this.processor.disconnect();
}
}
}
MediaSource API
The MediaSource API allows JavaScript to generate media streams for playback, enabling custom streaming solutions.
// Custom streaming with MediaSource API
class CustomStreamer {
constructor(videoElement) {
this.video = videoElement;
this.mediaSource = new MediaSource();
this.sourceBuffer = null;
this.queue = [];
}
initialize() {
this.video.src = URL.createObjectURL(this.mediaSource);
this.mediaSource.addEventListener('sourceopen', () => {
const mimeCodec = 'video/mp4; codecs="avc1.42E01E, mp4a.40.2"';
if (!MediaSource.isTypeSupported(mimeCodec)) {
console.error('Codec not supported');
return;
}
this.sourceBuffer = this.mediaSource.addSourceBuffer(mimeCodec);
this.sourceBuffer.addEventListener('updateend', () => {
if (this.queue.length > 0 && !this.sourceBuffer.updating) {
this.sourceBuffer.appendBuffer(this.queue.shift());
}
});
});
}
appendSegment(arrayBuffer) {
if (this.sourceBuffer.updating || this.queue.length > 0) {
this.queue.push(arrayBuffer);
} else {
this.sourceBuffer.appendBuffer(arrayBuffer);
}
}
endStream() {
if (this.mediaSource.readyState === 'open') {
this.mediaSource.endOfStream();
}
}
}
// Fetch and stream video
async function streamVideo(url) {
const video = document.getElementById('video');
const streamer = new CustomStreamer(video);
streamer.initialize();
const response = await fetch(url);
const reader = response.body.getReader();
while (true) {
const { done, value } = await reader.read();
if (done) {
streamer.endStream();
break;
}
streamer.appendSegment(value);
}
}
Adaptive Bitrate Streaming
Adaptive bitrate streaming automatically adjusts video quality based on network conditions and device capabilities.
// Implementing adaptive bitrate logic
class AdaptiveBitrateController {
constructor(player) {
this.player = player;
this.bandwidthHistory = [];
this.qualityLevels = [
{ bitrate: 500000, width: 640, height: 360, label: '360p' },
{ bitrate: 1000000, width: 854, height: 480, label: '480p' },
{ bitrate: 2500000, width: 1280, height: 720, label: '720p' },
{ bitrate: 5000000, width: 1920, height: 1080, label: '1080p' }
];
this.currentQuality = 1;
}
measureBandwidth() {
// Estimate bandwidth based on download speed
const downloadSize = this.player.getLastSegmentSize();
const downloadTime = this.player.getLastSegmentDownloadTime();
const bandwidth = (downloadSize * 8) / downloadTime; // bits per second
this.bandwidthHistory.push(bandwidth);
// Keep last 5 measurements
if (this.bandwidthHistory.length > 5) {
this.bandwidthHistory.shift();
}
return this.getAverageBandwidth();
}
getAverageBandwidth() {
const sum = this.bandwidthHistory.reduce((a, b) => a + b, 0);
return sum / this.bandwidthHistory.length;
}
selectOptimalQuality() {
const bandwidth = this.measureBandwidth();
const bufferLevel = this.player.getBufferLevel();
// Conservative approach: use 80% of bandwidth
const targetBitrate = bandwidth * 0.8;
// Consider buffer level
let selectedQuality = 0;
for (let i = 0; i < this.qualityLevels.length; i++) {
if (this.qualityLevels[i].bitrate <= targetBitrate) {
selectedQuality = i;
}
}
// Don't switch up if buffer is low
if (bufferLevel < 5 && selectedQuality > this.currentQuality) {
return this.currentQuality;
}
// Switch down immediately if buffer is critical
if (bufferLevel < 2 && this.currentQuality > 0) {
return this.currentQuality - 1;
}
this.currentQuality = selectedQuality;
return selectedQuality;
}
adjustQuality() {
const optimalQuality = this.selectOptimalQuality();
if (optimalQuality !== this.currentQuality) {
console.log(
`Switching quality: ${this.qualityLevels[this.currentQuality].label} -> ${this.qualityLevels[optimalQuality].label}`
);
this.player.setQuality(optimalQuality);
}
}
start() {
setInterval(() => {
this.adjustQuality();
}, 5000); // Check every 5 seconds
}
}
// Usage
const controller = new AdaptiveBitrateController(player);
controller.start();
Streaming Comparison:
- HLS: 10-30s latency, excellent scalability, Apple ecosystem
- DASH: 10-30s latency, codec-agnostic, industry standard
- Low-Latency HLS/DASH: 3-5s latency, newer protocols
- WebRTC: <500ms latency, peer-to-peer, limited scale
- WebSocket: <2s latency, custom implementation, moderate scale
Practice Exercise:
- Build an HLS video player with quality selection and playback controls
- Implement a WebRTC video chat application with screen sharing
- Create a live audio streaming application with real-time effects
- Build an adaptive bitrate controller that switches quality based on network conditions
- Implement a custom video streaming solution using MediaSource API and WebSockets