WebSockets & Real-Time Apps
Building Video/Audio Calls
Building Video/Audio Calls
Building production-ready video and audio calling applications requires handling media streams, user interface controls, connection states, and graceful error handling. Let's build a complete video calling application.
Video Call Setup with WebRTC
Complete HTML structure for a video call application:
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Video Call App</title>
<style>
* {
margin: 0;
padding: 0;
box-sizing: border-box;
}
body {
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', sans-serif;
background: #1a1a1a;
color: white;
}
.container {
max-width: 1400px;
margin: 0 auto;
padding: 20px;
}
.video-container {
position: relative;
width: 100%;
height: 80vh;
background: #000;
border-radius: 12px;
overflow: hidden;
}
#remoteVideo {
width: 100%;
height: 100%;
object-fit: cover;
}
#localVideo {
position: absolute;
bottom: 20px;
right: 20px;
width: 250px;
height: 180px;
border-radius: 8px;
border: 2px solid white;
object-fit: cover;
}
.controls {
display: flex;
justify-content: center;
gap: 15px;
margin-top: 20px;
}
button {
padding: 15px 30px;
border: none;
border-radius: 8px;
font-size: 16px;
cursor: pointer;
transition: all 0.3s;
}
.btn-primary {
background: #2563eb;
color: white;
}
.btn-danger {
background: #dc2626;
color: white;
}
.btn-secondary {
background: #6b7280;
color: white;
}
button:hover {
transform: translateY(-2px);
box-shadow: 0 4px 12px rgba(0,0,0,0.3);
}
button:disabled {
opacity: 0.5;
cursor: not-allowed;
}
.status {
text-align: center;
padding: 15px;
background: #374151;
border-radius: 8px;
margin-bottom: 20px;
}
.status.connected {
background: #059669;
}
.status.connecting {
background: #d97706;
}
.status.error {
background: #dc2626;
}
</style>
</head>
<body>
<div class="container">
<div class="status" id="status">Ready to connect</div>
<div class="video-container">
<video id="remoteVideo" autoplay playsinline></video>
<video id="localVideo" autoplay playsinline muted></video>
</div>
<div class="controls">
<button id="startCallBtn" class="btn-primary">Start Call</button>
<button id="endCallBtn" class="btn-danger" disabled>End Call</button>
<button id="muteBtn" class="btn-secondary" disabled>Mute</button>
<button id="videoBtn" class="btn-secondary" disabled>Video Off</button>
<button id="screenShareBtn" class="btn-secondary" disabled>Share Screen</button>
</div>
</div>
<script src="video-call.js"></script>
</body>
</html>
Complete Video Call Implementation
// video-call.js
class VideoCallApp {
constructor() {
this.signalingServer = new WebSocket('ws://localhost:8080');
this.peerConnection = null;
this.localStream = null;
this.remoteStream = null;
this.clientId = null;
this.remotePeerId = null;
// Media state
this.isAudioMuted = false;
this.isVideoOff = false;
this.isScreenSharing = false;
// DOM elements
this.localVideo = document.getElementById('localVideo');
this.remoteVideo = document.getElementById('remoteVideo');
this.statusDiv = document.getElementById('status');
this.setupSignaling();
this.setupUI();
}
setupSignaling() {
this.signalingServer.onopen = () => {
console.log('Connected to signaling server');
this.updateStatus('Connected to server', 'connected');
};
this.signalingServer.onmessage = async (event) => {
const data = JSON.parse(event.data);
await this.handleSignalingMessage(data);
};
this.signalingServer.onerror = (error) => {
console.error('Signaling error:', error);
this.updateStatus('Connection error', 'error');
};
this.signalingServer.onclose = () => {
this.updateStatus('Disconnected from server', 'error');
};
}
async handleSignalingMessage(data) {
switch (data.type) {
case 'registered':
this.clientId = data.clientId;
console.log('My client ID:', this.clientId);
break;
case 'offer':
this.remotePeerId = data.from;
await this.handleOffer(data.sdp);
break;
case 'answer':
await this.handleAnswer(data.sdp);
break;
case 'ice-candidate':
await this.handleIceCandidate(data.candidate);
break;
}
}
setupUI() {
document.getElementById('startCallBtn').onclick = () => this.startCall();
document.getElementById('endCallBtn').onclick = () => this.endCall();
document.getElementById('muteBtn').onclick = () => this.toggleAudio();
document.getElementById('videoBtn').onclick = () => this.toggleVideo();
document.getElementById('screenShareBtn').onclick = () => this.toggleScreenShare();
}
async startCall() {
try {
this.updateStatus('Starting call...', 'connecting');
// Get local media
this.localStream = await navigator.mediaDevices.getUserMedia({
video: {
width: { ideal: 1280 },
height: { ideal: 720 }
},
audio: {
echoCancellation: true,
noiseSuppression: true,
autoGainControl: true
}
});
this.localVideo.srcObject = this.localStream;
// Create peer connection
this.createPeerConnection();
// Add local tracks to connection
this.localStream.getTracks().forEach(track => {
this.peerConnection.addTrack(track, this.localStream);
});
// Create and send offer
const offer = await this.peerConnection.createOffer();
await this.peerConnection.setLocalDescription(offer);
// For demo, we'll need to get remote peer ID from user
const remotePeerId = prompt('Enter remote peer ID:');
if (!remotePeerId) return;
this.remotePeerId = remotePeerId;
this.sendSignaling({
type: 'offer',
sdp: offer,
target: remotePeerId
});
this.enableCallControls();
this.updateStatus('Calling...', 'connecting');
} catch (error) {
console.error('Error starting call:', error);
this.updateStatus('Failed to start call: ' + error.message, 'error');
}
}
createPeerConnection() {
const configuration = {
iceServers: [
{ urls: 'stun:stun.l.google.com:19302' },
{ urls: 'stun:stun1.l.google.com:19302' }
]
};
this.peerConnection = new RTCPeerConnection(configuration);
// Handle remote stream
this.peerConnection.ontrack = (event) => {
console.log('Received remote track');
if (!this.remoteVideo.srcObject) {
this.remoteVideo.srcObject = event.streams[0];
this.updateStatus('Connected', 'connected');
}
};
// Handle ICE candidates
this.peerConnection.onicecandidate = (event) => {
if (event.candidate) {
this.sendSignaling({
type: 'ice-candidate',
candidate: event.candidate,
target: this.remotePeerId
});
}
};
// Handle connection state changes
this.peerConnection.onconnectionstatechange = () => {
console.log('Connection state:', this.peerConnection.connectionState);
switch (this.peerConnection.connectionState) {
case 'connected':
this.updateStatus('Call connected', 'connected');
break;
case 'disconnected':
this.updateStatus('Call disconnected', 'error');
break;
case 'failed':
this.updateStatus('Connection failed', 'error');
this.endCall();
break;
}
};
}
async handleOffer(offer) {
try {
// Get local media first
this.localStream = await navigator.mediaDevices.getUserMedia({
video: true,
audio: true
});
this.localVideo.srcObject = this.localStream;
// Create peer connection
this.createPeerConnection();
// Add local tracks
this.localStream.getTracks().forEach(track => {
this.peerConnection.addTrack(track, this.localStream);
});
// Set remote description
await this.peerConnection.setRemoteDescription(
new RTCSessionDescription(offer)
);
// Create answer
const answer = await this.peerConnection.createAnswer();
await this.peerConnection.setLocalDescription(answer);
// Send answer
this.sendSignaling({
type: 'answer',
sdp: answer,
target: this.remotePeerId
});
this.enableCallControls();
this.updateStatus('Call connected', 'connected');
} catch (error) {
console.error('Error handling offer:', error);
this.updateStatus('Failed to answer call', 'error');
}
}
async handleAnswer(answer) {
try {
await this.peerConnection.setRemoteDescription(
new RTCSessionDescription(answer)
);
console.log('Remote description set from answer');
} catch (error) {
console.error('Error handling answer:', error);
}
}
async handleIceCandidate(candidate) {
try {
await this.peerConnection.addIceCandidate(
new RTCIceCandidate(candidate)
);
} catch (error) {
console.error('Error adding ICE candidate:', error);
}
}
toggleAudio() {
if (!this.localStream) return;
const audioTrack = this.localStream.getAudioTracks()[0];
if (audioTrack) {
audioTrack.enabled = !audioTrack.enabled;
this.isAudioMuted = !audioTrack.enabled;
const btn = document.getElementById('muteBtn');
btn.textContent = this.isAudioMuted ? 'Unmute' : 'Mute';
}
}
toggleVideo() {
if (!this.localStream) return;
const videoTrack = this.localStream.getVideoTracks()[0];
if (videoTrack) {
videoTrack.enabled = !videoTrack.enabled;
this.isVideoOff = !videoTrack.enabled;
const btn = document.getElementById('videoBtn');
btn.textContent = this.isVideoOff ? 'Video On' : 'Video Off';
}
}
async toggleScreenShare() {
try {
if (this.isScreenSharing) {
// Stop screen sharing, return to camera
const videoTrack = this.localStream.getVideoTracks()[0];
const sender = this.peerConnection.getSenders()
.find(s => s.track.kind === 'video');
if (sender) {
await sender.replaceTrack(videoTrack);
}
this.isScreenSharing = false;
document.getElementById('screenShareBtn').textContent = 'Share Screen';
} else {
// Start screen sharing
const screenStream = await navigator.mediaDevices.getDisplayMedia({
video: true
});
const screenTrack = screenStream.getVideoTracks()[0];
const sender = this.peerConnection.getSenders()
.find(s => s.track.kind === 'video');
if (sender) {
await sender.replaceTrack(screenTrack);
}
// Handle screen share stop
screenTrack.onended = () => {
this.toggleScreenShare();
};
this.isScreenSharing = true;
document.getElementById('screenShareBtn').textContent = 'Stop Sharing';
}
} catch (error) {
console.error('Error toggling screen share:', error);
alert('Screen sharing failed: ' + error.message);
}
}
endCall() {
// Stop all tracks
if (this.localStream) {
this.localStream.getTracks().forEach(track => track.stop());
}
// Close peer connection
if (this.peerConnection) {
this.peerConnection.close();
this.peerConnection = null;
}
// Clear video elements
this.localVideo.srcObject = null;
this.remoteVideo.srcObject = null;
// Reset state
this.localStream = null;
this.remoteStream = null;
this.remotePeerId = null;
this.isAudioMuted = false;
this.isVideoOff = false;
this.isScreenSharing = false;
this.disableCallControls();
this.updateStatus('Call ended', '');
}
enableCallControls() {
document.getElementById('startCallBtn').disabled = true;
document.getElementById('endCallBtn').disabled = false;
document.getElementById('muteBtn').disabled = false;
document.getElementById('videoBtn').disabled = false;
document.getElementById('screenShareBtn').disabled = false;
}
disableCallControls() {
document.getElementById('startCallBtn').disabled = false;
document.getElementById('endCallBtn').disabled = true;
document.getElementById('muteBtn').disabled = true;
document.getElementById('videoBtn').disabled = true;
document.getElementById('screenShareBtn').disabled = true;
}
updateStatus(message, className = '') {
this.statusDiv.textContent = message;
this.statusDiv.className = 'status ' + className;
}
sendSignaling(message) {
this.signalingServer.send(JSON.stringify(message));
}
}
// Initialize app
const app = new VideoCallApp();
Audio-Only Calls
For audio-only calls, simply request audio only:
// Audio-only call
this.localStream = await navigator.mediaDevices.getUserMedia({
video: false,
audio: {
echoCancellation: true,
noiseSuppression: true,
autoGainControl: true,
sampleRate: 48000
}
});
// Hide video elements or show audio-only UI
document.querySelector('.video-container').classList.add('audio-only');
Screen Sharing
Screen sharing uses the getDisplayMedia API:
async function startScreenShare() {
try {
const screenStream = await navigator.mediaDevices.getDisplayMedia({
video: {
cursor: 'always', // Show cursor in screen share
displaySurface: 'monitor' // Prefer full monitor
},
audio: false // Screen audio (optional)
});
const screenTrack = screenStream.getVideoTracks()[0];
// Replace video track with screen track
const sender = peerConnection.getSenders()
.find(s => s.track && s.track.kind === 'video');
if (sender) {
await sender.replaceTrack(screenTrack);
}
// Handle when user stops sharing
screenTrack.onended = () => {
console.log('Screen sharing stopped');
stopScreenShare();
};
return screenStream;
} catch (error) {
console.error('Error starting screen share:', error);
throw error;
}
}
async function stopScreenShare() {
// Get original camera track
const cameraTrack = localStream.getVideoTracks()[0];
// Replace screen track with camera track
const sender = peerConnection.getSenders()
.find(s => s.track && s.track.kind === 'video');
if (sender) {
await sender.replaceTrack(cameraTrack);
}
}
Multiple Participants
For multiple participants, create a separate peer connection for each participant:
class MultiPartyCall {
constructor() {
this.peerConnections = new Map(); // participantId => RTCPeerConnection
this.remoteStreams = new Map(); // participantId => MediaStream
}
addParticipant(participantId) {
const peerConnection = new RTCPeerConnection(configuration);
// Add local tracks
this.localStream.getTracks().forEach(track => {
peerConnection.addTrack(track, this.localStream);
});
// Handle remote stream
peerConnection.ontrack = (event) => {
this.remoteStreams.set(participantId, event.streams[0]);
this.updateVideoGrid();
};
// Handle ICE candidates
peerConnection.onicecandidate = (event) => {
if (event.candidate) {
this.sendToParticipant(participantId, {
type: 'ice-candidate',
candidate: event.candidate
});
}
};
this.peerConnections.set(participantId, peerConnection);
}
removeParticipant(participantId) {
const peerConnection = this.peerConnections.get(participantId);
if (peerConnection) {
peerConnection.close();
this.peerConnections.delete(participantId);
}
this.remoteStreams.delete(participantId);
this.updateVideoGrid();
}
updateVideoGrid() {
const container = document.getElementById('videoGrid');
container.innerHTML = '';
this.remoteStreams.forEach((stream, participantId) => {
const video = document.createElement('video');
video.srcObject = stream;
video.autoplay = true;
video.playsinline = true;
video.id = `participant-${participantId}`;
container.appendChild(video);
});
}
}
Note: For production multi-party calls with many participants, consider using a Selective Forwarding Unit (SFU) like Janus or Mediasoup instead of mesh topology.
Call UI Patterns
Common UI patterns for video calls:
// 1. Picture-in-Picture mode
async function enablePiP() {
try {
if (document.pictureInPictureEnabled) {
await localVideo.requestPictureInPicture();
}
} catch (error) {
console.error('PiP error:', error);
}
}
// 2. Full-screen mode
function toggleFullScreen() {
const container = document.querySelector('.video-container');
if (!document.fullscreenElement) {
container.requestFullscreen();
} else {
document.exitFullscreen();
}
}
// 3. Video layout switching
function switchLayout(layout) {
const container = document.querySelector('.video-container');
switch (layout) {
case 'grid':
container.classList.add('grid-layout');
container.classList.remove('speaker-layout');
break;
case 'speaker':
container.classList.add('speaker-layout');
container.classList.remove('grid-layout');
break;
}
}
// 4. Connection quality indicator
function updateConnectionQuality(stats) {
const quality = calculateQuality(stats);
const indicator = document.getElementById('quality-indicator');
indicator.className = `quality-${quality}`; // quality-good, quality-fair, quality-poor
}
function calculateQuality(stats) {
const packetLoss = stats.packetsLost / stats.packetsReceived;
if (packetLoss < 0.02) return 'good';
if (packetLoss < 0.05) return 'fair';
return 'poor';
}
Error Handling and Recovery
class CallErrorHandler {
async handleMediaError(error) {
console.error('Media error:', error);
if (error.name === 'NotAllowedError') {
alert('Camera/microphone permission denied. Please allow access.');
} else if (error.name === 'NotFoundError') {
alert('No camera or microphone found.');
} else if (error.name === 'NotReadableError') {
alert('Camera/microphone is already in use by another application.');
} else {
alert('Failed to access media devices: ' + error.message);
}
}
async handleConnectionFailure() {
console.log('Connection failed, attempting ICE restart...');
try {
const offer = await this.peerConnection.createOffer({
iceRestart: true
});
await this.peerConnection.setLocalDescription(offer);
this.sendSignaling({
type: 'offer',
sdp: offer,
target: this.remotePeerId
});
} catch (error) {
console.error('ICE restart failed:', error);
alert('Connection lost. Please restart the call.');
this.endCall();
}
}
monitorConnection() {
setInterval(() => {
if (this.peerConnection) {
this.peerConnection.getStats().then(stats => {
stats.forEach(report => {
if (report.type === 'inbound-rtp') {
console.log('Packets received:', report.packetsReceived);
console.log('Packets lost:', report.packetsLost);
}
});
});
}
}, 5000);
}
}
Production Considerations:
- Always implement automatic reconnection on connection failures
- Monitor connection quality and notify users of poor connections
- Handle device changes (e.g., headphones plugged in)
- Implement proper cleanup on page unload
- Test on different networks (WiFi, 4G, 5G)
Exercise: Build a Complete Video Chat App
Create a Production-Ready Video Chat Application:
- Implement the complete video call app from this lesson
- Add a lobby/waiting room before joining calls
- Implement call recording functionality
- Add virtual backgrounds using Canvas API
- Show connection quality indicators
- Implement automatic reconnection on failures
- Add chat messaging alongside video
- Support for 3+ participants in a grid layout
- Bonus: Add noise cancellation and background blur
Summary
- Video calls require proper media setup, peer connections, and UI controls
- Always handle getUserMedia errors gracefully with user-friendly messages
- Implement mute, video toggle, and screen sharing features
- Use replaceTrack() to switch between camera and screen sharing
- For multiple participants, create separate peer connections
- Monitor connection quality and implement automatic recovery
- Consider using SFU for scalable multi-party calls
- Test thoroughly on different devices and network conditions
- Implement proper cleanup when calls end or users leave