JavaScript APIs

JavaScript Media Streams API: Camera and Microphone Access

Master the Media Streams API to access camera and microphone. Learn media constraints, stream manipulation, recording, and building media-rich applications.

By JavaScript Document Team
media-streamsweb-apiscameramicrophonewebrtc

The Media Streams API provides access to audio and video input devices like cameras and microphones, enabling rich multimedia experiences in web applications. It's the foundation for video chat, audio recording, and real-time media processing.

Understanding Media Streams API

The Media Streams API centers around the MediaStream interface, which represents a stream of media content consisting of audio and video tracks.

Basic Media Access

// Get user media - basic usage
async function getBasicMedia() {
  try {
    const stream = await navigator.mediaDevices.getUserMedia({
      video: true,
      audio: true,
    });

    // Display in video element
    const video = document.getElementById('preview');
    video.srcObject = stream;

    return stream;
  } catch (error) {
    console.error('Error accessing media devices:', error);
    handleMediaError(error);
  }
}

// Handle different error types
function handleMediaError(error) {
  switch (error.name) {
    case 'NotFoundError':
      alert('No camera or microphone found.');
      break;
    case 'NotAllowedError':
      alert('Permission denied. Please allow access to camera and microphone.');
      break;
    case 'NotReadableError':
      alert('Hardware error. Camera or microphone is already in use.');
      break;
    case 'OverconstrainedError':
      alert('Constraints cannot be satisfied by available devices.');
      break;
    case 'SecurityError':
      alert('Media access is not allowed in this context.');
      break;
    default:
      alert('Error accessing media devices: ' + error.message);
  }
}

// Check if media devices are available
async function checkMediaDevices() {
  try {
    const devices = await navigator.mediaDevices.enumerateDevices();
    const hasCamera = devices.some((device) => device.kind === 'videoinput');
    const hasMicrophone = devices.some(
      (device) => device.kind === 'audioinput'
    );

    return { hasCamera, hasMicrophone };
  } catch (error) {
    console.error('Error enumerating devices:', error);
    return { hasCamera: false, hasMicrophone: false };
  }
}

Advanced Media Constraints

// Detailed media constraints
const advancedConstraints = {
  video: {
    width: { min: 640, ideal: 1280, max: 1920 },
    height: { min: 480, ideal: 720, max: 1080 },
    frameRate: { min: 15, ideal: 30, max: 60 },
    aspectRatio: { ideal: 16 / 9 },
    facingMode: 'user', // 'user' for front, 'environment' for back
    resizeMode: 'crop-and-scale', // or 'none'
  },
  audio: {
    echoCancellation: { ideal: true },
    noiseSuppression: { ideal: true },
    autoGainControl: { ideal: true },
    sampleRate: { ideal: 44100 },
    sampleSize: { ideal: 16 },
    channelCount: { ideal: 2 },
    volume: { ideal: 1.0 },
    latency: { ideal: 0.01 },
  },
};

// Apply constraints to existing stream
async function applyConstraints(stream, constraints) {
  const videoTrack = stream.getVideoTracks()[0];
  const audioTrack = stream.getAudioTracks()[0];

  try {
    if (videoTrack && constraints.video) {
      await videoTrack.applyConstraints(constraints.video);
    }

    if (audioTrack && constraints.audio) {
      await audioTrack.applyConstraints(constraints.audio);
    }

    console.log('Constraints applied successfully');
  } catch (error) {
    console.error('Failed to apply constraints:', error);
  }
}

// Get supported constraints
function getSupportedConstraints() {
  const supported = navigator.mediaDevices.getSupportedConstraints();
  console.log('Supported constraints:', supported);
  return supported;
}

// Get current track settings
function getTrackSettings(track) {
  const settings = track.getSettings();
  const capabilities = track.getCapabilities();
  const constraints = track.getConstraints();

  return {
    current: settings,
    possible: capabilities,
    requested: constraints,
  };
}

Practical Applications

Media Device Manager

class MediaDeviceManager {
  constructor() {
    this.devices = {
      video: [],
      audio: [],
      audioOutput: [],
    };

    this.currentStream = null;
    this.selectedDevices = {
      video: null,
      audio: null,
    };

    this.init();
  }

  async init() {
    // Request permissions first to get device labels
    try {
      const stream = await navigator.mediaDevices.getUserMedia({
        video: true,
        audio: true,
      });

      // Stop tracks immediately - we just needed permissions
      stream.getTracks().forEach((track) => track.stop());
    } catch (error) {
      console.error('Failed to get initial permissions:', error);
    }

    // Enumerate devices
    await this.updateDeviceList();

    // Listen for device changes
    navigator.mediaDevices.ondevicechange = () => this.updateDeviceList();
  }

  async updateDeviceList() {
    const devices = await navigator.mediaDevices.enumerateDevices();

    this.devices.video = devices.filter((d) => d.kind === 'videoinput');
    this.devices.audio = devices.filter((d) => d.kind === 'audioinput');
    this.devices.audioOutput = devices.filter((d) => d.kind === 'audiooutput');

    this.updateUI();
  }

  updateUI() {
    // Update video input select
    this.updateSelect('videoSelect', this.devices.video);

    // Update audio input select
    this.updateSelect('audioSelect', this.devices.audio);

    // Update audio output select (if supported)
    if (this.devices.audioOutput.length > 0) {
      this.updateSelect('audioOutputSelect', this.devices.audioOutput);
    }
  }

  updateSelect(selectId, devices) {
    const select = document.getElementById(selectId);
    if (!select) return;

    select.innerHTML = '';

    devices.forEach((device) => {
      const option = document.createElement('option');
      option.value = device.deviceId;
      option.textContent =
        device.label || `${device.kind} ${device.deviceId.substr(0, 8)}`;
      select.appendChild(option);
    });

    // Restore selection
    const deviceType = selectId.includes('video') ? 'video' : 'audio';
    if (this.selectedDevices[deviceType]) {
      select.value = this.selectedDevices[deviceType];
    }
  }

  async switchCamera(deviceId) {
    this.selectedDevices.video = deviceId;

    if (this.currentStream) {
      // Stop current video track
      const videoTrack = this.currentStream.getVideoTracks()[0];
      if (videoTrack) {
        videoTrack.stop();
      }

      // Get new video track
      try {
        const newStream = await navigator.mediaDevices.getUserMedia({
          video: { deviceId: { exact: deviceId } },
          audio: false,
        });

        const newVideoTrack = newStream.getVideoTracks()[0];

        // Replace track in stream
        this.currentStream.removeTrack(videoTrack);
        this.currentStream.addTrack(newVideoTrack);

        // Update video element if attached
        const video = document.querySelector('video[srcObject]');
        if (video && video.srcObject === this.currentStream) {
          video.srcObject = this.currentStream;
        }
      } catch (error) {
        console.error('Failed to switch camera:', error);
      }
    }
  }

  async switchMicrophone(deviceId) {
    this.selectedDevices.audio = deviceId;

    if (this.currentStream) {
      // Similar to switchCamera but for audio
      const audioTrack = this.currentStream.getAudioTracks()[0];
      if (audioTrack) {
        audioTrack.stop();
      }

      try {
        const newStream = await navigator.mediaDevices.getUserMedia({
          video: false,
          audio: { deviceId: { exact: deviceId } },
        });

        const newAudioTrack = newStream.getAudioTracks()[0];

        this.currentStream.removeTrack(audioTrack);
        this.currentStream.addTrack(newAudioTrack);
      } catch (error) {
        console.error('Failed to switch microphone:', error);
      }
    }
  }

  async getStream(constraints = {}) {
    // Stop current stream if exists
    if (this.currentStream) {
      this.stopStream();
    }

    // Build constraints with selected devices
    const finalConstraints = {
      video:
        constraints.video !== false
          ? {
              ...(typeof constraints.video === 'object'
                ? constraints.video
                : {}),
              ...(this.selectedDevices.video
                ? {
                    deviceId: { exact: this.selectedDevices.video },
                  }
                : {}),
            }
          : false,
      audio:
        constraints.audio !== false
          ? {
              ...(typeof constraints.audio === 'object'
                ? constraints.audio
                : {}),
              ...(this.selectedDevices.audio
                ? {
                    deviceId: { exact: this.selectedDevices.audio },
                  }
                : {}),
            }
          : false,
    };

    try {
      this.currentStream =
        await navigator.mediaDevices.getUserMedia(finalConstraints);
      return this.currentStream;
    } catch (error) {
      console.error('Failed to get stream:', error);
      throw error;
    }
  }

  stopStream() {
    if (this.currentStream) {
      this.currentStream.getTracks().forEach((track) => track.stop());
      this.currentStream = null;
    }
  }

  async setAudioOutput(deviceId) {
    const audio = document.querySelector('audio');
    if (audio && audio.setSinkId) {
      try {
        await audio.setSinkId(deviceId);
        console.log('Audio output device changed');
      } catch (error) {
        console.error('Failed to change audio output:', error);
      }
    }
  }
}

Audio Visualizer

class AudioVisualizer {
  constructor(stream) {
    this.stream = stream;
    this.audioContext = new (window.AudioContext ||
      window.webkitAudioContext)();
    this.analyser = this.audioContext.createAnalyser();
    this.canvas = document.getElementById('visualizer');
    this.canvasContext = this.canvas.getContext('2d');

    this.init();
  }

  init() {
    // Configure analyser
    this.analyser.fftSize = 2048;
    this.analyser.smoothingTimeConstant = 0.8;

    // Connect audio stream
    const source = this.audioContext.createMediaStreamSource(this.stream);
    source.connect(this.analyser);

    // Start visualization
    this.visualize();
  }

  visualize() {
    const bufferLength = this.analyser.frequencyBinCount;
    const dataArray = new Uint8Array(bufferLength);

    const draw = () => {
      requestAnimationFrame(draw);

      // Get frequency data
      this.analyser.getByteFrequencyData(dataArray);

      // Clear canvas
      this.canvasContext.fillStyle = 'rgb(0, 0, 0)';
      this.canvasContext.fillRect(0, 0, this.canvas.width, this.canvas.height);

      // Draw bars
      const barWidth = (this.canvas.width / bufferLength) * 2.5;
      let barHeight;
      let x = 0;

      for (let i = 0; i < bufferLength; i++) {
        barHeight = dataArray[i];

        // Color based on frequency
        const r = barHeight + 25 * (i / bufferLength);
        const g = 250 * (i / bufferLength);
        const b = 50;

        this.canvasContext.fillStyle = `rgb(${r},${g},${b})`;
        this.canvasContext.fillRect(
          x,
          this.canvas.height - barHeight / 2,
          barWidth,
          barHeight / 2
        );

        x += barWidth + 1;
      }
    };

    draw();
  }

  getVolumeLevel() {
    const dataArray = new Uint8Array(this.analyser.frequencyBinCount);
    this.analyser.getByteFrequencyData(dataArray);

    // Calculate average volume
    const average = dataArray.reduce((a, b) => a + b) / dataArray.length;
    return average / 255; // Normalize to 0-1
  }

  destroy() {
    this.audioContext.close();
  }
}

// Waveform visualizer
class WaveformVisualizer {
  constructor(stream) {
    this.stream = stream;
    this.audioContext = new AudioContext();
    this.analyser = this.audioContext.createAnalyser();
    this.canvas = document.getElementById('waveform');
    this.canvasContext = this.canvas.getContext('2d');

    this.init();
  }

  init() {
    this.analyser.fftSize = 2048;

    const source = this.audioContext.createMediaStreamSource(this.stream);
    source.connect(this.analyser);

    this.drawWaveform();
  }

  drawWaveform() {
    const bufferLength = this.analyser.fftSize;
    const dataArray = new Float32Array(bufferLength);

    const draw = () => {
      requestAnimationFrame(draw);

      // Get waveform data
      this.analyser.getFloatTimeDomainData(dataArray);

      // Clear canvas
      this.canvasContext.fillStyle = 'rgb(200, 200, 200)';
      this.canvasContext.fillRect(0, 0, this.canvas.width, this.canvas.height);

      // Draw waveform
      this.canvasContext.lineWidth = 2;
      this.canvasContext.strokeStyle = 'rgb(0, 0, 0)';
      this.canvasContext.beginPath();

      const sliceWidth = this.canvas.width / bufferLength;
      let x = 0;

      for (let i = 0; i < bufferLength; i++) {
        const v = dataArray[i];
        const y = ((v + 1) / 2) * this.canvas.height;

        if (i === 0) {
          this.canvasContext.moveTo(x, y);
        } else {
          this.canvasContext.lineTo(x, y);
        }

        x += sliceWidth;
      }

      this.canvasContext.stroke();
    };

    draw();
  }
}

Media Recorder

class MediaRecorderManager {
  constructor() {
    this.mediaRecorder = null;
    this.recordedChunks = [];
    this.stream = null;
    this.mimeType = null;
  }

  getSupportedMimeTypes() {
    const types = [
      'video/webm;codecs=vp9,opus',
      'video/webm;codecs=vp8,opus',
      'video/webm',
      'video/mp4',
      'audio/webm;codecs=opus',
      'audio/webm',
      'audio/ogg;codecs=opus',
    ];

    return types.filter((type) => MediaRecorder.isTypeSupported(type));
  }

  async startRecording(stream, options = {}) {
    this.stream = stream;
    this.recordedChunks = [];

    // Determine MIME type
    const mimeTypes = this.getSupportedMimeTypes();
    this.mimeType = options.mimeType || mimeTypes[0];

    if (!MediaRecorder.isTypeSupported(this.mimeType)) {
      throw new Error(`MIME type ${this.mimeType} is not supported`);
    }

    // Create MediaRecorder
    const recorderOptions = {
      mimeType: this.mimeType,
      videoBitsPerSecond: options.videoBitsPerSecond || 2500000,
      audioBitsPerSecond: options.audioBitsPerSecond || 128000,
    };

    this.mediaRecorder = new MediaRecorder(stream, recorderOptions);

    // Handle data
    this.mediaRecorder.ondataavailable = (event) => {
      if (event.data.size > 0) {
        this.recordedChunks.push(event.data);
        this.onProgress(event.data);
      }
    };

    // Handle stop
    this.mediaRecorder.onstop = () => {
      this.onRecordingComplete();
    };

    // Handle errors
    this.mediaRecorder.onerror = (event) => {
      console.error('MediaRecorder error:', event.error);
      this.onError(event.error);
    };

    // Start recording
    const timeslice = options.timeslice || 1000; // Request data every second
    this.mediaRecorder.start(timeslice);

    this.onStart();
  }

  pauseRecording() {
    if (this.mediaRecorder && this.mediaRecorder.state === 'recording') {
      this.mediaRecorder.pause();
      this.onPause();
    }
  }

  resumeRecording() {
    if (this.mediaRecorder && this.mediaRecorder.state === 'paused') {
      this.mediaRecorder.resume();
      this.onResume();
    }
  }

  stopRecording() {
    if (this.mediaRecorder && this.mediaRecorder.state !== 'inactive') {
      this.mediaRecorder.stop();

      // Stop all tracks
      if (this.stream) {
        this.stream.getTracks().forEach((track) => track.stop());
      }
    }
  }

  getBlob() {
    return new Blob(this.recordedChunks, { type: this.mimeType });
  }

  async saveRecording(filename) {
    const blob = this.getBlob();
    const url = URL.createObjectURL(blob);

    const a = document.createElement('a');
    a.href = url;
    a.download = filename || `recording-${Date.now()}.webm`;
    a.click();

    URL.revokeObjectURL(url);
  }

  // Event handlers (to be overridden)
  onStart() {
    console.log('Recording started');
  }

  onPause() {
    console.log('Recording paused');
  }

  onResume() {
    console.log('Recording resumed');
  }

  onProgress(chunk) {
    const size = this.recordedChunks.reduce(
      (acc, chunk) => acc + chunk.size,
      0
    );
    console.log(`Recording size: ${(size / 1024 / 1024).toFixed(2)} MB`);
  }

  onRecordingComplete() {
    console.log('Recording completed');
    const blob = this.getBlob();
    console.log(`Final size: ${(blob.size / 1024 / 1024).toFixed(2)} MB`);
  }

  onError(error) {
    console.error('Recording error:', error);
  }
}

// Advanced recorder with preview
class VideoRecorder extends MediaRecorderManager {
  constructor(previewElement) {
    super();
    this.preview = previewElement;
    this.startTime = null;
    this.duration = 0;
  }

  async startRecording(constraints = {}) {
    try {
      // Get stream
      const stream = await navigator.mediaDevices.getUserMedia({
        video: constraints.video || true,
        audio: constraints.audio || true,
      });

      // Show preview
      this.preview.srcObject = stream;

      // Start recording
      await super.startRecording(stream, constraints);

      this.startTime = Date.now();
      this.updateTimer();
    } catch (error) {
      console.error('Failed to start recording:', error);
      throw error;
    }
  }

  updateTimer() {
    if (this.mediaRecorder && this.mediaRecorder.state === 'recording') {
      this.duration = Math.floor((Date.now() - this.startTime) / 1000);

      const minutes = Math.floor(this.duration / 60);
      const seconds = this.duration % 60;

      const timerElement = document.getElementById('recordingTimer');
      if (timerElement) {
        timerElement.textContent = `${minutes.toString().padStart(2, '0')}:${seconds.toString().padStart(2, '0')}`;
      }

      requestAnimationFrame(() => this.updateTimer());
    }
  }

  onRecordingComplete() {
    super.onRecordingComplete();

    // Create playback element
    const blob = this.getBlob();
    const url = URL.createObjectURL(blob);

    const playback = document.getElementById('playback');
    if (playback) {
      playback.src = url;
      playback.controls = true;
    }
  }
}

Screen Recording

class ScreenRecorder {
  constructor() {
    this.recorder = new MediaRecorderManager();
    this.systemAudio = false;
    this.microphone = false;
  }

  async startScreenRecording(options = {}) {
    try {
      // Get screen stream
      const screenStream = await navigator.mediaDevices.getDisplayMedia({
        video: {
          displaySurface: options.displaySurface || 'monitor',
          logicalSurface: true,
          cursor: options.cursor || 'always',
          width: { max: 1920 },
          height: { max: 1080 },
          frameRate: { max: 30 },
        },
        audio: this.systemAudio, // System audio if supported
      });

      // Combine with microphone if requested
      let finalStream = screenStream;

      if (this.microphone) {
        const micStream = await navigator.mediaDevices.getUserMedia({
          audio: {
            echoCancellation: true,
            noiseSuppression: true,
          },
        });

        finalStream = this.combineStreams(screenStream, micStream);
      }

      // Monitor screen share ending
      screenStream.getVideoTracks()[0].onended = () => {
        this.stopRecording();
      };

      // Start recording
      await this.recorder.startRecording(finalStream, {
        mimeType: 'video/webm;codecs=vp9,opus',
        videoBitsPerSecond: 5000000, // 5 Mbps for screen recording
      });

      this.showRecordingUI();
    } catch (error) {
      console.error('Failed to start screen recording:', error);
      throw error;
    }
  }

  combineStreams(screenStream, micStream) {
    const audioContext = new AudioContext();
    const audioDestination = audioContext.createMediaStreamDestination();

    // Add microphone audio
    if (micStream.getAudioTracks().length > 0) {
      const micSource = audioContext.createMediaStreamSource(micStream);
      const micGain = audioContext.createGain();
      micGain.gain.value = 1.0;
      micSource.connect(micGain).connect(audioDestination);
    }

    // Add system audio if available
    if (screenStream.getAudioTracks().length > 0) {
      const screenSource = audioContext.createMediaStreamSource(screenStream);
      const screenGain = audioContext.createGain();
      screenGain.gain.value = 0.7; // Lower system audio slightly
      screenSource.connect(screenGain).connect(audioDestination);
    }

    // Combine video from screen and audio from destination
    const combinedStream = new MediaStream([
      ...screenStream.getVideoTracks(),
      ...audioDestination.stream.getAudioTracks(),
    ]);

    return combinedStream;
  }

  stopRecording() {
    this.recorder.stopRecording();
    this.hideRecordingUI();
  }

  showRecordingUI() {
    // Create recording indicator
    const indicator = document.createElement('div');
    indicator.id = 'screen-recording-indicator';
    indicator.innerHTML = `
      <div class="recording-dot"></div>
      <span>Recording Screen</span>
      <button onclick="screenRecorder.stopRecording()">Stop</button>
    `;
    indicator.style.cssText = `
      position: fixed;
      top: 20px;
      right: 20px;
      background: rgba(255, 0, 0, 0.9);
      color: white;
      padding: 10px 20px;
      border-radius: 20px;
      display: flex;
      align-items: center;
      gap: 10px;
      z-index: 10000;
    `;

    document.body.appendChild(indicator);
  }

  hideRecordingUI() {
    const indicator = document.getElementById('screen-recording-indicator');
    if (indicator) {
      indicator.remove();
    }
  }
}

Picture-in-Picture

class PictureInPicture {
  constructor(videoElement) {
    this.video = videoElement;
    this.pipWindow = null;

    this.init();
  }

  init() {
    // Check if PiP is supported
    if (!('pictureInPictureEnabled' in document)) {
      console.warn('Picture-in-Picture is not supported');
      return;
    }

    // Add event listeners
    this.video.addEventListener('enterpictureinpicture', (event) => {
      this.pipWindow = event.pictureInPictureWindow;
      this.onEnterPiP();
    });

    this.video.addEventListener('leavepictureinpicture', () => {
      this.pipWindow = null;
      this.onLeavePiP();
    });
  }

  async toggle() {
    try {
      if (this.video !== document.pictureInPictureElement) {
        await this.enter();
      } else {
        await this.exit();
      }
    } catch (error) {
      console.error('Failed to toggle PiP:', error);
    }
  }

  async enter() {
    try {
      await this.video.requestPictureInPicture();
    } catch (error) {
      console.error('Failed to enter PiP:', error);
      throw error;
    }
  }

  async exit() {
    try {
      await document.exitPictureInPicture();
    } catch (error) {
      console.error('Failed to exit PiP:', error);
      throw error;
    }
  }

  onEnterPiP() {
    console.log('Entered Picture-in-Picture');

    // Update UI
    const button = document.getElementById('pipButton');
    if (button) {
      button.textContent = 'Exit PiP';
    }

    // Monitor PiP window size
    if (this.pipWindow) {
      console.log(
        `PiP window size: ${this.pipWindow.width}x${this.pipWindow.height}`
      );

      this.pipWindow.addEventListener('resize', () => {
        console.log(
          `PiP resized: ${this.pipWindow.width}x${this.pipWindow.height}`
        );
      });
    }
  }

  onLeavePiP() {
    console.log('Left Picture-in-Picture');

    // Update UI
    const button = document.getElementById('pipButton');
    if (button) {
      button.textContent = 'Enter PiP';
    }
  }

  isPictureInPicture() {
    return this.video === document.pictureInPictureElement;
  }
}

Best Practices

  1. Always check for API support
function checkMediaSupport() {
  const support = {
    getUserMedia: !!(
      navigator.mediaDevices && navigator.mediaDevices.getUserMedia
    ),
    mediaRecorder: typeof MediaRecorder !== 'undefined',
    pictureInPicture: 'pictureInPictureEnabled' in document,
    displayMedia: !!(
      navigator.mediaDevices && navigator.mediaDevices.getDisplayMedia
    ),
  };

  return support;
}
  1. Handle permissions properly
async function requestMediaPermissions() {
  try {
    // Check current permissions
    const cameraPermission = await navigator.permissions.query({
      name: 'camera',
    });
    const micPermission = await navigator.permissions.query({
      name: 'microphone',
    });

    console.log('Camera permission:', cameraPermission.state);
    console.log('Microphone permission:', micPermission.state);

    // Listen for permission changes
    cameraPermission.onchange = () => {
      console.log('Camera permission changed:', cameraPermission.state);
    };

    // Request access if needed
    if (
      cameraPermission.state === 'prompt' ||
      micPermission.state === 'prompt'
    ) {
      const stream = await navigator.mediaDevices.getUserMedia({
        video: true,
        audio: true,
      });

      // Stop tracks after getting permission
      stream.getTracks().forEach((track) => track.stop());
    }
  } catch (error) {
    console.error('Permission error:', error);
  }
}
  1. Clean up resources
class MediaCleanup {
  constructor() {
    this.activeStreams = new Set();
  }

  trackStream(stream) {
    this.activeStreams.add(stream);

    // Auto-cleanup when all tracks end
    const tracks = stream.getTracks();
    let activeTracks = tracks.length;

    tracks.forEach((track) => {
      track.onended = () => {
        activeTracks--;
        if (activeTracks === 0) {
          this.activeStreams.delete(stream);
        }
      };
    });
  }

  stopAllStreams() {
    this.activeStreams.forEach((stream) => {
      stream.getTracks().forEach((track) => track.stop());
    });
    this.activeStreams.clear();
  }

  cleanup() {
    this.stopAllStreams();

    // Clear video elements
    document.querySelectorAll('video').forEach((video) => {
      video.srcObject = null;
    });
  }
}

// Use on page unload
window.addEventListener('beforeunload', () => {
  mediaCleanup.cleanup();
});
  1. Optimize media constraints for performance
function getOptimalConstraints(scenario) {
  const constraints = {
    videocall: {
      video: {
        width: { ideal: 1280 },
        height: { ideal: 720 },
        frameRate: { ideal: 30 },
      },
      audio: {
        echoCancellation: true,
        noiseSuppression: true,
        autoGainControl: true,
      },
    },
    recording: {
      video: {
        width: { ideal: 1920 },
        height: { ideal: 1080 },
        frameRate: { ideal: 30 },
      },
      audio: {
        sampleRate: 44100,
        channelCount: 2,
        echoCancellation: false,
      },
    },
    streaming: {
      video: {
        width: { min: 640, ideal: 1280, max: 1920 },
        height: { min: 360, ideal: 720, max: 1080 },
        frameRate: { min: 24, ideal: 30, max: 60 },
      },
      audio: {
        sampleRate: 48000,
        channelCount: 2,
        echoCancellation: true,
      },
    },
  };

  return constraints[scenario] || constraints.videocall;
}

Conclusion

The Media Streams API provides powerful capabilities for accessing and manipulating audio and video in web applications. From simple camera access to complex recording and streaming scenarios, it enables rich multimedia experiences. By understanding constraints, device management, and stream manipulation, you can build sophisticated applications that leverage the full potential of users' media devices. Remember to always handle permissions gracefully, provide fallbacks, and clean up resources properly for the best user experience.