can't play video in subsequent clients

when i connect to the mediasoup server backend, the consumer video is played only in the first client i connect. the first client plays all the video including the self and other clients’ whereas the subsequent client that connect to the backend, successfully produces stream but doesn’t play any of the subscribed videos.

Need more information or try

  1. Confirm are you getting incoming media stream ? you can check in chrome://webrtc-internals
  2. Auto play in html 5 video tag restricted if you didn’t click on document (html page) or use muted attribute. like
<video autoplay muted></video>

From chrome://webrtc-internals followings points were noted

  1. There was both inbound and outbound traffic in the first client
  2. There was only outbound traffic from subsequent clients but had no inbound traffic despite being subscribed to the tracks.
    And i’ve not used muted attribute.
    I’m just puzzled that the first client that connects to the server performs as expected in both consumer and producer where as the subsequent clients that connects to the server only has functional producer but not consumer…

please share client side code…

Basically i’m trying to implement the codes from the repo here in next and consumers array is being managed with redux
this is next page code

const VideoSession = () => {
  const consumers = useSelector((state) => state.call.consumers);
  useEffect(() => {
    call.main(uuidv4()).then(call.joinRoom());
  }, []);

  return (
    <div>
      {/* <Button onClick={call.joinRoom}>Join</Button> */}
      <Button onClick={call.sendCameraStreams}>Camera</Button>
      <Button>Screen</Button>
      <Button>Stop</Button>
      <Button>Leave</Button>
      {consumers.map((consumer, key) => {
        return <Media key={key} consumer={consumer}/>;
      })}
    </div>
  );
};

const Button = styled.button`
  border-width: 1px;
  border-color: grey;
  border-radius: 5px;
  padding: 0 4px;
  margin: 2px;
`;

This is client side mediasoup code

const warn = debugModule('demo-app:WARN');
const err = debugModule('demo-app:ERROR');

export let myPeerId;
export let device,
  joined,
  localCam,
  localScreen,
  recvTransport,
  sendTransport,
  camVideoProducer,
  camAudioProducer,
  screenVideoProducer,
  screenAudioProducer,
  currentActiveSpeaker = {},
  lastPollSyncData = {},
  consumers = [],
  pollingInterval;

const dispatch = store.dispatch;

export async function main(id) {
  myPeerId = id;
  dispatch(callActions.setMyPeerId({ myPeerId }));

  try {
    device = new mediasoup.Device();
  } catch (e) {
    if (e.name === "UnsupportedError") {
      console.error("browser not supported for video calls");
      return;
    } else {
      console.error(e);
    }
  }
  window.addEventListener("unload", () => sig("leave", {}, true));
}

export async function joinRoom() {
  if (joined) {
    return;
  }

  log("join room");
  try {
    let { routerRtpCapabilities } = await sig("join-as-new-peer");
    if (!device.loaded) {
      await device.load({ routerRtpCapabilities });
    }
    joined = true;
  } catch (e) {
    console.error(e);
    return;
  }

  pollingInterval = setInterval(async () => {
    let { error } = await pollAndUpdate();
    if (error) {
      clearInterval(pollingInterval);
      err(error);
    }
  }, 1000);
}

async function pollAndUpdate() {
  let { peers, activeSpeaker, error } = await sig("sync");
  if (error) {
    return { error };
  }

  currentActiveSpeaker = activeSpeaker;
  updateActiveSpeaker();
  let thisPeersList = utils.sortPeers(peers),
    lastPeersList = utils.sortPeers(lastPollSyncData);
  if (!deepEqual(thisPeersList, lastPeersList)) {
    for (let peer of thisPeersList) {
      for (let [mediaTag, info] of Object.entries(peer.media)) {
        subscribeToTrack(peer.id, mediaTag);
      }
    }
  }

  // if a peer has gone away, we need to close all consumers we have
  // for that peer and remove video and audio elements
  for (let id in lastPollSyncData) {
    if (!peers[id]) {
      log(`peer ${id} has exited`);
      consumers.forEach((consumer) => {
        if (consumer.appData.peerId === id) {
          closeConsumer(consumer);
        }
      });
    }
  }

  // if a peer has stopped sending media that we are consuming, we
  // need to close the consumer and remove video and audio elements
  consumers.forEach((consumer) => {
    let { peerId, mediaTag } = consumer.appData;
    if (peers[peerId] && !peers[peerId].media[mediaTag]) {
      log(`peer ${peerId} has stopped transmitting ${mediaTag}`);
      closeConsumer(consumer);
    }
  });

  lastPollSyncData = peers;
  return {}; // return an empty object if there isn't an error
}

export async function startCamera() {
  if (localCam) {
    return;
  }
  log("start camera");
  try {
    localCam = await navigator.mediaDevices.getUserMedia({
      video: true,
      audio: true,
    });
  } catch (e) {
    console.error("start camera error", e);
  }
}

export async function sendCameraStreams() {
  await joinRoom();
  await startCamera();
  if (!sendTransport) {
    sendTransport = await createTransport("send");
  }
  camVideoProducer = await sendTransport.produce({
    track: localCam.getVideoTracks()[0],
    encodings: camEncodings(),
    appData: { mediaTag: "cam-video" },
  });
  if (getCamPausedState()) {
    try {
      await camVideoProducer.pause();
    } catch (e) {
      console.error(e);
    }
  }
  camAudioProducer = await sendTransport.produce({
    track: localCam.getAudioTracks()[0],
    appData: { mediaTag: "cam-audio" },
  });
  if (getMicPausedState()) {
    try {
      camAudioProducer.pause();
    } catch (e) {
      console.error(e);
    }
  }
}

function updateActiveSpeaker() {
  return;
}

async function createTransport(direction) {
  log(`create ${direction} transport`);
  let transport,
    { transportOptions } = await sig("create-transport", { direction });
  log("transport options", transportOptions);

  if (direction === "recv") {
    transport = await device.createRecvTransport(transportOptions);
  } else if (direction === "send") {
    transport = await device.createSendTransport(transportOptions);
  } else {
    throw new Error(`bad transport 'direction': ${direction}`);
  }
  transport.on("connect", async ({ dtlsParameters }, callback, errback) => {
    log("transport connect event", direction);
    let { error } = await sig("connect-transport", {
      transportId: transportOptions.id,
      dtlsParameters,
    });
    if (error) {
      err("error connecting transport", direction, error);
      errback();
      return;
    }
    callback();
  });

  if (direction === "send") {
    transport.on("produce", async ({ kind, rtpParameters, appData }, callback, errback) => {
      log("transport produce event", appData.mediaTag);
      let paused = false;
      if (appData.mediaTag === "cam-video") {
        paused = getCamPausedState();
      } else if (appData.mediaTag === "cam-audio") {
        paused = getMicPausedState();
      }
      let { error, id } = await sig("send-track", {
        transportId: transportOptions.id,
        kind,
        rtpParameters,
        paused,
        appData,
      });
      if (error) {
        err("error setting up server-side producer", error);
        errback();
        return;
      }
      callback({ id });
    });
  }
  transport.on("connectionstatechange", async (state) => {
    log(`transport ${transport.id} connectionstatechange ${state}`);
    if (state === "closed" || state === "failed" || state === "disconnected") {
      log("transport closed ... leaving the room and resetting");
      leaveRoom();
    }
  });

  return transport;
}

export async function subscribeToTrack(peerId, mediaTag) {
  log("subscribe to track", peerId, mediaTag);
  if (peerId === myPeerId && mediaTag === 'cam-audio') return
  if (!recvTransport) {
    recvTransport = await createTransport("recv");
  }
  let consumer = utils.findConsumerForTrack(peerId, mediaTag);
  if (consumer) {
    err("already have consumer for track", peerId, mediaTag);
    return;
  }

  let consumerParameters = await sig("recv-track", {
    mediaTag,
    mediaPeerId: peerId,
    rtpCapabilities: device.rtpCapabilities,
  });
  log("consumer parameters", consumerParameters);
  consumer = await recvTransport.consume({
    ...consumerParameters,
    appData: { peerId, mediaTag },
  });
  log("created new consumer", consumer.id);

  while (recvTransport.connectionState !== "connected") {
    log("  transport connstate", recvTransport.connectionState);
    await utils.sleep(100);
  }
  await resumeConsumer(consumer);

  consumers.push(consumer);
  dispatch(callActions.setConsumers({ consumers }));

}

export async function pauseConsumer(consumer) {
  if (consumer) {
    log("pause consumer", consumer.appData.peerId, consumer.appData.mediaTag);
    try {
      await sig("pause-consumer", { consumerId: consumer.id });
      await consumer.pause();
    } catch (e) {
      console.error(e);
    }
  }
}

export async function resumeConsumer(consumer) {
  if (consumer) {
    log("resume consumer", consumer.appData.peerId, consumer.appData.mediaTag);
    try {
      await sig("resume-consumer", { consumerId: consumer.id });
      await consumer.resume();
    } catch (e) {
      console.error(e);
    }
  }
}

export async function pauseProducer(producer) {
  if (producer) {
    log("pause producer", producer.appData.mediaTag);
    try {
      await sig("pause-producer", { producerId: producer.id });
      await producer.pause();
    } catch (e) {
      console.error(e);
    }
  }
}

export async function resumeProducer(producer) {
  if (producer) {
    log("resume producer", producer.appData.mediaTag);
    try {
      await sig("resume-producer", { producerId: producer.id });
      await producer.resume();
    } catch (e) {
      console.error(e);
    }
  }
}

export function getCamPausedState() {
  return false;
}

export function getMicPausedState() {
  return false;
}

export function getScreenPausedState() {
  return false;
}

export function getScreenAudioPausedState() {
  return false;
}

export async function changeCamPaused() {
  if (getCamPausedState()) {
    pauseProducer(camVideoProducer);
  } else {
    resumeProducer(camVideoProducer);
  }
}

export async function changeMicPaused() {
  if (getMicPausedState()) {
    pauseProducer(camAudioProducer);
  } else {
    resumeProducer(camAudioProducer);
  }
}

export async function changeScreenPaused() {
  if (getScreenPausedState()) {
    pauseProducer(screenVideoProducer);
  } else {
    resumeProducer(screenVideoProducer);
  }
}

export async function changeScreenAudioPaused() {
  if (getScreenAudioPausedState()) {
    pauseProducer(screenAudioProducer);
  } else {
    resumeProducer(screenAudioProducer);
  }
}

async function closeConsumer(consumer) {
  if (!consumer) {
    return;
  }
  log("closing consumer", consumer.appData.peerId, consumer.appData.mediaTag);
  try {
    await sig("close-consumer", { consumerId: consumer.id });
    await consumer.close();

    consumers = consumers.filter((c) => c !== consumer);
    dispatch(callActions.setConsumers({ consumers }));
    removeVideoAudio(consumer);
  } catch (e) {
    console.error(e);
  }
}

export async function leaveRoom() {
  if (!joined) {
    return;
  }

  log("leave room");
  clearInterval(pollingInterval);
  let { error } = await sig("leave");
  if (error) {
    err(error);
  }
  try {
    recvTransport && (await recvTransport.close());
    sendTransport && (await sendTransport.close());
  } catch (e) {
    console.error(e);
  }
  recvTransport = null;
  sendTransport = null;
  camVideoProducer = null;
  camAudioProducer = null;
  screenVideoProducer = null;
  screenAudioProducer = null;
  localCam = null;
  localScreen = null;
  lastPollSyncData = {};
  consumers = [];
  joined = false;
  dispatch(callActions.setConsumers({ consumers: [] }));
}

const CAM_VIDEO_SIMULCAST_ENCODINGS = [
  { maxBitrate: 96000, scaleResolutionDownBy: 4 },
  { maxBitrate: 680000, scaleResolutionDownBy: 1 },
];

function camEncodings() {
  return CAM_VIDEO_SIMULCAST_ENCODINGS;
}
async function sig(endpoint, data, beacon) {
  try {
    let headers = { "Content-Type": "application/json" },
      body = JSON.stringify({ ...data, peerId: myPeerId });

    if (beacon) {
      navigator.sendBeacon(uris.baseURI + "/signaling/" + endpoint, body);
      return null;
    }

    let response = await fetch(uris.baseURI + "/signaling/" + endpoint, { method: "POST", body, headers });
    return await response.json();
  } catch (e) {
    console.error(e);
    return { error: e };
  }
}

The detail here isn’t helpful, but I can suggest that this would likely be caused to the stream not being consumed again for each new viewer. So you cannot just be passing off the same transportId and connections generated the first time.

Other than that, not really sure what you’re doing.

Try an official demo if you struggle further.

noted, thanks for the advice and what are the the things to keep in mind for consuming a stream? as the first client can consume the stream but not the rest, i’m failing to understand what i’m missing

Your broadcaster will have a transport ID, just consume that again and again for each viewer that wants to view the broadcaster.

So for 1 broadcaster producing audio/video; each viewers will consume twice to get both the audio and video signal. If an additional viewer, they repeat this and consume the audio and video again. So 4 consumes there. :slight_smile:

Fairly straight forward.

If it’s easier, try keeping track of your publishers/viewers in separate maps (in your chat/signalling server) and you should have a better time forwarding the signals or clearing them effectively. :slight_smile:
Happy new yeaars.