Trying to record rtp stream by ffmpeg and Firefox as browser.

I am trying to record RTP stream using FFmpeg. but i can not make it work on Firefox. It sounds like RTP packets are not sending when i am using Firefox. However chrome works fine.

Client side code is a vue js component which gets rtp compabilities using socket and create producers after the server crearted the transport

<template>

			<v-row no-gutters justify="center" ref="videoContainer" class="mt-2">
				<div class="video-recorder_wrapper">
					<video v-if="recordingMode ==='sapio'"
					       class="video-camera rounded" ref="video" style="height: 300px">
					</video>
					<div v-if="isRecording" class="pa-1" style="position: absolute; bottom:5px; left:5px; margin:3px;">
						<v-chip   color="white"
						          text-color="red"
						          small
						>
							<div  class="pulse-circle mr-1" ></div>
							recording
						</v-chip>
					</div>
				</div>
			</v-row>
			<v-row v-if="Q.options.showControl" justify="center" no-gutters class="my-2">
				<v-btn @click="stopRecording"
				       v-if="isRecording"
				       color="primary"
				       outlined
				       rounded
				>
					<v-icon>mdi-stop</v-icon>
					stop
				</v-btn>
				<v-btn @click="startRecording"
				       v-if="!isRecording"
				       :loading="loading"
				       color="red"
				       outlined
				       rounded
				>
					<v-icon>mdi-record</v-icon>
					record
				</v-btn>
			</v-row>

</template>

<script>


const { connect , createLocalTracks } = require('twilio-video');
const SocketClient = require("socket.io-client");
const SocketPromise = require("socket.io-promise").default;
const MediasoupClient = require("mediasoup-client");

export default {
	data() {
		return {
			errors: [],
			isReady: false,
			isRecording: false,
			loading: false,
			sapio: {
				token: null,
				connectionId: 0
			},
			server: {
				host: 'https://rtc.test',
				ws: '/server',
				socket: null,
			},
			peer: {},
		}
	},
	mounted() {
		this.init();
	},
	methods: {
		async init() {
			await this.startCamera();

			if (this.takeId) {
				await this.recordBySapioServer();
			}
		},
		startCamera() {
			return new Promise( (resolve, reject) => {
				if (window.videoMediaStreamObject) {
					this.setVideoElementStream(window.videoMediaStreamObject);
					resolve();
				} else {
					// Get user media as required
					try {
						this.localeStream = navigator.mediaDevices.getUserMedia({
							audio: true,
							video: true,
						}).then((stream) => {
							this.setVideoElementStream(stream);
							resolve();
						})
					} catch (err) {
						console.error(err);
						reject();
					}
				}
			})
		},
		setVideoElementStream(stream) {
			this.localStream = stream;
			this.$refs.video.srcObject = stream;
			this.$refs.video.muted = true;
			this.$refs.video.play().then((video) => {
				this.isStreaming = true;
				this.height = this.$refs.video.videoHeight;
				this.width = this.$refs.video.videoWidth;
			});
		},
		// first thing we need is connecting to websocket
		connectToSocket() {
			const serverUrl = this.server.host;
			console.log("Connect with sapio rtc server:", serverUrl);

			const socket = SocketClient(serverUrl, {
				path:  this.server.ws,
				transports: ["websocket"],
			});
			this.socket = socket;

			socket.on("connect", () => {
				console.log("WebSocket connected");
				// we ask for rtp-capabilities from server to send to us
				socket.emit('send-rtp-capabilities');
			});

			socket.on("error", (err) => {
				this.loading = true;
				console.error("WebSocket error:", err);
			});

			socket.on("router-rtp-capabilities", async (msg) => {
				const { routerRtpCapabilities, sessionId, externalId } = msg;
				console.log('[rtpCapabilities:%o]', routerRtpCapabilities);
				this.routerRtpCapabilities = routerRtpCapabilities;

				try {
					const device = new MediasoupClient.Device();
					// Load the mediasoup device with the router rtp capabilities gotten from the server
					await device.load({ routerRtpCapabilities });

					this.peer.sessionId = sessionId;
					this.peer.externalId = externalId;
					this.peer.device = device;

					this.createTransport();
				} catch (error) {
					console.error('failed to init device [error:%o]', error);
					socket.disconnect();
				}
			});

			socket.on("create-transport", async (msg) => {
				console.log('handleCreateTransportRequest() [data:%o]', msg);

				try {
					// Create the local mediasoup send transport
					this.peer.sendTransport = await this.peer.device.createSendTransport(msg);
					console.log('send transport created [id:%s]', this.peer.sendTransport.id);

					// Set the transport listeners and get the users media stream
					this.handleSendTransportListeners();
					this.setTracks();
					this.loading = false;
				} catch (error) {
					console.error('failed to create transport [error:%o]', error);
					socket.disconnect();
				}
			});

			socket.on("connect-transport", async (msg) => {
				console.log('handleTransportConnectRequest()');
				try {
					const action = this.connectTransport;

					if (!action) {
						throw new Error('transport-connect action was not found');
					}

					await action(msg);
				} catch (error) {
					console.error('ailed [error:%o]', error);
				}
			});

			socket.on("produce", async (msg) => {
				console.log('handleProduceRequest()');
				try {
					if (!this.produce) {
						throw new Error('produce action was not found');
					}
					await this.produce(msg);
				} catch (error) {
					console.error('failed [error:%o]', error);
				}
			});

			socket.on("recording", async (msg) => {
				this.isRecording = true;
			});

			socket.on("recording-error", async (msg) => {
				this.isRecording = false;
				console.error(msg);
			});

			socket.on("recording-closed", async (msg) => {
				this.isRecording = false;
				console.warn(msg)
			});

		},
		createTransport() {
			console.log('createTransport()');

			if (!this.peer || !this.peer.device.loaded) {
				throw new Error('Peer or device is not initialized');
			}

			// First we must create the mediasoup transport on the server side
			this.socket.emit('create-transport',{
				sessionId: this.peer.sessionId
			});
		},
		handleSendTransportListeners() {
			this.peer.sendTransport.on('connect', this.handleTransportConnectEvent);
			this.peer.sendTransport.on('produce', this.handleTransportProduceEvent);
			this.peer.sendTransport.on('connectionstatechange', connectionState => {
				console.log('send transport connection state change [state:%s]', connectionState);
			});
		},
		handleTransportConnectEvent({ dtlsParameters }, callback, errback) {
			console.log('handleTransportConnectEvent()');
			try {
				this.connectTransport = (msg) => {
					console.log('connect-transport action');
					callback();
					this.connectTransport = null;
				};

				this.socket.emit('connect-transport',{
					sessionId: this.peer.sessionId,
					transportId: this.peer.sendTransport.id,
					dtlsParameters
				});

			} catch (error) {
				console.error('handleTransportConnectEvent() failed [error:%o]', error);
				errback(error);
			}
		},
		handleTransportProduceEvent({ kind, rtpParameters }, callback, errback)  {
			console.log('handleTransportProduceEvent()');
			try {
				this.produce = jsonMessage => {
					console.log('handleTransportProduceEvent callback [data:%o]', jsonMessage);
					callback({ id: jsonMessage.id });
					this.produce = null;
				};

				this.socket.emit('produce', {
					sessionId: this.peer.sessionId,
					transportId: this.peer.sendTransport.id,
					kind,
					rtpParameters
				});
			} catch (error) {
				console.error('handleTransportProduceEvent() failed [error:%o]', error);
				errback(error);
			}
		},
		async recordBySapioServer() {
			this.loading = true;
			this.connectToSocket();
		},
		async setTracks() {
			// Start mediasoup-client's WebRTC producers
			const audioTrack = this.localStream.getAudioTracks()[0];
			this.peer.audioProducer = await this.peer.sendTransport.produce({
				track: audioTrack,
				codecOptions :
					{
						opusStereo : 1,
						opusDtx    : 1
					}
			});


			let encodings;
			let codec;
			const codecOptions = {videoGoogleStartBitrate : 1000};

			codec = this.peer.device.rtpCapabilities.codecs.find((c) => c.kind.toLowerCase() === 'video');
			if (codec.mimeType.toLowerCase() === 'video/vp9') {
				encodings = { scalabilityMode: 'S3T3_KEY' };
			} else {
				encodings = [
					{ scaleResolutionDownBy: 4, maxBitrate: 500000 },
					{ scaleResolutionDownBy: 2, maxBitrate: 1000000 },
					{ scaleResolutionDownBy: 1, maxBitrate: 5000000 }
				];
			}
			const videoTrack = this.localStream.getVideoTracks()[0];
			this.peer.videoProducer =await this.peer.sendTransport.produce({
				track: videoTrack,
				encodings,
				codecOptions,
				codec
			});

		},
		startRecording() {
			this.Q.answer.recordingId = this.peer.externalId;
			this.socket.emit("start-record", {
				sessionId: this.peer.sessionId
			});
		},
		stopRecording() {
			this.socket.emit("stop-record" , {
				sessionId: this.peer.sessionId
			});
		},
	},

}
</script>

<style scoped>
.video-recorder_wrapper {
	position: relative;
	display: flex;
}
.video-camera {
	margin: 0;
	height: auto;
	width: auto;
	max-height: 350px;
	max-width: 100%;
	border-radius: 3px;
}
@media screen and (max-width: 600px) {
	.video-camera {
		width: calc(100% - 20px);
		max-height: 600px;
	}
}
</style>

server side:
There are a lot of server side code to make the signalling server. I only show the codes most related to recording.

First I create the worker and then the router and create transport using following functions.

const mediasoup = require('mediasoup');

const config = require('./config');

console.log('mediasoup loaded [version:%s]', mediasoup.version);

let workers = [];
let nextWorkerIndex = 0;

// Start the mediasoup workers
module.exports.initializeWorkers = async () => {
  const { logLevel, logTags, rtcMinPort, rtcMaxPort } = config.worker;

  console.log('initializeWorkers() creating %d mediasoup workers', config.numWorkers);

  for (let i = 0; i < config.numWorkers; ++i) {
    const worker = await mediasoup.createWorker({
      logLevel, logTags, rtcMinPort, rtcMaxPort
    });

    worker.once('died', () => {
      console.error('worker::died worker has died exiting in 2 seconds... [pid:%d]', worker.pid);
      setTimeout(() => process.exit(1), 2000);
    });

    workers.push(worker);
  }
};

module.exports.createRouter = async () => {
  const worker = getNextWorker();

  console.log('createRouter() creating new router [worker.pid:%d]', worker.pid);

  console.log(`config.router.mediaCodecs:${JSON.stringify(config.router.mediaCodecs)}`)

  return await worker.createRouter({ mediaCodecs: config.router.mediaCodecs });
};

module.exports.createTransport = async (transportType, router, options) => {
  console.log('createTransport() [type:%s. options:%o]', transportType, options);

  switch (transportType) {
    case 'webRtc':
      return await router.createWebRtcTransport(config.webRtcTransport);
    case 'plain':
      return await router.createPlainRtpTransport(config.plainRtpTransport);
  }
};

const getNextWorker = () => {
  const worker = workers[nextWorkerIndex];

  if (++nextWorkerIndex === workers.length) {
    nextWorkerIndex = 0;
  }

  return worker;
};

Then when the client side create its transports I start recording using following class. This works for Chrome but not working for Firefox.

// Class to handle child process used for running FFmpeg

const child_process = require('child_process');
const { EventEmitter } = require('events');
const fs = require('fs');
const { createSdpText } = require('./sdp');
const { convertStringToStream } = require('./utils');
const shelljs = require('shelljs');
const Recording = require('../models/Recording');

const ffmpegPath = require('@ffmpeg-installer/ffmpeg').path;
const ffmpeg = require('fluent-ffmpeg');
ffmpeg.setFfmpegPath(ffmpegPath);

module.exports = class FFmpeg {
  constructor (args) {
    this._rtpParameters = args;
    this._process = undefined;
    this._observer = new EventEmitter();
    this._peer = args.peer;

    this._sdpString = createSdpText(this._rtpParameters);
    this._sdpStream = convertStringToStream(this._sdpString);
    // create dir
    const dir = process.env.REOCRDING_PATH ?? 'storage/recordings';
    if (!fs.existsSync(dir)) shelljs.mkdir('-p', dir);
  
    // create file path
    this._path = `${dir}/${args.peer.sessionId}.webm`
    let loop = 0;
    while(fs.existsSync(this._path)) {
      this._path = `${dir}/${args.peer.sessionId}-${++loop}.webm`
    }

    this._createProcess();
  }


  async _createProcess () {
    // get connection model
    this._recordingnModel = await Recording.findOne({sessionIds: { $in: [this._peer.sessionId] }})
    this._recordingnModel.files.push(this._path);
    this._recordingnModel.save();

    const sdpString = this._sdpString;
    const sdpStream = this._sdpStream;

    // this.recordUsingFluentFFmpeg();
    // return;

    
    console.log('createProcess() [sdpString:%s]', sdpString);

    this._process = child_process.spawn('ffmpeg', this._commandArgs);

    this._peer.socket.emit('recording');

    if (this._process.stderr) {
      this._process.stderr.setEncoding('utf-8');

      this._process.stderr.on('data', data =>
        console.log('ffmpeg::process::data [data:%o]', data)
      );

    }

    if (this._process.stdout) {
      this._process.stdout.setEncoding('utf-8');

      this._process.stdout.on('data', data => 
        console.log('ffmpeg::process::data [data:%o]', data)
      );
    }

    this._process.on('message', message =>
      console.log('ffmpeg::process::message [message:%o]', message)
    );

    this._process.on('error', error => {
      this._peer.socket.emit('recording-error');
      console.error('ffmpeg::process::error [error:%o]', error)
    });

    this._process.once('close', () => {
      this._peer.socket.emit('recording-closed');
      console.log('ffmpeg::process::close');
      this._observer.emit('process-close');
    });

    sdpStream.on('error', error =>
      console.error('sdpStream::error [error:%o]', error)
    );

    // Pipe sdp stream to the ffmpeg process
    sdpStream.resume();
    sdpStream.pipe(this._process.stdin);
  }

  kill () {
    console.log('kill() [pid:%d]', this._process.pid);
    this._process.kill('SIGINT');
  }

  get _commandArgs () {
    let commandArgs = [
      '-loglevel',
      'debug',
      '-protocol_whitelist',
      'pipe,udp,rtp',
      '-fflags',
      '+genpts',
      '-f',
      'sdp',
      '-i',
      'pipe:0'
    ];

    commandArgs = commandArgs.concat(this._videoArgs);
    commandArgs = commandArgs.concat(this._audioArgs);
    
    commandArgs = commandArgs.concat([
      '-f',
      'webm',
      '-flags',
      '+global_header',
      '-y',
      this._path
    ]);

    console.log('commandArgs:%o', commandArgs);

    return commandArgs;
  }

  get _videoArgs () {
    return [
      '-map',
      '0:v:0',
      '-c:v',
      'copy'
    ];
  }

  get _audioArgs () {
    return [
      '-map',
      '0:a:0',
      '-strict', // libvorbis is experimental
      '-2',
      '-c:a',
      'copy'
    ];
  }

  recordUsingFluentFFmpeg() {
    console.log(this._rtpParameters.video.rtpParameters.codec);

    let proc  = ffmpeg(this._sdpStream)
    .inputOptions([
      '-protocol_whitelist','pipe,udp,rtp',
      '-f','sdp',
    ])
    .format('webm')
    .output('test.webm')
    .size('720x?')
    .on('start', ()=>{
      console.log('Start recording')
    })
    .on('end', ()=>{
      console.log('Stop recording')
    });

    proc.run();
    this._process =  proc;
  }
}

Are there any errors from ffmpeg? If RTP packets aren’t flowing, you should at least get a timeout error from ffmpeg.

Yes, FFmpeg gets closed.

ffmpeg::process::data [data:'[mp4 @ 0x55ef198365c0] dimensions not set\n' +
  'Could not write header for output file #0 (incorrect codec parameters ?): Invalid argument\n' +
  'Error initializing output stream 0:1 -- \n' +
  'Stream mapping:\n' +
  '  Stream #0:0 -> #0:0 (copy)\n' +
  '  Stream #0:1 -> #0:1 (copy)\n' +
  '    Last message repeated 1 times\n' +
  '[AVIOContext @ 0x55ef19839bc0] Statistics: 0 seeks, 0 writeouts\n' +
  '[AVIOContext @ 0x55ef197c0380] Statistics: 210 bytes read, 0 seeks\n']
ffmpeg::process::close

And mediasoup debug mode does’nt show any RTP flow.

Have a look on console.log

// sdp string
[sdpString:v=0
  o=- 0 0 IN IP4 127.0.0.1
  s=FFmpeg
  c=IN IP4 127.0.0.1
  t=0 0
  m=video 25549 RTP/AVP 101 
  a=rtpmap:101 VP8/90000
  a=sendonly
  m=audio 26934 RTP/AVP 100 
  a=rtpmap:100 opus/48000/2
  a=sendonly
  ]

// ffmpeg args
commandArgs:[
  '-loglevel',
  'debug',
  '-protocol_whitelist',
  'pipe,udp,rtp',
  '-fflags',
  '+genpts',
  '-f',
  'sdp',
  '-i',
  'pipe:0',
  '-map',
  '0:v:0',
  '-c:v',
  'copy',
  '-map',
  '0:a:0',
  '-strict',
  '-2',
  '-c:a',
  'copy',
  '-f',
  'webm',
  '-flags',
  '+global_header',
  '-y',
  'storage/recordings/26e63cb3-4f81-499e-941a-c0bb7f7f52ce.webm',
  [length]: 26
]
// ffmpeg log
ffmpeg::process::data [data:'ffmpeg version n4.4']
ffmpeg::process::data [data:' Copyright (c) 2000-2021 the FFmpeg developers']
ffmpeg::process::data [data:'\n']
ffmpeg::process::data [data:'  built with gcc 11.1.0 (GCC)\n']
ffmpeg::process::data [data:'  configuration: --prefix=/usr --disable-debug --disable-static --disable-stripping --enable-amf --enable-avisynth --enable-cuda-llvm --enable-lto --enable-fontconfig --enable-gmp --enable-gnutls --enable-gpl --enable-ladspa --enable-libaom --enable-libass --enable-libbluray --enable-libdav1d --enable-libdrm --enable-libfreetype --enable-libfribidi --enable-libgsm --enable-libiec61883 --enable-libjack --enable-libmfx --enable-libmodplug --enable-libmp3lame --enable-libopencore_amrnb --enable-libopencore_amrwb --enable-libopenjpeg --enable-libopus --enable-libpulse --enable-librav1e --enable-librsvg --enable-libsoxr --enable-libspeex --enable-libsrt --enable-libssh --enable-libsvtav1 --enable-libtheora --enable-libv4l2 --enable-libvidstab --enable-libvmaf --enable-libvorbis --enable-libvpx --enable-libwebp --enable-libx264 --enable-libx265 --enable-libxcb --enable-libxml2 --enable-libxvid --enable-libzimg --enable-nvdec --enable-nvenc --enable-shared --enable-version3\n']
ffmpeg::process::data [data:'  libavutil      56. 70.100 / 56. 70.100\n' +
  '  libavcodec     58.134.100 / 58.134.100\n' +
  '  libavformat    58. 76.100 / 58. 76.100\n' +
  '  libavdevice    58. 13.100 / 58. 13.100\n' +
  '  libavfilter     7.110.100 /  7.110.100\n' +
  '  libswscale      5.  9.100 /  5.  9.100\n' +
  '  libswresample   3.  9.100 /  3.  9.100\n' +
  '  libpostproc    55.  9.100 / 55.  9.100\n' +
  'Splitting the commandline.\n' +
  "Reading option '-loglevel' ... matched as option 'loglevel' (set logging level) with argument 'debug'.\n" +
  "Reading option '-protocol_whitelist' ..."]
ffmpeg::process::data [data:" matched as AVOption 'protocol_whitelist' with argument 'pipe,udp,rtp'.\n" +
  "Reading option '-fflags' ..."]
ffmpeg::process::data [data:" matched as AVOption 'fflags' with argument '+genpts'.\n" +
  "Reading option '-f' ... matched as option 'f' (force format) with argument 'sdp'.\n" +
  "Reading option '-i' ... matched as input url with argument 'pipe:0'.\n" +
  "Reading option '-map' ... matched as option 'map' (set input stream mapping) with argument '0:v:0'.\n" +
  "Reading option '-c:v' ... matched as option 'c' (codec name) with argument 'copy'.\n" +
  "Reading option '-map' ... matched as option 'map' (set input stream mapping) with argument '0:a:0'.\n" +
  "Reading option '-strict' ...Routing option strict to both codec and muxer layer\n" +
  " matched as AVOption 'strict' with argument '-2'.\n" +
  "Reading option '-c:a' ... matched as option 'c' (codec name) with argument 'copy'.\n" +
  "Reading option '-f' ... matched as option 'f' (force format) with argument 'webm'.\n" +
  "Reading option '-flags' ... matched as AVOption 'flags' with argument '+global_header'.\n" +
  "Reading option '-y' ... matched as option 'y' (overwrite output files) with argument '1'.\n" +
  "Reading option 'storage/recordings/26e63cb3-4f81-499e-941a-c0bb7f7f52ce.webm' ... matched as output url.\n" +
  'Finished splitting the commandline.\n' +
  'Parsing a group of options: global .\n' +
  'Applying option loglevel (set logging level) with argument debug.\n' +
  'Applying option y (overwrite output files) with argument 1.\n' +
  'Successfully parsed a group of options.\n' +
  'Parsing a group of options: input url pipe:0.\n' +
  'Applying option f (force format) with argument sdp.\n' +
  'Successfully parsed a group of options.\n' +
  'Opening an input file: pipe:0.\n' +
  "[sdp @ 0x55604dc58400] Opening 'pipe:0' for reading\n" +
  '[sdp @ 0x55604dc58400] video codec set to: vp8\n' +
  '[sdp @ 0x55604dc58400] audio codec set to: opus\n' +
  '[sdp @ 0x55604dc58400] audio samplerate set to: 48000\n' +
  '[sdp @ 0x55604dc58400] audio channels set to: 2\n' +
  '[udp @ 0x55604dc6c500] end receive buffer size reported is 425984\n' +
  '[udp @ 0x55604dc6c7c0] end receive buffer size reported is 425984\n' +
  '[sdp @ 0x55604dc58400] setting jitter buffer size to 500\n' +
  '[udp @ 0x55604dc6d900] end receive buffer size reported is 425984\n' +
  '[udp @ 0x55604dc6d2c0] end receive buffer size reported is 425984\n' +
  '[sdp @ 0x55604dc58400] setting jitter buffer size to 500\n']
ffmpeg::process::data [data:'[sdp @ 0x55604dc58400] Before avformat_find_stream_info() pos: 210 bytes read:210 seeks:0 nb_streams:2\n']
  **mediasoup:Consumer resume() +1s**
  **mediasoup:Channel request() [method:consumer.resume, id:12] +1s**
  **mediasoup:Channel request succeeded [method:consumer.resume, id:12] +0ms**
  **mediasoup:Consumer resume() +1ms**
  **mediasoup:Channel request() [method:consumer.resume, id:13] +0ms**
  **mediasoup:Channel request succeeded [method:consumer.resume, id:13] +0ms**
ffmpeg::process::data [data:'[sdp @ 0x55604dc58400] Could not find codec parameters for stream 0 (Video: vp8, 1 reference frame, yuv420p): unspecified size\n' +
  "Consider increasing the value for the 'analyzeduration' (0) and 'probesize' (5000000) options\n"]
ffmpeg::process::data [data:'[sdp @ 0x55604dc58400] After avformat_find_stream_info() pos: 210 bytes read:210 seeks:0 frames:0\n' +
  "Input #0, sdp, from 'pipe:0':\n" +
  '  Metadata:\n' +
  '    title           : FFmpeg\n' +
  '  Duration: N/A, bitrate: N/A\n' +
  '  Stream #0:0, 0, 1/90000: Video: vp8, 1 reference frame, yuv420p, 90k tbr, 90k tbn, 90k tbc\n' +
  '  Stream #0:1, 0, 1/48000: Audio: opus, 48000 Hz, stereo, fltp\n' +
  'Successfully opened the file.\n' +
  'Parsing a group of options: output url storage/recordings/26e63cb3-4f81-499e-941a-c0bb7f7f52ce.webm.\n' +
  'Applying option map (set input stream mapping) with argument 0:v:0.\n' +
  'Applying option c:v (codec name) with argument copy.\n' +
  'Applying option map (set input stream mapping) with argument 0:a:0.\n' +
  'Applying option c:a (codec name) with argument copy.\n' +
  'Applying option f (force format) with argument webm.\n' +
  'Successfully parsed a group of options.\n' +
  'Opening an output file: storage/recordings/26e63cb3-4f81-499e-941a-c0bb7f7f52ce.webm.\n' +
  "[file @ 0x55604dce5bc0] Setting default whitelist 'file,crypto,data'\n"]
ffmpeg::process::data [data:'Successfully opened the file.\n' +
  '[webm @ 0x55604dce0fc0] dimensions not set\n' +
  'Could not write header for output file #0 (incorrect codec parameters ?): Invalid argument\n' +
  'Error initializing output stream 0:1 -- \n' +
  'Stream mapping:\n' +
  '  Stream #0:0 -> #0:0 (copy)\n' +
  '  Stream #0:1 -> #0:1 (copy)\n' +
  '    Last message repeated 1 times\n' +
  '[AVIOContext @ 0x55604dc6dcc0] Statistics: 0 seeks, 0 writeouts\n' +
  '[AVIOContext @ 0x55604dc69380] Statistics: 210 bytes read, 0 seeks\n']
ffmpeg::process::close

I created a stackoverflow question:
https://stackoverflow.com/questions/69249594/record-mediasoup-rtp-stream-using-ffmpeg-for-firefox

this might give more information.

I thought perhaps this might be a codec issue. Therefore I decided to record only audio. So that if there is a codec issue on Firefox it would be eliminated if I don’t create a video producer.
it has been successful on chrome but not on Firefox.
I guess perhaps there is bug with plain RTPs which are created for Firefox stream.

Why don’t you do it server side?

Hmm, now that I think of it, I wouldn’t know how to record a room, maybe the room recorder can be in the recorder’s socket, I would have to do some research into how to record the audio and video itself cause it’s all in different formats (h264, Vp8)

I have the WORST idea… use puppeteer

I am actually doing it server side. I am trying to record plainTransport created for stream coming from Firefox.

Oh, I thought you were using WASM ffmpeg. Just use puppeteer (its chrome).

Can puppeteer record webcam or microphone?

What do you mean by “chrome ok, firefox fail”? since both supports vp8 encoding

I’ve successfully done client-side ffmpeg rtp recording with mediasoup plainTransport (using connect() or comedia:true)

since logs here say codecs error, maybe you can try to modify producer-side codec to H264?

I choose the codec by following code:

let firstCodec = this.peer.device.rtpCapabilities.codecs.find((c) => c.kind.toLowerCase() === 'video');
codec = this.peer.device.rtpCapabilities.codecs.find((c) => c.mimeType.toLowerCase() === 'video/vp8') ?? firstCodec;

The resulted SDP is:

[sdpString:v=0
  o=- 0 0 IN IP4 127.0.0.1
  s=FFmpeg
  c=IN IP4 127.0.0.1
  t=0 0
  m=video 25549 RTP/AVP 101 
  a=rtpmap:101 VP8/90000
  a=sendonly
  m=audio 26934 RTP/AVP 100 
  a=rtpmap:100 opus/48000/2
  a=sendonly
  ]

I have tested changing producer side codec but no success.

I am using connect() on transport:

  await rtpTransport.connect({
    ip: '127.0.0.1',
    port: remoteRtpPort,
    rtcpPort: remoteRtcpPort
  });

my config for plain transport:

  listenIp: { ip: process.env.LISTEN_IP ?? '0.0.0.0', announcedIp: process.env.ANOUNCED_IP ?? undefined  }, // TODO: Change announcedIp to your external IP or domain name
    rtcpMux: true,
    comedia: false

Found my mistake!

I had set listenIp to 127.0.0.1 and anounceIp to my local network IP (10.0.0.6). While I should have to set the listen IP to my local network IP (10.0.0.6).

I am working on my local machine. On my local machine 0.0.0.0 didn’t work so I had to change it to 127.0.0.1 and based on the document that anything other than 0.0.0.0 must have an announced IP. So I set it to my private IP. But clearly I was wrong and I had to just set my private IP for listen IP to make it work.