[Pkg-voip-commits] [janus] 27/163: Added simulcast support to RTP live mountpoints in the Streaming plugin

Jonas Smedegaard dr at jones.dk
Sat Oct 28 01:22:05 UTC 2017


This is an automated email from the git hooks/post-receive script.

js pushed a commit to annotated tag debian/0.2.5-1
in repository janus.

commit 96de4ef3f382850728f8cb5b11f32ccba3510387
Author: Lorenzo Miniero <lminiero at gmail.com>
Date:   Tue Jul 11 18:42:46 2017 +0200

    Added simulcast support to RTP live mountpoints in the Streaming plugin
---
 conf/janus.plugin.streaming.cfg.sample.in |   3 +
 html/streamingtest.html                   |   8 +-
 html/streamingtest.js                     | 185 ++++++++++++
 plugins/janus_streaming.c                 | 464 ++++++++++++++++++++++++++----
 4 files changed, 601 insertions(+), 59 deletions(-)

diff --git a/conf/janus.plugin.streaming.cfg.sample.in b/conf/janus.plugin.streaming.cfg.sample.in
index acfea2e..b703166 100644
--- a/conf/janus.plugin.streaming.cfg.sample.in
+++ b/conf/janus.plugin.streaming.cfg.sample.in
@@ -32,6 +32,9 @@
 ; videortpmap = RTP map of the video codec (e.g., VP8/90000)
 ; videobufferkf = yes|no (whether the plugin should store the latest
 ;		keyframe and send it immediately for new viewers, EXPERIMENTAL)
+; videosimulcast = yes|no (do|don't enable video simulcasting)
+; videoport2 = second local port for receiving video frames (only for rtp, and simulcasting)
+; videoport3 = third local port for receiving video frames (only for rtp, and simulcasting)
 ; dataport = local port for receiving data messages to relay
 ; dataiface = network interface or IP address to bind to, if any (binds to all otherwise)
 ; databuffermsg = yes|no (whether the plugin should store the latest
diff --git a/html/streamingtest.html b/html/streamingtest.html
index 21d137c..264e445 100644
--- a/html/streamingtest.html
+++ b/html/streamingtest.html
@@ -10,6 +10,7 @@
 <script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/3.0.2/js/bootstrap.min.js"></script>
 <script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/bootbox.js/4.1.0/bootbox.min.js"></script>
 <script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/spin.js/2.3.2/spin.min.js"></script>
+<script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/toastr.js/2.1.3/toastr.min.js"></script>
 <script type="text/javascript" src="janus.js" ></script>
 <script type="text/javascript" src="streamingtest.js"></script>
 <script>
@@ -24,6 +25,7 @@
 <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/bootswatch/3.3.7/cerulean/bootstrap.min.css" type="text/css"/>
 <link rel="stylesheet" href="css/demo.css" type="text/css"/>
 <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.6.2/css/font-awesome.min.css" type="text/css"/>
+<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/toastr.js/2.1.3/toastr.css"/>
 </head>
 <body>
 
@@ -87,7 +89,11 @@
 					<div class="col-md-6">
 						<div class="panel panel-default">
 							<div class="panel-heading">
-								<h3 class="panel-title">Stream <span class="label label-info hide" id="status"></h3>
+								<h3 class="panel-title">Stream
+									<span class="label label-info hide" id="status"></span>
+									<span class="label label-primary hide" id="curres"></span>
+									<span class="label label-info hide" id="curbitrate"></span>
+								</h3>
 							</div>
 							<div class="panel-body" id="stream"></div>
 						</div>
diff --git a/html/streamingtest.js b/html/streamingtest.js
index 582616c..65905da 100644
--- a/html/streamingtest.js
+++ b/html/streamingtest.js
@@ -53,8 +53,11 @@ var streaming = null;
 var opaqueId = "streamingtest-"+Janus.randomString(12);
 
 var started = false;
+var bitrateTimer = null;
 var spinner = null;
 
+var simulcastStarted = false;
+
 var selectedStream = null;
 
 
@@ -92,6 +95,7 @@ $(document).ready(function() {
 									$('#start').removeAttr('disabled').html("Stop")
 										.click(function() {
 											$(this).attr('disabled', true);
+											clearInterval(bitrateTimer);
 											janus.destroy();
 											$('#streamslist').attr('disabled', true);
 											$('#watch').attr('disabled', true).unbind('click');
@@ -115,6 +119,18 @@ $(document).ready(function() {
 												$('#status').removeClass('hide').text("Started").show();
 											else if(status === 'stopped')
 												stopStream();
+										} else if(msg["streaming"] === "event") {
+											// Is simulcast in place?
+											var substream = result["substream"];
+											var temporal = result["temporal"];
+											if((substream !== null && substream !== undefined) || (temporal !== null && temporal !== undefined)) {
+												if(!simulcastStarted) {
+													simulcastStarted = true;
+													addSimulcastButtons();
+												}
+												// We just received notice that there's been a switch, update the buttons
+												updateSimulcastButtons(substream, temporal);
+											}
 										}
 									} else if(msg["error"] !== undefined && msg["error"] !== null) {
 										bootbox.alert(msg["error"]);
@@ -155,13 +171,54 @@ $(document).ready(function() {
 										if(spinner !== null && spinner !== undefined)
 											spinner.stop();
 										spinner = null;
+										var videoTracks = stream.getVideoTracks();
+										if(videoTracks === null || videoTracks === undefined || videoTracks.length === 0)
+											return;
+										var width = this.videoWidth;
+										var height = this.videoHeight;
+										$('#curres').removeClass('hide').text(width+'x'+height).show();
+										if(adapter.browserDetails.browser === "firefox") {
+											// Firefox Stable has a bug: width and height are not immediately available after a playing
+											setTimeout(function() {
+												var width = $("#remotevideo").get(0).videoWidth;
+												var height = $("#remotevideo").get(0).videoHeight;
+												$('#curres').removeClass('hide').text(width+'x'+height).show();
+											}, 2000);
+										}
 									});
+									var videoTracks = stream.getVideoTracks();
+									if(videoTracks && videoTracks.length &&
+											(adapter.browserDetails.browser === "chrome" ||
+												adapter.browserDetails.browser === "firefox" ||
+												adapter.browserDetails.browser === "safari")) {
+										$('#curbitrate').removeClass('hide').show();
+										bitrateTimer = setInterval(function() {
+											// Display updated bitrate, if supported
+											var bitrate = streaming.getBitrate();
+											//~ Janus.debug("Current bitrate is " + streaming.getBitrate());
+											$('#curbitrate').text(bitrate);
+											// Check if the resolution changed too
+											var width = $("#remotevideo").get(0).videoWidth;
+											var height = $("#remotevideo").get(0).videoHeight;
+											if(width > 0 && height > 0)
+												$('#curres').removeClass('hide').text(width+'x'+height).show();
+										}, 1000);
+									}
 									Janus.attachMediaStream($('#remotevideo').get(0), stream);
 								},
 								oncleanup: function() {
 									Janus.log(" ::: Got a cleanup notification :::");
 									$('#waitingvideo').remove();
 									$('#remotevideo').remove();
+									$('#bitrate').attr('disabled', true);
+									$('#bitrateset').html('Bandwidth<span class="caret"></span>');
+									$('#curbitrate').hide();
+									if(bitrateTimer !== null && bitrateTimer !== undefined)
+										clearInterval(bitrateTimer);
+									bitrateTimer = null;
+									$('#curres').hide();
+									$('#simulcast').remove();
+									simulcastStarted = false;
 								}
 							});
 					},
@@ -243,4 +300,132 @@ function stopStream() {
 	$('#streamslist').removeAttr('disabled');
 	$('#watch').html("Watch or Listen").removeAttr('disabled').click(startStream);
 	$('#status').empty().hide();
+	$('#bitrate').attr('disabled', true);
+	$('#bitrateset').html('Bandwidth<span class="caret"></span>');
+	$('#curbitrate').hide();
+	if(bitrateTimer !== null && bitrateTimer !== undefined)
+		clearInterval(bitrateTimer);
+	bitrateTimer = null;
+	$('#curres').empty().hide();
+	$('#simulcast').remove();
+	simulcastStarted = false;
+}
+
+// Helpers to create Simulcast-related UI, if enabled
+function addSimulcastButtons() {
+	$('#curres').parent().append(
+		'<div id="simulcast" class="btn-group-vertical btn-group-vertical-xs pull-right">' +
+		'	<div class"row">' +
+		'		<div class="btn-group btn-group-xs" style="width: 100%">' +
+		'			<button id="sl-2" type="button" class="btn btn-primary" data-toggle="tooltip" title="Switch to higher quality" style="width: 33%">SL 2</button>' +
+		'			<button id="sl-1" type="button" class="btn btn-primary" data-toggle="tooltip" title="Switch to normal quality" style="width: 33%">SL 1</button>' +
+		'			<button id="sl-0" type="button" class="btn btn-primary" data-toggle="tooltip" title="Switch to lower quality" style="width: 34%">SL 0</button>' +
+		'		</div>' +
+		'	</div>' +
+		'	<div class"row">' +
+		'		<div class="btn-group btn-group-xs" style="width: 100%">' +
+		'			<button id="tl-2" type="button" class="btn btn-primary" data-toggle="tooltip" title="Cap to temporal layer 2" style="width: 34%">TL 2</button>' +
+		'			<button id="tl-1" type="button" class="btn btn-primary" data-toggle="tooltip" title="Cap to temporal layer 1" style="width: 33%">TL 1</button>' +
+		'			<button id="tl-0" type="button" class="btn btn-primary" data-toggle="tooltip" title="Cap to temporal layer 0" style="width: 33%">TL 0</button>' +
+		'		</div>' +
+		'	</div>' +
+		'</div>');
+	// Enable the VP8 simulcast selection buttons
+	$('#sl-0').removeClass('btn-primary btn-success').addClass('btn-primary')
+		.unbind('click').click(function() {
+			toastr.info("Switching simulcast substream, wait for it... (lower quality)", null, {timeOut: 2000});
+			if(!$('#sl-2').hasClass('btn-success'))
+				$('#sl-2').removeClass('btn-primary btn-info').addClass('btn-primary');
+			if(!$('#sl-1').hasClass('btn-success'))
+				$('#sl-1').removeClass('btn-primary btn-info').addClass('btn-primary');
+			$('#sl-0').removeClass('btn-primary btn-info btn-success').addClass('btn-info');
+			streaming.send({message: { request: "configure", substream: 0 }});
+		});
+	$('#sl-1').removeClass('btn-primary btn-success').addClass('btn-primary')
+		.unbind('click').click(function() {
+			toastr.info("Switching simulcast substream, wait for it... (normal quality)", null, {timeOut: 2000});
+			if(!$('#sl-2').hasClass('btn-success'))
+				$('#sl-2').removeClass('btn-primary btn-info').addClass('btn-primary');
+			$('#sl-1').removeClass('btn-primary btn-info btn-success').addClass('btn-info');
+			if(!$('#sl-0').hasClass('btn-success'))
+				$('#sl-0').removeClass('btn-primary btn-info').addClass('btn-primary');
+			streaming.send({message: { request: "configure", substream: 1 }});
+		});
+	$('#sl-2').removeClass('btn-primary btn-success').addClass('btn-primary')
+		.unbind('click').click(function() {
+			toastr.info("Switching simulcast substream, wait for it... (higher quality)", null, {timeOut: 2000});
+			$('#sl-2').removeClass('btn-primary btn-info btn-success').addClass('btn-info');
+			if(!$('#sl-1').hasClass('btn-success'))
+				$('#sl-1').removeClass('btn-primary btn-info').addClass('btn-primary');
+			if(!$('#sl-0').hasClass('btn-success'))
+				$('#sl-0').removeClass('btn-primary btn-info').addClass('btn-primary');
+			streaming.send({message: { request: "configure", substream: 2 }});
+		});
+	$('#tl-0').removeClass('btn-primary btn-success').addClass('btn-primary')
+		.unbind('click').click(function() {
+			toastr.info("Capping simulcast temporal layer, wait for it... (lowest FPS)", null, {timeOut: 2000});
+			if(!$('#tl-2').hasClass('btn-success'))
+				$('#tl-2').removeClass('btn-primary btn-info').addClass('btn-primary');
+			if(!$('#tl-1').hasClass('btn-success'))
+				$('#tl-1').removeClass('btn-primary btn-info').addClass('btn-primary');
+			$('#tl-0').removeClass('btn-primary btn-info btn-success').addClass('btn-info');
+			streaming.send({message: { request: "configure", temporal: 0 }});
+		});
+	$('#tl-1').removeClass('btn-primary btn-success').addClass('btn-primary')
+		.unbind('click').click(function() {
+			toastr.info("Capping simulcast temporal layer, wait for it... (medium FPS)", null, {timeOut: 2000});
+			if(!$('#tl-2').hasClass('btn-success'))
+				$('#tl-2').removeClass('btn-primary btn-info').addClass('btn-primary');
+			$('#tl-1').removeClass('btn-primary btn-info').addClass('btn-info');
+			if(!$('#tl-0').hasClass('btn-success'))
+				$('#tl-0').removeClass('btn-primary btn-info').addClass('btn-primary');
+			streaming.send({message: { request: "configure", temporal: 1 }});
+		});
+	$('#tl-2').removeClass('btn-primary btn-success').addClass('btn-primary')
+		.unbind('click').click(function() {
+			toastr.info("Capping simulcast temporal layer, wait for it... (highest FPS)", null, {timeOut: 2000});
+			$('#tl-2').removeClass('btn-primary btn-info btn-success').addClass('btn-info');
+			if(!$('#tl-1').hasClass('btn-success'))
+				$('#tl-1').removeClass('btn-primary btn-info').addClass('btn-primary');
+			if(!$('#tl-0').hasClass('btn-success'))
+				$('#tl-0').removeClass('btn-primary btn-info').addClass('btn-primary');
+			streaming.send({message: { request: "configure", temporal: 2 }});
+		});
+}
+
+function updateSimulcastButtons(substream, temporal) {
+	// Check the substream
+	if(substream === 0) {
+		toastr.success("Switched simulcast substream! (lower quality)", null, {timeOut: 2000});
+		$('#sl-2').removeClass('btn-primary btn-success').addClass('btn-primary');
+		$('#sl-1').removeClass('btn-primary btn-success').addClass('btn-primary');
+		$('#sl-0').removeClass('btn-primary btn-info btn-success').addClass('btn-success');
+	} else if(substream === 1) {
+		toastr.success("Switched simulcast substream! (normal quality)", null, {timeOut: 2000});
+		$('#sl-2').removeClass('btn-primary btn-success').addClass('btn-primary');
+		$('#sl-1').removeClass('btn-primary btn-info btn-success').addClass('btn-success');
+		$('#sl-0').removeClass('btn-primary btn-success').addClass('btn-primary');
+	} else if(substream === 2) {
+		toastr.success("Switched simulcast substream! (higher quality)", null, {timeOut: 2000});
+		$('#sl-2').removeClass('btn-primary btn-info btn-success').addClass('btn-success');
+		$('#sl-1').removeClass('btn-primary btn-success').addClass('btn-primary');
+		$('#sl-0').removeClass('btn-primary btn-success').addClass('btn-primary');
+	}
+	// Check the temporal layer
+	if(temporal === 0) {
+		toastr.success("Capped simulcast temporal layer! (lowest FPS)", null, {timeOut: 2000});
+		$('#tl-2').removeClass('btn-primary btn-success').addClass('btn-primary');
+		$('#tl-1').removeClass('btn-primary btn-success').addClass('btn-primary');
+		$('#tl-0').removeClass('btn-primary btn-info btn-success').addClass('btn-success');
+	} else if(temporal === 1) {
+		toastr.success("Capped simulcast temporal layer! (medium FPS)", null, {timeOut: 2000});
+		$('#tl-2').removeClass('btn-primary btn-success').addClass('btn-primary');
+		$('#tl-1').removeClass('btn-primary btn-info btn-success').addClass('btn-success');
+		$('#tl-0').removeClass('btn-primary btn-success').addClass('btn-primary');
+	} else if(temporal === 2) {
+		toastr.success("Capped simulcast temporal layer! (highest FPS)", null, {timeOut: 2000});
+		$('#tl-2').removeClass('btn-primary btn-info btn-success').addClass('btn-success');
+		$('#tl-1').removeClass('btn-primary btn-success').addClass('btn-primary');
+		$('#tl-0').removeClass('btn-primary btn-success').addClass('btn-primary');
+	}
 }
diff --git a/plugins/janus_streaming.c b/plugins/janus_streaming.c
index fafb578..696f76d 100644
--- a/plugins/janus_streaming.c
+++ b/plugins/janus_streaming.c
@@ -79,6 +79,9 @@ videortpmap = RTP map of the video codec (e.g., VP8/90000)
 videofmtp = Codec specific parameters, if any
 videobufferkf = yes|no (whether the plugin should store the latest
 	keyframe and send it immediately for new viewers, EXPERIMENTAL)
+videosimulcast = yes|no (do|don't enable video simulcasting)
+videoport2 = second local port for receiving video frames (only for rtp, and simulcasting)
+videoport3 = third local port for receiving video frames (only for rtp, and simulcasting)
 dataport = local port for receiving data messages to relay
 dataiface = network interface or IP address to bind to, if any (binds to all otherwise)
 databuffermsg = yes|no (whether the plugin should store the latest
@@ -156,8 +159,8 @@ rtspiface = network interface IP address or device name to listen on when receiv
 
 
 /* Plugin information */
-#define JANUS_STREAMING_VERSION			7
-#define JANUS_STREAMING_VERSION_STRING	"0.0.7"
+#define JANUS_STREAMING_VERSION			8
+#define JANUS_STREAMING_VERSION_STRING	"0.0.8"
 #define JANUS_STREAMING_DESCRIPTION		"This is a streaming plugin for Janus, allowing WebRTC peers to watch/listen to pre-recorded files or media generated by gstreamer."
 #define JANUS_STREAMING_NAME			"JANUS Streaming plugin"
 #define JANUS_STREAMING_AUTHOR			"Meetecho s.r.l."
@@ -285,7 +288,10 @@ static struct janus_json_parameter rtp_video_parameters[] = {
 	{"videortpmap", JSON_STRING, JANUS_JSON_PARAM_REQUIRED},
 	{"videofmtp", JSON_STRING, 0},
 	{"videobufferkf", JANUS_JSON_BOOL, 0},
-	{"videoiface", JSON_STRING, 0}
+	{"videoiface", JSON_STRING, 0},
+	{"videosimulcast", JANUS_JSON_BOOL, 0},
+	{"videoport2", JSON_INTEGER, JANUS_JSON_PARAM_POSITIVE},
+	{"videoport3", JSON_INTEGER, JANUS_JSON_PARAM_POSITIVE},
 };
 static struct janus_json_parameter rtp_data_parameters[] = {
 	{"dataport", JSON_INTEGER, JANUS_JSON_PARAM_REQUIRED | JANUS_JSON_PARAM_POSITIVE},
@@ -310,6 +316,10 @@ static struct janus_json_parameter recording_stop_parameters[] = {
 	{"video", JANUS_JSON_BOOL, 0},
 	{"data", JANUS_JSON_BOOL, 0}
 };
+static struct janus_json_parameter simulcast_parameters[] = {
+	{"substream", JSON_INTEGER, JANUS_JSON_PARAM_POSITIVE},
+	{"temporal", JSON_INTEGER, JANUS_JSON_PARAM_POSITIVE}
+};
 
 /* Static configuration instance */
 static janus_config *config = NULL;
@@ -360,7 +370,7 @@ typedef struct janus_streaming_buffer {
 typedef struct janus_streaming_rtp_source {
 	gint audio_port;
 	in_addr_t audio_mcast;
-	gint video_port;
+	gint video_port[3];
 	in_addr_t video_mcast;
 	gint data_port;
 	janus_recorder *arc;	/* The Janus recorder instance for this streams's audio, if enabled */
@@ -368,8 +378,9 @@ typedef struct janus_streaming_rtp_source {
 	janus_recorder *drc;	/* The Janus recorder instance for this streams's data, if enabled */
 	janus_mutex rec_mutex;	/* Mutex to protect the recorders from race conditions */
 	int audio_fd;
-	int video_fd;
+	int video_fd[3];
 	int data_fd;
+	gboolean simulcast;
 	gint64 last_received_audio;
 	gint64 last_received_video;
 	gint64 last_received_data;
@@ -449,6 +460,7 @@ janus_streaming_mountpoint *janus_streaming_create_rtp_source(
 		uint64_t id, char *name, char *desc,
 		gboolean doaudio, char* amcast, const janus_network_address *aiface, uint16_t aport, uint8_t acodec, char *artpmap, char *afmtp,
 		gboolean dovideo, char* vmcast, const janus_network_address *viface, uint16_t vport, uint8_t vcodec, char *vrtpmap, char *vfmtp, gboolean bufferkf,
+			gboolean simulcast, uint16_t vport2, uint16_t vport3,
 		gboolean dodata, const janus_network_address *diface, uint16_t dport, gboolean buffermsg);
 /* Helper to create a file/ondemand live source */
 janus_streaming_mountpoint *janus_streaming_create_file_source(
@@ -495,6 +507,12 @@ typedef struct janus_streaming_session {
 	gboolean started;
 	gboolean paused;
 	janus_rtp_switching_context context;
+	int substream;			/* Which simulcast substream we should forward, in case the mountpoint is simulcasting */
+	int substream_target;	/* As above, but to handle transitions (e.g., wait for keyframe) */
+	int templayer;			/* Which simulcast temporal layer we should forward, in case the mountpoint is simulcasting */
+	int templayer_target;	/* As above, but to handle transitions (e.g., wait for keyframe) */
+	gint64 last_relayed;	/* When we relayed the last packet (used to detect when substreams become unavailable) */
+	janus_vp8_simulcast_context simulcast_context;
 	gboolean stopping;
 	volatile gint hangingup;
 	gint64 destroyed;	/* Time at which this session was marked as destroyed */
@@ -510,6 +528,8 @@ typedef struct janus_streaming_rtp_relay_packet {
 	gboolean is_rtp;	/* This may be a data packet and not RTP */
 	gboolean is_video;
 	gboolean is_keyframe;
+	gboolean simulcast;
+	int codec, substream;
 	uint32_t timestamp;
 	uint16_t seq_number;
 } janus_streaming_rtp_relay_packet;
@@ -676,6 +696,9 @@ int janus_streaming_init(janus_callbacks *callback, const char *config_path) {
 				janus_config_item *vrtpmap = janus_config_get_item(cat, "videortpmap");
 				janus_config_item *vfmtp = janus_config_get_item(cat, "videofmtp");
 				janus_config_item *vkf = janus_config_get_item(cat, "videobufferkf");
+				janus_config_item *vsc = janus_config_get_item(cat, "videosimulcast");
+				janus_config_item *vport2 = janus_config_get_item(cat, "videoport2");
+				janus_config_item *vport3 = janus_config_get_item(cat, "videoport3");
 				janus_config_item *dport = janus_config_get_item(cat, "dataport");
 				janus_config_item *dbm = janus_config_get_item(cat, "databuffermsg");
 				gboolean is_private = priv && priv->value && janus_is_true(priv->value);
@@ -683,6 +706,12 @@ int janus_streaming_init(janus_callbacks *callback, const char *config_path) {
 				gboolean dovideo = video && video->value && janus_is_true(video->value);
 				gboolean dodata = data && data->value && janus_is_true(data->value);
 				gboolean bufferkf = video && vkf && vkf->value && janus_is_true(vkf->value);
+				gboolean simulcast = video && vsc && vsc->value && janus_is_true(vsc->value);
+				if(simulcast && bufferkf) {
+					/* FIXME We'll need to take care of this */
+					JANUS_LOG(LOG_WARN, "Simulcasting enabled, so disabling buffering of keyframes\n");
+					bufferkf = FALSE;
+				}
 				gboolean buffermsg = data && dbm && dbm->value && janus_is_true(dbm->value);
 				if(!doaudio && !dovideo && !dodata) {
 					JANUS_LOG(LOG_ERR, "Can't add 'rtp' stream '%s', no audio, video or data have to be streamed...\n", cat->name);
@@ -790,6 +819,9 @@ int janus_streaming_init(janus_callbacks *callback, const char *config_path) {
 						vrtpmap ? (char *)vrtpmap->value : NULL,
 						vfmtp ? (char *)vfmtp->value : NULL,
 						bufferkf,
+						simulcast,
+						(vport2 && vport2->value) ? atoi(vport2->value) : 0,
+						(vport3 && vport3->value) ? atoi(vport3->value) : 0,
 						dodata,
 						dodata && diface && diface->value ? &data_iface : NULL,
 						(dport && dport->value) ? atoi(dport->value) : 0,
@@ -1279,7 +1311,7 @@ struct janus_plugin_result *janus_streaming_handle_message(janus_plugin_session
 				gint64 now = janus_get_monotonic_time();
 				if(source->audio_fd != -1)
 					json_object_set_new(ml, "audio_age_ms", json_integer((now - source->last_received_audio) / 1000));
-				if(source->video_fd != -1)
+				if(source->video_fd[0] != -1 || source->video_fd[1] != -1 || source->video_fd[2] != -1)
 					json_object_set_new(ml, "video_age_ms", json_integer((now - source->last_received_video) / 1000));
 			}
 			json_array_append_new(list, ml);
@@ -1318,7 +1350,7 @@ struct janus_plugin_result *janus_streaming_handle_message(janus_plugin_session
 			gint64 now = janus_get_monotonic_time();
 			if(source->audio_fd != -1)
 				json_object_set_new(ml, "audio_age_ms", json_integer((now - source->last_received_audio) / 1000));
-			if(source->video_fd != -1)
+			if(source->video_fd[0] != -1 || source->video_fd[1] != -1 || source->video_fd[2] != -1)
 				json_object_set_new(ml, "video_age_ms", json_integer((now - source->last_received_video) / 1000));
 			if(source->data_fd != -1)
 				json_object_set_new(ml, "data_age_ms", json_integer((now - source->last_received_data) / 1000));
@@ -1434,10 +1466,10 @@ struct janus_plugin_result *janus_streaming_handle_message(janus_plugin_session
 					janus_network_address_nullify(&audio_iface);
 				}
 			}
-			uint16_t vport = 0;
+			uint16_t vport = 0, vport2 = 0, vport3 = 0;
 			uint8_t vcodec = 0;
 			char *vrtpmap = NULL, *vfmtp = NULL, *vmcast = NULL;
-			gboolean bufferkf = FALSE;
+			gboolean bufferkf = FALSE, simulcast = FALSE;
 			if(dovideo) {
 				JANUS_VALIDATE_JSON_OBJECT(root, rtp_video_parameters,
 					error_code, error_cause, TRUE,
@@ -1456,6 +1488,17 @@ struct janus_plugin_result *janus_streaming_handle_message(janus_plugin_session
 				vfmtp = (char *)json_string_value(videofmtp);
 				json_t *vkf = json_object_get(root, "videobufferkf");
 				bufferkf = vkf ? json_is_true(vkf) : FALSE;
+				json_t *vsc = json_object_get(root, "videosimulcast");
+				simulcast = vsc ? json_is_true(vsc) : FALSE;
+				if(simulcast && bufferkf) {
+					/* FIXME We'll need to take care of this */
+					JANUS_LOG(LOG_WARN, "Simulcasting enabled, so disabling buffering of keyframes\n");
+					bufferkf = FALSE;
+				}
+				json_t *videoport2 = json_object_get(root, "videoport2");
+				vport2 = json_integer_value(videoport2);
+				json_t *videoport3 = json_object_get(root, "videoport3");
+				vport3 = json_integer_value(videoport3);
 				json_t *viface = json_object_get(root, "videoiface");
 				if(viface) {
 					const char *miface = (const char *)json_string_value(viface);
@@ -1522,6 +1565,7 @@ struct janus_plugin_result *janus_streaming_handle_message(janus_plugin_session
 					desc ? (char *)json_string_value(desc) : NULL,
 					doaudio, amcast, &audio_iface, aport, acodec, artpmap, afmtp,
 					dovideo, vmcast, &video_iface, vport, vcodec, vrtpmap, vfmtp, bufferkf,
+					simulcast, vport2, vport3,
 					dodata, &data_iface, dport, buffermsg);
 			if(mp == NULL) {
 				JANUS_LOG(LOG_ERR, "Error creating 'rtp' stream...\n");
@@ -1765,7 +1809,7 @@ struct janus_plugin_result *janus_streaming_handle_message(janus_plugin_session
 				}
 				janus_config_add_item(config, mp->name, "video", mp->codecs.video_pt >= 0? "yes" : "no");
 				if(mp->codecs.video_pt >= 0) {
-					g_snprintf(value, BUFSIZ, "%d", source->video_port);
+					g_snprintf(value, BUFSIZ, "%d", source->video_port[0]);
 					janus_config_add_item(config, mp->name, "videoport", value);
 					json_t *videomcast = json_object_get(root, "videomcast");
 					if(videomcast)
@@ -1777,6 +1821,17 @@ struct janus_plugin_result *janus_streaming_handle_message(janus_plugin_session
 						janus_config_add_item(config, mp->name, "videofmtp", mp->codecs.video_fmtp);
 					if(source->keyframe.enabled)
 						janus_config_add_item(config, mp->name, "videobufferkf", "yes");
+					if(source->simulcast) {
+						janus_config_add_item(config, mp->name, "videosimulcast", "yes");
+						if(source->video_port[1]) {
+							g_snprintf(value, BUFSIZ, "%d", source->video_port[1]);
+							janus_config_add_item(config, mp->name, "videoport2", value);
+						}
+						if(source->video_port[2]) {
+							g_snprintf(value, BUFSIZ, "%d", source->video_port[2]);
+							janus_config_add_item(config, mp->name, "videoport3", value);
+						}
+					}
 				}
 				janus_config_add_item(config, mp->name, "data", mp->data ? "yes" : "no");
 				if(source->data_port > -1) {
@@ -1820,8 +1875,14 @@ struct janus_plugin_result *janus_streaming_handle_message(janus_plugin_session
 			if (source->audio_fd != -1) {
 				json_object_set_new(ml, "audio_port", json_integer(janus_streaming_get_fd_port(source->audio_fd)));
 			}
-			if (source->video_fd != -1) {
-				json_object_set_new(ml, "video_port", json_integer(janus_streaming_get_fd_port(source->video_fd)));
+			if (source->video_fd[0] != -1) {
+				json_object_set_new(ml, "video_port", json_integer(janus_streaming_get_fd_port(source->video_fd[0])));
+			}
+			if (source->video_fd[1] != -1) {
+				json_object_set_new(ml, "video_port_2", json_integer(janus_streaming_get_fd_port(source->video_fd[1])));
+			}
+			if (source->video_fd[2] != -1) {
+				json_object_set_new(ml, "video_port_3", json_integer(janus_streaming_get_fd_port(source->video_fd[2])));
 			}
 			if (source->data_fd != -1) {
 				json_object_set_new(ml, "data_port", json_integer(janus_streaming_get_fd_port(source->data_fd)));
@@ -2192,7 +2253,7 @@ struct janus_plugin_result *janus_streaming_handle_message(janus_plugin_session
 		goto plugin_response;
 	} else if(!strcasecmp(request_text, "watch") || !strcasecmp(request_text, "start")
 			|| !strcasecmp(request_text, "pause") || !strcasecmp(request_text, "stop")
-			|| !strcasecmp(request_text, "switch")) {
+			|| !strcasecmp(request_text, "configure") || !strcasecmp(request_text, "switch")) {
 		/* These messages are handled asynchronously */
 		janus_streaming_message *msg = g_malloc0(sizeof(janus_streaming_message));
 		msg->handle = handle;
@@ -2322,6 +2383,12 @@ void janus_streaming_hangup_media(janus_plugin_session *handle) {
 		return;
 	if(g_atomic_int_add(&session->hangingup, 1))
 		return;
+	session->substream = -1;
+	session->substream_target = 0;
+	session->templayer = -1;
+	session->templayer_target = 0;
+	session->last_relayed = 0;
+	janus_vp8_simulcast_context_reset(&session->simulcast_context);
 	/* FIXME Simulate a "stop" coming from the browser */
 	janus_streaming_message *msg = g_malloc0(sizeof(janus_streaming_message));
 	msg->handle = handle;
@@ -2424,6 +2491,34 @@ static void *janus_streaming_handler(void *data) {
 					g_snprintf(error_cause, 512, "Got error %d (%s) trying to launch the on-demand thread", error->code, error->message ? error->message : "??");
 					goto error;
 				}
+			} else if(mp->streaming_source == janus_streaming_source_rtp) {
+				janus_streaming_rtp_source *source = (janus_streaming_rtp_source *)mp->source;
+				if(source && source->simulcast) {
+					JANUS_VALIDATE_JSON_OBJECT(root, simulcast_parameters,
+						error_code, error_cause, TRUE,
+						JANUS_STREAMING_ERROR_MISSING_ELEMENT, JANUS_STREAMING_ERROR_INVALID_ELEMENT);
+					if(error_code != 0)
+						goto error;
+					/* This mountpoint is simulcasting, let's aim high by default */
+					session->substream = -1;
+					session->substream_target = 2;
+					session->templayer = -1;
+					session->templayer_target = 2;
+					janus_vp8_simulcast_context_reset(&session->simulcast_context);
+					/* Unless the request contains a target */
+					json_t *substream = json_object_get(root, "substream");
+					if(substream) {
+						session->substream_target = json_integer_value(substream);
+						JANUS_LOG(LOG_VERB, "Setting video substream to let through (simulcast): %d (was %d)\n",
+							session->substream_target, session->substream);
+					}
+					json_t *temporal = json_object_get(root, "temporal");
+					if(temporal) {
+						session->templayer_target = json_integer_value(temporal);
+						JANUS_LOG(LOG_VERB, "Setting video temporal layer to let through (simulcast): %d (was %d)\n",
+							session->templayer_target, session->templayer);
+					}
+				}
 			}
 			/* TODO Check if user is already watching a stream, if the video is active, etc. */
 			janus_mutex_lock(&mp->mutex);
@@ -2547,6 +2642,61 @@ static void *janus_streaming_handler(void *data) {
 					json_object_set_new(info, "id", json_integer(session->mountpoint->id));
 				gateway->notify_event(&janus_streaming_plugin, session->handle, info);
 			}
+		} else if(!strcasecmp(request_text, "configure")) {
+			janus_streaming_mountpoint *mp = session->mountpoint;
+			if(mp == NULL) {
+				JANUS_LOG(LOG_VERB, "Can't configure: not on a mountpoint\n");
+				error_code = JANUS_STREAMING_ERROR_NO_SUCH_MOUNTPOINT;
+				g_snprintf(error_cause, 512, "Can't configure: not on a mountpoint");
+				goto error;
+			}
+			if(mp->streaming_source == janus_streaming_source_rtp) {
+				janus_streaming_rtp_source *source = (janus_streaming_rtp_source *)mp->source;
+				if(source && source->simulcast) {
+					/* This mountpoint is simulcasting, let's aim high by default */
+					session->substream = -1;
+					session->substream_target = 2;
+					session->templayer = -1;
+					session->templayer_target = 2;
+					janus_vp8_simulcast_context_reset(&session->simulcast_context);
+					/* Unless the request contains a target */
+					json_t *substream = json_object_get(root, "substream");
+					if(substream) {
+						session->substream_target = json_integer_value(substream);
+						JANUS_LOG(LOG_VERB, "Setting video substream to let through (simulcast): %d (was %d)\n",
+							session->substream_target, session->substream);
+						if(session->substream_target == session->substream) {
+							/* No need to do anything, we're already getting the right substream, so notify the viewer */
+							json_t *event = json_object();
+							json_object_set_new(event, "streaming", json_string("event"));
+							json_t *result = json_object();
+							json_object_set_new(result, "substream", json_integer(session->substream));
+							json_object_set_new(event, "result", result);
+							gateway->push_event(session->handle, &janus_streaming_plugin, NULL, event, NULL);
+							json_decref(event);
+						}
+					}
+					json_t *temporal = json_object_get(root, "temporal");
+					if(temporal) {
+						session->templayer_target = json_integer_value(temporal);
+						JANUS_LOG(LOG_VERB, "Setting video temporal layer to let through (simulcast): %d (was %d)\n",
+							session->templayer_target, session->templayer);
+						if(session->templayer_target == session->templayer) {
+							/* No need to do anything, we're already getting the right temporal layer, so notify the viewer */
+							json_t *event = json_object();
+							json_object_set_new(event, "streaming", json_string("event"));
+							json_t *result = json_object();
+							json_object_set_new(result, "temporal", json_integer(session->templayer));
+							json_object_set_new(event, "result", result);
+							gateway->push_event(session->handle, &janus_streaming_plugin, NULL, event, NULL);
+							json_decref(event);
+						}
+					}
+				}
+			}
+			/* Done */
+			result = json_object();
+			json_object_set_new(result, "event", json_string("configured"));
 		} else if(!strcasecmp(request_text, "switch")) {
 			/* This listener wants to switch to a different mountpoint
 			 * NOTE: this only works for live RTP streams as of now: you
@@ -2624,6 +2774,12 @@ static void *janus_streaming_handler(void *data) {
 			session->stopping = TRUE;
 			session->started = FALSE;
 			session->paused = FALSE;
+			session->substream = -1;
+			session->substream_target = 0;
+			session->templayer = -1;
+			session->templayer_target = 0;
+			session->last_relayed = 0;
+			janus_vp8_simulcast_context_reset(&session->simulcast_context);
 			result = json_object();
 			json_object_set_new(result, "status", json_string("stopping"));
 			janus_streaming_mountpoint *mp = session->mountpoint;
@@ -2786,8 +2942,14 @@ static void janus_streaming_rtp_source_free(janus_streaming_rtp_source *source)
 	if(source->audio_fd > 0) {
 		close(source->audio_fd);
 	}
-	if(source->video_fd > 0) {
-		close(source->video_fd);
+	if(source->video_fd[0] > 0) {
+		close(source->video_fd[0]);
+	}
+	if(source->video_fd[1] > 0) {
+		close(source->video_fd[1]);
+	}
+	if(source->video_fd[2] > 0) {
+		close(source->video_fd[2]);
 	}
 	if(source->data_fd > 0) {
 		close(source->data_fd);
@@ -2887,6 +3049,7 @@ janus_streaming_mountpoint *janus_streaming_create_rtp_source(
 		uint64_t id, char *name, char *desc,
 		gboolean doaudio, char *amcast, const janus_network_address *aiface, uint16_t aport, uint8_t acodec, char *artpmap, char *afmtp,
 		gboolean dovideo, char *vmcast, const janus_network_address *viface, uint16_t vport, uint8_t vcodec, char *vrtpmap, char *vfmtp, gboolean bufferkf,
+			gboolean simulcast, uint16_t vport2, uint16_t vport3,
 		gboolean dodata, const janus_network_address *diface, uint16_t dport, gboolean buffermsg) {
 	janus_mutex_lock(&mountpoints_mutex);
 	if(id == 0) {
@@ -2935,17 +3098,47 @@ janus_streaming_mountpoint *janus_streaming_create_rtp_source(
 			return NULL;
 		}
 	}
-	int video_fd = -1;
+	int video_fd[3] = {-1, -1, -1};
 	if(dovideo) {
-		video_fd = janus_streaming_create_fd(vport, vmcast ? inet_addr(vmcast) : INADDR_ANY, viface,
+		video_fd[0] = janus_streaming_create_fd(vport, vmcast ? inet_addr(vmcast) : INADDR_ANY, viface,
 			"Video", "video", name ? name : tempname);
-		if(video_fd < 0) {
+		if(video_fd[0] < 0) {
 			JANUS_LOG(LOG_ERR, "Can't bind to port %d for video...\n", vport);
 			if(audio_fd > 0)
 				close(audio_fd);
 			janus_mutex_unlock(&mountpoints_mutex);
 			return NULL;
 		}
+		if(simulcast) {
+			if(vport2 > 0) {
+				video_fd[1] = janus_streaming_create_fd(vport2, vmcast ? inet_addr(vmcast) : INADDR_ANY, viface,
+					"Video", "video", name ? name : tempname);
+				if(video_fd[1] < 0) {
+					JANUS_LOG(LOG_ERR, "Can't bind to port %d for video (2nd port)...\n", vport2);
+					if(audio_fd > 0)
+						close(audio_fd);
+					if(video_fd[0] > 0)
+						close(video_fd[0]);
+					janus_mutex_unlock(&mountpoints_mutex);
+					return NULL;
+				}
+			}
+			if(vport3 > 0) {
+				video_fd[2] = janus_streaming_create_fd(vport3, vmcast ? inet_addr(vmcast) : INADDR_ANY, viface,
+					"Video", "video", name ? name : tempname);
+				if(video_fd[2] < 0) {
+					JANUS_LOG(LOG_ERR, "Can't bind to port %d for video (3rd port)...\n", vport3);
+					if(audio_fd > 0)
+						close(audio_fd);
+					if(video_fd[0] > 0)
+						close(video_fd[0]);
+					if(video_fd[1] > 0)
+						close(video_fd[1]);
+					janus_mutex_unlock(&mountpoints_mutex);
+					return NULL;
+				}
+			}
+		}
 	}
 	int data_fd = -1;
 	if(dodata) {
@@ -2956,8 +3149,12 @@ janus_streaming_mountpoint *janus_streaming_create_rtp_source(
 			JANUS_LOG(LOG_ERR, "Can't bind to port %d for data...\n", dport);
 			if(audio_fd > 0)
 				close(audio_fd);
-			if(video_fd > 0)
-				close(video_fd);
+			if(video_fd[0] > 0)
+				close(video_fd[0]);
+			if(video_fd[1] > 0)
+				close(video_fd[1]);
+			if(video_fd[2] > 0)
+				close(video_fd[2]);
 			janus_mutex_unlock(&mountpoints_mutex);
 			return NULL;
 		}
@@ -2991,7 +3188,10 @@ janus_streaming_mountpoint *janus_streaming_create_rtp_source(
 	live_rtp_source->audio_iface = doaudio && !janus_network_address_is_null(aiface) ? *aiface : nil;
 	live_rtp_source->audio_port = doaudio ? aport : -1;
 	live_rtp_source->video_mcast = dovideo ? (vmcast ? inet_addr(vmcast) : INADDR_ANY) : INADDR_ANY;
-	live_rtp_source->video_port = dovideo ? vport : -1;
+	live_rtp_source->video_port[0] = dovideo ? vport : -1;
+	live_rtp_source->simulcast = dovideo && simulcast;
+	live_rtp_source->video_port[1] = live_rtp_source->simulcast ? vport2 : -1;
+	live_rtp_source->video_port[2] = live_rtp_source->simulcast ? vport3 : -1;
 	live_rtp_source->video_iface = dovideo && !janus_network_address_is_null(viface) ? *viface : nil;
 	live_rtp_source->data_port = dodata ? dport : -1;
 	live_rtp_source->data_iface = dodata && !janus_network_address_is_null(diface) ? *diface : nil;
@@ -3000,7 +3200,9 @@ janus_streaming_mountpoint *janus_streaming_create_rtp_source(
 	live_rtp_source->drc = NULL;
 	janus_mutex_init(&live_rtp_source->rec_mutex);
 	live_rtp_source->audio_fd = audio_fd;
-	live_rtp_source->video_fd = video_fd;
+	live_rtp_source->video_fd[0] = video_fd[0];
+	live_rtp_source->video_fd[1] = video_fd[1];
+	live_rtp_source->video_fd[2] = video_fd[2];
 	live_rtp_source->data_fd = data_fd;
 	live_rtp_source->last_received_audio = janus_get_monotonic_time();
 	live_rtp_source->last_received_video = janus_get_monotonic_time();
@@ -3394,7 +3596,7 @@ static int janus_streaming_rtsp_connect_to_server(janus_streaming_mountpoint *mp
 	mp->codecs.video_fmtp = dovideo ? g_strdup(vfmtp) : NULL;
 	source->audio_fd = audio_fds.fd;
 	source->audio_rtcp_fd = audio_fds.rtcp_fd;
-	source->video_fd = video_fds.fd;
+	source->video_fd[0] = video_fds.fd;
 	source->video_rtcp_fd = video_fds.rtcp_fd;
 	source->curl = curl;
 	source->curldata = curldata;
@@ -3491,7 +3693,9 @@ janus_streaming_mountpoint *janus_streaming_create_rtsp_source(
 	live_rtsp_source->audio_fd = -1;
 	live_rtsp_source->audio_rtcp_fd = -1;
 	live_rtsp_source->audio_iface = iface ? *iface : nil;
-	live_rtsp_source->video_fd = -1;
+	live_rtsp_source->video_fd[0] = -1;
+	live_rtsp_source->video_fd[1] = -1;
+	live_rtsp_source->video_fd[2] = -1;
 	live_rtsp_source->video_rtcp_fd = -1;
 	live_rtsp_source->video_iface = iface ? *iface : nil;
 	live_rtsp_source->data_fd = -1;
@@ -3808,19 +4012,19 @@ static void *janus_streaming_relay_thread(void *data) {
 		return NULL;
 	}
 	int audio_fd = source->audio_fd;
-	int video_fd = source->video_fd;
+	int video_fd[3] = {source->video_fd[0], source->video_fd[1], source->video_fd[2]};
 	int data_fd = source->data_fd;
 	char *name = g_strdup(mountpoint->name ? mountpoint->name : "??");
 	/* Needed to fix seq and ts */
 	uint32_t a_last_ssrc = 0, a_last_ts = 0, a_base_ts = 0, a_base_ts_prev = 0,
-			v_last_ssrc = 0, v_last_ts = 0, v_base_ts = 0, v_base_ts_prev = 0;
+			v_last_ssrc[3] = {0, 0, 0}, v_last_ts[3] = {0, 0, 0}, v_base_ts[3] = {0, 0, 0}, v_base_ts_prev[3] = {0, 0, 0};
 	uint16_t a_last_seq = 0, a_base_seq = 0, a_base_seq_prev = 0,
-			v_last_seq = 0, v_base_seq = 0, v_base_seq_prev = 0;
+			v_last_seq[3] = {0, 0, 0}, v_base_seq[3] = {0, 0, 0}, v_base_seq_prev[3] = {0, 0, 0};
 	/* File descriptors */
 	socklen_t addrlen;
 	struct sockaddr_in remote;
 	int resfd = 0, bytes = 0;
-	struct pollfd fds[3];
+	struct pollfd fds[5];
 	char buffer[1500];
 	memset(buffer, 0, 1500);
 #ifdef HAVE_LIBCURL
@@ -3849,7 +4053,9 @@ static void *janus_streaming_relay_thread(void *data) {
 				JANUS_LOG(LOG_WARN, "[%s] %"SCNi64"s passed with no media, trying to reconnect the RTSP stream\n",
 					name, (now - source->reconnect_timer)/G_USEC_PER_SEC);
 				audio_fd = -1;
-				video_fd = -1;
+				video_fd[0] = -1;
+				video_fd[1] = -1;
+				video_fd[2] = -1;
 				source->reconnect_timer = now;
 				source->reconnecting = TRUE;
 				/* Let's clean up the source first */
@@ -3863,10 +4069,18 @@ static void *janus_streaming_relay_thread(void *data) {
 					close(source->audio_fd);
 				}
 				source->audio_fd = -1;
-				if(source->video_fd > 0) {
-					close(source->video_fd);
+				if(source->video_fd[0] > 0) {
+					close(source->video_fd[0]);
+				}
+				source->video_fd[0] = -1;
+				if(source->video_fd[1] > 0) {
+					close(source->video_fd[1]);
 				}
-				source->video_fd = -1;
+				source->video_fd[1] = -1;
+				if(source->video_fd[2] > 0) {
+					close(source->video_fd[2]);
+				}
+				source->video_fd[2] = -1;
 				if(source->data_fd > 0) {
 					close(source->data_fd);
 				}
@@ -3892,7 +4106,7 @@ static void *janus_streaming_relay_thread(void *data) {
 						/* Everything should be back to normal, let's update the file descriptors */
 						JANUS_LOG(LOG_WARN, "[%s] Reconnected to the RTSP server, streaming again\n", name);
 						audio_fd = source->audio_fd;
-						video_fd = source->video_fd;
+						video_fd[0] = source->video_fd[0];
 						ka_timeout = (source->ka_timeout*G_USEC_PER_SEC)/2;
 					}
 				}
@@ -3901,7 +4115,7 @@ static void *janus_streaming_relay_thread(void *data) {
 				continue;
 			}
 		}
-		if(audio_fd < 0 && video_fd < 0 && data_fd < 0) {
+		if(audio_fd < 0 && video_fd[0] < 0 && video_fd[1] < 0 && video_fd[2] < 0 && data_fd < 0) {
 			/* No socket, we may be in the process of reconnecting, or waiting to reconnect */
 			g_usleep(5000000);
 			continue;
@@ -3936,8 +4150,20 @@ static void *janus_streaming_relay_thread(void *data) {
 			fds[num].revents = 0;
 			num++;
 		}
-		if(video_fd != -1) {
-			fds[num].fd = video_fd;
+		if(video_fd[0] != -1) {
+			fds[num].fd = video_fd[0];
+			fds[num].events = POLLIN;
+			fds[num].revents = 0;
+			num++;
+		}
+		if(video_fd[1] != -1) {
+			fds[num].fd = video_fd[1];
+			fds[num].events = POLLIN;
+			fds[num].revents = 0;
+			num++;
+		}
+		if(video_fd[2] != -1) {
+			fds[num].fd = video_fd[2];
 			fds[num].events = POLLIN;
 			fds[num].revents = 0;
 			num++;
@@ -4017,8 +4243,17 @@ static void *janus_streaming_relay_thread(void *data) {
 					g_list_foreach(mountpoint->listeners, janus_streaming_relay_rtp_packet, &packet);
 					janus_mutex_unlock(&mountpoint->mutex);
 					continue;
-				} else if(video_fd != -1 && fds[i].fd == video_fd) {
+				} else if((video_fd[0] != -1 && fds[i].fd == video_fd[0]) ||
+						(video_fd[1] != -1 && fds[i].fd == video_fd[1]) ||
+						(video_fd[2] != -1 && fds[i].fd == video_fd[2])) {
 					/* Got something video (RTP) */
+					int index = -1;
+					if(fds[i].fd == video_fd[0])
+						index = 0;
+					else if(fds[i].fd == video_fd[1])
+						index = 1;
+					else if(fds[i].fd == video_fd[2])
+						index = 2;
 					if(mountpoint->active == FALSE)
 						mountpoint->active = TRUE;
 					source->last_received_video = janus_get_monotonic_time();
@@ -4026,7 +4261,7 @@ static void *janus_streaming_relay_thread(void *data) {
 					source->reconnect_timer = janus_get_monotonic_time();
 #endif
 					addrlen = sizeof(remote);
-					bytes = recvfrom(video_fd, buffer, 1500, 0, (struct sockaddr*)&remote, &addrlen);
+					bytes = recvfrom(fds[i].fd, buffer, 1500, 0, (struct sockaddr*)&remote, &addrlen);
 					//~ JANUS_LOG(LOG_VERB, "************************\nGot %d bytes on the video channel...\n", bytes);
 					rtp_header *rtp = (rtp_header *)buffer;
 					/* First of all, let's check if this is (part of) a keyframe that we may need to save it for future reference */
@@ -4123,24 +4358,28 @@ static void *janus_streaming_relay_thread(void *data) {
 					packet.is_rtp = TRUE;
 					packet.is_video = TRUE;
 					packet.is_keyframe = FALSE;
+					packet.simulcast = source->simulcast;
+					packet.substream = index;
+					packet.codec = mountpoint->codecs.video_codec;
 					/* Do we have a new stream? */
-					if(ntohl(packet.data->ssrc) != v_last_ssrc) {
-						v_last_ssrc = ntohl(packet.data->ssrc);
-						JANUS_LOG(LOG_INFO, "[%s] New video stream! (ssrc=%u)\n", name, v_last_ssrc);
-						v_base_ts_prev = v_last_ts;
-						v_base_ts = ntohl(packet.data->timestamp);
-						v_base_seq_prev = v_last_seq;
-						v_base_seq = ntohs(packet.data->seq_number);
+					if(ntohl(packet.data->ssrc) != v_last_ssrc[index]) {
+						v_last_ssrc[index] = ntohl(packet.data->ssrc);
+						JANUS_LOG(LOG_INFO, "[%s] New video stream! (ssrc=%u, index %d)\n", name, v_last_ssrc[index], index);
+						v_base_ts_prev[index] = v_last_ts[index];
+						v_base_ts[index] = ntohl(packet.data->timestamp);
+						v_base_seq_prev[index] = v_last_seq[index];
+						v_base_seq[index] = ntohs(packet.data->seq_number);
 					}
-					v_last_ts = (ntohl(packet.data->timestamp)-v_base_ts)+v_base_ts_prev+4500;	/* FIXME We're assuming 15fps here... */
-					packet.data->timestamp = htonl(v_last_ts);
-					v_last_seq = (ntohs(packet.data->seq_number)-v_base_seq)+v_base_seq_prev+1;
-					packet.data->seq_number = htons(v_last_seq);
+					v_last_ts[index] = (ntohl(packet.data->timestamp)-v_base_ts[index])+v_base_ts_prev[index]+4500;	/* FIXME We're assuming 15fps here... */
+					packet.data->timestamp = htonl(v_last_ts[index]);
+					v_last_seq[index] = (ntohs(packet.data->seq_number)-v_base_seq[index])+v_base_seq_prev[index]+1;
+					packet.data->seq_number = htons(v_last_seq[index]);
 					//~ JANUS_LOG(LOG_VERB, " ... updated RTP packet (ssrc=%u, pt=%u, seq=%u, ts=%u)...\n",
 						//~ ntohl(rtp->ssrc), rtp->type, ntohs(rtp->seq_number), ntohl(rtp->timestamp));
 					packet.data->type = mountpoint->codecs.video_pt;
-					/* Is there a recorder? */
-					janus_recorder_save_frame(source->vrc, buffer, bytes);
+					/* Is there a recorder? (FIXME notice we only record the first substream, if simulcasting) */
+					if(index == 0)
+						janus_recorder_save_frame(source->vrc, buffer, bytes);
 					/* Backup the actual timestamp and sequence number set by the restreamer, in case switching is involved */
 					packet.timestamp = ntohl(packet.data->timestamp);
 					packet.seq_number = ntohs(packet.data->seq_number);
@@ -4242,13 +4481,122 @@ static void janus_streaming_relay_rtp_packet(gpointer data, gpointer user_data)
 	if(packet->is_rtp) {
 		/* Make sure there hasn't been a publisher switch by checking the SSRC */
 		if(packet->is_video) {
-			/* Fix sequence number and timestamp (switching may be involved) */
-			janus_rtp_header_update(packet->data, &session->context, TRUE, 4500);
-			if(gateway != NULL)
-				gateway->relay_rtp(session->handle, packet->is_video, (char *)packet->data, packet->length);
-			/* Restore the timestamp and sequence number to what the publisher set them to */
-			packet->data->timestamp = htonl(packet->timestamp);
-			packet->data->seq_number = htons(packet->seq_number);
+			if(packet->simulcast) {
+				/* Handle simulcast: don't relay if it's not the substream we wanted to handle */
+				int plen = 0;
+				char *payload = janus_rtp_payload((char *)packet->data, packet->length, &plen);
+				if(payload == NULL)
+					return;
+				gboolean switched = FALSE;
+				if(session->substream != session->substream_target) {
+					/* There has been a change: let's wait for a keyframe on the target */
+					int step = (session->substream < 1 && session->substream_target == 2);
+					if(packet->substream == session->substream_target || (step && packet->substream == step)) {
+						//~ if(janus_vp8_is_keyframe(payload, plen)) {
+							JANUS_LOG(LOG_WARN, "Received keyframe on substream %d, switching (was %d)\n",
+								packet->substream, session->substream);
+							session->substream = packet->substream;
+							switched = TRUE;
+							/* Notify the viewer */
+							json_t *event = json_object();
+							json_object_set_new(event, "streaming", json_string("event"));
+							json_t *result = json_object();
+							json_object_set_new(result, "substream", json_integer(session->substream));
+							json_object_set_new(event, "result", result);
+							gateway->push_event(session->handle, &janus_streaming_plugin, NULL, event, NULL);
+							json_decref(event);
+						//~ } else {
+							//~ JANUS_LOG(LOG_WARN, "Not a keyframe on SSRC %"SCNu32" yet, waiting before switching\n", ssrc);
+						//~ }
+					}
+				}
+				/* If we haven't received our desired substream yet, let's drop temporarily */
+				if(session->last_relayed == 0) {
+					/* Let's start slow */
+					session->last_relayed = janus_get_monotonic_time();
+				} else {
+					/* Check if 250ms went by with no packet relayed */
+					gint64 now = janus_get_monotonic_time();
+					if(now-session->last_relayed >= 250000) {
+						session->last_relayed = now;
+						int substream = session->substream-1;
+						if(substream < 0)
+							substream = 0;
+						if(session->substream != substream) {
+							JANUS_LOG(LOG_WARN, "No packet received on substream %d for a while, falling back to %d\n",
+								session->substream, substream);
+							session->substream = substream;
+							/* Notify the viewer */
+							json_t *event = json_object();
+							json_object_set_new(event, "streaming", json_string("event"));
+							json_t *result = json_object();
+							json_object_set_new(result, "substream", json_integer(session->substream));
+							json_object_set_new(event, "result", result);
+							gateway->push_event(session->handle, &janus_streaming_plugin, NULL, event, NULL);
+							json_decref(event);
+						}
+					}
+				}
+				if(packet->substream != session->substream) {
+					JANUS_LOG(LOG_HUGE, "Dropping packet (it's from substream %d, but we're only relaying substream %d now\n",
+						packet->substream, session->substream);
+					return;
+				}
+				session->last_relayed = janus_get_monotonic_time();
+				char vp8pd[6];
+				if(packet->codec == JANUS_STREAMING_VP8) {
+					/* Check if there's any temporal scalability to take into account */
+					uint16_t picid = 0;
+					uint8_t tlzi = 0;
+					uint8_t tid = 0;
+					uint8_t ybit = 0;
+					uint8_t keyidx = 0;
+					if(janus_vp8_parse_descriptor(payload, plen, &picid, &tlzi, &tid, &ybit, &keyidx) == 0) {
+						//~ JANUS_LOG(LOG_WARN, "%"SCNu16", %u, %u, %u, %u\n", picid, tlzi, tid, ybit, keyidx);
+						if(session->templayer != session->templayer_target) {
+							/* FIXME We should be smarter in deciding when to switch */
+							session->templayer = session->templayer_target;
+								/* Notify the viewer */
+								json_t *event = json_object();
+								json_object_set_new(event, "streaming", json_string("event"));
+								json_t *result = json_object();
+								json_object_set_new(result, "temporal", json_integer(session->templayer));
+								json_object_set_new(event, "result", result);
+								gateway->push_event(session->handle, &janus_streaming_plugin, NULL, event, NULL);
+								json_decref(event);
+						}
+						if(tid > session->templayer) {
+							JANUS_LOG(LOG_HUGE, "Dropping packet (it's temporal layer %d, but we're capping at %d)\n",
+								tid, session->templayer);
+							/* We increase the base sequence number, or there will be gaps when delivering later */
+							session->context.v_base_seq++;
+							return;
+						}
+					}
+					/* If we got here, update the RTP header and send the packet */
+					janus_rtp_header_update(packet->data, &session->context, TRUE, 4500);
+					memcpy(vp8pd, payload, sizeof(vp8pd));
+					janus_vp8_simulcast_descriptor_update(payload, plen, &session->simulcast_context, switched);
+				}
+				/* Send the packet */
+				if(gateway != NULL)
+					gateway->relay_rtp(session->handle, packet->is_video, (char *)packet->data, packet->length);
+				/* Restore the timestamp and sequence number to what the publisher set them to */
+				packet->data->timestamp = htonl(packet->timestamp);
+				packet->data->seq_number = htons(packet->seq_number);
+				if(packet->codec == JANUS_STREAMING_VP8) {
+					/* Restore the original payload descriptor as well, as it will be needed by the next viewer */
+					memcpy(payload, vp8pd, sizeof(vp8pd));
+				}
+			} else {
+				/* Fix sequence number and timestamp (switching may be involved) */
+				janus_rtp_header_update(packet->data, &session->context, TRUE, 4500);
+				if(gateway != NULL)
+					gateway->relay_rtp(session->handle, packet->is_video, (char *)packet->data, packet->length);
+				/* Restore the timestamp and sequence number to what the publisher set them to */
+				packet->data->timestamp = htonl(packet->timestamp);
+				packet->data->seq_number = htons(packet->seq_number);
+			}
 		} else {
 			/* Fix sequence number and timestamp (switching may be involved) */
 			janus_rtp_header_update(packet->data, &session->context, FALSE, 960);

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/pkg-voip/janus.git



More information about the Pkg-voip-commits mailing list