Skip to content
This repository was archived by the owner on Oct 24, 2022. It is now read-only.
Merged
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
132 changes: 102 additions & 30 deletions plugins/src/webrtcsink/imp.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,9 @@ static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
)
});

const CUDA_MEMORY_FEATURE: &str = "memory:CUDAMemory";
const GL_MEMORY_FEATURE: &str = "memory:GLMemory";

const RTP_TWCC_URI: &str =
"http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01";

Expand Down Expand Up @@ -242,22 +245,45 @@ impl Default for State {
}
}

fn make_converter_for_video_caps(caps: &gst::Caps) -> Result<gst::Element, Error> {
assert!(caps.is_fixed());

for feature in caps.features(0) {
if feature.contains(CUDA_MEMORY_FEATURE) {
return Ok(gst::parse_bin_from_description(
"cudaupload ! cudaconvert ! cudascale ! videorate drop-only=true",
true,
)?
.upcast());
} else if feature.contains(GL_MEMORY_FEATURE) {
return Ok(gst::parse_bin_from_description(
"glupload ! glcolorconvert ! glcolorscale ! videorate drop-only=true skip-to-first=true",
true,
)?
.upcast());
}
}

Ok(gst::parse_bin_from_description(
"videoconvert ! videoscale ! videorate drop-only=true skip-to-first=true",
true,
)?
.upcast())
}

/// Bit of an awkward function, but the goal here is to keep
/// most of the encoding code for consumers in line with
/// the codec discovery code, and this gets the job done.
fn setup_encoding(
pipeline: &gst::Pipeline,
src: &gst::Element,
input_caps: &gst::Caps,
codec: &Codec,
ssrc: Option<u32>,
twcc: bool,
) -> Result<(gst::Element, gst::Element, gst::Element), Error> {
let conv = match codec.is_video {
true => gst::parse_bin_from_description(
"videoconvert ! videoscale ! videorate drop-only=true",
true,
)?
.upcast(),
true => make_converter_for_video_caps(input_caps)?.upcast(),
false => gst::parse_bin_from_description("audioresample ! audioconvert", true)?.upcast(),
};

Expand Down Expand Up @@ -302,19 +328,21 @@ fn setup_encoding(
.with_context(|| "Linking encoding elements")?;
}

// Quirk: nvh264enc can perform conversion from RGB formats, but
// doesn't advertise / negotiate colorimetry correctly, leading
// to incorrect color display in Chrome (but interestingly not in
// Firefox). In any case, restrict to exclude RGB formats altogether,
// and let videoconvert do the conversion properly if needed.
let conv_caps = if codec.encoder.name() == "nvh264enc" {
gst::Caps::builder("video/x-raw")
.field("format", &gst::List::new(&[&"NV12", &"YV12", &"I420"]))
.field("pixel-aspect-ratio", gst::Fraction::new(1, 1))
.build()
} else if codec.is_video {
gst::Caps::builder("video/x-raw")
.field("pixel-aspect-ratio", gst::Fraction::new(1, 1))
let conv_caps = if codec.is_video {
let mut structure_builder = gst::Structure::builder("video/x-raw")
.field("pixel-aspect-ratio", gst::Fraction::new(1, 1));

if codec.encoder.name() == "nvh264enc" {
// Quirk: nvh264enc can perform conversion from RGB formats, but
// doesn't advertise / negotiate colorimetry correctly, leading
// to incorrect color display in Chrome (but interestingly not in
// Firefox). In any case, restrict to exclude RGB formats altogether,
// and let videoconvert do the conversion properly if needed.
structure_builder = structure_builder.field("format", &gst::List::new(&[&"NV12", &"YV12", &"I420"]));
}

gst::Caps::builder_full_with_any_features()
.structure(structure_builder.build())
.build()
} else {
gst::Caps::builder("audio/x-raw").build()
Expand Down Expand Up @@ -519,7 +547,9 @@ impl VideoEncoder {
self.mitigation_mode = WebRTCSinkMitigationMode::NONE;
}

let caps = gst::Caps::builder_full().structure(s).build();
let caps = gst::Caps::builder_full_with_any_features()
.structure(s)
.build();

gst_log!(
CAT,
Expand Down Expand Up @@ -982,8 +1012,14 @@ impl Consumer {
let pay_filter = make_element("capsfilter", None)?;
self.pipeline.add(&pay_filter).unwrap();

let (enc, raw_filter, pay) =
setup_encoding(&self.pipeline, &appsrc, codec, Some(webrtc_pad.ssrc), false)?;
let (enc, raw_filter, pay) = setup_encoding(
&self.pipeline,
&appsrc,
&webrtc_pad.in_caps,
codec,
Some(webrtc_pad.ssrc),
false,
)?;

// At this point, the peer has provided its answer, and we want to
// let the payloader / encoder perform negotiation according to that.
Expand Down Expand Up @@ -1803,7 +1839,17 @@ impl WebRTCSink {
* very well equipped to deal with this at the moment */
if let Some(media) = sdp.media(media_idx) {
if media.attribute_val("inactive").is_some() {
gst_warning!(CAT, "consumer {} refused media {}", peer_id, media_idx);
let media_str = sdp
.media(webrtc_pad.media_idx)
.and_then(|media| media.as_text().ok());

gst_warning!(
CAT,
"consumer {} refused media {}: {:?}",
peer_id,
media_idx,
media_str
);
state.remove_consumer(element, peer_id, true);

return Err(WebRTCSinkError::ConsumerRefusedMedia {
Expand Down Expand Up @@ -1865,17 +1911,24 @@ impl WebRTCSink {
) -> Result<gst::Structure, Error> {
let pipe = PipelineWrapper(gst::Pipeline::new(None));

let src = match codec.is_video {
true => make_element("videotestsrc", None)?,
false => make_element("audiotestsrc", None)?,
let src = if codec.is_video {
make_element("videotestsrc", None)?
} else {
make_element("audiotestsrc", None)?
};
let capsfilter = make_element("capsfilter", None)?;
let mut elements = Vec::new();
elements.push(src.clone());

elements.push(make_converter_for_video_caps(caps)?);

pipe.0.add_many(&[&src, &capsfilter]).unwrap();
src.link(&capsfilter)
let capsfilter = make_element("capsfilter", None)?;
elements.push(capsfilter.clone());
let elements_slice = &elements.iter().collect::<Vec<_>>();
pipe.0.add_many(elements_slice).unwrap();
gst::Element::link_many(elements_slice)
.with_context(|| format!("Running discovery pipeline for caps {}", caps))?;

let (_, _, pay) = setup_encoding(&pipe.0, &capsfilter, codec, None, true)?;
let (_, _, pay) = setup_encoding(&pipe.0, &capsfilter, &caps, codec, None, true)?;

let sink = make_element("fakesink", None)?;

Expand All @@ -1897,11 +1950,20 @@ impl WebRTCSink {
while let Some(msg) = stream.next().await {
match msg.view() {
gst::MessageView::Error(err) => {
pipe.0.debug_to_dot_file_with_ts(
gst::DebugGraphDetails::all(),
format!("webrtcsink-discovery-error"),
);
return Err(err.error().into());
}
gst::MessageView::Eos(_) => {
let caps = pay.static_pad("src").unwrap().current_caps().unwrap();

pipe.0.debug_to_dot_file_with_ts(
gst::DebugGraphDetails::all(),
format!("webrtcsink-discovery-done"),
);

if let Some(s) = caps.structure(0) {
let mut s = s.to_owned();
s.remove_fields(&[
Expand Down Expand Up @@ -2375,7 +2437,17 @@ impl ElementImpl for WebRTCSink {

fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
let caps = gst::Caps::builder("video/x-raw").build();
let caps = gst::Caps::builder_full()
.structure(gst::Structure::builder("video/x-raw").build())
.structure_with_features(
gst::Structure::builder("video/x-raw").build(),
gst::CapsFeatures::new(&[CUDA_MEMORY_FEATURE]),
)
.structure_with_features(
gst::Structure::builder("video/x-raw").build(),
gst::CapsFeatures::new(&[GL_MEMORY_FEATURE]),
)
.build();
let video_pad_template = gst::PadTemplate::new(
"video_%u",
gst::PadDirection::Sink,
Expand Down