diff --git a/src/stream/pipeline/file_pipeline.rs b/src/stream/pipeline/file_pipeline.rs index b1c3058b..bfe270e1 100644 --- a/src/stream/pipeline/file_pipeline.rs +++ b/src/stream/pipeline/file_pipeline.rs @@ -53,59 +53,40 @@ impl FilePipeline { let video_tee_name = format!("{PIPELINE_VIDEO_TEE_NAME}-{pipeline_id}"); let rtp_tee_name = format!("{PIPELINE_RTP_TEE_NAME}-{pipeline_id}"); - let rtp_payloader = match &configuration.encode { - VideoEncodeType::H265 => "rtph265pay".to_string(), - VideoEncodeType::H264 => "rtph264pay".to_string(), - VideoEncodeType::Mjpg => "rtpjpegpay".to_string(), - VideoEncodeType::Yuyv => "rtpvrawpay".to_string(), - // Well, lets try to encode and see how it goes! - other => { - warn!("Format {other:?} nor supported, going to use rtpjpegpay instead."); - "rtpjpegpay".to_string() - } + let width = configuration.width; + let height = configuration.height; + let encode = &configuration.encode; + + if width % 2 != 0 && height % 2 != 0 { + return Err(anyhow!( + "Width and height must be multiples of 2, but got width: {width}, and height: {height}" + )); }; - // Fakes (videotestsrc) are only "video/x-raw" or "video/x-bayer", - // and to be able to encode it, we need to define an available - // format for both its src the next element's sink pad. - // We are choosing "UYVY" because it is compatible with the - // application-rtp template capabilities. - // For more information: https://gstreamer.freedesktop.org/documentation/additional/design/mediatype-video-raw.html?gi-language=c#formats - /* - let description = format!( - concat!( - // Because application-rtp templates doesn't accept "YUY2", we - // need to transcode it. We are arbitrarily chosing the closest - // format available ("UYVY"). - " multifilesrc location=\"{source}\" loop=true", - " ! decodebin3", - // " ! video/x-raw,format=I420", - //" ! capsfilter name={filter_name} caps={encode},width={width},height={height},framerate={interval_denominator}/{interval_numerator}", - " ! tee name={video_tee_name} allow-not-linked=true", - " ! {rtp_payloader} pt=96", - " ! tee name={rtp_tee_name} allow-not-linked=true", - ), - source = video_source.source.clone().into_os_string().into_string().unwrap(), - // encode = video_source.configuration.encode.clone().to_codec(), - // width = configuration.width, - // height = configuration.height, - // interval_denominator = configuration.frame_interval.denominator, - // interval_numerator = configuration.frame_interval.numerator, - // filter_name = filter_name, - video_tee_name = video_tee_name, - rtp_payloader = rtp_payloader, - rtp_tee_name = rtp_tee_name, - );*/ + if !encode.to_string().contains("image") { + return Err(anyhow!( + "Format {encode:?} not supported, only images are supported at the moment." + )); + }; let description = format!( concat!( - " filesrc location={source}", - " ! qtdemux ! video/x-h264 ! queue", + "multifilesrc location={source}", + " ! decodebin", + " ! videoconvert", + " ! imagefreeze", + " ! videobox", + " ! video/x-raw,format=I420,width={width},height={height},framerate=30/1", + " ! x264enc", + " ! capsfilter name={filter_name} caps=video/x-h264,width={width},height={height},framerate=30/1", " ! tee name={video_tee_name} allow-not-linked=true", " ! rtph264pay config-interval=1 pt=96", " ! tee name={rtp_tee_name} allow-not-linked=true", ), source = video_source.source.clone().into_os_string().into_string().unwrap(), + filter_name = filter_name, + width = width, + height = height, video_tee_name = video_tee_name, rtp_tee_name = rtp_tee_name, ); @@ -117,6 +98,72 @@ impl FilePipeline { .downcast::() .expect("Couldn't downcast pipeline"); + if false { + let rtp_payloader = match &configuration.encode { + VideoEncodeType::H265 => "rtph265pay".to_string(), + VideoEncodeType::H264 => "rtph264pay".to_string(), + VideoEncodeType::Mjpg => "rtpjpegpay".to_string(), + VideoEncodeType::Yuyv => "rtpvrawpay".to_string(), + // Well, lets try to encode and see how it goes! + other => { + warn!("Format {other:?} nor supported, going to use rtpjpegpay instead."); + "rtpjpegpay".to_string() + } + }; + + // Fakes (videotestsrc) are only "video/x-raw" or "video/x-bayer", + // and to be able to encode it, we need to define an available + // format for both its src the next element's sink pad. + // We are choosing "UYVY" because it is compatible with the + // application-rtp template capabilities. + // For more information: https://gstreamer.freedesktop.org/documentation/additional/design/mediatype-video-raw.html?gi-language=c#formats + /* + let description = format!( + concat!( + // Because application-rtp templates doesn't accept "YUY2", we + // need to transcode it. We are arbitrarily chosing the closest + // format available ("UYVY"). + " multifilesrc location=\"{source}\" loop=true", + " ! decodebin3", + // " ! video/x-raw,format=I420", + //" ! capsfilter name={filter_name} caps={encode},width={width},height={height},framerate={interval_denominator}/{interval_numerator}", + " ! tee name={video_tee_name} allow-not-linked=true", + " ! {rtp_payloader} pt=96", + " ! tee name={rtp_tee_name} allow-not-linked=true", + ), + source = video_source.source.clone().into_os_string().into_string().unwrap(), + // encode = video_source.configuration.encode.clone().to_codec(), + // width = configuration.width, + // height = configuration.height, + // interval_denominator = configuration.frame_interval.denominator, + // interval_numerator = configuration.frame_interval.numerator, + // filter_name = filter_name, + video_tee_name = video_tee_name, + rtp_payloader = rtp_payloader, + rtp_tee_name = rtp_tee_name, + );*/ + + let description = format!( + concat!( + " filesrc location={source}", + " ! qtdemux ! video/x-h264 ! queue", + " ! tee name={video_tee_name} allow-not-linked=true", + " ! rtph264pay config-interval=1 pt=96", + " ! tee name={rtp_tee_name} allow-not-linked=true", + ), + source = video_source.source.clone().into_os_string().into_string().unwrap(), + video_tee_name = video_tee_name, + rtp_tee_name = rtp_tee_name, + ); + + debug!("Running pipeline: {description:#?}"); + let pipeline = gst::parse::launch(&description)?; + + let pipeline = pipeline + .downcast::() + .expect("Couldn't downcast pipeline"); + } + Ok(pipeline) } } diff --git a/src/stream/sink/image_sink.rs b/src/stream/sink/image_sink.rs index 1265af2e..30fb5a1d 100644 --- a/src/stream/sink/image_sink.rs +++ b/src/stream/sink/image_sink.rs @@ -333,6 +333,14 @@ impl ImageSink { // Depending of the sources' format we need different elements to transform it into a raw format let mut _transcoding_elements: Vec = Default::default(); + + let encoding = if encoding.to_string().contains("image") { + info!("The source appears to be an image, by default we transform image to h264 streams."); + VideoEncodeType::H264 + } else { + encoding + }; + match encoding { VideoEncodeType::H264 => { // For h264, we need to filter-out unwanted non-key frames here, before decoding it. @@ -353,7 +361,9 @@ impl ImageSink { _transcoding_elements.push(decoder); } VideoEncodeType::Yuyv => {} - _ => return Err(anyhow!("Unsupported video encoding for ImageSink: {encoding:?}. The supported are: H264, MJPG and YUYV")), + other => { + return Err(anyhow!("Unsupported video encoding for ImageSink: {other:?}. The supported are: H264, MJPG and YUYV")); + }, }; let videoconvert = gst::ElementFactory::make("videoconvert").build()?; diff --git a/src/stream/webrtc/signalling_server.rs b/src/stream/webrtc/signalling_server.rs index 186f6106..31e6f90d 100644 --- a/src/stream/webrtc/signalling_server.rs +++ b/src/stream/webrtc/signalling_server.rs @@ -329,11 +329,13 @@ impl StreamManagementInterface for SignallingServer { Ok(streams .iter() .filter_map(|stream| { + dbg!(stream); let (height, width, encode, interval) = match &stream.video_and_stream.stream_information.configuration { crate::stream::types::CaptureConfiguration::Video(configuration) => { // Filter out non-H264 local streams - if configuration.encode != crate::video::types::VideoEncodeType::H264 { + dbg!(configuration.encode.clone()); + if configuration.encode != crate::video::types::VideoEncodeType::H264 && !configuration.encode.to_string().contains("image") { trace!("Stream {:?} will not be listed in available streams because it's encoding isn't H264 (it's {:?} instead)", stream.video_and_stream.name, configuration.encode); return None; } @@ -341,11 +343,7 @@ impl StreamManagementInterface for SignallingServer { Some(configuration.height), Some(configuration.width), Some(format!("{:#?}", configuration.encode)), - Some( - (configuration.frame_interval.numerator as f32 - / configuration.frame_interval.denominator as f32) - .to_string(), - ), + Some("30".to_string()), ) } crate::stream::types::CaptureConfiguration::Redirect(_) => { @@ -369,6 +367,8 @@ impl StreamManagementInterface for SignallingServer { .to_string(), ); + dbg!(source.clone()); + let name = stream.video_and_stream.name.clone(); let id = stream.id; diff --git a/src/video/types.rs b/src/video/types.rs index 44b4917e..339281f6 100644 --- a/src/video/types.rs +++ b/src/video/types.rs @@ -1,13 +1,14 @@ -use std::fmt; +use std::{str::FromStr, fmt::Display}; use super::video_source::VideoSource; use super::video_source_file::VideoSourceFile; use super::video_source_gst::VideoSourceGst; use super::video_source_local::VideoSourceLocal; use super::video_source_redirect::VideoSourceRedirect; +use anyhow::{anyhow, Result}; use gst; use paperclip::actix::Apiv2Schema; -use serde::{de::{self, Visitor}, Deserialize, Deserializer, Serialize}; +use serde::{Deserialize, Serialize}; #[derive(Apiv2Schema, Clone, Debug, PartialEq, Deserialize, Serialize)] pub enum VideoSourceType { @@ -17,7 +18,7 @@ pub enum VideoSourceType { Redirect(VideoSourceRedirect), } -#[derive(Apiv2Schema, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize)] +#[derive(Apiv2Schema, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize)] #[serde(rename_all = "UPPERCASE")] pub enum VideoEncodeType { Unknown(String), @@ -116,29 +117,53 @@ impl VideoSourceType { } } -impl VideoEncodeType { - //TODO: use trait fromstr, check others places - pub fn from_str(fourcc: &str) -> VideoEncodeType { - let fourcc = fourcc.to_uppercase(); +impl FromStr for VideoEncodeType { + type Err = (); + + fn from_str(fourcc: &str) -> Result { + let fourcc = fourcc.to_lowercase(); match fourcc.as_str() { - "H264" => VideoEncodeType::H264, - "MJPG" => VideoEncodeType::Mjpg, - "YUYV" => VideoEncodeType::Yuyv, - _ => VideoEncodeType::Unknown(fourcc), + "h264" => Ok(VideoEncodeType::H264), + "h265" => Ok(VideoEncodeType::H265), + "mjpg" => Ok(VideoEncodeType::Mjpg), + "yuyv" => Ok(VideoEncodeType::Yuyv), + _ => Ok(VideoEncodeType::Unknown(fourcc)), } } +} - pub fn to_codec(self) -> String { - match self { - VideoEncodeType::H264 => "video/x-h264", - VideoEncodeType::H265 => "video/x-h265", - // TODO: We need to handle the mpeg version one day, but not today - VideoEncodeType::Mjpg => "video/mpeg", - VideoEncodeType::Yuyv => "video/x-raw,format=I420", - VideoEncodeType::Unknown(codec) => { - return codec; - } - }.to_string() +impl Display for VideoEncodeType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + if let Ok(codec) = self.to_codec() { + return write!(f, "{codec}"); + } + + let string = match self { + VideoEncodeType::H264 => "h264".to_string(), + VideoEncodeType::H265 => "h265".to_string(), + VideoEncodeType::Mjpg => "mjpg".to_string(), + VideoEncodeType::Yuyv => "yuyv".to_string(), + VideoEncodeType::Unknown(s) => s.clone().to_lowercase(), + }; + + write!(f, "{string}") + } +} + +impl VideoEncodeType { + pub fn to_codec(&self) -> Result { + if let VideoEncodeType::Unknown(codec) = self { + Err(anyhow!("Unsupported codec type: {codec}")) + } else { + Ok(match self { + VideoEncodeType::H264 => "video/x-h264", + VideoEncodeType::H265 => "video/x-h265", + // TODO: We need to handle the mpeg version one day, but not today + VideoEncodeType::Mjpg => "video/mpeg", + VideoEncodeType::Yuyv => "video/x-raw,format=I420", + _ => unreachable!(), + }.to_string()) + } } pub fn from_codec(codec: &str) -> VideoEncodeType { @@ -162,40 +187,6 @@ impl Default for ControlType { } } -struct VideoEncodeTypeVisitor; - -impl<'de> Visitor<'de> for VideoEncodeTypeVisitor { - type Value = VideoEncodeType; - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("a string representing a video encoding type") - } - - fn visit_str(self, value: &str) -> Result - where - E: de::Error, - { - let variant = match value.to_uppercase().as_str() { - "H265" => VideoEncodeType::H265, - "H264" => VideoEncodeType::H264, - "MJPG" => VideoEncodeType::Mjpg, - "YUYV" => VideoEncodeType::Yuyv, - _ => VideoEncodeType::Unknown(value.to_owned()), - }; - Ok(variant) - } -} - -// Implementing the Deserialize trait for VideoEncodeType -impl<'de> Deserialize<'de> for VideoEncodeType { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - deserializer.deserialize_str(VideoEncodeTypeVisitor) - } -} - pub static STANDARD_SIZES: &[(u32, u32); 16] = &[ (7680, 4320), (7200, 3060), diff --git a/src/video/video_source_file.rs b/src/video/video_source_file.rs index 78cf48aa..661bf1f9 100644 --- a/src/video/video_source_file.rs +++ b/src/video/video_source_file.rs @@ -197,7 +197,10 @@ impl VideoSourceAvailable for VideoSourceFile { encode: VideoEncodeType::from_codec(codec), height: video_info.height() as u32, width: video_info.width() as u32, - frame_interval: FrameInterval::from(video_info.framerate()), + frame_interval: FrameInterval { + numerator: 1, + denominator: 1, + }, }, })); break; diff --git a/src/video/video_source_local.rs b/src/video/video_source_local.rs index 44a4bb0c..7062e495 100644 --- a/src/video/video_source_local.rs +++ b/src/video/video_source_local.rs @@ -1,5 +1,6 @@ use std::cmp::max; use std::collections::HashMap; +use std::str::FromStr; use std::sync::{Arc, Mutex}; use crate::stream::types::VideoCaptureConfiguration; @@ -361,7 +362,7 @@ fn get_device_formats(device_path: &str, typ: &VideoSourceLocalType) -> Vec
{ formats.push(Format { - encode: VideoEncodeType::from_str(encode_str), + encode: VideoEncodeType::from_str(encode_str).unwrap(), sizes, }); }