diff --git a/NetworkToolkit.cpp b/NetworkToolkit.cpp index 1b8c76f..d31953a 100644 --- a/NetworkToolkit.cpp +++ b/NetworkToolkit.cpp @@ -114,17 +114,6 @@ const std::vector< std::pair > NetworkToolkit::stream_ {"vaapih264enc", "video/x-raw, format=NV12, framerate=30/1 ! queue max-size-buffers=10 ! vaapih264enc rate-control=cqp init-qp=26 ! video/x-h264, profile=(string)main ! rtph264pay aggregate-mode=1 ! udpsink name=sink"} }; - -const char* NetworkToolkit::broadcast_protocol_label[NetworkToolkit::BROADCAST_DEFAULT] = { - "SRT" -}; - -const std::vector NetworkToolkit::broadcast_pipeline { - "x264enc tune=zerolatency ! video/x-h264, profile=high ! mpegtsmux ! srtsink uri=srt://:XXXX/ name=sink", -}; -//"video/x-raw, format=I420, framerate=30/1 ! queue max-size-buffers=3 ! jpegenc idct-method=float ! rtpjpegpay ! rtpstreampay ! tcpserversink name=sink", -//"video/x-raw, format=I420, framerate=30/1 ! queue max-size-buffers=3 ! x264enc tune=\"zerolatency\" threads=2 ! rtph264pay ! rtpstreampay ! tcpserversink name=sink", - bool initialized_ = false; std::vector ipstrings_; std::vector iplongs_; diff --git a/NetworkToolkit.h b/NetworkToolkit.h index 0e8f0bd..1991c1a 100644 --- a/NetworkToolkit.h +++ b/NetworkToolkit.h @@ -66,13 +66,13 @@ struct StreamConfig { } }; -typedef enum { - BROADCAST_SRT = 0, - BROADCAST_DEFAULT -} BroadcastProtocol; +//typedef enum { +// BROADCAST_SRT = 0, +// BROADCAST_DEFAULT +//} BroadcastProtocol; -extern const char* broadcast_protocol_label[BROADCAST_DEFAULT]; -extern const std::vector broadcast_pipeline; +//extern const char* broadcast_protocol_label[BROADCAST_DEFAULT]; +//extern const std::vector broadcast_pipeline; std::string hostname(); std::vector host_ips(); diff --git a/PatternSource.cpp b/PatternSource.cpp index b7087f4..11a044a 100644 --- a/PatternSource.cpp +++ b/PatternSource.cpp @@ -111,19 +111,19 @@ void Pattern::open( uint pattern, glm::ivec2 res ) // if there is a XXXX parameter to enter std::string::size_type xxxx = gstreamer_pattern.find("XXXX"); if (xxxx != std::string::npos) - gstreamer_pattern = gstreamer_pattern.replace(xxxx, 4, std::to_string(res.x)); + gstreamer_pattern.replace(xxxx, 4, std::to_string(res.x)); // if there is a YYYY parameter to enter std::string::size_type yyyy = gstreamer_pattern.find("YYYY"); if (yyyy != std::string::npos) - gstreamer_pattern = gstreamer_pattern.replace(yyyy, 4, std::to_string(res.y)); + gstreamer_pattern.replace(yyyy, 4, std::to_string(res.y)); // if there is a XXX parameter to enter std::string::size_type xxx = gstreamer_pattern.find("XXX"); if (xxx != std::string::npos) - gstreamer_pattern = gstreamer_pattern.replace(xxx, 3, std::to_string(res.x/10)); + gstreamer_pattern.replace(xxx, 3, std::to_string(res.x/10)); // if there is a YYY parameter to enter std::string::size_type yyy = gstreamer_pattern.find("YYY"); if (yyy != std::string::npos) - gstreamer_pattern = gstreamer_pattern.replace(yyy, 3, std::to_string(res.y/10)); + gstreamer_pattern.replace(yyy, 3, std::to_string(res.y/10)); // remember if the pattern is to be updated once or animated single_frame_ = !Pattern::patterns_[type_].animated; diff --git a/UserInterfaceManager.cpp b/UserInterfaceManager.cpp index 698b4a6..7a8d788 100644 --- a/UserInterfaceManager.cpp +++ b/UserInterfaceManager.cpp @@ -3780,21 +3780,24 @@ void OutputPreview::Render() } } #endif - // Broadcasting menu - ImGui::PushStyleColor(ImGuiCol_Text, ImVec4(IMGUI_COLOR_BROADCAST, 0.8f)); - // Stop broadcast menu (broadcaster already exists) - if (video_broadcaster_) { - if ( ImGui::MenuItem( ICON_FA_SQUARE " Stop Broadcast") ) - video_broadcaster_->stop(); - } - // start broadcast (broadcaster does not exists) - else { - if ( ImGui::MenuItem( ICON_FA_GLOBE " Broadcast") ) { - video_broadcaster_ = new VideoBroadcast; - FrameGrabbing::manager().add(video_broadcaster_); + + if (VideoBroadcast::available()) { + // Broadcasting menu + ImGui::PushStyleColor(ImGuiCol_Text, ImVec4(IMGUI_COLOR_BROADCAST, 0.8f)); + // Stop broadcast menu (broadcaster already exists) + if (video_broadcaster_) { + if ( ImGui::MenuItem( ICON_FA_SQUARE " Stop Broadcast") ) + video_broadcaster_->stop(); } + // start broadcast (broadcaster does not exists) + else { + if ( ImGui::MenuItem( ICON_FA_GLOBE " Broadcast") ) { + video_broadcaster_ = new VideoBroadcast(Settings::application.broadcast_port); + FrameGrabbing::manager().add(video_broadcaster_); + } + } + ImGui::PopStyleColor(1); } - ImGui::PopStyleColor(1); // Stream sharing menu ImGui::PushStyleColor(ImGuiCol_Text, ImVec4(IMGUI_COLOR_STREAM, 0.9f)); diff --git a/VideoBroadcast.cpp b/VideoBroadcast.cpp index acef087..f6e3f00 100644 --- a/VideoBroadcast.cpp +++ b/VideoBroadcast.cpp @@ -18,8 +18,52 @@ #define BROADCAST_DEBUG #endif +std::string VideoBroadcast::srt_sink_; +std::string VideoBroadcast::h264_encoder_; -VideoBroadcast::VideoBroadcast(NetworkToolkit::BroadcastProtocol proto, int port): FrameGrabber(), protocol_(proto), port_(port), stopped_(false) +std::vector< std::pair > pipeline_sink_ { + {"srtsink", "srtsink uri=srt://:XXXX/ name=sink"}, + {"srtserversink", "srtserversink uri=srt://:XXXX/ name=sink"} +}; + +std::vector< std::pair > pipeline_encoder_ { + {"nvh264enc", "nvh264enc rc-mode=1 zerolatency=true ! video/x-h264, profile=high ! mpegtsmux ! "}, + {"vaapih264enc", "vaapih264enc rate-control=cqp init-qp=26 ! video/x-h264, profile=high ! mpegtsmux ! "}, + {"x264enc", "x264enc tune=zerolatency ! video/x-h264, profile=high ! mpegtsmux ! "} +}; + +bool VideoBroadcast::available() +{ + // test for installation on first run + static bool _tested = false; + if (!_tested) { + srt_sink_.clear(); + for (auto config = pipeline_sink_.cbegin(); + config != pipeline_sink_.cend() && srt_sink_.empty(); ++config) { + if ( GstToolkit::has_feature(config->first) ) { + srt_sink_ = config->second; + } + } + + h264_encoder_.clear(); + for (auto config = pipeline_encoder_.cbegin(); + config != pipeline_encoder_.cend() && h264_encoder_.empty(); ++config) { + if ( GstToolkit::has_feature(config->first) ) { + h264_encoder_ = config->second; + if (config->first != pipeline_encoder_.back().first) + Log::Info("VideoBroadcast using hardware accelerated encoder (%s)", config->first.c_str()); + } + } + + // perform test only once + _tested = true; + } + + // video broadcast is installed if both srt and h264 are available + return (!srt_sink_.empty() && !h264_encoder_.empty()); +} + +VideoBroadcast::VideoBroadcast(int port): FrameGrabber(), port_(port), stopped_(false) { frame_duration_ = gst_util_uint64_scale_int (1, GST_SECOND, BROADCAST_FPS); // fixed 30 FPS @@ -29,25 +73,24 @@ std::string VideoBroadcast::init(GstCaps *caps) { // ignore if (caps == nullptr) - return std::string("Invalid caps"); + return std::string("Video Broadcast : Invalid caps"); + + if (!VideoBroadcast::available()) + return std::string("Video Broadcast : Not available (missing SRT or H264)"); // create a gstreamer pipeline std::string description = "appsrc name=src ! videoconvert ! queue ! "; - // choose pipeline for protocol - if (protocol_ == NetworkToolkit::BROADCAST_DEFAULT) - protocol_ = NetworkToolkit::BROADCAST_SRT; - description += NetworkToolkit::broadcast_pipeline[protocol_]; + // complement pipeline with encoder and sink + description += VideoBroadcast::h264_encoder_; + description += VideoBroadcast::srt_sink_; - // setup streaming pipeline - if (protocol_ == NetworkToolkit::BROADCAST_SRT) { - // change the pipeline to include the broadcast port - std::string::size_type xxxx = description.find("XXXX"); - if (xxxx != std::string::npos) - description = description.replace(xxxx, 4, std::to_string(port_)); - else - return std::string("Video Broadcast : Failed to configure broadcast port."); - } + // change the placeholder to include the broadcast port + std::string::size_type xxxx = description.find("XXXX"); + if (xxxx != std::string::npos) + description.replace(xxxx, 4, std::to_string(port_)); + else + return std::string("Video Broadcast : Failed to configure broadcast port."); // parse pipeline descriptor GError *error = NULL; @@ -58,14 +101,11 @@ std::string VideoBroadcast::init(GstCaps *caps) return msg; } - // setup streaming sink - if (protocol_ == NetworkToolkit::BROADCAST_SRT) { - g_object_set (G_OBJECT (gst_bin_get_by_name (GST_BIN (pipeline_), "sink")), - "latency", 500, - NULL); - } - // TODO Configure options + // setup SRT streaming sink properties + g_object_set (G_OBJECT (gst_bin_get_by_name (GST_BIN (pipeline_), "sink")), + "latency", 500, + NULL); // setup custom app source src_ = GST_APP_SRC( gst_bin_get_by_name (GST_BIN (pipeline_), "src") ); @@ -117,7 +157,7 @@ std::string VideoBroadcast::init(GstCaps *caps) // all good initialized_ = true; - return std::string("Video Broadcast started."); + return std::string("Video Broadcast started SRT on port ") + std::to_string(port_); } @@ -127,7 +167,7 @@ void VideoBroadcast::terminate() // send EOS gst_app_src_end_of_stream (src_); - Log::Notify("Broadcast terminated after %s s.", + Log::Notify("Video Broadcast terminated after %s s.", GstToolkit::time_to_string(duration_).c_str()); } @@ -147,12 +187,10 @@ std::string VideoBroadcast::info() const if (!initialized_) ret << "Starting"; - else if (active_) { - ret << NetworkToolkit::broadcast_protocol_label[protocol_]; - ret << " ( Port " << port_ << " )"; - } + else if (active_) + ret << "Streaming SRT on Port " << port_; else - ret << "Terminated"; + ret << "Terminated"; return ret.str(); } diff --git a/VideoBroadcast.h b/VideoBroadcast.h index 397d928..8cc6980 100644 --- a/VideoBroadcast.h +++ b/VideoBroadcast.h @@ -10,9 +10,11 @@ class VideoBroadcast : public FrameGrabber { public: - VideoBroadcast(NetworkToolkit::BroadcastProtocol p = NetworkToolkit::BROADCAST_DEFAULT, int port = 8888); + VideoBroadcast(int port = 8888); virtual ~VideoBroadcast() {} + static bool available(); + void stop() override; std::string info() const override; @@ -21,9 +23,12 @@ private: void terminate() override; // connection information - NetworkToolkit::BroadcastProtocol protocol_; int port_; std::atomic stopped_; + + // pipeline elements + static std::string srt_sink_; + static std::string h264_encoder_; }; #endif // VIDEOBROADCAST_H