mirror of
https://github.com/brunoherbelin/vimix.git
synced 2025-12-11 18:34:58 +01:00
Video Streamer with H264 hardware acceleration
Simplified option in user menu; lower bandwidth with H264, otherwise use JPEG. Always use RAW for localhost.
This commit is contained in:
@@ -88,7 +88,7 @@
|
||||
* */
|
||||
|
||||
const char* NetworkToolkit::protocol_name[NetworkToolkit::DEFAULT] = {
|
||||
"RGB Stream",
|
||||
"RAW Images",
|
||||
"JPEG Stream",
|
||||
"H264 Stream",
|
||||
"JPEG Broadcast",
|
||||
@@ -99,8 +99,8 @@ const char* NetworkToolkit::protocol_name[NetworkToolkit::DEFAULT] = {
|
||||
const std::vector<std::string> NetworkToolkit::protocol_send_pipeline {
|
||||
|
||||
"video/x-raw, format=RGB, framerate=30/1 ! queue max-size-buffers=3 ! rtpvrawpay ! application/x-rtp,sampling=RGB ! udpsink name=sink",
|
||||
"video/x-raw, format=I420, framerate=30/1 ! queue max-size-buffers=10 ! jpegenc quality=95 ! rtpjpegpay ! udpsink name=sink",
|
||||
"video/x-raw, format=I420, framerate=30/1 ! queue max-size-buffers=10 ! x264enc tune=\"zerolatency\" threads=2 ! rtph264pay ! udpsink name=sink",
|
||||
"video/x-raw, format=I420, framerate=30/1 ! queue max-size-buffers=10 ! jpegenc quality=85 ! rtpjpegpay ! udpsink name=sink",
|
||||
"video/x-raw, format=I420, framerate=30/1 ! queue max-size-buffers=10 ! x264enc tune=\"zerolatency\" pass=4 quantizer=22 speed-preset=2 ! rtph264pay ! udpsink name=sink",
|
||||
"video/x-raw, format=I420, framerate=30/1 ! queue max-size-buffers=3 ! jpegenc idct-method=float ! rtpjpegpay ! rtpstreampay ! tcpserversink name=sink",
|
||||
"video/x-raw, format=I420, framerate=30/1 ! queue max-size-buffers=3 ! x264enc tune=\"zerolatency\" threads=2 ! rtph264pay ! rtpstreampay ! tcpserversink name=sink",
|
||||
"video/x-raw, format=RGB, framerate=30/1 ! queue max-size-buffers=10 ! shmsink buffer-time=100000 wait-for-connection=true name=sink"
|
||||
@@ -116,6 +116,12 @@ const std::vector<std::string> NetworkToolkit::protocol_receive_pipeline {
|
||||
"shmsrc socket-path=XXXX ! video/x-raw, format=RGB, framerate=30/1 ! queue max-size-buffers=10",
|
||||
};
|
||||
|
||||
const std::vector< std::pair<std::string, std::string> > NetworkToolkit::protocol_h264_send_pipeline {
|
||||
{"vtenc_h264_hw", "video/x-raw, format=I420 ! vtenc_h264_hw realtime=1 allow-frame-reordering=0 ! rtph264pay ! udpsink name=sink"},
|
||||
{"nvh264enc", "video/x-raw, format=RGBA ! nvh264enc rc-mode=1 zerolatency=true ! video/x-h264, profile=(string)main ! rtph264pay ! udpsink name=sink"},
|
||||
{"vaapih264enc", "video/x-raw, format=NV12 ! vaapih264enc rate-control=cqp init-qp=26 ! video/x-h264, profile=(string)main ! rtph264pay ! udpsink name=sink"}
|
||||
};
|
||||
|
||||
bool initialized_ = false;
|
||||
std::vector<std::string> ipstrings_;
|
||||
std::vector<unsigned long> iplongs_;
|
||||
|
||||
@@ -67,6 +67,7 @@ struct StreamConfig {
|
||||
|
||||
extern const char* protocol_name[DEFAULT];
|
||||
extern const std::vector<std::string> protocol_send_pipeline;
|
||||
extern const std::vector< std::pair<std::string, std::string> > protocol_h264_send_pipeline;
|
||||
extern const std::vector<std::string> protocol_receive_pipeline;
|
||||
|
||||
std::string hostname();
|
||||
|
||||
@@ -110,7 +110,7 @@ void Settings::Save(uint64_t runtime)
|
||||
applicationNode->SetAttribute("show_tooptips", application.show_tooptips);
|
||||
applicationNode->SetAttribute("accept_connections", application.accept_connections);
|
||||
applicationNode->SetAttribute("pannel_history_mode", application.pannel_current_session_mode);
|
||||
applicationNode->SetAttribute("stream_protocol", application.stream_protocol);
|
||||
applicationNode->SetAttribute("stream_low_bandwidth", application.stream_low_bandwidth);
|
||||
pRoot->InsertEndChild(applicationNode);
|
||||
|
||||
// Widgets
|
||||
@@ -355,7 +355,7 @@ void Settings::Load()
|
||||
applicationNode->QueryBoolAttribute("show_tooptips", &application.show_tooptips);
|
||||
applicationNode->QueryBoolAttribute("accept_connections", &application.accept_connections);
|
||||
applicationNode->QueryIntAttribute("pannel_history_mode", &application.pannel_current_session_mode);
|
||||
applicationNode->QueryIntAttribute("stream_protocol", &application.stream_protocol);
|
||||
applicationNode->QueryBoolAttribute("stream_low_bandwidth", &application.stream_low_bandwidth);
|
||||
}
|
||||
|
||||
// Widgets
|
||||
|
||||
@@ -242,7 +242,7 @@ struct Application
|
||||
|
||||
// settings exporters
|
||||
RecordConfig record;
|
||||
int stream_protocol;
|
||||
bool stream_low_bandwidth;
|
||||
|
||||
// settings new source
|
||||
SourceConfig source;
|
||||
@@ -280,7 +280,7 @@ struct Application
|
||||
current_view = 1;
|
||||
current_workspace= 1;
|
||||
brush = glm::vec3(0.5f, 0.1f, 0.f);
|
||||
stream_protocol = 0;
|
||||
stream_low_bandwidth = false;
|
||||
windows = std::vector<WindowConfig>(3);
|
||||
windows[0].name = APP_TITLE;
|
||||
windows[0].w = 1600;
|
||||
|
||||
@@ -157,7 +157,7 @@ StreamInfo StreamDiscoverer(const std::string &description, guint w, guint h)
|
||||
// wait for the callback_stream_discoverer to return, no more than 4 sec
|
||||
std::mutex mtx;
|
||||
std::unique_lock<std::mutex> lck(mtx);
|
||||
if ( info.discovered.wait_for(lck,std::chrono::seconds(TIMEOUT)) == std::cv_status::timeout)
|
||||
if ( info.discovered.wait_for(lck,std::chrono::seconds(TIMEOUT*2)) == std::cv_status::timeout)
|
||||
info.message = "Time out";
|
||||
}
|
||||
|
||||
|
||||
30
Streamer.cpp
30
Streamer.cpp
@@ -267,10 +267,14 @@ void Streaming::addStream(const std::string &sender, int reply_to, const std::st
|
||||
conf.width = FrameGrabbing::manager().width();
|
||||
conf.height = FrameGrabbing::manager().height();
|
||||
|
||||
// set protocol according to settings
|
||||
// without indication, the JPEG stream is default
|
||||
conf.protocol = NetworkToolkit::UDP_JPEG;
|
||||
// on localhost sharing, use RAW
|
||||
if ( NetworkToolkit::is_host_ip(conf.client_address) )
|
||||
conf.protocol = NetworkToolkit::UDP_RAW;
|
||||
if (Settings::application.stream_protocol >= 0 && Settings::application.stream_protocol < NetworkToolkit::DEFAULT)
|
||||
conf.protocol = (NetworkToolkit::Protocol) Settings::application.stream_protocol;
|
||||
// for non-localhost, if low bandwidth is requested, use H264 codec
|
||||
else if (Settings::application.stream_low_bandwidth)
|
||||
conf.protocol = NetworkToolkit::UDP_H264;
|
||||
|
||||
// TODO : ideal would be Shared Memory, but does not work with linux snap package...
|
||||
// // offer SHM stream if same IP that our host IP (i.e. on the same machine)
|
||||
@@ -329,12 +333,28 @@ std::string VideoStreamer::init(GstCaps *caps)
|
||||
") are incompatible with stream (" + std::to_string(config_.width) + " x " + std::to_string(config_.height) + ")";
|
||||
}
|
||||
|
||||
// create a gstreamer pipeline
|
||||
std::string description = "appsrc name=src ! videoconvert ! ";
|
||||
|
||||
// prevent eroneous protocol values
|
||||
if (config_.protocol < 0 || config_.protocol >= NetworkToolkit::DEFAULT)
|
||||
config_.protocol = NetworkToolkit::UDP_RAW;
|
||||
|
||||
// create a gstreamer pipeline
|
||||
std::string description = "appsrc name=src ! videoconvert ! ";
|
||||
// special case H264: can be Hardware accelerated
|
||||
bool found_harware_acceleration = false;
|
||||
if (config_.protocol == NetworkToolkit::UDP_H264 && Settings::application.render.gpu_decoding) {
|
||||
for (auto config = NetworkToolkit::protocol_h264_send_pipeline.cbegin();
|
||||
config != NetworkToolkit::protocol_h264_send_pipeline.cend(); ++config) {
|
||||
if ( GstToolkit::has_feature(config->first) ) {
|
||||
description += config->second;
|
||||
found_harware_acceleration = true;
|
||||
Log::Info("Video Streamer : Hardware accelerated encoder (%s)", config->first.c_str());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
// general case: use defined protocols
|
||||
if (!found_harware_acceleration)
|
||||
description += NetworkToolkit::protocol_send_pipeline[config_.protocol];
|
||||
|
||||
// parse pipeline descriptor
|
||||
|
||||
@@ -91,7 +91,7 @@ TextEditor editor;
|
||||
|
||||
#include "UserInterfaceManager.h"
|
||||
#define PLOT_ARRAY_SIZE 180
|
||||
#define LABEL_AUTO_MEDIA_PLAYER ICON_FA_ARROW_RIGHT " Dynamic selection"
|
||||
#define LABEL_AUTO_MEDIA_PLAYER ICON_FA_CARET_SQUARE_RIGHT " Dynamic selection"
|
||||
#define LABEL_STORE_SELECTION " Store selection"
|
||||
#define LABEL_EDIT_FADING ICON_FA_RANDOM " Fade in & out"
|
||||
|
||||
@@ -1450,7 +1450,7 @@ void UserInterface::RenderPreview()
|
||||
if (Settings::application.accept_connections)
|
||||
{
|
||||
ImGui::SetNextItemWidth(IMGUI_RIGHT_ALIGN);
|
||||
ImGui::Combo("Format", &Settings::application.stream_protocol, NetworkToolkit::protocol_name, 3);
|
||||
ImGui::MenuItem( "Lower bandwidth (H264)", NULL, &Settings::application.stream_low_bandwidth);
|
||||
|
||||
static char dummy_str[512];
|
||||
sprintf(dummy_str, "%s", Connection::manager().info().name.c_str());
|
||||
@@ -5498,7 +5498,7 @@ void Navigator::RenderMainPannelSettings()
|
||||
if (output) {
|
||||
guint64 nb = 0;
|
||||
nb = VideoRecorder::buffering_preset_value[Settings::application.record.buffering_mode] / (output->width() * output->height() * 4);
|
||||
char buf[256]; sprintf(buf, "Buffer can contain %ld frames (%dx%d), %.1f sec", nb, output->width(), output->height(),
|
||||
char buf[256]; sprintf(buf, "Buffer can contain %ld frames (%dx%d), %.1f sec", (unsigned long)nb, output->width(), output->height(),
|
||||
(float)nb / (float) VideoRecorder::framerate_preset_value[Settings::application.record.framerate_mode] );
|
||||
ImGuiToolkit::Indication(buf, ICON_FA_MEMORY);
|
||||
ImGui::SameLine(0);
|
||||
|
||||
Reference in New Issue
Block a user