Slight change in terminology to distinguish streaming from broadcasting

VideoStream is reserved for point-to-point video streaming (between vimix), while VideoBroadcast is for sending out to many potential clients.
This commit is contained in:
Bruno Herbelin
2022-01-22 00:23:59 +01:00
parent 35ec0c9bcf
commit 2b3696aab1
6 changed files with 45 additions and 43 deletions

View File

@@ -268,12 +268,12 @@ void InfoVisitor::visit (NetworkSource& s)
std::ostringstream oss; std::ostringstream oss;
if (brief_) { if (brief_) {
oss << ns->resolution().x << " x " << ns->resolution().y << ", "; oss << ns->resolution().x << " x " << ns->resolution().y << ", ";
oss << NetworkToolkit::protocol_name[ns->protocol()] << std::endl; oss << NetworkToolkit::stream_protocol_label[ns->protocol()] << std::endl;
oss << "IP " << ns->serverAddress(); oss << "IP " << ns->serverAddress();
} }
else { else {
oss << s.connection() << std::endl; oss << s.connection() << std::endl;
oss << NetworkToolkit::protocol_name[ns->protocol()]; oss << NetworkToolkit::stream_protocol_label[ns->protocol()];
oss << ", IP " << ns->serverAddress() << std::endl; oss << ", IP " << ns->serverAddress() << std::endl;
oss << ns->resolution().x << " x " << ns->resolution().y << " "; oss << ns->resolution().x << " x " << ns->resolution().y << " ";
} }

View File

@@ -60,7 +60,7 @@ void NetworkStream::ResponseListener::ProcessMessage( const osc::ReceivedMessage
// someone is offering a stream // someone is offering a stream
osc::ReceivedMessage::const_iterator arg = m.ArgumentsBegin(); osc::ReceivedMessage::const_iterator arg = m.ArgumentsBegin();
conf.port = (arg++)->AsInt32(); conf.port = (arg++)->AsInt32();
conf.protocol = (NetworkToolkit::Protocol) (arg++)->AsInt32(); conf.protocol = (NetworkToolkit::StreamProtocol) (arg++)->AsInt32();
conf.width = (arg++)->AsInt32(); conf.width = (arg++)->AsInt32();
conf.height = (arg++)->AsInt32(); conf.height = (arg++)->AsInt32();
@@ -261,7 +261,7 @@ void NetworkStream::update()
// general case : create pipeline and open // general case : create pipeline and open
if (!failed_) { if (!failed_) {
// build the pipeline depending on stream info // build the pipeline depending on stream info
std::string pipelinestring = NetworkToolkit::protocol_receive_pipeline[config_.protocol]; std::string pipelinestring = NetworkToolkit::stream_receive_pipeline[config_.protocol];
// find placeholder for PORT or SHH socket // find placeholder for PORT or SHH socket
size_t xxxx = pipelinestring.find("XXXX"); size_t xxxx = pipelinestring.find("XXXX");

View File

@@ -19,7 +19,7 @@ public:
void update() override; void update() override;
glm::ivec2 resolution() const; glm::ivec2 resolution() const;
inline NetworkToolkit::Protocol protocol() const { return config_.protocol; } inline NetworkToolkit::StreamProtocol protocol() const { return config_.protocol; }
std::string clientAddress() const; std::string clientAddress() const;
std::string serverAddress() const; std::string serverAddress() const;

View File

@@ -87,41 +87,44 @@
* *
* */ * */
const char* NetworkToolkit::protocol_name[NetworkToolkit::DEFAULT] = { const char* NetworkToolkit::stream_protocol_label[NetworkToolkit::DEFAULT] = {
"RAW Images", "RAW Images",
"JPEG Stream", "JPEG Stream",
"H264 Stream", "H264 Stream",
"JPEG Broadcast",
"H264 Broadcast",
"RGB Shared Memory" "RGB Shared Memory"
}; };
const std::vector<std::string> NetworkToolkit::protocol_send_pipeline { const std::vector<std::string> NetworkToolkit::stream_send_pipeline {
"video/x-raw, format=RGB, framerate=30/1 ! queue max-size-buffers=10 ! rtpvrawpay ! application/x-rtp,sampling=RGB ! udpsink name=sink",
"video/x-raw, format=RGB, framerate=30/1 ! queue max-size-buffers=3 ! rtpvrawpay ! application/x-rtp,sampling=RGB ! udpsink name=sink", "video/x-raw, format=NV12, framerate=30/1 ! queue max-size-buffers=10 ! jpegenc ! rtpjpegpay ! udpsink name=sink",
"video/x-raw, format=I420, framerate=30/1 ! queue max-size-buffers=10 ! jpegenc quality=85 ! rtpjpegpay ! udpsink name=sink", "video/x-raw, format=NV12, framerate=30/1 ! queue max-size-buffers=10 ! x264enc tune=zerolatency pass=4 quantizer=22 speed-preset=2 ! rtph264pay aggregate-mode=1 ! udpsink name=sink",
"video/x-raw, format=I420, framerate=30/1 ! queue max-size-buffers=10 ! x264enc tune=\"zerolatency\" pass=4 quantizer=22 speed-preset=2 ! rtph264pay aggregate-mode=1 ! udpsink name=sink",
"video/x-raw, format=I420, framerate=30/1 ! queue max-size-buffers=3 ! jpegenc idct-method=float ! rtpjpegpay ! rtpstreampay ! tcpserversink name=sink",
"video/x-raw, format=I420, framerate=30/1 ! queue max-size-buffers=3 ! x264enc tune=\"zerolatency\" threads=2 ! rtph264pay ! rtpstreampay ! tcpserversink name=sink",
"video/x-raw, format=RGB, framerate=30/1 ! queue max-size-buffers=10 ! shmsink buffer-time=100000 wait-for-connection=true name=sink" "video/x-raw, format=RGB, framerate=30/1 ! queue max-size-buffers=10 ! shmsink buffer-time=100000 wait-for-connection=true name=sink"
}; };
const std::vector<std::string> NetworkToolkit::protocol_receive_pipeline { const std::vector<std::string> NetworkToolkit::stream_receive_pipeline {
"udpsrc port=XXXX caps=\"application/x-rtp,media=(string)video,encoding-name=(string)RAW,sampling=(string)RGB,width=(string)WWWW,height=(string)HHHH\" ! rtpvrawdepay ! queue max-size-buffers=10",
"udpsrc buffer-size=200000 port=XXXX caps=\"application/x-rtp,media=(string)video,encoding-name=(string)RAW,sampling=(string)RGB,width=(string)WWWW,height=(string)HHHH\" ! rtpvrawdepay ! queue max-size-buffers=10", "udpsrc port=XXXX caps=\"application/x-rtp,media=(string)video,encoding-name=(string)JPEG\" ! rtpjpegdepay ! queue max-size-buffers=10 ! jpegdec",
"udpsrc buffer-size=200000 port=XXXX ! application/x-rtp,encoding-name=JPEG ! rtpjpegdepay ! queue max-size-buffers=10 ! jpegdec", "udpsrc port=XXXX caps=\"application/x-rtp,media=(string)video,encoding-name=(string)H264\" ! rtph264depay ! queue max-size-buffers=10 ! avdec_h264",
"udpsrc buffer-size=200000 port=XXXX ! application/x-rtp,encoding-name=H264 ! rtph264depay ! queue max-size-buffers=10 ! avdec_h264",
"tcpclientsrc timeout=1 port=XXXX ! queue max-size-buffers=30 ! application/x-rtp-stream,media=video,encoding-name=JPEG ! rtpstreamdepay ! rtpjpegdepay ! jpegdec",
"tcpclientsrc timeout=1 port=XXXX ! queue max-size-buffers=30 ! application/x-rtp-stream,media=video,encoding-name=H264 ! rtpstreamdepay ! rtph264depay ! avdec_h264",
"shmsrc socket-path=XXXX ! video/x-raw, format=RGB, framerate=30/1 ! queue max-size-buffers=10", "shmsrc socket-path=XXXX ! video/x-raw, format=RGB, framerate=30/1 ! queue max-size-buffers=10",
}; };
const std::vector< std::pair<std::string, std::string> > NetworkToolkit::protocol_h264_send_pipeline { const std::vector< std::pair<std::string, std::string> > NetworkToolkit::stream_h264_send_pipeline {
// {"vtenc_h264_hw", "video/x-raw, format=I420, framerate=30/1 ! queue max-size-buffers=10 ! vtenc_h264_hw realtime=1 allow-frame-reordering=0 ! rtph264pay aggregate-mode=1 ! udpsink name=sink"}, // {"vtenc_h264_hw", "video/x-raw, format=I420, framerate=30/1 ! queue max-size-buffers=10 ! vtenc_h264_hw realtime=1 allow-frame-reordering=0 ! rtph264pay aggregate-mode=1 ! udpsink name=sink"},
{"nvh264enc", "video/x-raw, format=RGBA, framerate=30/1 ! queue max-size-buffers=10 ! nvh264enc rc-mode=1 zerolatency=true ! video/x-h264, profile=(string)main ! rtph264pay aggregate-mode=1 ! udpsink name=sink"}, {"nvh264enc", "video/x-raw, format=RGBA, framerate=30/1 ! queue max-size-buffers=10 ! nvh264enc rc-mode=1 zerolatency=true ! video/x-h264, profile=(string)main ! rtph264pay aggregate-mode=1 ! udpsink name=sink"},
{"vaapih264enc", "video/x-raw, format=NV12, framerate=30/1 ! queue max-size-buffers=10 ! vaapih264enc rate-control=cqp init-qp=26 ! video/x-h264, profile=(string)main ! rtph264pay aggregate-mode=1 ! udpsink name=sink"} {"vaapih264enc", "video/x-raw, format=NV12, framerate=30/1 ! queue max-size-buffers=10 ! vaapih264enc rate-control=cqp init-qp=26 ! video/x-h264, profile=(string)main ! rtph264pay aggregate-mode=1 ! udpsink name=sink"}
}; };
const char* NetworkToolkit::broadcast_protocol_label[NetworkToolkit::BROADCAST_DEFAULT] = {
"SRT"
};
const std::vector<std::string> NetworkToolkit::broadcast_pipeline {
"videoconvert ! x264enc tune=zerolatency ! video/x-h264, profile=high ! mpegtsmux ! srtsink uri=srt://:XXXX/",
};
//"video/x-raw, format=I420, framerate=30/1 ! queue max-size-buffers=3 ! jpegenc idct-method=float ! rtpjpegpay ! rtpstreampay ! tcpserversink name=sink",
//"video/x-raw, format=I420, framerate=30/1 ! queue max-size-buffers=3 ! x264enc tune=\"zerolatency\" threads=2 ! rtph264pay ! rtpstreampay ! tcpserversink name=sink",
bool initialized_ = false; bool initialized_ = false;
std::vector<std::string> ipstrings_; std::vector<std::string> ipstrings_;
std::vector<unsigned long> iplongs_; std::vector<unsigned long> iplongs_;

View File

@@ -26,16 +26,18 @@ typedef enum {
UDP_RAW = 0, UDP_RAW = 0,
UDP_JPEG, UDP_JPEG,
UDP_H264, UDP_H264,
TCP_JPEG,
TCP_H264,
SHM_RAW, SHM_RAW,
DEFAULT DEFAULT
} Protocol; } StreamProtocol;
extern const char* stream_protocol_label[DEFAULT];
extern const std::vector<std::string> stream_send_pipeline;
extern const std::vector< std::pair<std::string, std::string> > stream_h264_send_pipeline;
extern const std::vector<std::string> stream_receive_pipeline;
struct StreamConfig { struct StreamConfig {
Protocol protocol; StreamProtocol protocol;
std::string client_name; std::string client_name;
std::string client_address; std::string client_address;
int port; int port;
@@ -51,8 +53,7 @@ struct StreamConfig {
height = 0; height = 0;
} }
inline StreamConfig& operator = (const StreamConfig& o) inline StreamConfig& operator = (const StreamConfig& o) {
{
if (this != &o) { if (this != &o) {
this->client_name = o.client_name; this->client_name = o.client_name;
this->client_address = o.client_address; this->client_address = o.client_address;
@@ -65,10 +66,13 @@ struct StreamConfig {
} }
}; };
extern const char* protocol_name[DEFAULT]; typedef enum {
extern const std::vector<std::string> protocol_send_pipeline; BROADCAST_SRT = 0,
extern const std::vector< std::pair<std::string, std::string> > protocol_h264_send_pipeline; BROADCAST_DEFAULT
extern const std::vector<std::string> protocol_receive_pipeline; } BroadcastProtocol;
extern const char* broadcast_protocol_label[BROADCAST_DEFAULT];
extern const std::vector<std::string> broadcast_pipeline;
std::string hostname(); std::string hostname();
std::vector<std::string> host_ips(); std::vector<std::string> host_ips();

View File

@@ -25,10 +25,6 @@
// Desktop OpenGL function loader // Desktop OpenGL function loader
#include <glad/glad.h> #include <glad/glad.h>
// standalone image loader
#include <stb_image.h>
#include <stb_image_write.h>
// gstreamer // gstreamer
#include <gst/gstformat.h> #include <gst/gstformat.h>
#include <gst/video/video.h> #include <gst/video/video.h>
@@ -345,19 +341,18 @@ std::string VideoStreamer::init(GstCaps *caps)
// special case H264: can be Hardware accelerated // special case H264: can be Hardware accelerated
bool found_harware_acceleration = false; bool found_harware_acceleration = false;
if (config_.protocol == NetworkToolkit::UDP_H264 && Settings::application.render.gpu_decoding) { if (config_.protocol == NetworkToolkit::UDP_H264 && Settings::application.render.gpu_decoding) {
for (auto config = NetworkToolkit::protocol_h264_send_pipeline.cbegin(); for (auto config = NetworkToolkit::stream_h264_send_pipeline.cbegin();
config != NetworkToolkit::protocol_h264_send_pipeline.cend(); ++config) { config != NetworkToolkit::stream_h264_send_pipeline.cend() && !found_harware_acceleration; ++config) {
if ( GstToolkit::has_feature(config->first) ) { if ( GstToolkit::has_feature(config->first) ) {
description += config->second; description += config->second;
found_harware_acceleration = true; found_harware_acceleration = true;
Log::Info("Video Streamer using hardware accelerated encoder (%s)", config->first.c_str()); Log::Info("Video Streamer using hardware accelerated encoder (%s)", config->first.c_str());
break;
} }
} }
} }
// general case: use defined protocols // general case: use defined protocols
if (!found_harware_acceleration) if (!found_harware_acceleration)
description += NetworkToolkit::protocol_send_pipeline[config_.protocol]; description += NetworkToolkit::stream_send_pipeline[config_.protocol];
// parse pipeline descriptor // parse pipeline descriptor
GError *error = NULL; GError *error = NULL;
@@ -471,7 +466,7 @@ std::string VideoStreamer::info() const
if (!initialized_) if (!initialized_)
ret << "Connecting"; ret << "Connecting";
else if (active_) { else if (active_) {
ret << NetworkToolkit::protocol_name[config_.protocol]; ret << NetworkToolkit::stream_protocol_label[config_.protocol];
ret << " to "; ret << " to ";
ret << config_.client_name; ret << config_.client_name;
} }