mirror of
https://github.com/brunoherbelin/vimix.git
synced 2025-12-11 18:34:58 +01:00
Slight change in terminology to distinguish streaming from broadcasting
VideoStream is reserved for point-to-point video streaming (between vimix), while VideoBroadcast is for sending out to many potential clients.
This commit is contained in:
@@ -268,12 +268,12 @@ void InfoVisitor::visit (NetworkSource& s)
|
||||
std::ostringstream oss;
|
||||
if (brief_) {
|
||||
oss << ns->resolution().x << " x " << ns->resolution().y << ", ";
|
||||
oss << NetworkToolkit::protocol_name[ns->protocol()] << std::endl;
|
||||
oss << NetworkToolkit::stream_protocol_label[ns->protocol()] << std::endl;
|
||||
oss << "IP " << ns->serverAddress();
|
||||
}
|
||||
else {
|
||||
oss << s.connection() << std::endl;
|
||||
oss << NetworkToolkit::protocol_name[ns->protocol()];
|
||||
oss << NetworkToolkit::stream_protocol_label[ns->protocol()];
|
||||
oss << ", IP " << ns->serverAddress() << std::endl;
|
||||
oss << ns->resolution().x << " x " << ns->resolution().y << " ";
|
||||
}
|
||||
|
||||
@@ -60,7 +60,7 @@ void NetworkStream::ResponseListener::ProcessMessage( const osc::ReceivedMessage
|
||||
// someone is offering a stream
|
||||
osc::ReceivedMessage::const_iterator arg = m.ArgumentsBegin();
|
||||
conf.port = (arg++)->AsInt32();
|
||||
conf.protocol = (NetworkToolkit::Protocol) (arg++)->AsInt32();
|
||||
conf.protocol = (NetworkToolkit::StreamProtocol) (arg++)->AsInt32();
|
||||
conf.width = (arg++)->AsInt32();
|
||||
conf.height = (arg++)->AsInt32();
|
||||
|
||||
@@ -261,7 +261,7 @@ void NetworkStream::update()
|
||||
// general case : create pipeline and open
|
||||
if (!failed_) {
|
||||
// build the pipeline depending on stream info
|
||||
std::string pipelinestring = NetworkToolkit::protocol_receive_pipeline[config_.protocol];
|
||||
std::string pipelinestring = NetworkToolkit::stream_receive_pipeline[config_.protocol];
|
||||
|
||||
// find placeholder for PORT or SHH socket
|
||||
size_t xxxx = pipelinestring.find("XXXX");
|
||||
|
||||
@@ -19,7 +19,7 @@ public:
|
||||
void update() override;
|
||||
|
||||
glm::ivec2 resolution() const;
|
||||
inline NetworkToolkit::Protocol protocol() const { return config_.protocol; }
|
||||
inline NetworkToolkit::StreamProtocol protocol() const { return config_.protocol; }
|
||||
std::string clientAddress() const;
|
||||
std::string serverAddress() const;
|
||||
|
||||
|
||||
@@ -87,41 +87,44 @@
|
||||
*
|
||||
* */
|
||||
|
||||
const char* NetworkToolkit::protocol_name[NetworkToolkit::DEFAULT] = {
|
||||
const char* NetworkToolkit::stream_protocol_label[NetworkToolkit::DEFAULT] = {
|
||||
"RAW Images",
|
||||
"JPEG Stream",
|
||||
"H264 Stream",
|
||||
"JPEG Broadcast",
|
||||
"H264 Broadcast",
|
||||
"RGB Shared Memory"
|
||||
};
|
||||
|
||||
const std::vector<std::string> NetworkToolkit::protocol_send_pipeline {
|
||||
|
||||
"video/x-raw, format=RGB, framerate=30/1 ! queue max-size-buffers=3 ! rtpvrawpay ! application/x-rtp,sampling=RGB ! udpsink name=sink",
|
||||
"video/x-raw, format=I420, framerate=30/1 ! queue max-size-buffers=10 ! jpegenc quality=85 ! rtpjpegpay ! udpsink name=sink",
|
||||
"video/x-raw, format=I420, framerate=30/1 ! queue max-size-buffers=10 ! x264enc tune=\"zerolatency\" pass=4 quantizer=22 speed-preset=2 ! rtph264pay aggregate-mode=1 ! udpsink name=sink",
|
||||
"video/x-raw, format=I420, framerate=30/1 ! queue max-size-buffers=3 ! jpegenc idct-method=float ! rtpjpegpay ! rtpstreampay ! tcpserversink name=sink",
|
||||
"video/x-raw, format=I420, framerate=30/1 ! queue max-size-buffers=3 ! x264enc tune=\"zerolatency\" threads=2 ! rtph264pay ! rtpstreampay ! tcpserversink name=sink",
|
||||
const std::vector<std::string> NetworkToolkit::stream_send_pipeline {
|
||||
"video/x-raw, format=RGB, framerate=30/1 ! queue max-size-buffers=10 ! rtpvrawpay ! application/x-rtp,sampling=RGB ! udpsink name=sink",
|
||||
"video/x-raw, format=NV12, framerate=30/1 ! queue max-size-buffers=10 ! jpegenc ! rtpjpegpay ! udpsink name=sink",
|
||||
"video/x-raw, format=NV12, framerate=30/1 ! queue max-size-buffers=10 ! x264enc tune=zerolatency pass=4 quantizer=22 speed-preset=2 ! rtph264pay aggregate-mode=1 ! udpsink name=sink",
|
||||
"video/x-raw, format=RGB, framerate=30/1 ! queue max-size-buffers=10 ! shmsink buffer-time=100000 wait-for-connection=true name=sink"
|
||||
};
|
||||
|
||||
const std::vector<std::string> NetworkToolkit::protocol_receive_pipeline {
|
||||
|
||||
"udpsrc buffer-size=200000 port=XXXX caps=\"application/x-rtp,media=(string)video,encoding-name=(string)RAW,sampling=(string)RGB,width=(string)WWWW,height=(string)HHHH\" ! rtpvrawdepay ! queue max-size-buffers=10",
|
||||
"udpsrc buffer-size=200000 port=XXXX ! application/x-rtp,encoding-name=JPEG ! rtpjpegdepay ! queue max-size-buffers=10 ! jpegdec",
|
||||
"udpsrc buffer-size=200000 port=XXXX ! application/x-rtp,encoding-name=H264 ! rtph264depay ! queue max-size-buffers=10 ! avdec_h264",
|
||||
"tcpclientsrc timeout=1 port=XXXX ! queue max-size-buffers=30 ! application/x-rtp-stream,media=video,encoding-name=JPEG ! rtpstreamdepay ! rtpjpegdepay ! jpegdec",
|
||||
"tcpclientsrc timeout=1 port=XXXX ! queue max-size-buffers=30 ! application/x-rtp-stream,media=video,encoding-name=H264 ! rtpstreamdepay ! rtph264depay ! avdec_h264",
|
||||
const std::vector<std::string> NetworkToolkit::stream_receive_pipeline {
|
||||
"udpsrc port=XXXX caps=\"application/x-rtp,media=(string)video,encoding-name=(string)RAW,sampling=(string)RGB,width=(string)WWWW,height=(string)HHHH\" ! rtpvrawdepay ! queue max-size-buffers=10",
|
||||
"udpsrc port=XXXX caps=\"application/x-rtp,media=(string)video,encoding-name=(string)JPEG\" ! rtpjpegdepay ! queue max-size-buffers=10 ! jpegdec",
|
||||
"udpsrc port=XXXX caps=\"application/x-rtp,media=(string)video,encoding-name=(string)H264\" ! rtph264depay ! queue max-size-buffers=10 ! avdec_h264",
|
||||
"shmsrc socket-path=XXXX ! video/x-raw, format=RGB, framerate=30/1 ! queue max-size-buffers=10",
|
||||
};
|
||||
|
||||
const std::vector< std::pair<std::string, std::string> > NetworkToolkit::protocol_h264_send_pipeline {
|
||||
const std::vector< std::pair<std::string, std::string> > NetworkToolkit::stream_h264_send_pipeline {
|
||||
// {"vtenc_h264_hw", "video/x-raw, format=I420, framerate=30/1 ! queue max-size-buffers=10 ! vtenc_h264_hw realtime=1 allow-frame-reordering=0 ! rtph264pay aggregate-mode=1 ! udpsink name=sink"},
|
||||
{"nvh264enc", "video/x-raw, format=RGBA, framerate=30/1 ! queue max-size-buffers=10 ! nvh264enc rc-mode=1 zerolatency=true ! video/x-h264, profile=(string)main ! rtph264pay aggregate-mode=1 ! udpsink name=sink"},
|
||||
{"vaapih264enc", "video/x-raw, format=NV12, framerate=30/1 ! queue max-size-buffers=10 ! vaapih264enc rate-control=cqp init-qp=26 ! video/x-h264, profile=(string)main ! rtph264pay aggregate-mode=1 ! udpsink name=sink"}
|
||||
};
|
||||
|
||||
|
||||
const char* NetworkToolkit::broadcast_protocol_label[NetworkToolkit::BROADCAST_DEFAULT] = {
|
||||
"SRT"
|
||||
};
|
||||
|
||||
const std::vector<std::string> NetworkToolkit::broadcast_pipeline {
|
||||
"videoconvert ! x264enc tune=zerolatency ! video/x-h264, profile=high ! mpegtsmux ! srtsink uri=srt://:XXXX/",
|
||||
};
|
||||
//"video/x-raw, format=I420, framerate=30/1 ! queue max-size-buffers=3 ! jpegenc idct-method=float ! rtpjpegpay ! rtpstreampay ! tcpserversink name=sink",
|
||||
//"video/x-raw, format=I420, framerate=30/1 ! queue max-size-buffers=3 ! x264enc tune=\"zerolatency\" threads=2 ! rtph264pay ! rtpstreampay ! tcpserversink name=sink",
|
||||
|
||||
bool initialized_ = false;
|
||||
std::vector<std::string> ipstrings_;
|
||||
std::vector<unsigned long> iplongs_;
|
||||
|
||||
@@ -26,16 +26,18 @@ typedef enum {
|
||||
UDP_RAW = 0,
|
||||
UDP_JPEG,
|
||||
UDP_H264,
|
||||
TCP_JPEG,
|
||||
TCP_H264,
|
||||
SHM_RAW,
|
||||
DEFAULT
|
||||
} Protocol;
|
||||
} StreamProtocol;
|
||||
|
||||
extern const char* stream_protocol_label[DEFAULT];
|
||||
extern const std::vector<std::string> stream_send_pipeline;
|
||||
extern const std::vector< std::pair<std::string, std::string> > stream_h264_send_pipeline;
|
||||
extern const std::vector<std::string> stream_receive_pipeline;
|
||||
|
||||
struct StreamConfig {
|
||||
|
||||
Protocol protocol;
|
||||
StreamProtocol protocol;
|
||||
std::string client_name;
|
||||
std::string client_address;
|
||||
int port;
|
||||
@@ -51,8 +53,7 @@ struct StreamConfig {
|
||||
height = 0;
|
||||
}
|
||||
|
||||
inline StreamConfig& operator = (const StreamConfig& o)
|
||||
{
|
||||
inline StreamConfig& operator = (const StreamConfig& o) {
|
||||
if (this != &o) {
|
||||
this->client_name = o.client_name;
|
||||
this->client_address = o.client_address;
|
||||
@@ -65,10 +66,13 @@ struct StreamConfig {
|
||||
}
|
||||
};
|
||||
|
||||
extern const char* protocol_name[DEFAULT];
|
||||
extern const std::vector<std::string> protocol_send_pipeline;
|
||||
extern const std::vector< std::pair<std::string, std::string> > protocol_h264_send_pipeline;
|
||||
extern const std::vector<std::string> protocol_receive_pipeline;
|
||||
typedef enum {
|
||||
BROADCAST_SRT = 0,
|
||||
BROADCAST_DEFAULT
|
||||
} BroadcastProtocol;
|
||||
|
||||
extern const char* broadcast_protocol_label[BROADCAST_DEFAULT];
|
||||
extern const std::vector<std::string> broadcast_pipeline;
|
||||
|
||||
std::string hostname();
|
||||
std::vector<std::string> host_ips();
|
||||
|
||||
13
Streamer.cpp
13
Streamer.cpp
@@ -25,10 +25,6 @@
|
||||
// Desktop OpenGL function loader
|
||||
#include <glad/glad.h>
|
||||
|
||||
// standalone image loader
|
||||
#include <stb_image.h>
|
||||
#include <stb_image_write.h>
|
||||
|
||||
// gstreamer
|
||||
#include <gst/gstformat.h>
|
||||
#include <gst/video/video.h>
|
||||
@@ -345,19 +341,18 @@ std::string VideoStreamer::init(GstCaps *caps)
|
||||
// special case H264: can be Hardware accelerated
|
||||
bool found_harware_acceleration = false;
|
||||
if (config_.protocol == NetworkToolkit::UDP_H264 && Settings::application.render.gpu_decoding) {
|
||||
for (auto config = NetworkToolkit::protocol_h264_send_pipeline.cbegin();
|
||||
config != NetworkToolkit::protocol_h264_send_pipeline.cend(); ++config) {
|
||||
for (auto config = NetworkToolkit::stream_h264_send_pipeline.cbegin();
|
||||
config != NetworkToolkit::stream_h264_send_pipeline.cend() && !found_harware_acceleration; ++config) {
|
||||
if ( GstToolkit::has_feature(config->first) ) {
|
||||
description += config->second;
|
||||
found_harware_acceleration = true;
|
||||
Log::Info("Video Streamer using hardware accelerated encoder (%s)", config->first.c_str());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
// general case: use defined protocols
|
||||
if (!found_harware_acceleration)
|
||||
description += NetworkToolkit::protocol_send_pipeline[config_.protocol];
|
||||
description += NetworkToolkit::stream_send_pipeline[config_.protocol];
|
||||
|
||||
// parse pipeline descriptor
|
||||
GError *error = NULL;
|
||||
@@ -471,7 +466,7 @@ std::string VideoStreamer::info() const
|
||||
if (!initialized_)
|
||||
ret << "Connecting";
|
||||
else if (active_) {
|
||||
ret << NetworkToolkit::protocol_name[config_.protocol];
|
||||
ret << NetworkToolkit::stream_protocol_label[config_.protocol];
|
||||
ret << " to ";
|
||||
ret << config_.client_name;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user