First working implementation of Streamer, with TCP and SharedMemory.

This commit is contained in:
brunoherbelin
2020-10-18 13:13:07 +02:00
parent 59c07ceb96
commit d7893be541
13 changed files with 406 additions and 61 deletions

View File

@@ -242,6 +242,7 @@ set(VMIX_SRCS
StreamSource.cpp
PatternSource.cpp
DeviceSource.cpp
NetworkSource.cpp
FrameBuffer.cpp
RenderingManager.cpp
UserInterfaceManager.cpp
@@ -255,6 +256,7 @@ set(VMIX_SRCS
GlmToolkit.cpp
SystemToolkit.cpp
tinyxml2Toolkit.cpp
NetworkToolkit.cpp
ActionManager.cpp
)

84
NetworkSource.cpp Normal file
View File

@@ -0,0 +1,84 @@
#include <algorithm>
#include <sstream>
#include <glm/gtc/matrix_transform.hpp>
#include <gst/pbutils/pbutils.h>
#include <gst/gst.h>
#include "defines.h"
#include "ImageShader.h"
#include "Resource.h"
#include "Decorations.h"
#include "Stream.h"
#include "Visitor.h"
#include "Log.h"
#include "NetworkSource.h"
#ifndef NDEBUG
#define NETWORK_DEBUG
#endif
NetworkStream::NetworkStream(): Stream(), protocol_(NetworkToolkit::DEFAULT), address_("127.0.0.1"), port_(5000)
{
}
glm::ivec2 NetworkStream::resolution()
{
return glm::ivec2( width_, height_);
}
void NetworkStream::open( NetworkToolkit::Protocol protocol, const std::string &address, uint port )
{
protocol_ = protocol;
address_ = address;
port_ = port;
int w = 800;
int h = 600;
std::ostringstream pipeline;
pipeline << "tcpclientsrc port=" << port_ << " ";
pipeline << NetworkToolkit::protocol_receive_pipeline[protocol_];
pipeline << " ! videoconvert";
// (private) open stream
Stream::open(pipeline.str(), w, h);
}
NetworkSource::NetworkSource() : StreamSource()
{
// create stream
stream_ = (Stream *) new NetworkStream;
// set icons
overlays_[View::MIXING]->attach( new Symbol(Symbol::EMPTY, glm::vec3(0.8f, 0.8f, 0.01f)) );
overlays_[View::LAYER]->attach( new Symbol(Symbol::EMPTY, glm::vec3(0.8f, 0.8f, 0.01f)) );
}
void NetworkSource::connect(NetworkToolkit::Protocol protocol, const std::string &address, uint port)
{
Log::Notify("Creating Network Source '%s:%d'", address.c_str(), port);
networkstream()->open( protocol, address, port );
stream_->play(true);
}
void NetworkSource::accept(Visitor& v)
{
Source::accept(v);
if (!failed())
v.visit(*this);
}
NetworkStream *NetworkSource::networkstream() const
{
return dynamic_cast<NetworkStream *>(stream_);
}

45
NetworkSource.h Normal file
View File

@@ -0,0 +1,45 @@
#ifndef NETWORKSOURCE_H
#define NETWORKSOURCE_H
#include "NetworkToolkit.h"
#include "StreamSource.h"
class NetworkStream : public Stream
{
public:
NetworkStream();
void open(NetworkToolkit::Protocol protocol, const std::string &address, uint port );
glm::ivec2 resolution();
inline NetworkToolkit::Protocol protocol() const { return protocol_; }
inline std::string address() const { return address_; }
inline uint port() const { return port_; }
private:
NetworkToolkit::Protocol protocol_;
std::string address_;
uint port_;
};
class NetworkSource : public StreamSource
{
public:
NetworkSource();
// Source interface
void accept (Visitor& v) override;
// StreamSource interface
Stream *stream() const override { return stream_; }
// specific interface
NetworkStream *networkstream() const;
void connect(NetworkToolkit::Protocol protocol, const std::string &address, uint port);
glm::ivec2 icon() const override { return glm::ivec2(11, 8); }
};
#endif // NETWORKSOURCE_H

131
NetworkToolkit.cpp Normal file
View File

@@ -0,0 +1,131 @@
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <netdb.h>
#include <arpa/inet.h>
#include <netinet/in.h>
#include <sys/socket.h>
#include <sys/ioctl.h>
#include <sys/socket.h>
#include <sys/ioctl.h>
#include <linux/netdevice.h>
#include <arpa/inet.h>
#include <netinet/in.h>
#include <unistd.h>
#include "NetworkToolkit.h"
const char* NetworkToolkit::protocol_name[NetworkToolkit::DEFAULT] = {
"TCP Broadcast JPEG",
"TCP Broadcast H264",
"Shared Memory"
};
const std::vector<std::string> NetworkToolkit::protocol_broadcast_pipeline {
"video/x-raw, format=I420 ! jpegenc ! rtpjpegpay ! rtpstreampay ! tcpserversink name=sink",
"video/x-raw, format=I420 ! x264enc pass=4 quantizer=26 speed-preset=3 threads=4 ! rtph264pay ! rtpstreampay ! tcpserversink name=sink",
"video/x-raw, format=I420 ! jpegenc ! shmsink name=sink"
};
const std::vector<std::string> NetworkToolkit::protocol_receive_pipeline {
"application/x-rtp-stream,media=video,encoding-name=JPEG,payload=26 ! rtpstreamdepay ! rtpjitterbuffer ! rtpjpegdepay ! jpegdec",
"application/x-rtp-stream,media=video,encoding-name=H264,payload=96,clock-rate=90000 ! rtpstreamdepay ! rtpjitterbuffer ! rtph264depay ! avdec_h264",
"jpegdec"
};
/***
*
* TCP Server JPEG : broadcast
* SND:
* gst-launch-1.0 videotestsrc is-live=true ! jpegenc ! rtpjpegpay ! rtpstreampay ! tcpserversink port=5400
* RCV:
* gst-launch-1.0 tcpclientsrc port=5400 ! application/x-rtp-stream,encoding-name=JPEG ! rtpstreamdepay! rtpjpegdepay ! jpegdec ! autovideosink
*
* TCP Server H264 : broadcast
* SND:
* gst-launch-1.0 videotestsrc is-live=true ! x264enc ! rtph264pay ! rtpstreampay ! tcpserversink port=5400
* RCV:
* gst-launch-1.0 tcpclientsrc port=5400 ! application/x-rtp-stream,media=video,encoding-name=H264,payload=96,clock-rate=90000 ! rtpstreamdepay ! rtpjitterbuffer ! rtph264depay ! avdec_h264 ! autovideosink
*
* UDP unicast
* SND
* gst-launch-1.0 videotestsrc is-live=true ! jpegenc ! rtpjpegpay ! udpsink port=5000 host=127.0.0.1 sync=false
* RCV
* gst-launch-1.0 udpsrc port=5000 ! application/x-rtp,encoding-name=JPEG,payload=26 ! rtpjpegdepay ! jpegdec ! autovideosink
*
* * UDP multicast : hass to know the PORT and IP of all clients
* SND
* gst-launch-1.0 videotestsrc is-live=true ! jpegenc ! rtpjpegpay ! multiudpsink clients="127.0.0.1:5000,127.0.0.1:5001"
* RCV
* gst-launch-1.0 -v udpsrc address=127.0.0.1 port=5000 ! application/x-rtp,encoding-name=JPEG,payload=26 ! rtpjpegdepay ! jpegdec ! autovideosink
* gst-launch-1.0 -v udpsrc address=127.0.0.1 port=5001 ! application/x-rtp,encoding-name=JPEG,payload=26 ! rtpjpegdepay ! jpegdec ! autovideosink
*
* RAW UDP (caps has to match exactly, and depends on resolution)
* SND
* gst-launch-1.0 -v videotestsrc is-live=true ! video/x-raw,format=RGBA,width=1920,height=1080 ! rtpvrawpay ! udpsink port=5000 host=127.0.0.1
* RCV
* gst-launch-1.0 udpsrc port=5000 caps = "application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)RAW, sampling=(string)RGBA, depth=(string)8, width=(string)1920, height=(string)1080, colorimetry=(string)SMPTE240M, payload=(int)96, ssrc=(uint)2272750581, timestamp-offset=(uint)1699493959, seqnum-offset=(uint)14107, a-framerate=(string)30" ! rtpvrawdepay ! videoconvert ! autovideosink
*
* */
std::vector<std::string> NetworkToolkit::host_ips()
{
std::vector<std::string> ipstrings;
int s = socket(AF_INET, SOCK_STREAM, 0);
if (s > -1) {
struct ifconf ifconf;
struct ifreq ifr[50];
int ifs;
int i;
ifconf.ifc_buf = (char *) ifr;
ifconf.ifc_len = sizeof ifr;
if (ioctl(s, SIOCGIFCONF, &ifconf) > -1) {
ifs = ifconf.ifc_len / sizeof(ifr[0]);
for (i = 0; i < ifs; i++) {
char ip[INET_ADDRSTRLEN];
struct sockaddr_in *s_in = (struct sockaddr_in *) &ifr[i].ifr_addr;
if (inet_ntop(AF_INET, &s_in->sin_addr, ip, sizeof(ip))) {
if ( std::string(ip).compare("127.0.0.1") == 0 )
ipstrings.push_back( "localhost" );
else
ipstrings.push_back( std::string(ip) );
}
}
close(s);
}
}
// localhost127.0.0.1, 192.168.0.30, 10.164.239.1,
// char hostbuffer[256];
// // retrieve hostname
// if ( gethostname(hostbuffer, sizeof(hostbuffer)) != -1 )
// {
// // retrieve host information
// struct hostent *host_entry;
// host_entry = gethostbyname(hostbuffer);
// if ( host_entry != NULL ) {
// // convert an Internet network
// // address into ASCII string
// char *IPbuffer = inet_ntoa(*((struct in_addr*) host_entry->h_addr_list[0]));
// ipstring = IPbuffer;
// }
// }
return ipstrings;
}

25
NetworkToolkit.h Normal file
View File

@@ -0,0 +1,25 @@
#ifndef NETWORKTOOLKIT_H
#define NETWORKTOOLKIT_H
#include <string>
#include <vector>
namespace NetworkToolkit
{
typedef enum {
TCP_JPEG = 0,
TCP_H264,
SHM_JPEG,
DEFAULT
} Protocol;
extern const char* protocol_name[DEFAULT];
extern const std::vector<std::string> protocol_broadcast_pipeline;
extern const std::vector<std::string> protocol_receive_pipeline;
std::vector<std::string> host_ips();
}
#endif // NETWORKTOOLKIT_H

View File

@@ -133,6 +133,7 @@ const std::vector<std::string> VideoRecorder::profile_description {
// veryfast (3)
// faster (4)
// fast (5)
// "video/x-raw, format=I420 ! x264enc tune=\"zerolatency\" threads=4 ! video/x-h264, profile=baseline ! h264parse ! ",
"video/x-raw, format=I420 ! x264enc pass=4 quantizer=26 speed-preset=3 threads=4 ! video/x-h264, profile=baseline ! h264parse ! ",
"video/x-raw, format=Y444_10LE ! x264enc pass=4 quantizer=16 speed-preset=4 threads=4 ! video/x-h264, profile=(string)high-4:4:4 ! h264parse ! ",
// Control x265 encoder quality :
@@ -152,7 +153,7 @@ const std::vector<std::string> VideoRecorder::profile_description {
// default 28
// 24 for x265 should be visually transparent; anything lower will probably just waste file size
"video/x-raw, format=I420 ! x265enc tune=4 speed-preset=3 ! video/x-h265, profile=(string)main ! h265parse ! ",
"video/x-raw, format=I420 ! x265enc tune=6 speed-preset=4 option-string=\"crf=22\" ! video/x-h265, profile=(string)main ! h265parse ! ",
"video/x-raw, format=I420 ! x265enc tune=6 speed-preset=4 option-string=\"crf=24\" ! video/x-h265, profile=(string)main ! h265parse ! ",
// Apple ProRes encoding parameters
// pass
// cbr (0) Constant Bitrate Encoding

View File

@@ -88,6 +88,13 @@ void Settings::Save()
RecordNode->SetAttribute("timeout", application.record.timeout);
pRoot->InsertEndChild(RecordNode);
// Record
XMLElement *StreamNode = xmlDoc.NewElement( "Stream" );
StreamNode->SetAttribute("profile", application.stream.profile);
StreamNode->SetAttribute("ip", application.stream.ip.c_str());
StreamNode->SetAttribute("port", application.stream.port);
pRoot->InsertEndChild(StreamNode);
// Transition
XMLElement *TransitionNode = xmlDoc.NewElement( "Transition" );
TransitionNode->SetAttribute("auto_open", application.transition.auto_open);
@@ -251,6 +258,19 @@ void Settings::Load()
application.record.path = SystemToolkit::home_path();
}
// Stream
XMLElement * streamnode = pRoot->FirstChildElement("Stream");
if (streamnode != nullptr) {
streamnode->QueryIntAttribute("profile", &application.stream.profile);
streamnode->QueryIntAttribute("port", &application.stream.port);
const char *ip_ = recordnode->Attribute("ip");
if (ip_)
application.stream.ip = std::string(ip_);
else
application.stream.ip = "localhost";
}
// Source
XMLElement * sourceconfnode = pRoot->FirstChildElement("Source");
if (sourceconfnode != nullptr) {

View File

@@ -81,9 +81,9 @@ struct StreamingConfig
int port;
int profile;
StreamingConfig() : ip("") {
StreamingConfig() : ip("localhost") {
profile = 0;
port = 5000;
port = 5400;
}
};

View File

@@ -18,29 +18,10 @@
#include "FrameBuffer.h"
#include "Log.h"
#include "NetworkToolkit.h"
#include "Streamer.h"
const char* VideoStreamer::profile_name[VideoStreamer::DEFAULT] = {
"MJPEG RTP (UDP)",
"MPEG4 RTP (UDP)",
"H264 RTP (UDP)"
};
const std::vector<std::string> VideoStreamer::profile_description {
"video/x-raw, format=I420 ! jpegenc ! rtpjpegpay ! udpsink name=sink",
"video/x-raw, format=I420 ! avenc_mpeg4 ! rtpmp4vpay config-interval=3 ! udpsink name=sink",
"video/x-raw, format=I420 ! x264enc pass=4 quantizer=26 speed-preset=3 threads=4 ! rtph264pay ! udpsink name=sink"
};
const std::vector<std::string> VideoStreamer::receiver_example {
"gst-launch-1.0 udpsrc port=5000 ! application/x-rtp,encoding-name=JPEG,payload=26 ! rtpjpegdepay ! jpegdec ! autovideosink",
"video/x-raw, format=I420 ! avenc_mpeg4 ! rtpmp4vpay config-interval=3 ! udpsink name=sink",
"gst-launch-1.0 -v udpsrc port=5000 caps=\"application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, payload=(int)96\" ! rtph264depay ! decodebin ! videoconvert ! autovideosink"
};
VideoStreamer::VideoStreamer(): FrameGrabber(), frame_buffer_(nullptr), width_(0), height_(0),
streaming_(false), accept_buffer_(false), pipeline_(nullptr), src_(nullptr), timestamp_(0)
@@ -53,6 +34,8 @@ VideoStreamer::VideoStreamer(): FrameGrabber(), frame_buffer_(nullptr), width_(0
VideoStreamer::~VideoStreamer()
{
stop();
if (src_ != nullptr)
gst_object_unref (src_);
if (pipeline_ != nullptr) {
@@ -93,12 +76,9 @@ void VideoStreamer::addFrame (FrameBuffer *frame_buffer, float dt)
// create a gstreamer pipeline
std::string description = "appsrc name=src ! videoconvert ! ";
if (Settings::application.stream.profile < 0 || Settings::application.stream.profile >= DEFAULT)
Settings::application.stream.profile = UDP_MJPEG;
description += profile_description[Settings::application.stream.profile];
Settings::application.stream.ip = "127.0.0.1";
// Settings::application.stream.port = 1000;
if (Settings::application.stream.profile < 0 || Settings::application.stream.profile >= NetworkToolkit::DEFAULT)
Settings::application.stream.profile = NetworkToolkit::TCP_JPEG;
description += NetworkToolkit::protocol_broadcast_pipeline[Settings::application.stream.profile];
// parse pipeline descriptor
GError *error = NULL;
@@ -111,10 +91,17 @@ void VideoStreamer::addFrame (FrameBuffer *frame_buffer, float dt)
}
// setup streaming sink
if (Settings::application.stream.profile == NetworkToolkit::TCP_JPEG || Settings::application.stream.profile == NetworkToolkit::TCP_H264) {
g_object_set (G_OBJECT (gst_bin_get_by_name (GST_BIN (pipeline_), "sink")),
"host", "127.0.0.1",
"port", Settings::application.stream.port,
NULL);
"host", Settings::application.stream.ip.c_str(),
"port", Settings::application.stream.port, NULL);
}
else if (Settings::application.stream.profile == NetworkToolkit::SHM_JPEG) {
std::string path = SystemToolkit::full_filename(SystemToolkit::settings_path(), "shm_socket");
SystemToolkit::remove_file(path);
g_object_set (G_OBJECT (gst_bin_get_by_name (GST_BIN (pipeline_), "sink")),
"socket-path", path.c_str(), NULL);
}
// setup custom app source
src_ = GST_APP_SRC( gst_bin_get_by_name (GST_BIN (pipeline_), "src") );
@@ -163,7 +150,9 @@ void VideoStreamer::addFrame (FrameBuffer *frame_buffer, float dt)
}
// all good
Log::Info("VideoStreamer start (%s %d x %d)", profile_name[Settings::application.record.profile], width_, height_);
Log::Info("VideoStreamer start (%s %d x %d)", NetworkToolkit::protocol_name[Settings::application.stream.profile], width_, height_);
Log::Info("%s", description.c_str());
// start streaming !!
streaming_ = true;
@@ -282,8 +271,14 @@ void VideoStreamer::addFrame (FrameBuffer *frame_buffer, float dt)
void VideoStreamer::stop ()
{
// send end of stream
if (src_)
gst_app_src_end_of_stream (src_);
// Log::Info("VideoRecorder push EOS");
// make sure the shared memory socket is deleted
if (Settings::application.stream.profile == NetworkToolkit::SHM_JPEG) {
std::string path = SystemToolkit::full_filename(SystemToolkit::settings_path(), "shm_socket");
SystemToolkit::remove_file(path);
}
// stop recording
streaming_ = false;
@@ -291,10 +286,20 @@ void VideoStreamer::stop ()
std::string VideoStreamer::info()
{
if (streaming_)
return GstToolkit::time_to_string(timestamp_);
else
return "Closing stream...";
std::string ret = "Streaming terminated.";
if (streaming_) {
if (Settings::application.stream.profile == NetworkToolkit::TCP_JPEG || Settings::application.stream.profile == NetworkToolkit::TCP_H264) {
}
else if (Settings::application.stream.profile == NetworkToolkit::SHM_JPEG) {
ret = "Shared Memory";
}
}
return ret;
}

View File

@@ -1,11 +1,10 @@
#ifndef STREAMER_H
#define STREAMER_H
#include <vector>
#include <gst/pbutils/pbutils.h>
#include <gst/app/gstappsrc.h>
#include "NetworkToolkit.h"
#include "FrameGrabber.h"
class VideoStreamer : public FrameGrabber
@@ -30,15 +29,6 @@ class VideoStreamer : public FrameGrabber
static void callback_enough_data (GstAppSrc *, gpointer user_data);
public:
typedef enum {
UDP_MJPEG = 0,
UDP_MPEG4,
UDP_h264,
DEFAULT
} Profile;
static const char* profile_name[DEFAULT];
static const std::vector<std::string> profile_description;
static const std::vector<std::string> receiver_example;
VideoStreamer();
~VideoStreamer();

View File

@@ -214,6 +214,17 @@ bool SystemToolkit::create_directory(const string& path)
// TODO : verify WIN32 implementation
}
bool SystemToolkit::remove_file(const string& path)
{
bool ret = true;
if (file_exists(path)) {
ret = (remove(path.c_str()) == 0);
}
return ret;
// TODO : verify WIN32 implementation
}
string SystemToolkit::settings_path()
{
// start from home folder
@@ -324,6 +335,9 @@ void SystemToolkit::execute(const string& command)
int r = system( command.c_str() );
#endif
}
// example :
// std::thread (SystemToolkit::execute,
// "gst-launch-1.0 udpsrc port=5000 ! application/x-rtp,encoding-name=JPEG,payload=26 ! rtpjpegdepay ! jpegdec ! autovideosink").detach();;

View File

@@ -54,8 +54,11 @@ namespace SystemToolkit
// true of file exists
bool file_exists(const std::string& path);
// create directory and return true on success
bool create_directory(const std::string& path);
// remove file and return true if the file does not exist after this call
bool remove_file(const std::string& path);
// try to open the file with system
void open(const std::string& path);

View File

@@ -1205,33 +1205,58 @@ void UserInterface::RenderPreview()
str->stop();
video_streamer_ = 0;
}
else {
if (Settings::application.stream.profile == NetworkToolkit::TCP_JPEG || Settings::application.stream.profile == NetworkToolkit::TCP_H264) {
// Options menu
ImGui::Separator();
ImGui::MenuItem("Connection parameters", nullptr, false, false);
static char dummy_str[512];
sprintf(dummy_str, "%s", Settings::application.stream.ip.c_str());
ImGui::SetNextItemWidth(IMGUI_RIGHT_ALIGN);
ImGui::InputText("Host", dummy_str, IM_ARRAYSIZE(dummy_str), ImGuiInputTextFlags_ReadOnly);
sprintf(dummy_str, "%d", Settings::application.stream.port);
ImGui::SetNextItemWidth(IMGUI_RIGHT_ALIGN);
ImGui::InputText("Port", dummy_str, IM_ARRAYSIZE(dummy_str), ImGuiInputTextFlags_ReadOnly);
}
else if (Settings::application.stream.profile == NetworkToolkit::SHM_JPEG)
{
ImGui::Separator();
ImGui::MenuItem("Shared Memory active", nullptr, false, false);
}
}
}
// start recording
else {
// detecting the absence of video streamer but the variable is still not 0: fix this!
if (video_streamer_ > 0)
video_streamer_ = 0;
if ( ImGui::MenuItem( ICON_FA_SATELLITE_DISH " Stream") ) {
if ( ImGui::MenuItem( ICON_FA_PODCAST " Stream") ) {
FrameGrabber *fg = new VideoStreamer;
video_streamer_ = fg->id();
Mixer::manager().session()->addFrameGrabber(fg);
}
// select profile
ImGui::SetNextItemWidth(300);
ImGui::Combo("##StreamProfile", &Settings::application.stream.profile, VideoStreamer::profile_name, IM_ARRAYSIZE(VideoStreamer::profile_name) );
ImGui::Combo("##StreamProfile", &Settings::application.stream.profile, NetworkToolkit::protocol_name, IM_ARRAYSIZE(NetworkToolkit::protocol_name) );
if (Settings::application.stream.profile == NetworkToolkit::TCP_JPEG || Settings::application.stream.profile == NetworkToolkit::TCP_H264) {
// Options menu
ImGui::Separator();
ImGui::MenuItem("Options", nullptr, false, false);
ImGui::MenuItem("TCP Options", nullptr, false, false);
ImGui::SetNextItemWidth(IMGUI_RIGHT_ALIGN);
if (ImGui::BeginCombo("Host", Settings::application.stream.ip.c_str()))
{
static std::vector<std::string> ips = NetworkToolkit::host_ips();
for (int i=0; i < ips.size(); i++) {
if (ImGui::Selectable( ips[i].c_str() ))
Settings::application.stream.ip = ips[i];
}
ImGui::EndCombo();
}
ImGui::SetNextItemWidth(IMGUI_RIGHT_ALIGN);
ImGui::InputInt("Port", &Settings::application.stream.port, 100, 1000);
Settings::application.stream.port = CLAMP(Settings::application.stream.port, 1000, 9000);
}
// if ( ImGui::MenuItem( "Test") ) {
// std::thread (SystemToolkit::execute,
// "gst-launch-1.0 udpsrc port=5000 ! application/x-rtp,encoding-name=JPEG,payload=26 ! rtpjpegdepay ! jpegdec ! autovideosink").detach();;
// }
}
ImGui::EndMenu();