working on streaming and clients

This commit is contained in:
brunoherbelin
2020-10-20 00:28:44 +02:00
parent 15285ec151
commit 27239b7513
15 changed files with 254 additions and 96 deletions

View File

@@ -379,7 +379,7 @@ void DeviceSource::setDevice(const std::string &devicename)
pipeline << " ! jpegdec"; pipeline << " ! jpegdec";
if ( device_.find("Screen") != std::string::npos ) if ( device_.find("Screen") != std::string::npos )
pipeline << " ! videoconvert ! video/x-raw,format=RGB ! queue"; pipeline << " ! videoconvert ! video/x-raw,format=RGB ! queue max-size-buffers=3";
pipeline << " ! videoconvert"; pipeline << " ! videoconvert";

View File

@@ -42,6 +42,7 @@ public:
virtual void stop() { } virtual void stop() { }
virtual std::string info() { return ""; } virtual std::string info() { return ""; }
virtual double duration() { return 0.0; } virtual double duration() { return 0.0; }
virtual bool busy() { return false; }
inline bool finished() const { return finished_; } inline bool finished() const { return finished_; }

View File

@@ -25,6 +25,7 @@ using namespace tinyxml2;
#include "PatternSource.h" #include "PatternSource.h"
#include "DeviceSource.h" #include "DeviceSource.h"
#include "StreamSource.h" #include "StreamSource.h"
#include "NetworkSource.h"
#include "ActionManager.h" #include "ActionManager.h"
#include "Mixer.h" #include "Mixer.h"
@@ -306,6 +307,18 @@ Source * Mixer::createSourceDevice(const std::string &namedevice)
} }
Source * Mixer::createSourceNetwork(uint protocol, const std::string &address)
{
// ready to create a source
NetworkSource *s = new NetworkSource;
s->connect((NetworkToolkit::Protocol) protocol, address);
// propose a new name based on address
s->setName(address);
return s;
}
Source * Mixer::createSourceClone(const std::string &namesource) Source * Mixer::createSourceClone(const std::string &namesource)
{ {
// ready to create a source // ready to create a source

View File

@@ -42,6 +42,7 @@ public:
Source * createSourceStream (const std::string &gstreamerpipeline); Source * createSourceStream (const std::string &gstreamerpipeline);
Source * createSourcePattern(uint pattern, glm::ivec2 res); Source * createSourcePattern(uint pattern, glm::ivec2 res);
Source * createSourceDevice (const std::string &namedevice); Source * createSourceDevice (const std::string &namedevice);
Source * createSourceNetwork(uint protocol, const std::string &address);
// operations on sources // operations on sources
void addSource (Source *s); void addSource (Source *s);

View File

@@ -5,11 +5,10 @@
#include <gst/pbutils/pbutils.h> #include <gst/pbutils/pbutils.h>
#include <gst/gst.h> #include <gst/gst.h>
#include "SystemToolkit.h"
#include "defines.h" #include "defines.h"
#include "ImageShader.h"
#include "Resource.h"
#include "Decorations.h"
#include "Stream.h" #include "Stream.h"
#include "Decorations.h"
#include "Visitor.h" #include "Visitor.h"
#include "Log.h" #include "Log.h"
@@ -19,7 +18,7 @@
#define NETWORK_DEBUG #define NETWORK_DEBUG
#endif #endif
NetworkStream::NetworkStream(): Stream(), protocol_(NetworkToolkit::DEFAULT), address_("127.0.0.1"), port_(5000) NetworkStream::NetworkStream(): Stream(), protocol_(NetworkToolkit::DEFAULT), host_("127.0.0.1"), port_(5000)
{ {
} }
@@ -30,27 +29,136 @@ glm::ivec2 NetworkStream::resolution()
} }
void NetworkStream::open( NetworkToolkit::Protocol protocol, const std::string &address, uint port ) void NetworkStream::open(NetworkToolkit::Protocol protocol, const std::string &host, uint port )
{ {
protocol_ = protocol; protocol_ = protocol;
address_ = address; host_ = host;
port_ = port; port_ = port;
int w = 800; int w = 1920;
int h = 600; int h = 1080;
std::ostringstream pipeline; std::ostringstream pipeline;
if (protocol_ == NetworkToolkit::TCP_JPEG || protocol_ == NetworkToolkit::TCP_H264) {
pipeline << "tcpclientsrc port=" << port_ << " "; pipeline << "tcpclientsrc name=src timeout=1 host=" << host_ << " port=" << port_;
}
else if (protocol_ == NetworkToolkit::SHM_RAW) {
std::string path = SystemToolkit::full_filename(SystemToolkit::settings_path(), "shm_socket");
pipeline << "shmsrc name=src is-live=true socket-path=" << path;
// TODO SUPPORT multiple sockets shared memory
}
pipeline << NetworkToolkit::protocol_receive_pipeline[protocol_]; pipeline << NetworkToolkit::protocol_receive_pipeline[protocol_];
pipeline << " ! videoconvert"; pipeline << " ! videoconvert";
// (private) open stream // if ( ping(&w, &h) )
Stream::open(pipeline.str(), w, h); // (private) open stream
Stream::open(pipeline.str(), w, h);
// else {
// Log::Notify("Failed to connect to %s:%d", host.c_str(), port);
// failed_ = true;
// }
} }
bool NetworkStream::ping(int *w, int *h)
{
bool ret = false;
std::ostringstream pipeline_desc;
if (protocol_ == NetworkToolkit::TCP_JPEG || protocol_ == NetworkToolkit::TCP_H264) {
pipeline_desc << "tcpclientsrc is-live=true host=" << host_ << " port=" << port_;
}
else if (protocol_ == NetworkToolkit::SHM_RAW) {
std::string path = SystemToolkit::full_filename(SystemToolkit::settings_path(), "shm_socket");
pipeline_desc << "shmsrc is-live=true socket-path=" << path;
}
pipeline_desc << " ! appsink name=sink";
GError *error = NULL;
GstElement *pipeline = gst_parse_launch (pipeline_desc.str().c_str(), &error);
if (error != NULL) return false;
GstElement *sink = gst_bin_get_by_name (GST_BIN (pipeline), "sink");
if (sink) {
GstAppSinkCallbacks callbacks;
callbacks.new_preroll = callback_sample;
callbacks.new_sample = callback_sample;
callbacks.eos = NULL;
gst_app_sink_set_callbacks (GST_APP_SINK(sink), &callbacks, this, NULL);
gst_app_sink_set_emit_signals (GST_APP_SINK(sink), false);
GstStateChangeReturn status = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (status == GST_STATE_CHANGE_FAILURE) {
ret = false;
}
GstState state;
gst_element_get_state (pipeline, &state, NULL, GST_CLOCK_TIME_NONE);
gst_element_set_state (pipeline, GST_STATE_PAUSED);
// ret = true;
// unref sink
gst_object_unref (sink);
}
// free pipeline
gst_object_unref (pipeline);
*w = 1920;
*h = 1080;
return ret;
}
GstFlowReturn NetworkStream::callback_sample (GstAppSink *sink, gpointer p)
{
GstFlowReturn ret = GST_FLOW_OK;
Log::Info("callback_sample PING");
// non-blocking read new sample
GstSample *sample = gst_app_sink_pull_sample(sink);
// if got a valid sample
if (sample != NULL && !gst_app_sink_is_eos (sink)) {
const GstStructure *truc = gst_sample_get_info(sample);
GstCaps *cap = gst_sample_get_caps(sample);
gchar *c = gst_caps_to_string(cap);
// get buffer from sample (valid until sample is released)
// GstBuffer *buf = gst_sample_get_buffer (sample) ;
// // send frames to media player only if ready
// Stream *m = (Stream *)p;
// if (m && m->ready_) {
// // fill frame with buffer
// if ( !m->fill_frame(buf, Stream::SAMPLE) )
// ret = GST_FLOW_ERROR;
// }
}
else
ret = GST_FLOW_FLUSHING;
// release sample
gst_sample_unref (sample);
return ret;
}
NetworkSource::NetworkSource() : StreamSource() NetworkSource::NetworkSource() : StreamSource()
{ {
@@ -62,11 +170,21 @@ NetworkSource::NetworkSource() : StreamSource()
overlays_[View::LAYER]->attach( new Symbol(Symbol::EMPTY, glm::vec3(0.8f, 0.8f, 0.01f)) ); overlays_[View::LAYER]->attach( new Symbol(Symbol::EMPTY, glm::vec3(0.8f, 0.8f, 0.01f)) );
} }
void NetworkSource::connect(NetworkToolkit::Protocol protocol, const std::string &address, uint port) void NetworkSource::connect(NetworkToolkit::Protocol protocol, const std::string &address)
{ {
Log::Notify("Creating Network Source '%s:%d'", address.c_str(), port); Log::Notify("Creating Network Source '%s'", address.c_str());
networkstream()->open( protocol, address, port ); // extract host and port from "host:port"
std::string host = address.substr(0, address.find_last_of(":"));
std::string port_s = address.substr(address.find_last_of(":") + 1);
uint port = std::stoi(port_s);
// validate protocol
if (protocol < NetworkToolkit::TCP_JPEG || protocol >= NetworkToolkit::DEFAULT)
protocol = NetworkToolkit::TCP_JPEG;
// open network stream
networkstream()->open( protocol, host, port );
stream_->play(true); stream_->play(true);
} }

View File

@@ -9,18 +9,21 @@ class NetworkStream : public Stream
public: public:
NetworkStream(); NetworkStream();
void open(NetworkToolkit::Protocol protocol, const std::string &address, uint port ); void open(NetworkToolkit::Protocol protocol, const std::string &host, uint port );
bool ping(int *w, int *h);
glm::ivec2 resolution(); glm::ivec2 resolution();
inline NetworkToolkit::Protocol protocol() const { return protocol_; } inline NetworkToolkit::Protocol protocol() const { return protocol_; }
inline std::string address() const { return address_; } inline std::string host() const { return host_; }
inline uint port() const { return port_; } inline uint port() const { return port_; }
private: private:
NetworkToolkit::Protocol protocol_; NetworkToolkit::Protocol protocol_;
std::string address_; std::string host_;
uint port_; uint port_;
static GstFlowReturn callback_sample (GstAppSink *, gpointer );
}; };
class NetworkSource : public StreamSource class NetworkSource : public StreamSource
@@ -36,7 +39,7 @@ public:
// specific interface // specific interface
NetworkStream *networkstream() const; NetworkStream *networkstream() const;
void connect(NetworkToolkit::Protocol protocol, const std::string &address, uint port); void connect(NetworkToolkit::Protocol protocol, const std::string &address);
glm::ivec2 icon() const override { return glm::ivec2(11, 8); } glm::ivec2 icon() const override { return glm::ivec2(11, 8); }

View File

@@ -1,19 +1,10 @@
#include <stdio.h> #include <stdio.h>
#include <stdlib.h>
#include <unistd.h> #include <unistd.h>
#include <netdb.h>
#include <arpa/inet.h>
#include <netinet/in.h>
#include <sys/socket.h>
#include <sys/ioctl.h> #include <sys/ioctl.h>
#include <sys/socket.h> #include <sys/socket.h>
#include <sys/ioctl.h>
#include <linux/netdevice.h> #include <linux/netdevice.h>
#include <arpa/inet.h> #include <arpa/inet.h>
#include <netinet/in.h>
#include <unistd.h>
#include "NetworkToolkit.h" #include "NetworkToolkit.h"
@@ -26,17 +17,17 @@ const char* NetworkToolkit::protocol_name[NetworkToolkit::DEFAULT] = {
const std::vector<std::string> NetworkToolkit::protocol_broadcast_pipeline { const std::vector<std::string> NetworkToolkit::protocol_broadcast_pipeline {
"video/x-raw, format=I420 ! jpegenc ! rtpjpegpay ! rtpstreampay ! tcpserversink name=sink", "video/x-raw, format=I420 ! queue max-size-buffers=3 ! jpegenc ! rtpjpegpay ! rtpstreampay ! tcpserversink name=sink",
"video/x-raw, format=I420 ! x264enc pass=4 quantizer=26 speed-preset=3 threads=4 ! rtph264pay ! rtpstreampay ! tcpserversink name=sink", "video/x-raw, format=I420 ! queue max-size-buffers=3 ! x264enc tune=\"zerolatency\" threads=2 ! rtph264pay ! rtpstreampay ! tcpserversink name=sink",
"video/x-raw, format=I420 ! jpegenc ! shmsink name=sink" "video/x-raw, format=RGB, framerate=30/1 ! queue max-size-buffers=3 ! shmsink buffer-time=100000 name=sink"
}; };
const std::vector<std::string> NetworkToolkit::protocol_receive_pipeline { const std::vector<std::string> NetworkToolkit::protocol_receive_pipeline {
"application/x-rtp-stream,media=video,encoding-name=JPEG,payload=26 ! rtpstreamdepay ! rtpjitterbuffer ! rtpjpegdepay ! jpegdec", " ! application/x-rtp-stream,media=video,encoding-name=JPEG,payload=26 ! rtpstreamdepay ! rtpjpegdepay ! jpegdec",
"application/x-rtp-stream,media=video,encoding-name=H264,payload=96,clock-rate=90000 ! rtpstreamdepay ! rtpjitterbuffer ! rtph264depay ! avdec_h264", " ! application/x-rtp-stream,media=video,encoding-name=H264,payload=96,clock-rate=90000 ! rtpstreamdepay ! rtph264depay ! avdec_h264",
"jpegdec" " ! video/x-raw, format=RGB, framerate=30/1"
}; };
@@ -73,6 +64,13 @@ const std::vector<std::string> NetworkToolkit::protocol_receive_pipeline {
* RCV * RCV
* gst-launch-1.0 udpsrc port=5000 caps = "application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)RAW, sampling=(string)RGBA, depth=(string)8, width=(string)1920, height=(string)1080, colorimetry=(string)SMPTE240M, payload=(int)96, ssrc=(uint)2272750581, timestamp-offset=(uint)1699493959, seqnum-offset=(uint)14107, a-framerate=(string)30" ! rtpvrawdepay ! videoconvert ! autovideosink * gst-launch-1.0 udpsrc port=5000 caps = "application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)RAW, sampling=(string)RGBA, depth=(string)8, width=(string)1920, height=(string)1080, colorimetry=(string)SMPTE240M, payload=(int)96, ssrc=(uint)2272750581, timestamp-offset=(uint)1699493959, seqnum-offset=(uint)14107, a-framerate=(string)30" ! rtpvrawdepay ! videoconvert ! autovideosink
* *
*
* SHM RAW RGB
* SND
* gst-launch-1.0 videotestsrc is-live=true ! video/x-raw, format=RGB, framerate=30/1 ! shmsink socket-path=/tmp/blah
* RCV
* gst-launch-1.0 shmsrc is-live=true socket-path=/tmp/blah ! video/x-raw, format=RGB, framerate=30/1, width=320, height=240 ! videoconvert ! autovideosink
*
* */ * */
@@ -107,25 +105,5 @@ std::vector<std::string> NetworkToolkit::host_ips()
close(s); close(s);
} }
} }
// localhost127.0.0.1, 192.168.0.30, 10.164.239.1,
// char hostbuffer[256];
// // retrieve hostname
// if ( gethostname(hostbuffer, sizeof(hostbuffer)) != -1 )
// {
// // retrieve host information
// struct hostent *host_entry;
// host_entry = gethostbyname(hostbuffer);
// if ( host_entry != NULL ) {
// // convert an Internet network
// // address into ASCII string
// char *IPbuffer = inet_ntoa(*((struct in_addr*) host_entry->h_addr_list[0]));
// ipstring = IPbuffer;
// }
// }
return ipstrings; return ipstrings;
} }

View File

@@ -10,7 +10,7 @@ namespace NetworkToolkit
typedef enum { typedef enum {
TCP_JPEG = 0, TCP_JPEG = 0,
TCP_H264, TCP_H264,
SHM_JPEG, SHM_RAW,
DEFAULT DEFAULT
} Protocol; } Protocol;

View File

@@ -400,6 +400,8 @@ void VideoRecorder::addFrame (FrameBuffer *frame_buffer, float dt)
gst_app_src_push_buffer (src_, buffer); gst_app_src_push_buffer (src_, buffer);
// NB: buffer will be unrefed by the appsrc // NB: buffer will be unrefed by the appsrc
accept_buffer_ = false;
// next timestamp // next timestamp
timestamp_ += frame_duration_; timestamp_ += frame_duration_;
} }
@@ -461,6 +463,11 @@ double VideoRecorder::duration()
return gst_guint64_to_gdouble( GST_TIME_AS_MSECONDS(timestamp_) ) / 1000.0; return gst_guint64_to_gdouble( GST_TIME_AS_MSECONDS(timestamp_) ) / 1000.0;
} }
bool VideoRecorder::busy()
{
return accept_buffer_ ? true : false;
}
// appsrc needs data and we should start sending // appsrc needs data and we should start sending
void VideoRecorder::callback_need_data (GstAppSrc *, guint , gpointer p) void VideoRecorder::callback_need_data (GstAppSrc *, guint , gpointer p)
{ {

View File

@@ -65,9 +65,8 @@ public:
void addFrame(FrameBuffer *frame_buffer, float dt) override; void addFrame(FrameBuffer *frame_buffer, float dt) override;
void stop() override; void stop() override;
std::string info() override; std::string info() override;
double duration() override; double duration() override;
bool busy() override;
}; };

View File

@@ -264,7 +264,7 @@ void Settings::Load()
streamnode->QueryIntAttribute("profile", &application.stream.profile); streamnode->QueryIntAttribute("profile", &application.stream.profile);
streamnode->QueryIntAttribute("port", &application.stream.port); streamnode->QueryIntAttribute("port", &application.stream.port);
const char *ip_ = recordnode->Attribute("ip"); const char *ip_ = streamnode->Attribute("ip");
if (ip_) if (ip_)
application.stream.ip = std::string(ip_); application.stream.ip = std::string(ip_);
else else

View File

@@ -97,7 +97,7 @@ void Stream::execute_open()
// reset // reset
ready_ = false; ready_ = false;
// Create the gstreamer pipeline possible : // Add custom app sink to the gstreamer pipeline
string description = description_; string description = description_;
description += " ! appsink name=sink"; description += " ! appsink name=sink";
@@ -259,7 +259,7 @@ float Stream::aspectRatio() const
void Stream::enable(bool on) void Stream::enable(bool on)
{ {
if ( !ready_ ) if ( !ready_ || pipeline_ == nullptr)
return; return;
if ( enabled_ != on ) { if ( enabled_ != on ) {
@@ -333,10 +333,8 @@ void Stream::play(bool on)
#endif #endif
// activate live-source // activate live-source
if (live_) { if (live_)
GstState state; gst_element_get_state (pipeline_, NULL, NULL, GST_CLOCK_TIME_NONE);
gst_element_get_state (pipeline_, &state, NULL, GST_CLOCK_TIME_NONE);
}
// reset time counter // reset time counter
timecount_.reset(); timecount_.reset();
@@ -526,6 +524,10 @@ void Stream::update()
// unkock frame after reading it // unkock frame after reading it
frame_[read_index].access.unlock(); frame_[read_index].access.unlock();
if (need_loop) {
// stop on end of stream
play(false);
}
} }
double Stream::updateFrameRate() const double Stream::updateFrameRate() const

View File

@@ -96,7 +96,7 @@ void VideoStreamer::addFrame (FrameBuffer *frame_buffer, float dt)
"host", Settings::application.stream.ip.c_str(), "host", Settings::application.stream.ip.c_str(),
"port", Settings::application.stream.port, NULL); "port", Settings::application.stream.port, NULL);
} }
else if (Settings::application.stream.profile == NetworkToolkit::SHM_JPEG) { else if (Settings::application.stream.profile == NetworkToolkit::SHM_RAW) {
std::string path = SystemToolkit::full_filename(SystemToolkit::settings_path(), "shm_socket"); std::string path = SystemToolkit::full_filename(SystemToolkit::settings_path(), "shm_socket");
SystemToolkit::remove_file(path); SystemToolkit::remove_file(path);
g_object_set (G_OBJECT (gst_bin_get_by_name (GST_BIN (pipeline_), "sink")), g_object_set (G_OBJECT (gst_bin_get_by_name (GST_BIN (pipeline_), "sink")),
@@ -226,10 +226,12 @@ Log::Info("%s", description.c_str());
gst_buffer_unmap (buffer, &map); gst_buffer_unmap (buffer, &map);
// push // push
// Log::Info("VideoRecorder push data %ld", buffer->pts); // Log::Info("VideoRecorder push data %ld", buffer->pts);
gst_app_src_push_buffer (src_, buffer); gst_app_src_push_buffer (src_, buffer);
// NB: buffer will be unrefed by the appsrc // NB: buffer will be unrefed by the appsrc
accept_buffer_ = false;
// next timestamp // next timestamp
timestamp_ += frame_duration_; timestamp_ += frame_duration_;
} }
@@ -263,6 +265,12 @@ Log::Info("%s", description.c_str());
finished_ = true; finished_ = true;
} }
// make sure the shared memory socket is deleted
if (Settings::application.stream.profile == NetworkToolkit::SHM_RAW) {
std::string path = SystemToolkit::full_filename(SystemToolkit::settings_path(), "shm_socket");
SystemToolkit::remove_file(path);
}
} }
@@ -274,12 +282,6 @@ void VideoStreamer::stop ()
if (src_) if (src_)
gst_app_src_end_of_stream (src_); gst_app_src_end_of_stream (src_);
// make sure the shared memory socket is deleted
if (Settings::application.stream.profile == NetworkToolkit::SHM_JPEG) {
std::string path = SystemToolkit::full_filename(SystemToolkit::settings_path(), "shm_socket");
SystemToolkit::remove_file(path);
}
// stop recording // stop recording
streaming_ = false; streaming_ = false;
} }
@@ -292,9 +294,10 @@ std::string VideoStreamer::info()
if (Settings::application.stream.profile == NetworkToolkit::TCP_JPEG || Settings::application.stream.profile == NetworkToolkit::TCP_H264) { if (Settings::application.stream.profile == NetworkToolkit::TCP_JPEG || Settings::application.stream.profile == NetworkToolkit::TCP_H264) {
ret = "TCP";
} }
else if (Settings::application.stream.profile == NetworkToolkit::SHM_JPEG) { else if (Settings::application.stream.profile == NetworkToolkit::SHM_RAW) {
ret = "Shared Memory"; ret = "Shared Memory";
} }
@@ -308,20 +311,25 @@ double VideoStreamer::duration()
return gst_guint64_to_gdouble( GST_TIME_AS_MSECONDS(timestamp_) ) / 1000.0; return gst_guint64_to_gdouble( GST_TIME_AS_MSECONDS(timestamp_) ) / 1000.0;
} }
bool VideoStreamer::busy()
{
return accept_buffer_ ? true : false;
}
// appsrc needs data and we should start sending // appsrc needs data and we should start sending
void VideoStreamer::callback_need_data (GstAppSrc *, guint , gpointer p) void VideoStreamer::callback_need_data (GstAppSrc *, guint , gpointer p)
{ {
// Log::Info("H264Recording callback_need_data");
VideoStreamer *rec = (VideoStreamer *)p; VideoStreamer *rec = (VideoStreamer *)p;
if (rec) { if (rec) {
rec->accept_buffer_ = rec->streaming_ ? true : false; rec->accept_buffer_ = true;
// Log::Info("VideoStreamer need_data");
} }
} }
// appsrc has enough data and we can stop sending // appsrc has enough data and we can stop sending
void VideoStreamer::callback_enough_data (GstAppSrc *, gpointer p) void VideoStreamer::callback_enough_data (GstAppSrc *, gpointer p)
{ {
// Log::Info("H264Recording callback_enough_data"); // Log::Info("VideoStreamer enough_data");
VideoStreamer *rec = (VideoStreamer *)p; VideoStreamer *rec = (VideoStreamer *)p;
if (rec) { if (rec) {
rec->accept_buffer_ = false; rec->accept_buffer_ = false;

View File

@@ -36,8 +36,8 @@ public:
void addFrame(FrameBuffer *frame_buffer, float dt) override; void addFrame(FrameBuffer *frame_buffer, float dt) override;
void stop() override; void stop() override;
std::string info() override; std::string info() override;
double duration() override; double duration() override;
bool busy() override;
}; };
#endif // STREAMER_H #endif // STREAMER_H

View File

@@ -1135,31 +1135,35 @@ void UserInterface::RenderPreview()
} }
if (ImGui::BeginMenu("Record")) if (ImGui::BeginMenu("Record"))
{ {
if ( ImGui::MenuItem( ICON_FA_CAMERA_RETRO " Capture frame (PNG)") )
Mixer::manager().session()->addFrameGrabber(new PNGRecorder);
// Stop recording menu if main recorder already exists // Stop recording menu if main recorder already exists
if (rec) { if (rec) {
ImGui::PushStyleColor(ImGuiCol_Text, ImVec4(1.0, 0.05, 0.05, 0.8f));
if ( ImGui::MenuItem( ICON_FA_SQUARE " Stop Record", CTRL_MOD "R") ) { if ( ImGui::MenuItem( ICON_FA_SQUARE " Stop Record", CTRL_MOD "R") ) {
rec->stop(); rec->stop();
video_recorder_ = 0; video_recorder_ = 0;
} }
ImGui::PopStyleColor(1);
} }
// start recording // start recording
else { else {
// detecting the absence of video recorder but the variable is still not 0: fix this! // detecting the absence of video recorder but the variable is still not 0: fix this!
if (video_recorder_ > 0) if (video_recorder_ > 0)
video_recorder_ = 0; video_recorder_ = 0;
ImGui::PushStyleColor(ImGuiCol_Text, ImVec4(1.0, 0.05, 0.05, 0.8f));
if ( ImGui::MenuItem( ICON_FA_CIRCLE " Record", CTRL_MOD "R") ) { if ( ImGui::MenuItem( ICON_FA_CIRCLE " Record", CTRL_MOD "R") ) {
FrameGrabber *fg = new VideoRecorder; FrameGrabber *fg = new VideoRecorder;
video_recorder_ = fg->id(); video_recorder_ = fg->id();
Mixer::manager().session()->addFrameGrabber(fg); Mixer::manager().session()->addFrameGrabber(fg);
} }
ImGui::PopStyleColor(1);
// select profile // select profile
ImGui::SetNextItemWidth(300); ImGui::SetNextItemWidth(300);
ImGui::Combo("##RecProfile", &Settings::application.record.profile, VideoRecorder::profile_name, IM_ARRAYSIZE(VideoRecorder::profile_name) ); ImGui::Combo("##RecProfile", &Settings::application.record.profile, VideoRecorder::profile_name, IM_ARRAYSIZE(VideoRecorder::profile_name) );
} }
if ( ImGui::MenuItem( ICON_FA_CAMERA_RETRO " Capture frame (PNG)") )
Mixer::manager().session()->addFrameGrabber(new PNGRecorder);
// Options menu // Options menu
ImGui::Separator(); ImGui::Separator();
ImGui::MenuItem("Options", nullptr, false, false); ImGui::MenuItem("Options", nullptr, false, false);
@@ -1201,11 +1205,13 @@ void UserInterface::RenderPreview()
{ {
// Stop recording menu if main recorder already exists // Stop recording menu if main recorder already exists
if (str) { if (str) {
ImGui::PushStyleColor(ImGuiCol_Text, ImVec4(0.05, 1.0, 0.05, 0.8f));
if ( ImGui::MenuItem( ICON_FA_SQUARE " Stop Streaming") ) { if ( ImGui::MenuItem( ICON_FA_SQUARE " Stop Streaming") ) {
str->stop(); str->stop();
video_streamer_ = 0; video_streamer_ = 0;
} }
else { ImGui::PopStyleColor(1);
if (video_streamer_ > 0) {
if (Settings::application.stream.profile == NetworkToolkit::TCP_JPEG || Settings::application.stream.profile == NetworkToolkit::TCP_H264) { if (Settings::application.stream.profile == NetworkToolkit::TCP_JPEG || Settings::application.stream.profile == NetworkToolkit::TCP_H264) {
// Options menu // Options menu
ImGui::Separator(); ImGui::Separator();
@@ -1218,11 +1224,11 @@ void UserInterface::RenderPreview()
ImGui::SetNextItemWidth(IMGUI_RIGHT_ALIGN); ImGui::SetNextItemWidth(IMGUI_RIGHT_ALIGN);
ImGui::InputText("Port", dummy_str, IM_ARRAYSIZE(dummy_str), ImGuiInputTextFlags_ReadOnly); ImGui::InputText("Port", dummy_str, IM_ARRAYSIZE(dummy_str), ImGuiInputTextFlags_ReadOnly);
} }
else if (Settings::application.stream.profile == NetworkToolkit::SHM_JPEG) // else if (Settings::application.stream.profile == NetworkToolkit::SHM_RAW)
{ // {
ImGui::Separator(); // ImGui::Separator();
ImGui::MenuItem("Shared Memory active", nullptr, false, false); // ImGui::MenuItem("Shared Memory active", nullptr, false, false);
} // }
} }
} }
// start recording // start recording
@@ -1230,11 +1236,13 @@ void UserInterface::RenderPreview()
// detecting the absence of video streamer but the variable is still not 0: fix this! // detecting the absence of video streamer but the variable is still not 0: fix this!
if (video_streamer_ > 0) if (video_streamer_ > 0)
video_streamer_ = 0; video_streamer_ = 0;
ImGui::PushStyleColor(ImGuiCol_Text, ImVec4(0.05, 1.0, 0.05, 0.8f));
if ( ImGui::MenuItem( ICON_FA_PODCAST " Stream") ) { if ( ImGui::MenuItem( ICON_FA_PODCAST " Stream") ) {
FrameGrabber *fg = new VideoStreamer; FrameGrabber *fg = new VideoStreamer;
video_streamer_ = fg->id(); video_streamer_ = fg->id();
Mixer::manager().session()->addFrameGrabber(fg); Mixer::manager().session()->addFrameGrabber(fg);
} }
ImGui::PopStyleColor(1);
// select profile // select profile
ImGui::SetNextItemWidth(300); ImGui::SetNextItemWidth(300);
ImGui::Combo("##StreamProfile", &Settings::application.stream.profile, NetworkToolkit::protocol_name, IM_ARRAYSIZE(NetworkToolkit::protocol_name) ); ImGui::Combo("##StreamProfile", &Settings::application.stream.profile, NetworkToolkit::protocol_name, IM_ARRAYSIZE(NetworkToolkit::protocol_name) );
@@ -1272,6 +1280,14 @@ void UserInterface::RenderPreview()
ImVec2 draw_pos = ImGui::GetCursorScreenPos(); ImVec2 draw_pos = ImGui::GetCursorScreenPos();
// preview image // preview image
ImGui::Image((void*)(intptr_t)output->texture(), imagesize); ImGui::Image((void*)(intptr_t)output->texture(), imagesize);
// tooltip overlay
if (ImGui::IsItemHovered())
{
ImDrawList* draw_list = ImGui::GetWindowDrawList();
draw_list->AddRectFilled(draw_pos, ImVec2(draw_pos.x + width, draw_pos.y + ImGui::GetTextLineHeightWithSpacing()), IMGUI_COLOR_OVERLAY);
ImGui::SetCursorScreenPos(draw_pos);
ImGui::Text(" %d x %d px, %d fps", output->width(), output->height(), int(1000.f / Mixer::manager().dt()) );
}
// recording indicator overlay // recording indicator overlay
if (rec) if (rec)
{ {
@@ -1283,13 +1299,19 @@ void UserInterface::RenderPreview()
ImGui::PopStyleColor(1); ImGui::PopStyleColor(1);
ImGui::PopFont(); ImGui::PopFont();
} }
// tooltip overlay // streaming indicator overlay
if (ImGui::IsItemHovered()) if (str)
{ {
ImDrawList* draw_list = ImGui::GetWindowDrawList(); float r = ImGui::GetTextLineHeightWithSpacing();
draw_list->AddRectFilled(draw_pos, ImVec2(draw_pos.x + width, draw_pos.y + ImGui::GetTextLineHeightWithSpacing()), IMGUI_COLOR_OVERLAY); ImGui::SetCursorScreenPos(ImVec2(draw_pos.x + width - 2.f * r, draw_pos.y + r));
ImGui::SetCursorScreenPos(draw_pos); ImGuiToolkit::PushFont(ImGuiToolkit::FONT_LARGE);
ImGui::Text(" %d x %d px, %d fps", output->width(), output->height(), int(1000.f / Mixer::manager().dt()) ); if (str->busy())
ImGui::PushStyleColor(ImGuiCol_Text, ImVec4(0.05, 1.0, 0.05, 0.8f));
else
ImGui::PushStyleColor(ImGuiCol_Text, ImVec4(0.05, 1.0, 0.05, 0.2f));
ImGui::Text(ICON_FA_PODCAST);
ImGui::PopStyleColor(1);
ImGui::PopFont();
} }
ImGui::End(); ImGui::End();
@@ -2268,14 +2290,13 @@ void Navigator::RenderNewPannel()
Pattern::pattern_types[pattern_type]); Pattern::pattern_types[pattern_type]);
} }
} }
// Hardware // External source creator
else if (Settings::application.source.new_type == 3){ else if (Settings::application.source.new_type == 3){
ImGui::SetCursorPosY(2.f * width_); ImGui::SetCursorPosY(2.f * width_);
ImGui::SetNextItemWidth(IMGUI_RIGHT_ALIGN); ImGui::SetNextItemWidth(IMGUI_RIGHT_ALIGN);
if (ImGui::BeginCombo("##Hardware", "Select device")) if (ImGui::BeginCombo("##External", "Select source"))
{ {
for (int d = 0; d < Device::manager().numDevices(); ++d){ for (int d = 0; d < Device::manager().numDevices(); ++d){
std::string namedev = Device::manager().name(d); std::string namedev = Device::manager().name(d);
@@ -2284,6 +2305,13 @@ void Navigator::RenderNewPannel()
new_source_preview_.setSource( Mixer::manager().createSourceDevice(namedev), namedev); new_source_preview_.setSource( Mixer::manager().createSourceDevice(namedev), namedev);
} }
} }
for (uint n = 0; n < NetworkToolkit::DEFAULT; ++n){
if (ImGui::Selectable( NetworkToolkit::protocol_name[n] )) {
new_source_preview_.setSource( Mixer::manager().createSourceNetwork(n, "192.168.0.30:5400") );
}
}
ImGui::EndCombo(); ImGui::EndCombo();
} }