mirror of
https://github.com/brunoherbelin/vimix.git
synced 2025-12-05 15:30:00 +01:00
Deep redesign of frame grabbers to prevent multiple frame captures
(slow) and optimize distribution of gabbed frames to multiple recording and streaming pipelines (e.g. record H264 *and* share stream on the network). New implementation of Loopback source for LINUX using v4l2loopback (experimental).
This commit is contained in:
@@ -251,8 +251,10 @@ set(VMIX_SRCS
|
||||
GarbageVisitor.cpp
|
||||
SessionCreator.cpp
|
||||
Mixer.cpp
|
||||
FrameGrabber.cpp
|
||||
Recorder.cpp
|
||||
Streamer.cpp
|
||||
Loopback.cpp
|
||||
Settings.cpp
|
||||
Screenshot.cpp
|
||||
Resource.cpp
|
||||
|
||||
357
FrameGrabber.cpp
Normal file
357
FrameGrabber.cpp
Normal file
@@ -0,0 +1,357 @@
|
||||
#include <algorithm>
|
||||
|
||||
// Desktop OpenGL function loader
|
||||
#include <glad/glad.h>
|
||||
|
||||
// gstreamer
|
||||
#include <gst/gstformat.h>
|
||||
#include <gst/video/video.h>
|
||||
|
||||
#include "defines.h"
|
||||
#include "Log.h"
|
||||
#include "GstToolkit.h"
|
||||
#include "FrameBuffer.h"
|
||||
|
||||
#include "FrameGrabber.h"
|
||||
|
||||
|
||||
|
||||
FrameGrabbing::FrameGrabbing(): pbo_index_(0), pbo_next_index_(0), size_(0), width_(0), height_(0), use_alpha_(0), caps_(nullptr)
|
||||
{
|
||||
pbo_[0] = 0;
|
||||
pbo_[1] = 0;
|
||||
}
|
||||
|
||||
FrameGrabbing::~FrameGrabbing()
|
||||
{
|
||||
// stop and delete all frame grabbers
|
||||
clearAll();
|
||||
|
||||
// cleanup
|
||||
if (caps_!=nullptr)
|
||||
gst_caps_unref (caps_);
|
||||
if (pbo_[0])
|
||||
glDeleteBuffers(2, pbo_);
|
||||
}
|
||||
|
||||
void FrameGrabbing::add(FrameGrabber *rec)
|
||||
{
|
||||
if (rec != nullptr)
|
||||
grabbers_.push_back(rec);
|
||||
}
|
||||
|
||||
FrameGrabber *FrameGrabbing::front()
|
||||
{
|
||||
if (grabbers_.empty())
|
||||
return nullptr;
|
||||
else
|
||||
return grabbers_.front();
|
||||
}
|
||||
|
||||
struct fgId: public std::unary_function<FrameGrabber*, bool>
|
||||
{
|
||||
inline bool operator()(const FrameGrabber* elem) const {
|
||||
return (elem && elem->id() == _id);
|
||||
}
|
||||
fgId(uint64_t id) : _id(id) { }
|
||||
private:
|
||||
uint64_t _id;
|
||||
};
|
||||
|
||||
FrameGrabber *FrameGrabbing::get(uint64_t id)
|
||||
{
|
||||
if (id > 0 && grabbers_.size() > 0 )
|
||||
{
|
||||
std::list<FrameGrabber *>::iterator iter = std::find_if(grabbers_.begin(), grabbers_.end(), fgId(id));
|
||||
if (iter != grabbers_.end())
|
||||
return (*iter);
|
||||
}
|
||||
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
void FrameGrabbing::stopAll()
|
||||
{
|
||||
std::list<FrameGrabber *>::iterator iter;
|
||||
for (iter=grabbers_.begin(); iter != grabbers_.end(); )
|
||||
(*iter)->stop();
|
||||
}
|
||||
|
||||
void FrameGrabbing::clearAll()
|
||||
{
|
||||
std::list<FrameGrabber *>::iterator iter;
|
||||
for (iter=grabbers_.begin(); iter != grabbers_.end(); )
|
||||
{
|
||||
FrameGrabber *rec = *iter;
|
||||
rec->stop();
|
||||
iter = grabbers_.erase(iter);
|
||||
delete rec;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void FrameGrabbing::grabFrame(FrameBuffer *frame_buffer, float dt)
|
||||
{
|
||||
if (frame_buffer == nullptr)
|
||||
return;
|
||||
|
||||
// if different frame buffer from previous frame
|
||||
if ( frame_buffer->width() != width_ ||
|
||||
frame_buffer->height() != height_ ||
|
||||
frame_buffer->use_alpha() != use_alpha_) {
|
||||
|
||||
// define stream properties
|
||||
width_ = frame_buffer->width();
|
||||
height_ = frame_buffer->height();
|
||||
use_alpha_ = frame_buffer->use_alpha();
|
||||
size_ = width_ * height_ * (use_alpha_ ? 4 : 3);
|
||||
|
||||
// first time initialization
|
||||
if ( pbo_[0] == 0 )
|
||||
glGenBuffers(2, pbo_);
|
||||
|
||||
// re-affect pixel buffer object
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, pbo_[1]);
|
||||
glBufferData(GL_PIXEL_PACK_BUFFER, size_, NULL, GL_STREAM_READ);
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, pbo_[0]);
|
||||
glBufferData(GL_PIXEL_PACK_BUFFER, size_, NULL, GL_STREAM_READ);
|
||||
|
||||
// reset indices
|
||||
pbo_index_ = 0;
|
||||
pbo_next_index_ = 0;
|
||||
|
||||
// new caps
|
||||
if (caps_!=nullptr)
|
||||
gst_caps_unref (caps_);
|
||||
caps_ = gst_caps_new_simple ("video/x-raw",
|
||||
"format", G_TYPE_STRING, use_alpha_ ? "RGBA" : "RGB",
|
||||
"width", G_TYPE_INT, width_,
|
||||
"height", G_TYPE_INT, height_,
|
||||
"framerate", GST_TYPE_FRACTION, 30, 1,
|
||||
NULL);
|
||||
}
|
||||
|
||||
// fill a frame in buffer
|
||||
if (!grabbers_.empty() && size_ > 0) {
|
||||
|
||||
GstBuffer *buffer = nullptr;
|
||||
|
||||
// set buffer target for writing in a new frame
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, pbo_[pbo_index_]);
|
||||
|
||||
#ifdef USE_GLREADPIXEL
|
||||
// get frame
|
||||
frame_buffer->readPixels();
|
||||
#else
|
||||
glBindTexture(GL_TEXTURE_2D, frame_buffer->texture());
|
||||
glGetTexImage(GL_TEXTURE_2D, 0, GL_RGB, GL_UNSIGNED_BYTE, 0);
|
||||
#endif
|
||||
|
||||
// update case ; alternating indices
|
||||
if ( pbo_next_index_ != pbo_index_ ) {
|
||||
|
||||
// set buffer target for saving the frame
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, pbo_[pbo_next_index_]);
|
||||
|
||||
// new buffer
|
||||
buffer = gst_buffer_new_and_alloc (size_);
|
||||
|
||||
// map gst buffer into a memory WRITE target
|
||||
GstMapInfo map;
|
||||
gst_buffer_map (buffer, &map, GST_MAP_WRITE);
|
||||
|
||||
// map PBO pixels into a memory READ pointer
|
||||
unsigned char* ptr = (unsigned char*) glMapBuffer(GL_PIXEL_PACK_BUFFER, GL_READ_ONLY);
|
||||
|
||||
// transfer pixels from PBO memory to buffer memory
|
||||
if (NULL != ptr)
|
||||
memmove(map.data, ptr, size_);
|
||||
|
||||
// un-map
|
||||
glUnmapBuffer(GL_PIXEL_PACK_BUFFER);
|
||||
gst_buffer_unmap (buffer, &map);
|
||||
}
|
||||
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
|
||||
|
||||
// alternate indices
|
||||
pbo_next_index_ = pbo_index_;
|
||||
pbo_index_ = (pbo_index_ + 1) % 2;
|
||||
|
||||
// a frame was successfully grabbed
|
||||
if (buffer != nullptr) {
|
||||
|
||||
// give the frame to all recorders
|
||||
std::list<FrameGrabber *>::iterator iter = iter=grabbers_.begin();
|
||||
while (iter != grabbers_.end())
|
||||
{
|
||||
FrameGrabber *rec = *iter;
|
||||
rec->addFrame(buffer, caps_, dt);
|
||||
|
||||
if (rec->finished()) {
|
||||
iter = grabbers_.erase(iter);
|
||||
delete rec;
|
||||
}
|
||||
else
|
||||
iter++;
|
||||
}
|
||||
|
||||
// unref / free the frame
|
||||
gst_buffer_unref(buffer);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
FrameGrabber::FrameGrabber(): finished_(false), active_(false), accept_buffer_(false),
|
||||
pipeline_(nullptr), src_(nullptr), caps_(nullptr), timestamp_(0)
|
||||
{
|
||||
// unique id
|
||||
id_ = GlmToolkit::uniqueId();
|
||||
// configure fix parameter
|
||||
frame_duration_ = gst_util_uint64_scale_int (1, GST_SECOND, 30); // 30 FPS
|
||||
timeframe_ = 2 * frame_duration_;
|
||||
}
|
||||
|
||||
FrameGrabber::~FrameGrabber()
|
||||
{
|
||||
if (src_ != nullptr)
|
||||
gst_object_unref (src_);
|
||||
if (caps_ != nullptr)
|
||||
gst_caps_unref (caps_);
|
||||
if (pipeline_ != nullptr) {
|
||||
gst_element_set_state (pipeline_, GST_STATE_NULL);
|
||||
gst_object_unref (pipeline_);
|
||||
}
|
||||
}
|
||||
|
||||
bool FrameGrabber::finished() const
|
||||
{
|
||||
return finished_;
|
||||
}
|
||||
|
||||
bool FrameGrabber::busy() const
|
||||
{
|
||||
if (active_)
|
||||
return accept_buffer_ ? true : false;
|
||||
else
|
||||
return false;
|
||||
}
|
||||
|
||||
double FrameGrabber::duration() const
|
||||
{
|
||||
return gst_guint64_to_gdouble( GST_TIME_AS_MSECONDS(timestamp_) ) / 1000.0;
|
||||
}
|
||||
|
||||
void FrameGrabber::stop ()
|
||||
{
|
||||
// send end of stream
|
||||
gst_app_src_end_of_stream (src_);
|
||||
|
||||
// stop recording
|
||||
active_ = false;
|
||||
}
|
||||
|
||||
std::string FrameGrabber::info() const
|
||||
{
|
||||
if (active_)
|
||||
return GstToolkit::time_to_string(timestamp_);
|
||||
else
|
||||
return "Inactive";
|
||||
}
|
||||
|
||||
// appsrc needs data and we should start sending
|
||||
void FrameGrabber::callback_need_data (GstAppSrc *, guint , gpointer p)
|
||||
{
|
||||
FrameGrabber *grabber = static_cast<FrameGrabber *>(p);
|
||||
if (grabber)
|
||||
grabber->accept_buffer_ = true;
|
||||
}
|
||||
|
||||
// appsrc has enough data and we can stop sending
|
||||
void FrameGrabber::callback_enough_data (GstAppSrc *, gpointer p)
|
||||
{
|
||||
FrameGrabber *grabber = static_cast<FrameGrabber *>(p);
|
||||
if (grabber)
|
||||
grabber->accept_buffer_ = false;
|
||||
}
|
||||
|
||||
void FrameGrabber::addFrame (GstBuffer *buffer, GstCaps *caps, float dt)
|
||||
{
|
||||
// ignore
|
||||
if (buffer == nullptr)
|
||||
return;
|
||||
|
||||
// first time initialization
|
||||
if (pipeline_ == nullptr)
|
||||
init(caps);
|
||||
|
||||
// cancel if finished
|
||||
if (finished_)
|
||||
return;
|
||||
|
||||
// stop if an incompatilble frame buffer given
|
||||
if ( !gst_caps_is_equal( caps_, caps ))
|
||||
{
|
||||
stop();
|
||||
// Log::Warning("FrameGrabber interrupted: new session (%s)\nincompatible with recording (%s)", gst_caps_to_string(frame.caps), gst_caps_to_string(caps_));
|
||||
Log::Warning("FrameGrabber interrupted because the resolution changed.");
|
||||
}
|
||||
|
||||
// store a frame if recording is active
|
||||
if (active_)
|
||||
{
|
||||
// calculate dt in ns
|
||||
timeframe_ += gst_gdouble_to_guint64( dt * 1000000.f);
|
||||
|
||||
// if time is passed one frame duration (with 10% margin)
|
||||
// and if the encoder accepts data
|
||||
if ( timeframe_ > frame_duration_ - 3000000 && accept_buffer_) {
|
||||
|
||||
// set timing of buffer
|
||||
buffer->pts = timestamp_;
|
||||
buffer->duration = frame_duration_;
|
||||
|
||||
// increment ref counter to make sure the frame remains available
|
||||
gst_buffer_ref(buffer);
|
||||
|
||||
// push
|
||||
gst_app_src_push_buffer (src_, buffer);
|
||||
// NB: buffer will be unrefed by the appsrc
|
||||
|
||||
accept_buffer_ = false;
|
||||
|
||||
// next timestamp
|
||||
timestamp_ += frame_duration_;
|
||||
|
||||
// restart frame counter
|
||||
timeframe_ = 0;
|
||||
}
|
||||
}
|
||||
// did the recording terminate with sink receiving end-of-stream ?
|
||||
else {
|
||||
|
||||
if (!finished_)
|
||||
{
|
||||
// Wait for EOS message
|
||||
GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline_));
|
||||
GstMessage *msg = gst_bus_poll(bus, GST_MESSAGE_EOS, GST_TIME_AS_USECONDS(1));
|
||||
// received EOS
|
||||
if (msg) {
|
||||
// stop the pipeline
|
||||
GstStateChangeReturn ret = gst_element_set_state (pipeline_, GST_STATE_NULL);
|
||||
if (ret == GST_STATE_CHANGE_FAILURE)
|
||||
Log::Warning("FrameGrabber Could not stop.");
|
||||
|
||||
finished_ = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (finished_)
|
||||
terminate();
|
||||
|
||||
}
|
||||
118
FrameGrabber.h
118
FrameGrabber.h
@@ -2,59 +2,123 @@
|
||||
#define FRAMEGRABBER_H
|
||||
|
||||
#include <atomic>
|
||||
#include <list>
|
||||
#include <string>
|
||||
|
||||
#include <gst/gst.h>
|
||||
#include <gst/app/gstappsrc.h>
|
||||
|
||||
#include "GlmToolkit.h"
|
||||
|
||||
class FrameBuffer;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @brief The FrameGrabber class defines the base class for all recorders
|
||||
* used to save images or videos from a frame buffer.
|
||||
*
|
||||
* The Mixer class calls addFrame() at each newly rendered frame for all of its recorder.
|
||||
* Every subclass shall at least implement init() and terminate()
|
||||
*
|
||||
* The FrameGrabbing manager calls addFrame() for all its grabbers.
|
||||
*/
|
||||
class FrameGrabber
|
||||
{
|
||||
friend class FrameGrabbing;
|
||||
|
||||
uint64_t id_;
|
||||
|
||||
public:
|
||||
FrameGrabber(): finished_(false), pbo_index_(0), pbo_next_index_(0), size_(0)
|
||||
{
|
||||
id_ = GlmToolkit::uniqueId();
|
||||
pbo_[0] = pbo_[1] = 0;
|
||||
}
|
||||
virtual ~FrameGrabber() {}
|
||||
FrameGrabber();
|
||||
virtual ~FrameGrabber();
|
||||
|
||||
inline uint64_t id() const { return id_; }
|
||||
struct hasId: public std::unary_function<FrameGrabber*, bool>
|
||||
{
|
||||
inline bool operator()(const FrameGrabber* elem) const {
|
||||
return (elem && elem->id() == _id);
|
||||
}
|
||||
hasId(uint64_t id) : _id(id) { }
|
||||
private:
|
||||
uint64_t _id;
|
||||
};
|
||||
|
||||
virtual void addFrame(FrameBuffer *frame_buffer, float dt) = 0;
|
||||
virtual void stop() { }
|
||||
virtual std::string info() { return ""; }
|
||||
virtual double duration() { return 0.0; }
|
||||
virtual bool busy() { return false; }
|
||||
|
||||
inline bool finished() const { return finished_; }
|
||||
virtual void stop();
|
||||
virtual std::string info() const;
|
||||
virtual double duration() const;
|
||||
virtual bool finished() const;
|
||||
virtual bool busy() const;
|
||||
|
||||
protected:
|
||||
|
||||
// only FrameGrabbing manager can add frame
|
||||
virtual void addFrame(GstBuffer *buffer, GstCaps *caps, float dt);
|
||||
|
||||
// only addFrame method shall call those
|
||||
virtual void init(GstCaps *caps) = 0;
|
||||
virtual void terminate() = 0;
|
||||
|
||||
// thread-safe testing termination
|
||||
std::atomic<bool> finished_;
|
||||
std::atomic<bool> active_;
|
||||
std::atomic<bool> accept_buffer_;
|
||||
|
||||
// gstreamer pipeline
|
||||
GstElement *pipeline_;
|
||||
GstAppSrc *src_;
|
||||
GstCaps *caps_;
|
||||
GstClockTime timeframe_;
|
||||
GstClockTime timestamp_;
|
||||
GstClockTime frame_duration_;
|
||||
|
||||
// gstreamer callbacks
|
||||
static void callback_need_data (GstAppSrc *, guint, gpointer user_data);
|
||||
static void callback_enough_data (GstAppSrc *, gpointer user_data);
|
||||
|
||||
// PBO
|
||||
guint pbo_[2];
|
||||
guint pbo_index_, pbo_next_index_;
|
||||
guint size_;
|
||||
};
|
||||
|
||||
/**
|
||||
* @brief The FrameGrabbing class manages all frame grabbers
|
||||
*
|
||||
* Session calls grabFrame after each render
|
||||
*
|
||||
*/
|
||||
class FrameGrabbing
|
||||
{
|
||||
friend class Session;
|
||||
|
||||
// Private Constructor
|
||||
FrameGrabbing();
|
||||
FrameGrabbing(FrameGrabbing const& copy); // Not Implemented
|
||||
FrameGrabbing& operator=(FrameGrabbing const& copy); // Not Implemented
|
||||
|
||||
public:
|
||||
|
||||
static FrameGrabbing& manager()
|
||||
{
|
||||
// The only instance
|
||||
static FrameGrabbing _instance;
|
||||
return _instance;
|
||||
}
|
||||
~FrameGrabbing();
|
||||
|
||||
inline uint width() const { return width_; }
|
||||
inline uint height() const { return height_; }
|
||||
|
||||
void add(FrameGrabber *rec);
|
||||
FrameGrabber *front();
|
||||
FrameGrabber *get(uint64_t id);
|
||||
void stopAll();
|
||||
void clearAll();
|
||||
|
||||
protected:
|
||||
|
||||
// only for friend Session
|
||||
void grabFrame(FrameBuffer *frame_buffer, float dt);
|
||||
|
||||
private:
|
||||
std::list<FrameGrabber *> grabbers_;
|
||||
guint pbo_[2];
|
||||
guint pbo_index_;
|
||||
guint pbo_next_index_;
|
||||
guint size_;
|
||||
guint width_;
|
||||
guint height_;
|
||||
bool use_alpha_;
|
||||
GstCaps *caps_;
|
||||
};
|
||||
|
||||
|
||||
|
||||
#endif // FRAMEGRABBER_H
|
||||
|
||||
239
Loopback.cpp
Normal file
239
Loopback.cpp
Normal file
@@ -0,0 +1,239 @@
|
||||
#include <thread>
|
||||
|
||||
// Desktop OpenGL function loader
|
||||
#include <glad/glad.h>
|
||||
|
||||
// gstreamer
|
||||
#include <gst/gstformat.h>
|
||||
#include <gst/video/video.h>
|
||||
|
||||
#include "defines.h"
|
||||
#include "Settings.h"
|
||||
#include "GstToolkit.h"
|
||||
#include "SystemToolkit.h"
|
||||
#include "FrameBuffer.h"
|
||||
#include "Log.h"
|
||||
|
||||
#include "Loopback.h"
|
||||
|
||||
bool Loopback::system_loopback_initialized = false;
|
||||
|
||||
#if defined(LINUX)
|
||||
|
||||
/**
|
||||
*
|
||||
* Linux video 4 linux loopback device
|
||||
*
|
||||
* 1) Linux system has to have the v4l2loopback package
|
||||
* See documentation at https://github.com/umlaeute/v4l2loopback
|
||||
*
|
||||
* $ sudo -A apt install v4l2loopback-dkms
|
||||
*
|
||||
* 2) User (sudo) has to install a v4l2loopback
|
||||
*
|
||||
* $ sudo -A modprobe v4l2loopback exclusive_caps=1 video_nr=10
|
||||
*
|
||||
* 3) But to do that, the user has to enter sudo passwd
|
||||
*
|
||||
* The command line above should be preceeded by
|
||||
* export SUDO_ASKPASS="/tmp/mysudo.sh"
|
||||
*
|
||||
* where mysudo.sh contains the following:
|
||||
* #!/bin/bash
|
||||
* zenity --password --title=Authentication
|
||||
*
|
||||
* 4) Optionaly, we can set the dynamic properties of the stream
|
||||
*
|
||||
* $ sudo v4l2loopback-ctl set-caps "RGBA:640x480" /dev/video10
|
||||
* $ sudo v4l2loopback-ctl set-fps 30 /dev/video10
|
||||
*
|
||||
* 5) Finally, the gstreamer pipeline can write into v4l2sink
|
||||
*
|
||||
* gst-launch-1.0 videotestsrc ! v4l2sink device=/dev/video10
|
||||
*
|
||||
*
|
||||
* Useful command lines for debug
|
||||
* $ v4l2-ctl --all -d 10
|
||||
* $ gst-launch-1.0 v4l2src device=/dev/video10 ! videoconvert ! autovideosink
|
||||
* $ gst-launch-1.0 videotestsrc ! v4l2sink device=/dev/video1
|
||||
*/
|
||||
|
||||
#include <stdio.h>
|
||||
#include <sys/stat.h>
|
||||
#include <fcntl.h>
|
||||
#include <unistd.h>
|
||||
|
||||
std::string Loopback::system_loopback_name = "/dev/video10";
|
||||
std::string Loopback::system_loopback_pipeline = "appsrc name=src ! videoconvert ! v4l2sink sync=false name=sink";
|
||||
|
||||
bool Loopback::initializeSystemLoopback()
|
||||
{
|
||||
if (!Loopback::systemLoopbackInitialized()) {
|
||||
|
||||
// create script for asking sudo password
|
||||
std::string sudoscript = SystemToolkit::full_filename(SystemToolkit::settings_path(), "sudo.sh");
|
||||
FILE *file = fopen(sudoscript.c_str(), "w");
|
||||
if (file) {
|
||||
fprintf(file, "#!/bin/bash\n");
|
||||
fprintf(file, "zenity --password --title=Authentication\n");
|
||||
fclose(file);
|
||||
|
||||
// make script executable
|
||||
int fildes = 0;
|
||||
fildes = open(sudoscript.c_str(), O_RDWR);
|
||||
fchmod(fildes, S_IRWXU | S_IRWXG | S_IROTH | S_IWOTH);
|
||||
close(fildes);
|
||||
|
||||
// create command line for installing v4l2loopback
|
||||
std::string cmdline = "export SUDO_ASKPASS=\"" + sudoscript + "\"\n";
|
||||
cmdline += "sudo -A apt install v4l2loopback-dkms 2>&1\n";
|
||||
cmdline += "sudo -A modprobe v4l2loopback exclusive_caps=1 video_nr=10 card_label=\"vimix loopback\" 2>&1\n";
|
||||
|
||||
// execute v4l2 command line
|
||||
std::string report;
|
||||
FILE *fp = popen(cmdline.c_str(), "r");
|
||||
if (fp != NULL) {
|
||||
|
||||
// get stdout content from command line
|
||||
char linestdout[PATH_MAX];
|
||||
while (fgets(linestdout, PATH_MAX, fp) != NULL)
|
||||
report += linestdout;
|
||||
|
||||
// error reported by pclose?
|
||||
if (pclose(fp) != 0 )
|
||||
Log::Warning("Failed to initialize system v4l2loopback\n%s", report.c_str());
|
||||
// okay, probaly all good...
|
||||
else
|
||||
system_loopback_initialized = true;
|
||||
}
|
||||
else
|
||||
Log::Warning("Failed to initialize system v4l2loopback\nCannot execute command line");
|
||||
|
||||
}
|
||||
else
|
||||
Log::Warning("Failed to initialize system v4l2loopback\nCannot create script", sudoscript.c_str());
|
||||
}
|
||||
|
||||
return system_loopback_initialized;
|
||||
}
|
||||
|
||||
bool Loopback::systemLoopbackInitialized()
|
||||
{
|
||||
// test if already initialized
|
||||
if (!system_loopback_initialized) {
|
||||
// check the existence of loopback device
|
||||
if ( SystemToolkit::file_exists(system_loopback_name) )
|
||||
system_loopback_initialized = true;
|
||||
}
|
||||
|
||||
return system_loopback_initialized;
|
||||
}
|
||||
|
||||
#else
|
||||
|
||||
std::string Loopback::system_loopback_name = "undefined";
|
||||
std::string Loopback::system_loopback_pipeline = "";
|
||||
|
||||
|
||||
bool Loopback::initializeSystemLoopback()
|
||||
{
|
||||
system_loopback_initialized = false;
|
||||
return false;
|
||||
}
|
||||
|
||||
bool Loopback::systemLoopbackInitialized()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
Loopback::Loopback() : FrameGrabber()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
void Loopback::init(GstCaps *caps)
|
||||
{
|
||||
// ignore
|
||||
if (caps == nullptr)
|
||||
return;
|
||||
|
||||
if (!Loopback::systemLoopbackInitialized()){
|
||||
Log::Warning("Loopback system shall be initialized first.");
|
||||
finished_ = true;
|
||||
return;
|
||||
}
|
||||
|
||||
// create a gstreamer pipeline
|
||||
std::string description = Loopback::system_loopback_pipeline;
|
||||
|
||||
// parse pipeline descriptor
|
||||
GError *error = NULL;
|
||||
pipeline_ = gst_parse_launch (description.c_str(), &error);
|
||||
if (error != NULL) {
|
||||
Log::Warning("Loopback Could not construct pipeline %s:\n%s", description.c_str(), error->message);
|
||||
g_clear_error (&error);
|
||||
finished_ = true;
|
||||
return;
|
||||
}
|
||||
|
||||
// setup device sink
|
||||
g_object_set (G_OBJECT (gst_bin_get_by_name (GST_BIN (pipeline_), "sink")),
|
||||
"device", Loopback::system_loopback_name.c_str(),
|
||||
NULL);
|
||||
|
||||
// setup custom app source
|
||||
src_ = GST_APP_SRC( gst_bin_get_by_name (GST_BIN (pipeline_), "src") );
|
||||
if (src_) {
|
||||
|
||||
g_object_set (G_OBJECT (src_),
|
||||
"stream-type", GST_APP_STREAM_TYPE_STREAM,
|
||||
"is-live", TRUE,
|
||||
"format", GST_FORMAT_TIME,
|
||||
// "do-timestamp", TRUE,
|
||||
NULL);
|
||||
|
||||
// Direct encoding (no buffering)
|
||||
gst_app_src_set_max_bytes( src_, 0 );
|
||||
|
||||
// instruct src to use the required caps
|
||||
caps_ = gst_caps_copy( caps );
|
||||
gst_app_src_set_caps (src_, caps_);
|
||||
|
||||
// setup callbacks
|
||||
GstAppSrcCallbacks callbacks;
|
||||
callbacks.need_data = FrameGrabber::callback_need_data;
|
||||
callbacks.enough_data = FrameGrabber::callback_enough_data;
|
||||
callbacks.seek_data = NULL; // stream type is not seekable
|
||||
gst_app_src_set_callbacks (src_, &callbacks, this, NULL);
|
||||
|
||||
}
|
||||
else {
|
||||
Log::Warning("Loopback Could not configure source");
|
||||
finished_ = true;
|
||||
return;
|
||||
}
|
||||
|
||||
// start recording
|
||||
GstStateChangeReturn ret = gst_element_set_state (pipeline_, GST_STATE_PLAYING);
|
||||
if (ret == GST_STATE_CHANGE_FAILURE) {
|
||||
Log::Warning("Loopback Could not open %s", Loopback::system_loopback_name.c_str());
|
||||
finished_ = true;
|
||||
return;
|
||||
}
|
||||
|
||||
// all good
|
||||
#if defined(LINUX)
|
||||
Log::Notify("Loopback started (v4l2loopback on %s)", Loopback::system_loopback_name.c_str());
|
||||
#else
|
||||
Log::Notify("Loopback started (%s)", Loopback::system_loopback_name.c_str());
|
||||
#endif
|
||||
// start
|
||||
active_ = true;
|
||||
}
|
||||
|
||||
void Loopback::terminate()
|
||||
{
|
||||
Log::Notify("Loopback to %s terminated.", Loopback::system_loopback_name.c_str());
|
||||
}
|
||||
31
Loopback.h
Normal file
31
Loopback.h
Normal file
@@ -0,0 +1,31 @@
|
||||
#ifndef LOOPBACK_H
|
||||
#define LOOPBACK_H
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include <gst/pbutils/pbutils.h>
|
||||
#include <gst/app/gstappsrc.h>
|
||||
|
||||
#include "FrameGrabber.h"
|
||||
|
||||
|
||||
class Loopback : public FrameGrabber
|
||||
{
|
||||
static std::string system_loopback_pipeline;
|
||||
static std::string system_loopback_name;
|
||||
static bool system_loopback_initialized;
|
||||
|
||||
void init(GstCaps *caps) override;
|
||||
void terminate() override;
|
||||
|
||||
public:
|
||||
|
||||
Loopback();
|
||||
|
||||
static bool systemLoopbackInitialized();
|
||||
static bool initializeSystemLoopback();
|
||||
|
||||
};
|
||||
|
||||
|
||||
#endif // LOOPBACK_H
|
||||
@@ -815,9 +815,6 @@ void Mixer::swap()
|
||||
session_ = back_session_;
|
||||
back_session_ = tmp;
|
||||
|
||||
// swap recorders
|
||||
back_session_->transferFrameGrabber(session_);
|
||||
|
||||
// attach new session's nodes to views
|
||||
for (auto source_iter = session_->begin(); source_iter != session_->end(); source_iter++)
|
||||
attach(*source_iter);
|
||||
@@ -850,9 +847,6 @@ void Mixer::swap()
|
||||
// reset History manager
|
||||
Action::manager().clear();
|
||||
|
||||
// inform streaming manager
|
||||
Streaming::manager().setSession(session_);
|
||||
|
||||
// notification
|
||||
Log::Notify("Session %s loaded. %d source(s) created.", session_->filename().c_str(), session_->numSource());
|
||||
}
|
||||
|
||||
@@ -97,14 +97,11 @@ void wait_for_stream_(UdpListeningReceiveSocket *receiver)
|
||||
void NetworkStream::connect(const std::string &nameconnection)
|
||||
{
|
||||
// start fresh
|
||||
if (connected())
|
||||
disconnect();
|
||||
received_config_ = false;
|
||||
connected_ = false;
|
||||
if (receiver_) {
|
||||
delete receiver_;
|
||||
receiver_ = nullptr;
|
||||
close();
|
||||
}
|
||||
|
||||
// refuse self referencing
|
||||
if (nameconnection.compare(Connection::manager().info().name) == 0) {
|
||||
Log::Warning("Cannot create self-referencing Network Source '%s'", nameconnection.c_str());
|
||||
failed_ = true;
|
||||
@@ -190,7 +187,11 @@ void NetworkStream::disconnect()
|
||||
// send OSC message to streamer
|
||||
UdpTransmitSocket socket( IpEndpointName(streamer_.address.c_str(), streamer_.port_stream_request) );
|
||||
socket.Send( p.Data(), p.Size() );
|
||||
|
||||
connected_ = false;
|
||||
}
|
||||
|
||||
close();
|
||||
}
|
||||
|
||||
|
||||
@@ -291,7 +292,7 @@ NetworkStream *NetworkSource::networkStream() const
|
||||
void NetworkSource::setConnection(const std::string &nameconnection)
|
||||
{
|
||||
connection_name_ = nameconnection;
|
||||
Log::Notify("Creating Network Source '%s'", connection_name_.c_str());
|
||||
Log::Notify("Network Source connecting to '%s'", connection_name_.c_str());
|
||||
|
||||
// open network stream
|
||||
networkStream()->connect( connection_name_ );
|
||||
|
||||
511
Recorder.cpp
511
Recorder.cpp
@@ -22,95 +22,103 @@
|
||||
|
||||
PNGRecorder::PNGRecorder() : FrameGrabber()
|
||||
{
|
||||
}
|
||||
|
||||
void PNGRecorder::init(GstCaps *caps)
|
||||
{
|
||||
// ignore
|
||||
if (caps == nullptr)
|
||||
return;
|
||||
|
||||
// create a gstreamer pipeline
|
||||
std::string description = "appsrc name=src ! videoconvert ! pngenc ! filesink name=sink";
|
||||
|
||||
// parse pipeline descriptor
|
||||
GError *error = NULL;
|
||||
pipeline_ = gst_parse_launch (description.c_str(), &error);
|
||||
if (error != NULL) {
|
||||
Log::Warning("PNG Capture Could not construct pipeline %s:\n%s", description.c_str(), error->message);
|
||||
g_clear_error (&error);
|
||||
finished_ = true;
|
||||
return;
|
||||
}
|
||||
|
||||
// verify location path (path is always terminated by the OS dependent separator)
|
||||
std::string path = SystemToolkit::path_directory(Settings::application.record.path);
|
||||
if (path.empty())
|
||||
path = SystemToolkit::home_path();
|
||||
|
||||
filename_ = path + "vimix_" + SystemToolkit::date_time_string() + ".png";
|
||||
|
||||
}
|
||||
// setup file sink
|
||||
g_object_set (G_OBJECT (gst_bin_get_by_name (GST_BIN (pipeline_), "sink")),
|
||||
"location", filename_.c_str(),
|
||||
"sync", FALSE,
|
||||
NULL);
|
||||
|
||||
// setup custom app source
|
||||
src_ = GST_APP_SRC( gst_bin_get_by_name (GST_BIN (pipeline_), "src") );
|
||||
if (src_) {
|
||||
|
||||
g_object_set (G_OBJECT (src_),
|
||||
"stream-type", GST_APP_STREAM_TYPE_STREAM,
|
||||
"is-live", TRUE,
|
||||
"format", GST_FORMAT_TIME,
|
||||
// "do-timestamp", TRUE,
|
||||
NULL);
|
||||
|
||||
// Direct encoding (no buffering)
|
||||
gst_app_src_set_max_bytes( src_, 0 );
|
||||
|
||||
// instruct src to use the required caps
|
||||
caps_ = gst_caps_copy( caps );
|
||||
gst_app_src_set_caps (src_, caps_);
|
||||
|
||||
// setup callbacks
|
||||
GstAppSrcCallbacks callbacks;
|
||||
callbacks.need_data = FrameGrabber::callback_need_data;
|
||||
callbacks.enough_data = FrameGrabber::callback_enough_data;
|
||||
callbacks.seek_data = NULL; // stream type is not seekable
|
||||
gst_app_src_set_callbacks (src_, &callbacks, this, NULL);
|
||||
|
||||
// Thread to perform slow operation of saving to file
|
||||
void save_png(std::string filename, unsigned char *data, uint w, uint h, uint c)
|
||||
{
|
||||
// got data to save ?
|
||||
if (data) {
|
||||
// save file
|
||||
stbi_write_png(filename.c_str(), w, h, c, data, w * c);
|
||||
// notify
|
||||
Log::Notify("Capture %s ready (%d x %d %d)", filename.c_str(), w, h, c);
|
||||
// done
|
||||
free(data);
|
||||
}
|
||||
}
|
||||
|
||||
void PNGRecorder::addFrame(FrameBuffer *frame_buffer, float)
|
||||
{
|
||||
// ignore
|
||||
if (frame_buffer == nullptr)
|
||||
return;
|
||||
|
||||
// get what is needed from frame buffer
|
||||
uint w = frame_buffer->width();
|
||||
uint h = frame_buffer->height();
|
||||
uint c = frame_buffer->use_alpha() ? 4 : 3;
|
||||
|
||||
// first iteration: initialize and get frame
|
||||
if (size_ < 1)
|
||||
{
|
||||
// init size
|
||||
size_ = w * h * c;
|
||||
|
||||
// create PBO
|
||||
glGenBuffers(2, pbo_);
|
||||
|
||||
// set writing PBO
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, pbo_[0]);
|
||||
glBufferData(GL_PIXEL_PACK_BUFFER, size_, NULL, GL_STREAM_READ);
|
||||
|
||||
#ifdef USE_GLREADPIXEL
|
||||
// get frame
|
||||
frame_buffer->readPixels();
|
||||
#else
|
||||
glBindTexture(GL_TEXTURE_2D, frame_buffer->texture());
|
||||
glGetTexImage(GL_TEXTURE_2D, 0, GL_RGB, GL_UNSIGNED_BYTE, 0);
|
||||
#endif
|
||||
}
|
||||
// second iteration; get frame and save file
|
||||
else {
|
||||
|
||||
// set reading PBO
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, pbo_[0]);
|
||||
|
||||
// get pixels
|
||||
unsigned char* ptr = (unsigned char*) glMapBuffer(GL_PIXEL_PACK_BUFFER, GL_READ_ONLY);
|
||||
if (NULL != ptr) {
|
||||
// prepare memory buffer0
|
||||
unsigned char * data = (unsigned char*) malloc(size_);
|
||||
// transfer frame to data
|
||||
memmove(data, ptr, size_);
|
||||
// save in separate thread
|
||||
std::thread(save_png, filename_, data, w, h, c).detach();
|
||||
}
|
||||
// unmap
|
||||
glUnmapBuffer(GL_PIXEL_PACK_BUFFER);
|
||||
|
||||
// ok done
|
||||
if (pbo_[0] > 0)
|
||||
glDeleteBuffers(2, pbo_);
|
||||
|
||||
// recorded one frame
|
||||
Log::Warning("PNG Capture Could not configure source");
|
||||
finished_ = true;
|
||||
return;
|
||||
}
|
||||
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
|
||||
// start pipeline
|
||||
GstStateChangeReturn ret = gst_element_set_state (pipeline_, GST_STATE_PLAYING);
|
||||
if (ret == GST_STATE_CHANGE_FAILURE) {
|
||||
Log::Warning("PNG Capture Could not record %s", filename_.c_str());
|
||||
finished_ = true;
|
||||
return;
|
||||
}
|
||||
|
||||
// unsigned char * data = (unsigned char*) malloc(size);
|
||||
// GLenum format = frame_buffer->use_alpha() ? GL_RGBA : GL_RGB;
|
||||
// glGetTextureSubImage( frame_buffer->texture(), 0, 0, 0, 0, w, h, 1, format, GL_UNSIGNED_BYTE, size, data);
|
||||
// all good
|
||||
Log::Info("PNG Capture started.");
|
||||
|
||||
// start recording !!
|
||||
active_ = true;
|
||||
}
|
||||
|
||||
void PNGRecorder::terminate()
|
||||
{
|
||||
Log::Notify("PNG Capture %s is ready.", filename_.c_str());
|
||||
}
|
||||
|
||||
void PNGRecorder::addFrame(GstBuffer *buffer, GstCaps *caps, float dt)
|
||||
{
|
||||
FrameGrabber::addFrame(buffer, caps, dt);
|
||||
|
||||
// PNG Recorder specific :
|
||||
// stop after one frame
|
||||
if (timestamp_ > 0) {
|
||||
stop();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const char* VideoRecorder::profile_name[VideoRecorder::DEFAULT] = {
|
||||
"H264 (Realtime)",
|
||||
"H264 (High 4:4:4)",
|
||||
@@ -189,306 +197,115 @@ const std::vector<std::string> VideoRecorder::profile_description {
|
||||
// "qtmux ! filesink name=sink";
|
||||
|
||||
|
||||
VideoRecorder::VideoRecorder() : FrameGrabber(), frame_buffer_(nullptr), width_(0), height_(0),
|
||||
recording_(false), accept_buffer_(false), pipeline_(nullptr), src_(nullptr), timestamp_(0)
|
||||
VideoRecorder::VideoRecorder() : FrameGrabber()
|
||||
{
|
||||
|
||||
// configure fix parameter
|
||||
frame_duration_ = gst_util_uint64_scale_int (1, GST_SECOND, 30); // 30 FPS
|
||||
timeframe_ = 2 * frame_duration_;
|
||||
}
|
||||
|
||||
VideoRecorder::~VideoRecorder()
|
||||
void VideoRecorder::init(GstCaps *caps)
|
||||
{
|
||||
if (src_ != nullptr)
|
||||
gst_object_unref (src_);
|
||||
if (pipeline_ != nullptr) {
|
||||
gst_element_set_state (pipeline_, GST_STATE_NULL);
|
||||
gst_object_unref (pipeline_);
|
||||
}
|
||||
|
||||
if (pbo_[0] > 0)
|
||||
glDeleteBuffers(2, pbo_);
|
||||
}
|
||||
|
||||
void VideoRecorder::addFrame (FrameBuffer *frame_buffer, float dt)
|
||||
{
|
||||
// TODO : avoid software videoconvert by using a GPU shader to produce Y444 frames
|
||||
|
||||
// ignore
|
||||
if (frame_buffer == nullptr)
|
||||
if (caps == nullptr)
|
||||
return;
|
||||
|
||||
// first frame for initialization
|
||||
if (frame_buffer_ == nullptr) {
|
||||
// create a gstreamer pipeline
|
||||
std::string description = "appsrc name=src ! videoconvert ! ";
|
||||
if (Settings::application.record.profile < 0 || Settings::application.record.profile >= DEFAULT)
|
||||
Settings::application.record.profile = H264_STANDARD;
|
||||
description += profile_description[Settings::application.record.profile];
|
||||
|
||||
// set frame buffer as input
|
||||
frame_buffer_ = frame_buffer;
|
||||
// verify location path (path is always terminated by the OS dependent separator)
|
||||
std::string path = SystemToolkit::path_directory(Settings::application.record.path);
|
||||
if (path.empty())
|
||||
path = SystemToolkit::home_path();
|
||||
|
||||
// define stream properties
|
||||
width_ = frame_buffer_->width();
|
||||
height_ = frame_buffer_->height();
|
||||
size_ = width_ * height_ * (frame_buffer_->use_alpha() ? 4 : 3);
|
||||
// setup filename & muxer
|
||||
if( Settings::application.record.profile == JPEG_MULTI) {
|
||||
std::string folder = path + "vimix_" + SystemToolkit::date_time_string();
|
||||
filename_ = SystemToolkit::full_filename(folder, "%05d.jpg");
|
||||
if (SystemToolkit::create_directory(folder))
|
||||
description += "multifilesink name=sink";
|
||||
}
|
||||
else if( Settings::application.record.profile == VP8) {
|
||||
filename_ = path + "vimix_" + SystemToolkit::date_time_string() + ".webm";
|
||||
description += "webmmux ! filesink name=sink";
|
||||
}
|
||||
else {
|
||||
filename_ = path + "vimix_" + SystemToolkit::date_time_string() + ".mov";
|
||||
description += "qtmux ! filesink name=sink";
|
||||
}
|
||||
|
||||
// create PBOs
|
||||
glGenBuffers(2, pbo_);
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, pbo_[1]);
|
||||
glBufferData(GL_PIXEL_PACK_BUFFER, size_, NULL, GL_STREAM_READ);
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, pbo_[0]);
|
||||
glBufferData(GL_PIXEL_PACK_BUFFER, size_, NULL, GL_STREAM_READ);
|
||||
// parse pipeline descriptor
|
||||
GError *error = NULL;
|
||||
pipeline_ = gst_parse_launch (description.c_str(), &error);
|
||||
if (error != NULL) {
|
||||
Log::Warning("VideoRecorder Could not construct pipeline %s:\n%s", description.c_str(), error->message);
|
||||
g_clear_error (&error);
|
||||
finished_ = true;
|
||||
return;
|
||||
}
|
||||
|
||||
// create a gstreamer pipeline
|
||||
std::string description = "appsrc name=src ! videoconvert ! ";
|
||||
if (Settings::application.record.profile < 0 || Settings::application.record.profile >= DEFAULT)
|
||||
Settings::application.record.profile = H264_STANDARD;
|
||||
description += profile_description[Settings::application.record.profile];
|
||||
// setup file sink
|
||||
g_object_set (G_OBJECT (gst_bin_get_by_name (GST_BIN (pipeline_), "sink")),
|
||||
"location", filename_.c_str(),
|
||||
"sync", FALSE,
|
||||
NULL);
|
||||
|
||||
// verify location path (path is always terminated by the OS dependent separator)
|
||||
std::string path = SystemToolkit::path_directory(Settings::application.record.path);
|
||||
if (path.empty())
|
||||
path = SystemToolkit::home_path();
|
||||
// setup custom app source
|
||||
src_ = GST_APP_SRC( gst_bin_get_by_name (GST_BIN (pipeline_), "src") );
|
||||
if (src_) {
|
||||
|
||||
// setup filename & muxer
|
||||
if( Settings::application.record.profile == JPEG_MULTI) {
|
||||
std::string folder = path + "vimix_" + SystemToolkit::date_time_string();
|
||||
filename_ = SystemToolkit::full_filename(folder, "%05d.jpg");
|
||||
if (SystemToolkit::create_directory(folder))
|
||||
description += "multifilesink name=sink";
|
||||
}
|
||||
else if( Settings::application.record.profile == VP8) {
|
||||
filename_ = path + "vimix_" + SystemToolkit::date_time_string() + ".webm";
|
||||
description += "webmmux ! filesink name=sink";
|
||||
}
|
||||
else {
|
||||
filename_ = path + "vimix_" + SystemToolkit::date_time_string() + ".mov";
|
||||
description += "qtmux ! filesink name=sink";
|
||||
}
|
||||
g_object_set (G_OBJECT (src_),
|
||||
"stream-type", GST_APP_STREAM_TYPE_STREAM,
|
||||
"is-live", TRUE,
|
||||
"format", GST_FORMAT_TIME,
|
||||
// "do-timestamp", TRUE,
|
||||
NULL);
|
||||
|
||||
// parse pipeline descriptor
|
||||
GError *error = NULL;
|
||||
pipeline_ = gst_parse_launch (description.c_str(), &error);
|
||||
if (error != NULL) {
|
||||
Log::Warning("VideoRecorder Could not construct pipeline %s:\n%s", description.c_str(), error->message);
|
||||
g_clear_error (&error);
|
||||
finished_ = true;
|
||||
return;
|
||||
}
|
||||
// Direct encoding (no buffering)
|
||||
gst_app_src_set_max_bytes( src_, 0 );
|
||||
|
||||
// setup file sink
|
||||
g_object_set (G_OBJECT (gst_bin_get_by_name (GST_BIN (pipeline_), "sink")),
|
||||
"location", filename_.c_str(),
|
||||
"sync", FALSE,
|
||||
NULL);
|
||||
// instruct src to use the required caps
|
||||
caps_ = gst_caps_copy( caps );
|
||||
gst_app_src_set_caps (src_, caps_);
|
||||
|
||||
// setup custom app source
|
||||
src_ = GST_APP_SRC( gst_bin_get_by_name (GST_BIN (pipeline_), "src") );
|
||||
if (src_) {
|
||||
// setup callbacks
|
||||
GstAppSrcCallbacks callbacks;
|
||||
callbacks.need_data = FrameGrabber::callback_need_data;
|
||||
callbacks.enough_data = FrameGrabber::callback_enough_data;
|
||||
callbacks.seek_data = NULL; // stream type is not seekable
|
||||
gst_app_src_set_callbacks (src_, &callbacks, this, NULL);
|
||||
|
||||
g_object_set (G_OBJECT (src_),
|
||||
"stream-type", GST_APP_STREAM_TYPE_STREAM,
|
||||
"is-live", TRUE,
|
||||
"format", GST_FORMAT_TIME,
|
||||
// "do-timestamp", TRUE,
|
||||
NULL);
|
||||
}
|
||||
else {
|
||||
Log::Warning("VideoRecorder Could not configure source");
|
||||
finished_ = true;
|
||||
return;
|
||||
}
|
||||
|
||||
// Direct encoding (no buffering)
|
||||
gst_app_src_set_max_bytes( src_, 0 );
|
||||
// gst_app_src_set_max_bytes( src_, 2 * buf_size_);
|
||||
// start recording
|
||||
GstStateChangeReturn ret = gst_element_set_state (pipeline_, GST_STATE_PLAYING);
|
||||
if (ret == GST_STATE_CHANGE_FAILURE) {
|
||||
Log::Warning("VideoRecorder Could not record %s", filename_.c_str());
|
||||
finished_ = true;
|
||||
return;
|
||||
}
|
||||
|
||||
// instruct src to use the required caps
|
||||
GstCaps *caps = gst_caps_new_simple ("video/x-raw",
|
||||
"format", G_TYPE_STRING, frame_buffer_->use_alpha() ? "RGBA" : "RGB",
|
||||
"width", G_TYPE_INT, width_,
|
||||
"height", G_TYPE_INT, height_,
|
||||
"framerate", GST_TYPE_FRACTION, 30, 1,
|
||||
NULL);
|
||||
gst_app_src_set_caps (src_, caps);
|
||||
gst_caps_unref (caps);
|
||||
|
||||
// setup callbacks
|
||||
GstAppSrcCallbacks callbacks;
|
||||
callbacks.need_data = callback_need_data;
|
||||
callbacks.enough_data = callback_enough_data;
|
||||
callbacks.seek_data = NULL; // stream type is not seekable
|
||||
gst_app_src_set_callbacks (src_, &callbacks, this, NULL);
|
||||
|
||||
}
|
||||
else {
|
||||
Log::Warning("VideoRecorder Could not configure capture source");
|
||||
finished_ = true;
|
||||
return;
|
||||
}
|
||||
|
||||
// start recording
|
||||
GstStateChangeReturn ret = gst_element_set_state (pipeline_, GST_STATE_PLAYING);
|
||||
if (ret == GST_STATE_CHANGE_FAILURE) {
|
||||
Log::Warning("VideoRecorder Could not record %s", filename_.c_str());
|
||||
finished_ = true;
|
||||
return;
|
||||
}
|
||||
|
||||
// all good
|
||||
Log::Info("VideoRecorder start (%s %d x %d)", profile_name[Settings::application.record.profile], width_, height_);
|
||||
|
||||
// start recording !!
|
||||
recording_ = true;
|
||||
}
|
||||
// frame buffer changed ?
|
||||
else if (frame_buffer_ != frame_buffer) {
|
||||
|
||||
// if an incompatilble frame buffer given: stop recorder
|
||||
if ( frame_buffer->width() != width_ ||
|
||||
frame_buffer->height() != height_ ||
|
||||
frame_buffer->use_alpha() != frame_buffer_->use_alpha()) {
|
||||
|
||||
stop();
|
||||
Log::Warning("Recording interrupted: new session (%d x %d) incompatible with recording (%d x %d)", frame_buffer->width(), frame_buffer->height(), width_, height_);
|
||||
}
|
||||
else {
|
||||
// accepting a new frame buffer as input
|
||||
frame_buffer_ = frame_buffer;
|
||||
}
|
||||
}
|
||||
|
||||
// store a frame if recording is active
|
||||
if (recording_ && size_ > 0)
|
||||
{
|
||||
// calculate dt in ns
|
||||
timeframe_ += gst_gdouble_to_guint64( dt * 1000000.f);
|
||||
|
||||
// if time is passed one frame duration (with 10% margin)
|
||||
// and if the encoder accepts data
|
||||
if ( timeframe_ > frame_duration_ - 3000000 && accept_buffer_) {
|
||||
|
||||
// set buffer target for writing in a new frame
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, pbo_[pbo_index_]);
|
||||
|
||||
#ifdef USE_GLREADPIXEL
|
||||
// get frame
|
||||
frame_buffer->readPixels();
|
||||
#else
|
||||
glBindTexture(GL_TEXTURE_2D, frame_buffer->texture());
|
||||
glGetTexImage(GL_TEXTURE_2D, 0, GL_RGB, GL_UNSIGNED_BYTE, 0);
|
||||
#endif
|
||||
|
||||
// update case ; alternating indices
|
||||
if ( pbo_next_index_ != pbo_index_ ) {
|
||||
|
||||
// set buffer target for saving the frame
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, pbo_[pbo_next_index_]);
|
||||
|
||||
// new buffer
|
||||
GstBuffer *buffer = gst_buffer_new_and_alloc (size_);
|
||||
|
||||
// set timing of buffer
|
||||
buffer->pts = timestamp_;
|
||||
buffer->duration = frame_duration_;
|
||||
|
||||
// map gst buffer into a memory WRITE target
|
||||
GstMapInfo map;
|
||||
gst_buffer_map (buffer, &map, GST_MAP_WRITE);
|
||||
|
||||
// map PBO pixels into a memory READ pointer
|
||||
unsigned char* ptr = (unsigned char*) glMapBuffer(GL_PIXEL_PACK_BUFFER, GL_READ_ONLY);
|
||||
|
||||
// transfer pixels from PBO memory to buffer memory
|
||||
if (NULL != ptr)
|
||||
memmove(map.data, ptr, size_);
|
||||
|
||||
// un-map
|
||||
glUnmapBuffer(GL_PIXEL_PACK_BUFFER);
|
||||
gst_buffer_unmap (buffer, &map);
|
||||
|
||||
// push
|
||||
// Log::Info("VideoRecorder push data %ld", buffer->pts);
|
||||
gst_app_src_push_buffer (src_, buffer);
|
||||
// NB: buffer will be unrefed by the appsrc
|
||||
|
||||
accept_buffer_ = false;
|
||||
|
||||
// next timestamp
|
||||
timestamp_ += frame_duration_;
|
||||
}
|
||||
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
|
||||
|
||||
// alternate indices
|
||||
pbo_next_index_ = pbo_index_;
|
||||
pbo_index_ = (pbo_index_ + 1) % 2;
|
||||
|
||||
// restart frame counter
|
||||
timeframe_ = 0;
|
||||
}
|
||||
|
||||
}
|
||||
// did the recording terminate with sink receiving end-of-stream ?
|
||||
else
|
||||
{
|
||||
// Wait for EOS message
|
||||
GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline_));
|
||||
GstMessage *msg = gst_bus_poll(bus, GST_MESSAGE_EOS, GST_TIME_AS_USECONDS(1));
|
||||
|
||||
if (msg) {
|
||||
// Log::Info("received EOS");
|
||||
// stop the pipeline
|
||||
GstStateChangeReturn ret = gst_element_set_state (pipeline_, GST_STATE_NULL);
|
||||
if (ret == GST_STATE_CHANGE_FAILURE)
|
||||
Log::Warning("VideoRecorder Could not stop");
|
||||
else
|
||||
Log::Notify("Recording %s ready.", filename_.c_str());
|
||||
|
||||
finished_ = true;
|
||||
}
|
||||
}
|
||||
// all good
|
||||
Log::Info("Video Recording started (%s)", profile_name[Settings::application.record.profile]);
|
||||
|
||||
// start recording !!
|
||||
active_ = true;
|
||||
}
|
||||
|
||||
void VideoRecorder::stop ()
|
||||
void VideoRecorder::terminate()
|
||||
{
|
||||
// send end of stream
|
||||
gst_app_src_end_of_stream (src_);
|
||||
// Log::Info("VideoRecorder push EOS");
|
||||
|
||||
// stop recording
|
||||
recording_ = false;
|
||||
Log::Notify("Video Recording %s is ready.", filename_.c_str());
|
||||
}
|
||||
|
||||
std::string VideoRecorder::info()
|
||||
std::string VideoRecorder::info() const
|
||||
{
|
||||
if (recording_)
|
||||
if (active_)
|
||||
return GstToolkit::time_to_string(timestamp_);
|
||||
else
|
||||
return "Saving file...";
|
||||
}
|
||||
|
||||
|
||||
double VideoRecorder::duration()
|
||||
{
|
||||
return gst_guint64_to_gdouble( GST_TIME_AS_MSECONDS(timestamp_) ) / 1000.0;
|
||||
}
|
||||
|
||||
bool VideoRecorder::busy()
|
||||
{
|
||||
return accept_buffer_ ? true : false;
|
||||
}
|
||||
|
||||
// appsrc needs data and we should start sending
|
||||
void VideoRecorder::callback_need_data (GstAppSrc *, guint , gpointer p)
|
||||
{
|
||||
// Log::Info("H264Recording callback_need_data");
|
||||
VideoRecorder *rec = (VideoRecorder *)p;
|
||||
if (rec) {
|
||||
rec->accept_buffer_ = rec->recording_ ? true : false;
|
||||
}
|
||||
}
|
||||
|
||||
// appsrc has enough data and we can stop sending
|
||||
void VideoRecorder::callback_enough_data (GstAppSrc *, gpointer p)
|
||||
{
|
||||
// Log::Info("H264Recording callback_enough_data");
|
||||
VideoRecorder *rec = (VideoRecorder *)p;
|
||||
if (rec) {
|
||||
rec->accept_buffer_ = false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
34
Recorder.h
34
Recorder.h
@@ -15,7 +15,12 @@ class PNGRecorder : public FrameGrabber
|
||||
public:
|
||||
|
||||
PNGRecorder();
|
||||
void addFrame(FrameBuffer *frame_buffer, float) override;
|
||||
|
||||
protected:
|
||||
|
||||
void init(GstCaps *caps) override;
|
||||
void terminate() override;
|
||||
void addFrame(GstBuffer *buffer, GstCaps *caps, float dt) override;
|
||||
|
||||
};
|
||||
|
||||
@@ -24,24 +29,8 @@ class VideoRecorder : public FrameGrabber
|
||||
{
|
||||
std::string filename_;
|
||||
|
||||
// Frame buffer information
|
||||
FrameBuffer *frame_buffer_;
|
||||
uint width_;
|
||||
uint height_;
|
||||
|
||||
// operation
|
||||
std::atomic<bool> recording_;
|
||||
std::atomic<bool> accept_buffer_;
|
||||
|
||||
// gstreamer pipeline
|
||||
GstElement *pipeline_;
|
||||
GstAppSrc *src_;
|
||||
GstClockTime timeframe_;
|
||||
GstClockTime timestamp_;
|
||||
GstClockTime frame_duration_;
|
||||
|
||||
static void callback_need_data (GstAppSrc *, guint, gpointer user_data);
|
||||
static void callback_enough_data (GstAppSrc *, gpointer user_data);
|
||||
void init(GstCaps *caps) override;
|
||||
void terminate() override;
|
||||
|
||||
public:
|
||||
|
||||
@@ -60,13 +49,8 @@ public:
|
||||
static const std::vector<std::string> profile_description;
|
||||
|
||||
VideoRecorder();
|
||||
~VideoRecorder();
|
||||
std::string info() const override;
|
||||
|
||||
void addFrame(FrameBuffer *frame_buffer, float dt) override;
|
||||
void stop() override;
|
||||
std::string info() override;
|
||||
double duration() override;
|
||||
bool busy() override;
|
||||
};
|
||||
|
||||
|
||||
|
||||
143
Session.cpp
143
Session.cpp
@@ -33,9 +33,6 @@ Session::Session() : failedSource_(nullptr), active_(true), fading_target_(0.f)
|
||||
|
||||
Session::~Session()
|
||||
{
|
||||
// delete all recorders
|
||||
clearAllFrameGrabbers();
|
||||
|
||||
// delete all sources
|
||||
for(auto it = sources_.begin(); it != sources_.end(); ) {
|
||||
// erase this source from the list
|
||||
@@ -89,22 +86,34 @@ void Session::update(float dt)
|
||||
// draw render view in Frame Buffer
|
||||
render_.draw();
|
||||
|
||||
// send frame to recorders
|
||||
std::list<FrameGrabber *>::iterator iter;
|
||||
for (iter=grabbers_.begin(); iter != grabbers_.end(); )
|
||||
{
|
||||
FrameGrabber *rec = *iter;
|
||||
// grab frames to recorders & streamers
|
||||
FrameGrabbing::manager().grabFrame(render_.frame(), dt);
|
||||
|
||||
rec->addFrame(render_.frame(), dt);
|
||||
// // send frame to recorders
|
||||
// std::list<FrameGrabber *>::iterator iter = iter=grabbers_.begin();
|
||||
// // if there is at least once frame grabber
|
||||
// if (iter != grabbers_.end()) {
|
||||
// // grab a frame (once for all recorders)
|
||||
// FrameGrabber::Buffer buf = FrameGrabber::grabFrame(render_.frame());
|
||||
|
||||
if (rec->finished()) {
|
||||
iter = grabbers_.erase(iter);
|
||||
delete rec;
|
||||
}
|
||||
else {
|
||||
iter++;
|
||||
}
|
||||
}
|
||||
// // give the frame to all recorders
|
||||
// while (iter != grabbers_.end())
|
||||
// {
|
||||
// FrameGrabber *rec = *iter;
|
||||
// rec->addFrame(buf, dt);
|
||||
|
||||
// if (rec->finished()) {
|
||||
// iter = grabbers_.erase(iter);
|
||||
// delete rec;
|
||||
// }
|
||||
// else {
|
||||
// iter++;
|
||||
// }
|
||||
// }
|
||||
|
||||
// gst_buffer_unref(buf.buffer);
|
||||
// // gst_clear_buffer(&buf.buffer);
|
||||
// }
|
||||
}
|
||||
|
||||
|
||||
@@ -295,64 +304,64 @@ int Session::index(SourceList::iterator it) const
|
||||
return index;
|
||||
}
|
||||
|
||||
void Session::addFrameGrabber(FrameGrabber *rec)
|
||||
{
|
||||
if (rec != nullptr)
|
||||
grabbers_.push_back(rec);
|
||||
}
|
||||
//void Session::addFrameGrabber(FrameGrabber *rec)
|
||||
//{
|
||||
// if (rec != nullptr)
|
||||
// grabbers_.push_back(rec);
|
||||
//}
|
||||
|
||||
|
||||
FrameGrabber *Session::frontFrameGrabber()
|
||||
{
|
||||
if (grabbers_.empty())
|
||||
return nullptr;
|
||||
else
|
||||
return grabbers_.front();
|
||||
}
|
||||
//FrameGrabber *Session::frontFrameGrabber()
|
||||
//{
|
||||
// if (grabbers_.empty())
|
||||
// return nullptr;
|
||||
// else
|
||||
// return grabbers_.front();
|
||||
//}
|
||||
|
||||
FrameGrabber *Session::getFrameGrabber(uint64_t id)
|
||||
{
|
||||
if (id > 0 && grabbers_.size() > 0 )
|
||||
{
|
||||
std::list<FrameGrabber *>::iterator iter = std::find_if(grabbers_.begin(), grabbers_.end(), FrameGrabber::hasId(id));
|
||||
if (iter != grabbers_.end())
|
||||
return (*iter);
|
||||
}
|
||||
//FrameGrabber *Session::getFrameGrabber(uint64_t id)
|
||||
//{
|
||||
// if (id > 0 && grabbers_.size() > 0 )
|
||||
// {
|
||||
// std::list<FrameGrabber *>::iterator iter = std::find_if(grabbers_.begin(), grabbers_.end(), FrameGrabber::hasId(id));
|
||||
// if (iter != grabbers_.end())
|
||||
// return (*iter);
|
||||
// }
|
||||
|
||||
return nullptr;
|
||||
}
|
||||
// return nullptr;
|
||||
//}
|
||||
|
||||
void Session::stopAllFrameGrabbers()
|
||||
{
|
||||
std::list<FrameGrabber *>::iterator iter;
|
||||
for (iter=grabbers_.begin(); iter != grabbers_.end(); )
|
||||
(*iter)->stop();
|
||||
}
|
||||
//void Session::stopAllFrameGrabbers()
|
||||
//{
|
||||
// std::list<FrameGrabber *>::iterator iter;
|
||||
// for (iter=grabbers_.begin(); iter != grabbers_.end(); )
|
||||
// (*iter)->stop();
|
||||
//}
|
||||
|
||||
void Session::clearAllFrameGrabbers()
|
||||
{
|
||||
std::list<FrameGrabber *>::iterator iter;
|
||||
for (iter=grabbers_.begin(); iter != grabbers_.end(); )
|
||||
{
|
||||
FrameGrabber *rec = *iter;
|
||||
rec->stop();
|
||||
iter = grabbers_.erase(iter);
|
||||
delete rec;
|
||||
}
|
||||
}
|
||||
//void Session::clearAllFrameGrabbers()
|
||||
//{
|
||||
// std::list<FrameGrabber *>::iterator iter;
|
||||
// for (iter=grabbers_.begin(); iter != grabbers_.end(); )
|
||||
// {
|
||||
// FrameGrabber *rec = *iter;
|
||||
// rec->stop();
|
||||
// iter = grabbers_.erase(iter);
|
||||
// delete rec;
|
||||
// }
|
||||
//}
|
||||
|
||||
void Session::transferFrameGrabber(Session *dest)
|
||||
{
|
||||
if (dest == nullptr)
|
||||
return;
|
||||
//void Session::transferFrameGrabber(Session *dest)
|
||||
//{
|
||||
// if (dest == nullptr)
|
||||
// return;
|
||||
|
||||
std::list<FrameGrabber *>::iterator iter;
|
||||
for (iter=grabbers_.begin(); iter != grabbers_.end(); )
|
||||
{
|
||||
dest->grabbers_.push_back(*iter);
|
||||
iter = grabbers_.erase(iter);
|
||||
}
|
||||
}
|
||||
// std::list<FrameGrabber *>::iterator iter;
|
||||
// for (iter=grabbers_.begin(); iter != grabbers_.end(); )
|
||||
// {
|
||||
// dest->grabbers_.push_back(*iter);
|
||||
// iter = grabbers_.erase(iter);
|
||||
// }
|
||||
//}
|
||||
|
||||
|
||||
void Session::lock()
|
||||
|
||||
14
Session.h
14
Session.h
@@ -58,13 +58,13 @@ public:
|
||||
// get frame result of render
|
||||
inline FrameBuffer *frame () const { return render_.frame(); }
|
||||
|
||||
// Recorders
|
||||
void addFrameGrabber(FrameGrabber *rec);
|
||||
FrameGrabber *frontFrameGrabber();
|
||||
FrameGrabber *getFrameGrabber(uint64_t id);
|
||||
void stopAllFrameGrabbers();
|
||||
void clearAllFrameGrabbers();
|
||||
void transferFrameGrabber(Session *dest);
|
||||
// // Recorders
|
||||
// void addFrameGrabber(FrameGrabber *rec);
|
||||
// FrameGrabber *frontFrameGrabber();
|
||||
// FrameGrabber *getFrameGrabber(uint64_t id);
|
||||
// void stopAllFrameGrabbers();
|
||||
// void clearAllFrameGrabbers();
|
||||
// void transferFrameGrabber(Session *dest);
|
||||
|
||||
// configure rendering resolution
|
||||
void setResolution(glm::vec3 resolution);
|
||||
|
||||
414
Streamer.cpp
414
Streamer.cpp
@@ -78,7 +78,7 @@ void wait_for_request_(UdpListeningReceiveSocket *receiver)
|
||||
receiver->Run();
|
||||
}
|
||||
|
||||
Streaming::Streaming() : enabled_(false), session_(nullptr), width_(0), height_(0)
|
||||
Streaming::Streaming() : enabled_(false)
|
||||
{
|
||||
int port = Connection::manager().info().port_stream_request;
|
||||
receiver_ = new UdpListeningReceiveSocket(IpEndpointName( IpEndpointName::ANY_ADDRESS, port ), &listener_ );
|
||||
@@ -140,22 +140,6 @@ void Streaming::enable(bool on)
|
||||
}
|
||||
}
|
||||
|
||||
void Streaming::setSession(Session *se)
|
||||
{
|
||||
if (se != nullptr && session_ != se) {
|
||||
session_ = se;
|
||||
FrameBuffer *f = session_->frame();
|
||||
width_ = f->width();
|
||||
height_ = f->height();
|
||||
}
|
||||
else {
|
||||
session_ = nullptr;
|
||||
width_ = 0;
|
||||
height_ = 0;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void Streaming::removeStream(const std::string &sender, int port)
|
||||
{
|
||||
// get ip of sender
|
||||
@@ -238,13 +222,16 @@ void Streaming::addStream(const std::string &sender, int reply_to, const std::st
|
||||
conf.client_address = sender_ip;
|
||||
conf.client_name = clientname;
|
||||
conf.port = std::stoi(sender_port); // this port seems free, so re-use it!
|
||||
conf.width = width_;
|
||||
conf.height = height_;
|
||||
conf.width = FrameGrabbing::manager().width();
|
||||
conf.height = FrameGrabbing::manager().height();
|
||||
|
||||
// TEMP DISABLED : TODO Fix snap to allow system wide shared access
|
||||
|
||||
// offer SHM if same IP that our host IP (i.e. on the same machine)
|
||||
if( NetworkToolkit::is_host_ip(conf.client_address) )
|
||||
conf.protocol = NetworkToolkit::SHM_RAW;
|
||||
// any other IP : offer network streaming
|
||||
else
|
||||
// if( NetworkToolkit::is_host_ip(conf.client_address) )
|
||||
// conf.protocol = NetworkToolkit::SHM_RAW;
|
||||
// // any other IP : offer network streaming
|
||||
// else
|
||||
conf.protocol = NetworkToolkit::UDP_JPEG;
|
||||
|
||||
// build OSC message
|
||||
@@ -272,290 +259,141 @@ void Streaming::addStream(const std::string &sender, int reply_to, const std::st
|
||||
streamers_lock_.unlock();
|
||||
|
||||
// start streamer
|
||||
session_->addFrameGrabber(streamer);
|
||||
FrameGrabbing::manager().add(streamer);
|
||||
}
|
||||
|
||||
|
||||
VideoStreamer::VideoStreamer(NetworkToolkit::StreamConfig conf): FrameGrabber(), frame_buffer_(nullptr), width_(0), height_(0),
|
||||
streaming_(false), accept_buffer_(false), pipeline_(nullptr), src_(nullptr), timestamp_(0)
|
||||
VideoStreamer::VideoStreamer(NetworkToolkit::StreamConfig conf): FrameGrabber(), config_(conf)
|
||||
{
|
||||
// configure fix parameter
|
||||
frame_duration_ = gst_util_uint64_scale_int (1, GST_SECOND, 30); // 30 FPS
|
||||
timeframe_ = 2 * frame_duration_;
|
||||
|
||||
config_ = conf;
|
||||
}
|
||||
|
||||
VideoStreamer::~VideoStreamer()
|
||||
void VideoStreamer::init(GstCaps *caps)
|
||||
{
|
||||
if (src_ != nullptr)
|
||||
gst_object_unref (src_);
|
||||
if (pipeline_ != nullptr) {
|
||||
gst_element_set_state (pipeline_, GST_STATE_NULL);
|
||||
gst_object_unref (pipeline_);
|
||||
}
|
||||
|
||||
if (pbo_[0] > 0)
|
||||
glDeleteBuffers(2, pbo_);
|
||||
}
|
||||
|
||||
|
||||
void VideoStreamer::addFrame (FrameBuffer *frame_buffer, float dt)
|
||||
{
|
||||
// ignore
|
||||
if (frame_buffer == nullptr)
|
||||
if (caps == nullptr)
|
||||
return;
|
||||
|
||||
// first frame for initialization
|
||||
if (frame_buffer_ == nullptr) {
|
||||
// check that config matches the given buffer properties
|
||||
gint w = 0, h = 0;
|
||||
GstStructure *capstruct = gst_caps_get_structure (caps, 0);
|
||||
if ( gst_structure_has_field (capstruct, "width"))
|
||||
gst_structure_get_int (capstruct, "width", &w);
|
||||
if ( gst_structure_has_field (capstruct, "height"))
|
||||
gst_structure_get_int (capstruct, "height", &h);
|
||||
if ( config_.width != w || config_.height != h) {
|
||||
Log::Warning("Streaming cannot start: given frames (%d x %d) incompatible with stream (%d x %d)",
|
||||
w, w, config_.width, config_.height);
|
||||
finished_ = true;
|
||||
return;
|
||||
}
|
||||
|
||||
// set frame buffer as input
|
||||
frame_buffer_ = frame_buffer;
|
||||
// prevent eroneous protocol values
|
||||
if (config_.protocol < 0 || config_.protocol >= NetworkToolkit::DEFAULT)
|
||||
config_.protocol = NetworkToolkit::UDP_JPEG;
|
||||
|
||||
// define frames properties
|
||||
width_ = frame_buffer_->width();
|
||||
height_ = frame_buffer_->height();
|
||||
size_ = width_ * height_ * (frame_buffer_->use_alpha() ? 4 : 3);
|
||||
// create a gstreamer pipeline
|
||||
std::string description = "appsrc name=src ! videoconvert ! ";
|
||||
description += NetworkToolkit::protocol_send_pipeline[config_.protocol];
|
||||
|
||||
// if an incompatilble frame buffer given: cancel streaming
|
||||
if ( config_.width != width_ || config_.height != height_) {
|
||||
Log::Warning("Streaming cannot start: given frames (%d x %d) incompatible with stream (%d x %d)",
|
||||
width_, height_, config_.width, config_.height);
|
||||
finished_ = true;
|
||||
return;
|
||||
}
|
||||
// parse pipeline descriptor
|
||||
GError *error = NULL;
|
||||
pipeline_ = gst_parse_launch (description.c_str(), &error);
|
||||
if (error != NULL) {
|
||||
Log::Warning("VideoStreamer Could not construct pipeline %s:\n%s", description.c_str(), error->message);
|
||||
g_clear_error (&error);
|
||||
finished_ = true;
|
||||
return;
|
||||
}
|
||||
|
||||
// create PBOs
|
||||
glGenBuffers(2, pbo_);
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, pbo_[1]);
|
||||
glBufferData(GL_PIXEL_PACK_BUFFER, size_, NULL, GL_STREAM_READ);
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, pbo_[0]);
|
||||
glBufferData(GL_PIXEL_PACK_BUFFER, size_, NULL, GL_STREAM_READ);
|
||||
// setup streaming sink
|
||||
if (config_.protocol == NetworkToolkit::UDP_JPEG || config_.protocol == NetworkToolkit::UDP_H264) {
|
||||
g_object_set (G_OBJECT (gst_bin_get_by_name (GST_BIN (pipeline_), "sink")),
|
||||
"host", config_.client_address.c_str(),
|
||||
"port", config_.port, NULL);
|
||||
}
|
||||
else if (config_.protocol == NetworkToolkit::SHM_RAW) {
|
||||
std::string path = SystemToolkit::full_filename(SystemToolkit::temp_path(), "shm");
|
||||
path += std::to_string(config_.port);
|
||||
g_object_set (G_OBJECT (gst_bin_get_by_name (GST_BIN (pipeline_), "sink")),
|
||||
"socket-path", path.c_str(), NULL);
|
||||
}
|
||||
|
||||
// prevent eroneous protocol values
|
||||
if (config_.protocol < 0 || config_.protocol >= NetworkToolkit::DEFAULT)
|
||||
config_.protocol = NetworkToolkit::UDP_JPEG;
|
||||
// setup custom app source
|
||||
src_ = GST_APP_SRC( gst_bin_get_by_name (GST_BIN (pipeline_), "src") );
|
||||
if (src_) {
|
||||
|
||||
// create a gstreamer pipeline
|
||||
std::string description = "appsrc name=src ! videoconvert ! ";
|
||||
description += NetworkToolkit::protocol_send_pipeline[config_.protocol];
|
||||
g_object_set (G_OBJECT (src_),
|
||||
"stream-type", GST_APP_STREAM_TYPE_STREAM,
|
||||
"is-live", TRUE,
|
||||
"format", GST_FORMAT_TIME,
|
||||
// "do-timestamp", TRUE,
|
||||
NULL);
|
||||
|
||||
// parse pipeline descriptor
|
||||
GError *error = NULL;
|
||||
pipeline_ = gst_parse_launch (description.c_str(), &error);
|
||||
if (error != NULL) {
|
||||
Log::Warning("VideoStreamer Could not construct pipeline %s:\n%s", description.c_str(), error->message);
|
||||
g_clear_error (&error);
|
||||
finished_ = true;
|
||||
return;
|
||||
}
|
||||
// Direct encoding (no buffering)
|
||||
gst_app_src_set_max_bytes( src_, 0 );
|
||||
|
||||
// setup streaming sink
|
||||
if (config_.protocol == NetworkToolkit::UDP_JPEG || config_.protocol == NetworkToolkit::UDP_H264) {
|
||||
g_object_set (G_OBJECT (gst_bin_get_by_name (GST_BIN (pipeline_), "sink")),
|
||||
"host", config_.client_address.c_str(),
|
||||
"port", config_.port, NULL);
|
||||
}
|
||||
else if (config_.protocol == NetworkToolkit::SHM_RAW) {
|
||||
// TODO rename SHM socket "shm_PORT"
|
||||
std::string path = SystemToolkit::full_filename(SystemToolkit::temp_path(), "shm");
|
||||
path += std::to_string(config_.port);
|
||||
g_object_set (G_OBJECT (gst_bin_get_by_name (GST_BIN (pipeline_), "sink")),
|
||||
"socket-path", path.c_str(), NULL);
|
||||
}
|
||||
// instruct src to use the required caps
|
||||
caps_ = gst_caps_copy( caps );
|
||||
gst_app_src_set_caps (src_, caps_);
|
||||
|
||||
// setup custom app source
|
||||
src_ = GST_APP_SRC( gst_bin_get_by_name (GST_BIN (pipeline_), "src") );
|
||||
if (src_) {
|
||||
// setup callbacks
|
||||
GstAppSrcCallbacks callbacks;
|
||||
callbacks.need_data = FrameGrabber::callback_need_data;
|
||||
callbacks.enough_data = FrameGrabber::callback_enough_data;
|
||||
callbacks.seek_data = NULL; // stream type is not seekable
|
||||
gst_app_src_set_callbacks (src_, &callbacks, this, NULL);
|
||||
|
||||
g_object_set (G_OBJECT (src_),
|
||||
"stream-type", GST_APP_STREAM_TYPE_STREAM,
|
||||
"is-live", TRUE,
|
||||
"format", GST_FORMAT_TIME,
|
||||
NULL);
|
||||
}
|
||||
else {
|
||||
Log::Warning("VideoStreamer Could not configure capture source");
|
||||
finished_ = true;
|
||||
return;
|
||||
}
|
||||
|
||||
// Direct encoding (no buffering)
|
||||
gst_app_src_set_max_bytes( src_, 0 );
|
||||
// start recording
|
||||
GstStateChangeReturn ret = gst_element_set_state (pipeline_, GST_STATE_PLAYING);
|
||||
if (ret == GST_STATE_CHANGE_FAILURE) {
|
||||
Log::Warning("VideoStreamer failed");
|
||||
finished_ = true;
|
||||
return;
|
||||
}
|
||||
|
||||
// instruct src to use the required caps
|
||||
GstCaps *caps = gst_caps_new_simple ("video/x-raw",
|
||||
"format", G_TYPE_STRING, frame_buffer_->use_alpha() ? "RGBA" : "RGB",
|
||||
"width", G_TYPE_INT, width_,
|
||||
"height", G_TYPE_INT, height_,
|
||||
"framerate", GST_TYPE_FRACTION, 30, 1,
|
||||
NULL);
|
||||
gst_app_src_set_caps (src_, caps);
|
||||
gst_caps_unref (caps);
|
||||
// all good
|
||||
Log::Notify("Streaming to %s.", config_.client_name.c_str());
|
||||
|
||||
// setup callbacks
|
||||
GstAppSrcCallbacks callbacks;
|
||||
callbacks.need_data = callback_need_data;
|
||||
callbacks.enough_data = callback_enough_data;
|
||||
callbacks.seek_data = NULL; // stream type is not seekable
|
||||
gst_app_src_set_callbacks (src_, &callbacks, this, NULL);
|
||||
// start streaming !!
|
||||
active_ = true;
|
||||
}
|
||||
|
||||
}
|
||||
else {
|
||||
Log::Warning("VideoStreamer Could not configure capture source");
|
||||
finished_ = true;
|
||||
return;
|
||||
}
|
||||
|
||||
// start recording
|
||||
GstStateChangeReturn ret = gst_element_set_state (pipeline_, GST_STATE_PLAYING);
|
||||
if (ret == GST_STATE_CHANGE_FAILURE) {
|
||||
Log::Warning("VideoStreamer failed");
|
||||
finished_ = true;
|
||||
return;
|
||||
}
|
||||
|
||||
// all good
|
||||
Log::Info("Streaming video to %s (%d x %d)",
|
||||
config_.client_name.c_str(), width_, height_);
|
||||
|
||||
// start streaming !!
|
||||
streaming_ = true;
|
||||
|
||||
}
|
||||
// frame buffer changed ?
|
||||
else if (frame_buffer_ != frame_buffer) {
|
||||
|
||||
// if an incompatilble frame buffer given: stop recorder
|
||||
if ( frame_buffer->width() != width_ ||
|
||||
frame_buffer->height() != height_ ||
|
||||
frame_buffer->use_alpha() != frame_buffer_->use_alpha()) {
|
||||
|
||||
Streaming::manager().removeStream(config_.client_address, config_.port);
|
||||
Log::Warning("Streaming interrupted: new session (%d x %d) incompatible with recording (%d x %d)", frame_buffer->width(), frame_buffer->height(), width_, height_);
|
||||
}
|
||||
else {
|
||||
// accepting a new frame buffer as input
|
||||
frame_buffer_ = frame_buffer;
|
||||
}
|
||||
}
|
||||
|
||||
// store a frame if recording is active
|
||||
if (streaming_ && size_ > 0)
|
||||
{
|
||||
// calculate dt in ns
|
||||
timeframe_ += gst_gdouble_to_guint64( dt * 1000000.f);
|
||||
|
||||
// if time is passed one frame duration (with 10% margin)
|
||||
// and if the encoder accepts data
|
||||
if ( timeframe_ > frame_duration_ - 3000000 && accept_buffer_) {
|
||||
|
||||
// set buffer target for writing in a new frame
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, pbo_[pbo_index_]);
|
||||
|
||||
#ifdef USE_GLREADPIXEL
|
||||
// get frame
|
||||
frame_buffer->readPixels();
|
||||
#else
|
||||
glBindTexture(GL_TEXTURE_2D, frame_buffer->texture());
|
||||
glGetTexImage(GL_TEXTURE_2D, 0, GL_RGB, GL_UNSIGNED_BYTE, 0);
|
||||
#endif
|
||||
|
||||
// update case ; alternating indices
|
||||
if ( pbo_next_index_ != pbo_index_ ) {
|
||||
|
||||
// set buffer target for saving the frame
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, pbo_[pbo_next_index_]);
|
||||
|
||||
// new buffer
|
||||
GstBuffer *buffer = gst_buffer_new_and_alloc (size_);
|
||||
|
||||
// set timing of buffer
|
||||
buffer->pts = timestamp_;
|
||||
buffer->duration = frame_duration_;
|
||||
|
||||
// map gst buffer into a memory WRITE target
|
||||
GstMapInfo map;
|
||||
gst_buffer_map (buffer, &map, GST_MAP_WRITE);
|
||||
|
||||
// map PBO pixels into a memory READ pointer
|
||||
unsigned char* ptr = (unsigned char*) glMapBuffer(GL_PIXEL_PACK_BUFFER, GL_READ_ONLY);
|
||||
|
||||
// transfer pixels from PBO memory to buffer memory
|
||||
if (NULL != ptr)
|
||||
memmove(map.data, ptr, size_);
|
||||
|
||||
// un-map
|
||||
glUnmapBuffer(GL_PIXEL_PACK_BUFFER);
|
||||
gst_buffer_unmap (buffer, &map);
|
||||
|
||||
// push
|
||||
gst_app_src_push_buffer (src_, buffer);
|
||||
// NB: buffer will be unrefed by the appsrc
|
||||
|
||||
accept_buffer_ = false;
|
||||
|
||||
// next timestamp
|
||||
timestamp_ += frame_duration_;
|
||||
}
|
||||
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
|
||||
|
||||
// alternate indices
|
||||
pbo_next_index_ = pbo_index_;
|
||||
pbo_index_ = (pbo_index_ + 1) % 2;
|
||||
|
||||
// restart frame counter
|
||||
timeframe_ = 0;
|
||||
}
|
||||
|
||||
}
|
||||
// did the streaming receive end-of-stream ?
|
||||
else if (!finished_)
|
||||
{
|
||||
// Wait for EOS message
|
||||
GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline_));
|
||||
GstMessage *msg = gst_bus_poll(bus, GST_MESSAGE_EOS, GST_TIME_AS_USECONDS(4));
|
||||
|
||||
if (msg) {
|
||||
// stop the pipeline
|
||||
GstStateChangeReturn ret = gst_element_set_state (pipeline_, GST_STATE_NULL);
|
||||
#ifdef STREAMER_DEBUG
|
||||
if (ret == GST_STATE_CHANGE_FAILURE)
|
||||
Log::Info("Streaming to %s:%d could not stop properly.", config_.client_address.c_str(), config_.port);
|
||||
else
|
||||
Log::Info("Streaming to %s:%d ending...", config_.client_address.c_str(), config_.port);
|
||||
#endif
|
||||
finished_ = true;
|
||||
}
|
||||
}
|
||||
// finished !
|
||||
else {
|
||||
|
||||
// send EOS
|
||||
gst_app_src_end_of_stream (src_);
|
||||
|
||||
// make sure the shared memory socket is deleted
|
||||
if (config_.protocol == NetworkToolkit::SHM_RAW) {
|
||||
std::string path = SystemToolkit::full_filename(SystemToolkit::settings_path(), "shm");
|
||||
path += std::to_string(config_.port);
|
||||
SystemToolkit::remove_file(path);
|
||||
}
|
||||
|
||||
Log::Notify("Streaming to %s finished after %s s.", config_.client_name.c_str(),
|
||||
GstToolkit::time_to_string(timestamp_).c_str());
|
||||
|
||||
}
|
||||
void VideoStreamer::terminate()
|
||||
{
|
||||
// send EOS
|
||||
gst_app_src_end_of_stream (src_);
|
||||
|
||||
// make sure the shared memory socket is deleted
|
||||
if (config_.protocol == NetworkToolkit::SHM_RAW) {
|
||||
std::string path = SystemToolkit::full_filename(SystemToolkit::temp_path(), "shm");
|
||||
path += std::to_string(config_.port);
|
||||
SystemToolkit::remove_file(path);
|
||||
}
|
||||
|
||||
Log::Notify("Streaming to %s finished after %s s.", config_.client_name.c_str(),
|
||||
GstToolkit::time_to_string(timestamp_).c_str());
|
||||
}
|
||||
|
||||
void VideoStreamer::stop ()
|
||||
{
|
||||
// stop recording
|
||||
streaming_ = false;
|
||||
finished_ = true;
|
||||
FrameGrabber::stop ();
|
||||
|
||||
// force finished
|
||||
finished_ = true;
|
||||
}
|
||||
|
||||
std::string VideoStreamer::info()
|
||||
std::string VideoStreamer::info() const
|
||||
{
|
||||
std::ostringstream ret;
|
||||
if (streaming_) {
|
||||
if (active_) {
|
||||
ret << NetworkToolkit::protocol_name[config_.protocol];
|
||||
ret << " to ";
|
||||
ret << config_.client_name;
|
||||
@@ -564,37 +402,3 @@ std::string VideoStreamer::info()
|
||||
ret << "Streaming terminated.";
|
||||
return ret.str();
|
||||
}
|
||||
|
||||
|
||||
double VideoStreamer::duration()
|
||||
{
|
||||
return gst_guint64_to_gdouble( GST_TIME_AS_MSECONDS(timestamp_) ) / 1000.0;
|
||||
}
|
||||
|
||||
bool VideoStreamer::busy()
|
||||
{
|
||||
if (streaming_)
|
||||
return accept_buffer_ ? true : false;
|
||||
else
|
||||
return false;
|
||||
}
|
||||
|
||||
// appsrc needs data and we should start sending
|
||||
void VideoStreamer::callback_need_data (GstAppSrc *, guint , gpointer p)
|
||||
{
|
||||
VideoStreamer *rec = (VideoStreamer *)p;
|
||||
if (rec) {
|
||||
rec->accept_buffer_ = true;
|
||||
// Log::Info("VideoStreamer need_data");
|
||||
}
|
||||
}
|
||||
|
||||
// appsrc has enough data and we can stop sending
|
||||
void VideoStreamer::callback_enough_data (GstAppSrc *, gpointer p)
|
||||
{
|
||||
// Log::Info("VideoStreamer enough_data");
|
||||
VideoStreamer *rec = (VideoStreamer *)p;
|
||||
if (rec) {
|
||||
rec->accept_buffer_ = false;
|
||||
}
|
||||
}
|
||||
|
||||
32
Streamer.h
32
Streamer.h
@@ -44,7 +44,6 @@ public:
|
||||
|
||||
void enable(bool on);
|
||||
inline bool enabled() const { return enabled_; }
|
||||
void setSession(Session *se);
|
||||
void removeStreams(const std::string &clientname);
|
||||
void removeStream(const std::string &sender, int port);
|
||||
|
||||
@@ -61,10 +60,6 @@ private:
|
||||
StreamingRequestListener listener_;
|
||||
UdpListeningReceiveSocket *receiver_;
|
||||
|
||||
Session *session_;
|
||||
int width_;
|
||||
int height_;
|
||||
|
||||
std::vector<VideoStreamer *> streamers_;
|
||||
std::mutex streamers_lock_;
|
||||
};
|
||||
@@ -73,39 +68,18 @@ class VideoStreamer : public FrameGrabber
|
||||
{
|
||||
friend class Streaming;
|
||||
|
||||
void init(GstCaps *caps) override;
|
||||
void terminate() override;
|
||||
void stop() override;
|
||||
|
||||
// Frame buffer information
|
||||
FrameBuffer *frame_buffer_;
|
||||
uint width_;
|
||||
uint height_;
|
||||
|
||||
// connection information
|
||||
NetworkToolkit::StreamConfig config_;
|
||||
|
||||
// operation
|
||||
std::atomic<bool> streaming_;
|
||||
std::atomic<bool> accept_buffer_;
|
||||
|
||||
// gstreamer pipeline
|
||||
GstElement *pipeline_;
|
||||
GstAppSrc *src_;
|
||||
GstClockTime timeframe_;
|
||||
GstClockTime timestamp_;
|
||||
GstClockTime frame_duration_;
|
||||
|
||||
static void callback_need_data (GstAppSrc *, guint, gpointer user_data);
|
||||
static void callback_enough_data (GstAppSrc *, gpointer user_data);
|
||||
|
||||
public:
|
||||
|
||||
VideoStreamer(NetworkToolkit::StreamConfig conf);
|
||||
~VideoStreamer();
|
||||
std::string info() const override;
|
||||
|
||||
void addFrame(FrameBuffer *frame_buffer, float dt) override;
|
||||
std::string info() override;
|
||||
double duration() override;
|
||||
bool busy() override;
|
||||
};
|
||||
|
||||
#endif // STREAMER_H
|
||||
|
||||
@@ -266,6 +266,7 @@ string SystemToolkit::temp_path()
|
||||
|
||||
temp += PATH_SEP;
|
||||
return temp;
|
||||
// TODO : verify WIN32 implementation
|
||||
}
|
||||
|
||||
string SystemToolkit::full_filename(const std::string& path, const string &filename)
|
||||
|
||||
@@ -52,6 +52,7 @@ using namespace std;
|
||||
#include "Mixer.h"
|
||||
#include "Recorder.h"
|
||||
#include "Streamer.h"
|
||||
#include "Loopback.h"
|
||||
#include "Selection.h"
|
||||
#include "FrameBuffer.h"
|
||||
#include "MediaPlayer.h"
|
||||
@@ -164,8 +165,9 @@ UserInterface::UserInterface()
|
||||
currentTextEdit = "";
|
||||
screenshot_step = 0;
|
||||
|
||||
// keep hold on frame grabbers
|
||||
video_recorder_ = 0;
|
||||
// video_streamer_ = 0;
|
||||
webcam_emulator_ = 0;
|
||||
}
|
||||
|
||||
bool UserInterface::Init()
|
||||
@@ -303,7 +305,7 @@ void UserInterface::handleKeyboard()
|
||||
}
|
||||
else if (ImGui::IsKeyPressed( GLFW_KEY_R )) {
|
||||
// toggle recording
|
||||
FrameGrabber *rec = Mixer::manager().session()->getFrameGrabber(video_recorder_);
|
||||
FrameGrabber *rec = FrameGrabbing::manager().get(video_recorder_);
|
||||
if (rec) {
|
||||
rec->stop();
|
||||
video_recorder_ = 0;
|
||||
@@ -311,7 +313,7 @@ void UserInterface::handleKeyboard()
|
||||
else {
|
||||
FrameGrabber *fg = new VideoRecorder;
|
||||
video_recorder_ = fg->id();
|
||||
Mixer::manager().session()->addFrameGrabber(fg);
|
||||
FrameGrabbing::manager().add(fg);
|
||||
}
|
||||
}
|
||||
else if (ImGui::IsKeyPressed( GLFW_KEY_Z )) {
|
||||
@@ -791,7 +793,7 @@ void UserInterface::Render()
|
||||
&Settings::application.widget.stats_timer);
|
||||
|
||||
// management of video_recorder
|
||||
FrameGrabber *rec = Mixer::manager().session()->getFrameGrabber(video_recorder_);
|
||||
FrameGrabber *rec = FrameGrabbing::manager().get(video_recorder_);
|
||||
if (rec && rec->duration() > Settings::application.record.timeout ){
|
||||
rec->stop();
|
||||
video_recorder_ = 0;
|
||||
@@ -1085,6 +1087,7 @@ void UserInterface::RenderHistory()
|
||||
|
||||
void UserInterface::RenderPreview()
|
||||
{
|
||||
bool openInitializeSystemLoopback = false;
|
||||
struct CustomConstraints // Helper functions for aspect-ratio constraints
|
||||
{
|
||||
static void AspectRatio(ImGuiSizeCallbackData* data) {
|
||||
@@ -1107,7 +1110,8 @@ void UserInterface::RenderPreview()
|
||||
|
||||
}
|
||||
|
||||
FrameGrabber *rec = Mixer::manager().session()->getFrameGrabber(video_recorder_);
|
||||
FrameGrabber *rec = FrameGrabbing::manager().get(video_recorder_);
|
||||
FrameGrabber *cam = FrameGrabbing::manager().get(webcam_emulator_);
|
||||
|
||||
// return from thread for folder openning
|
||||
if ( !recordFolderFileDialogs.empty() ) {
|
||||
@@ -1139,6 +1143,9 @@ void UserInterface::RenderPreview()
|
||||
}
|
||||
if (ImGui::BeginMenu("Record"))
|
||||
{
|
||||
if ( ImGui::MenuItem( ICON_FA_CAMERA_RETRO " Capture frame (PNG)") )
|
||||
FrameGrabbing::manager().add(new PNGRecorder);
|
||||
|
||||
// Stop recording menu if main recorder already exists
|
||||
if (rec) {
|
||||
ImGui::PushStyleColor(ImGuiCol_Text, ImVec4(IMGUI_COLOR_RECORD, 0.8f));
|
||||
@@ -1157,16 +1164,14 @@ void UserInterface::RenderPreview()
|
||||
if ( ImGui::MenuItem( ICON_FA_CIRCLE " Record", CTRL_MOD "R") ) {
|
||||
FrameGrabber *fg = new VideoRecorder;
|
||||
video_recorder_ = fg->id();
|
||||
Mixer::manager().session()->addFrameGrabber(fg);
|
||||
FrameGrabbing::manager().add(fg);
|
||||
}
|
||||
ImGui::PopStyleColor(1);
|
||||
// select profile
|
||||
ImGui::SetNextItemWidth(300);
|
||||
ImGui::Combo("##RecProfile", &Settings::application.record.profile, VideoRecorder::profile_name, IM_ARRAYSIZE(VideoRecorder::profile_name) );
|
||||
ImGui::Combo("Codec", &Settings::application.record.profile, VideoRecorder::profile_name, IM_ARRAYSIZE(VideoRecorder::profile_name) );
|
||||
}
|
||||
|
||||
if ( ImGui::MenuItem( ICON_FA_CAMERA_RETRO " Capture frame (PNG)") )
|
||||
Mixer::manager().session()->addFrameGrabber(new PNGRecorder);
|
||||
|
||||
// Options menu
|
||||
ImGui::Separator();
|
||||
@@ -1205,8 +1210,28 @@ void UserInterface::RenderPreview()
|
||||
|
||||
ImGui::EndMenu();
|
||||
}
|
||||
if (ImGui::BeginMenu("Stream"))
|
||||
if (ImGui::BeginMenu("Share stream"))
|
||||
{
|
||||
#if defined(LINUX)
|
||||
bool on = cam != nullptr;
|
||||
if ( ImGui::MenuItem( ICON_FA_CAMERA " Emulate video camera", NULL, &on) ) {
|
||||
if (on && cam == nullptr) {
|
||||
if (webcam_emulator_ > 0)
|
||||
webcam_emulator_ = 0;
|
||||
if (Loopback::systemLoopbackInitialized()) {
|
||||
FrameGrabber *fg = new Loopback;
|
||||
webcam_emulator_ = fg->id();
|
||||
FrameGrabbing::manager().add(fg);
|
||||
}
|
||||
else
|
||||
openInitializeSystemLoopback = true;
|
||||
}
|
||||
if (!on && cam != nullptr) {
|
||||
cam->stop();
|
||||
webcam_emulator_ = 0;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
ImGui::PushStyleColor(ImGuiCol_Text, ImVec4(IMGUI_COLOR_STREAM, 0.9f));
|
||||
if ( ImGui::MenuItem( ICON_FA_SHARE_ALT " Accept connections", NULL, &Settings::application.accept_connections) ) {
|
||||
Streaming::manager().enable(Settings::application.accept_connections);
|
||||
@@ -1216,7 +1241,7 @@ void UserInterface::RenderPreview()
|
||||
{
|
||||
static char dummy_str[512];
|
||||
sprintf(dummy_str, "%s", Connection::manager().info().name.c_str());
|
||||
ImGui::InputText("My network ID", dummy_str, IM_ARRAYSIZE(dummy_str), ImGuiInputTextFlags_ReadOnly);
|
||||
ImGui::InputText("My ID", dummy_str, IM_ARRAYSIZE(dummy_str), ImGuiInputTextFlags_ReadOnly);
|
||||
|
||||
std::vector<std::string> ls = Streaming::manager().listStreams();
|
||||
if (ls.size()>0) {
|
||||
@@ -1232,7 +1257,6 @@ void UserInterface::RenderPreview()
|
||||
ImGui::EndMenuBar();
|
||||
}
|
||||
|
||||
|
||||
float width = ImGui::GetContentRegionAvail().x;
|
||||
|
||||
ImVec2 imagesize ( width, width / ar);
|
||||
@@ -1274,8 +1298,44 @@ void UserInterface::RenderPreview()
|
||||
ImGui::PopFont();
|
||||
}
|
||||
|
||||
|
||||
ImGui::End();
|
||||
}
|
||||
|
||||
#if defined(LINUX)
|
||||
if (openInitializeSystemLoopback && !ImGui::IsPopupOpen("Initialize System Loopback"))
|
||||
ImGui::OpenPopup("Initialize System Loopback");
|
||||
if (ImGui::BeginPopupModal("Initialize System Loopback", NULL, ImGuiWindowFlags_AlwaysAutoResize))
|
||||
{
|
||||
int w = 600;
|
||||
ImGui::Text("In order to enable the video4linux camera loopback,\n"
|
||||
"'v4l2loopack' has to be installed and initialized on your machine\n\n"
|
||||
"To do so, the following commands should be executed (admin rights):\n");
|
||||
|
||||
static char dummy_str[512];
|
||||
sprintf(dummy_str, "sudo apt install v4l2loopback-dkms");
|
||||
ImGui::SetNextItemWidth(w + 20);
|
||||
ImGui::InputText("##cmd1", dummy_str, IM_ARRAYSIZE(dummy_str), ImGuiInputTextFlags_ReadOnly);
|
||||
sprintf(dummy_str, "sudo modprobe v4l2loopback exclusive_caps=1 video_nr=10 card_label=\"vimix loopback\"");
|
||||
ImGui::SetNextItemWidth(w + 20);
|
||||
ImGui::InputText("##cmd2", dummy_str, IM_ARRAYSIZE(dummy_str), ImGuiInputTextFlags_ReadOnly);
|
||||
|
||||
ImGui::Separator();
|
||||
if (ImGui::Button("Cancel, I'll do it.\n(try again later)", ImVec2(w/2, 0))) { ImGui::CloseCurrentPopup(); }
|
||||
ImGui::SameLine();
|
||||
ImGui::SetItemDefaultFocus();
|
||||
if (ImGui::Button("Ok, let vimix try.\n(sudo password required)", ImVec2(w/2, 0)) ) {
|
||||
if (Loopback::initializeSystemLoopback()) {
|
||||
FrameGrabber *fg = new Loopback;
|
||||
webcam_emulator_ = fg->id();
|
||||
FrameGrabbing::manager().add(fg);
|
||||
}
|
||||
ImGui::CloseCurrentPopup();
|
||||
}
|
||||
|
||||
ImGui::EndPopup();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void UserInterface::showMediaPlayer(MediaPlayer *mp)
|
||||
|
||||
@@ -117,7 +117,7 @@ class UserInterface
|
||||
|
||||
// frame grabbers
|
||||
uint64_t video_recorder_;
|
||||
// uint64_t video_streamer_;
|
||||
uint64_t webcam_emulator_;
|
||||
|
||||
// Private Constructor
|
||||
UserInterface();
|
||||
|
||||
Reference in New Issue
Block a user