Creation of the base class StreamSource. It is intended to become the

base class for all gstreamer pipeline. DeviceSource and PatternSource
inherit from it. A generic stream source class is created for
development tests and hack.
This commit is contained in:
brunoherbelin
2020-09-22 22:58:37 +02:00
parent 9251aff19f
commit b7d54dfadf
15 changed files with 392 additions and 272 deletions

View File

@@ -238,6 +238,7 @@ set(VMIX_SRCS
Stream.cpp Stream.cpp
MediaPlayer.cpp MediaPlayer.cpp
MediaSource.cpp MediaSource.cpp
StreamSource.cpp
PatternSource.cpp PatternSource.cpp
DeviceSource.cpp DeviceSource.cpp
FrameBuffer.cpp FrameBuffer.cpp

View File

@@ -27,7 +27,6 @@ void Device::open( uint device )
{ {
device_ = CLAMP(device, 0, 2); device_ = CLAMP(device, 0, 2);
single_frame_ = false;
live_ = true; live_ = true;
// std::string desc = "v4l2src ! video/x-raw,width=320,height=240,framerate=30/1 ! videoconvert"; // std::string desc = "v4l2src ! video/x-raw,width=320,height=240,framerate=30/1 ! videoconvert";
@@ -38,133 +37,34 @@ void Device::open( uint device )
std::string desc = "ximagesrc endx=640 endy=480 ! video/x-raw,framerate=5/1 ! videoconvert ! queue"; std::string desc = "ximagesrc endx=640 endy=480 ! video/x-raw,framerate=5/1 ! videoconvert ! queue";
// (private) open stream // (private) open stream
open(desc); Stream::open(desc);
} }
void Device::open(std::string gstreamer_description) DeviceSource::DeviceSource() : StreamSource()
{
// set gstreamer pipeline source
description_ = gstreamer_description;
// close before re-openning
if (isOpen())
close();
execute_open();
}
DeviceSource::DeviceSource() : Source()
{ {
// create stream // create stream
stream_ = new Device(); stream_ = (Stream *) new Device();
// create surface // icon in mixing view
devicesurface_ = new Surface(renderingshader_); overlays_[View::MIXING]->attach( new Symbol(Symbol::EMPTY, glm::vec3(0.8f, 0.8f, 0.01f)) );
} overlays_[View::LAYER]->attach( new Symbol(Symbol::EMPTY, glm::vec3(0.8f, 0.8f, 0.01f)) );
DeviceSource::~DeviceSource()
{
// delete media surface & stream
delete devicesurface_;
delete stream_;
}
bool DeviceSource::failed() const
{
return stream_->failed();
}
uint DeviceSource::texture() const
{
return stream_->texture();
}
void DeviceSource::replaceRenderingShader()
{
devicesurface_->replaceShader(renderingshader_);
} }
void DeviceSource::setDevice(int id) void DeviceSource::setDevice(int id)
{ {
Log::Notify("Openning device %d", id); Log::Notify("Openning device %d", id);
stream_->open(id); device()->open(id);
stream_->play(true); stream_->play(true);
} }
void DeviceSource::init()
{
if ( stream_->isOpen() ) {
// update video
stream_->update();
// once the texture of media player is created
if (stream_->texture() != Resource::getTextureBlack()) {
// get the texture index from media player, apply it to the media surface
devicesurface_->setTextureIndex( stream_->texture() );
// create Frame buffer matching size of media player
float height = float(stream_->width()) / stream_->aspectRatio();
FrameBuffer *renderbuffer = new FrameBuffer(stream_->width(), (uint)height, true);
// set the renderbuffer of the source and attach rendering nodes
attach(renderbuffer);
// icon in mixing view
overlays_[View::MIXING]->attach( new Symbol(Symbol::EMPTY, glm::vec3(0.8f, 0.8f, 0.01f)) );
overlays_[View::LAYER]->attach( new Symbol(Symbol::EMPTY, glm::vec3(0.8f, 0.8f, 0.01f)) );
// done init
initialized_ = true;
Log::Info("Source Device linked to Stream %d.", stream_->description().c_str());
// force update of activation mode
active_ = true;
touch();
}
}
}
void DeviceSource::setActive (bool on)
{
bool was_active = active_;
Source::setActive(on);
// change status of media player (only if status changed)
if ( active_ != was_active ) {
stream_->enable(active_);
}
}
void DeviceSource::update(float dt)
{
Source::update(dt);
// update stream
stream_->update();
}
void DeviceSource::render()
{
if (!initialized_)
init();
else {
// render the media player into frame buffer
static glm::mat4 projection = glm::ortho(-1.f, 1.f, 1.f, -1.f, -1.f, 1.f);
renderbuffer_->begin();
devicesurface_->draw(glm::identity<glm::mat4>(), projection);
renderbuffer_->end();
}
}
void DeviceSource::accept(Visitor& v) void DeviceSource::accept(Visitor& v)
{ {
Source::accept(v); Source::accept(v);
v.visit(*this); v.visit(*this);
} }
Device *DeviceSource::device() const
{
return dynamic_cast<Device *>(stream_);
}

View File

@@ -1,8 +1,7 @@
#ifndef DEVICESOURCE_H #ifndef DEVICESOURCE_H
#define DEVICESOURCE_H #define DEVICESOURCE_H
#include "Stream.h" #include "StreamSource.h"
#include "Source.h"
class Device : public Stream class Device : public Stream
{ {
@@ -14,35 +13,24 @@ public:
glm::ivec2 resolution(); glm::ivec2 resolution();
private: private:
void open( std::string description ) override;
uint device_; uint device_;
}; };
class DeviceSource : public Source class DeviceSource : public StreamSource
{ {
public: public:
DeviceSource(); DeviceSource();
~DeviceSource();
// implementation of source API // Source interface
void update (float dt) override;
void setActive (bool on) override;
void render() override;
bool failed() const override;
uint texture() const override;
void accept (Visitor& v) override; void accept (Visitor& v) override;
// Pattern specific interface // StreamSource interface
inline Device *device() const { return stream_; } Stream *stream() const override { return stream_; }
// specific interface
Device *device() const;
void setDevice(int id); void setDevice(int id);
protected:
void init() override;
void replaceRenderingShader() override;
Surface *devicesurface_;
Device *stream_;
}; };
#endif // DEVICESOURCE_H #endif // DEVICESOURCE_H

View File

@@ -24,6 +24,7 @@ using namespace tinyxml2;
#include "MediaSource.h" #include "MediaSource.h"
#include "PatternSource.h" #include "PatternSource.h"
#include "DeviceSource.h" #include "DeviceSource.h"
#include "StreamSource.h"
#include "Mixer.h" #include "Mixer.h"
@@ -266,6 +267,18 @@ Source * Mixer::createSourceRender()
return s; return s;
} }
Source * Mixer::createSourceStream(const std::string &gstreamerpipeline)
{
// ready to create a source
GenericStreamSource *s = new GenericStreamSource;
s->setDescription(gstreamerpipeline);
// propose a new name based on pattern name
renameSource(s, gstreamerpipeline.substr(0,10));
return s;
}
Source * Mixer::createSourcePattern(int pattern, glm::ivec2 res) Source * Mixer::createSourcePattern(int pattern, glm::ivec2 res)
{ {
// ready to create a source // ready to create a source

View File

@@ -39,6 +39,7 @@ public:
Source * createSourceFile (const std::string &path); Source * createSourceFile (const std::string &path);
Source * createSourceClone (const std::string &namesource = ""); Source * createSourceClone (const std::string &namesource = "");
Source * createSourceRender (); Source * createSourceRender ();
Source * createSourceStream (const std::string &gstreamerpipeline);
Source * createSourcePattern(int pattern, glm::ivec2 res); Source * createSourcePattern(int pattern, glm::ivec2 res);
Source * createSourceDevice (int id); Source * createSourceDevice (int id);

View File

@@ -5,7 +5,6 @@
#include "defines.h" #include "defines.h"
#include "ImageShader.h" #include "ImageShader.h"
#include "ImageProcessingShader.h"
#include "Resource.h" #include "Resource.h"
#include "Primitives.h" #include "Primitives.h"
#include "Stream.h" #include "Stream.h"
@@ -118,7 +117,6 @@ void Pattern::open( uint pattern )
std::ostringstream oss; std::ostringstream oss;
oss << " kx2=" << (int)(aspectRatio() * 10.f) << " ky2=10 kt=4"; oss << " kx2=" << (int)(aspectRatio() * 10.f) << " ky2=10 kt=4";
gstreamer_pattern += oss.str(); // Zone plate gstreamer_pattern += oss.str(); // Zone plate
single_frame_ = false;
} }
break; break;
default: default:
@@ -129,133 +127,129 @@ void Pattern::open( uint pattern )
single_frame_ = type_ < 15; single_frame_ = type_ < 15;
// (private) open stream // (private) open stream
open(gstreamer_pattern); Stream::open(gstreamer_pattern);
} }
void Pattern::open(std::string gstreamer_pattern) PatternSource::PatternSource(glm::ivec2 resolution) : StreamSource()
{
// set gstreamer pipeline source
description_ = gstreamer_pattern;
// close before re-openning
if (isOpen())
close();
execute_open();
}
PatternSource::PatternSource(glm::ivec2 resolution) : Source()
{ {
// create stream // create stream
stream_ = new Pattern(resolution); stream_ = (Stream *) new Pattern(resolution);
// create surface // // create surface
patternsurface_ = new Surface(renderingshader_); // surface_ = new Surface(renderingshader_);
}
PatternSource::~PatternSource() overlays_[View::MIXING]->attach( new Symbol(Symbol::PATTERN, glm::vec3(0.8f, 0.8f, 0.01f)) );
{ overlays_[View::LAYER]->attach( new Symbol(Symbol::PATTERN, glm::vec3(0.8f, 0.8f, 0.01f)) );
// delete media surface & stream
delete patternsurface_;
delete stream_;
}
bool PatternSource::failed() const
{
return stream_->failed();
}
uint PatternSource::texture() const
{
return stream_->texture();
}
void PatternSource::replaceRenderingShader()
{
patternsurface_->replaceShader(renderingshader_);
} }
void PatternSource::setPattern(int id) void PatternSource::setPattern(int id)
{ {
Log::Notify("Creating pattern %s", Pattern::pattern_types[id].c_str()); Log::Notify("Creating pattern %s", Pattern::pattern_types[id].c_str());
stream_->open(id); pattern()->open( (uint) id );
stream_->play(true); stream_->play(true);
} }
void PatternSource::init()
{
if ( stream_->isOpen() ) {
// update video
stream_->update();
// once the texture of media player is created
if (stream_->texture() != Resource::getTextureBlack()) {
// get the texture index from media player, apply it to the media surface
patternsurface_->setTextureIndex( stream_->texture() );
// create Frame buffer matching size of media player
float height = float(stream_->width()) / stream_->aspectRatio();
FrameBuffer *renderbuffer = new FrameBuffer(stream_->width(), (uint)height, true);
// set the renderbuffer of the source and attach rendering nodes
attach(renderbuffer);
// icon in mixing view
overlays_[View::MIXING]->attach( new Symbol(Symbol::PATTERN, glm::vec3(0.8f, 0.8f, 0.01f)) );
overlays_[View::LAYER]->attach( new Symbol(Symbol::PATTERN, glm::vec3(0.8f, 0.8f, 0.01f)) );
// done init
initialized_ = true;
Log::Info("Source Pattern linked to Stream %d.", stream_->description().c_str());
// force update of activation mode
active_ = true;
touch();
}
}
}
void PatternSource::setActive (bool on)
{
bool was_active = active_;
Source::setActive(on);
// change status of media player (only if status changed)
if ( active_ != was_active ) {
stream_->enable(active_);
}
}
void PatternSource::update(float dt)
{
Source::update(dt);
// update stream
stream_->update();
}
void PatternSource::render()
{
if (!initialized_)
init();
else {
// render the media player into frame buffer
static glm::mat4 projection = glm::ortho(-1.f, 1.f, 1.f, -1.f, -1.f, 1.f);
renderbuffer_->begin();
patternsurface_->draw(glm::identity<glm::mat4>(), projection);
renderbuffer_->end();
}
}
void PatternSource::accept(Visitor& v) void PatternSource::accept(Visitor& v)
{ {
Source::accept(v); Source::accept(v);
v.visit(*this); v.visit(*this);
} }
Pattern *PatternSource::pattern() const
{
return dynamic_cast<Pattern *>(stream_);
}
//PatternSource::~PatternSource()
//{
// // delete media surface & stream
// delete patternsurface_;
// delete stream_;
//}
//bool PatternSource::failed() const
//{
// return stream_->failed();
//}
//uint PatternSource::texture() const
//{
// return stream_->texture();
//}
//void PatternSource::replaceRenderingShader()
//{
// patternsurface_->replaceShader(renderingshader_);
//}
//void PatternSource::init()
//{
// if ( stream_->isOpen() ) {
// // update video
// stream_->update();
// // once the texture of media player is created
// if (stream_->texture() != Resource::getTextureBlack()) {
// // get the texture index from media player, apply it to the media surface
// patternsurface_->setTextureIndex( stream_->texture() );
// // create Frame buffer matching size of media player
// float height = float(stream_->width()) / stream_->aspectRatio();
// FrameBuffer *renderbuffer = new FrameBuffer(stream_->width(), (uint)height, true);
// // set the renderbuffer of the source and attach rendering nodes
// attach(renderbuffer);
// // icon in mixing view
// overlays_[View::MIXING]->attach( new Symbol(Symbol::PATTERN, glm::vec3(0.8f, 0.8f, 0.01f)) );
// overlays_[View::LAYER]->attach( new Symbol(Symbol::PATTERN, glm::vec3(0.8f, 0.8f, 0.01f)) );
// // done init
// initialized_ = true;
// Log::Info("Source Pattern linked to Stream %d.", stream_->description().c_str());
// // force update of activation mode
// active_ = true;
// touch();
// }
// }
//}
//void PatternSource::setActive (bool on)
//{
// bool was_active = active_;
// Source::setActive(on);
// // change status of media player (only if status changed)
// if ( active_ != was_active ) {
// stream_->enable(active_);
// }
//}
//void PatternSource::update(float dt)
//{
// Source::update(dt);
// // update stream
// stream_->update();
//}
//void PatternSource::render()
//{
// if (!initialized_)
// init();
// else {
// // render the media player into frame buffer
// static glm::mat4 projection = glm::ortho(-1.f, 1.f, 1.f, -1.f, -1.f, 1.f);
// renderbuffer_->begin();
// patternsurface_->draw(glm::identity<glm::mat4>(), projection);
// renderbuffer_->end();
// }
//}

View File

@@ -3,8 +3,7 @@
#include <vector> #include <vector>
#include "Stream.h" #include "StreamSource.h"
#include "Source.h"
class Pattern : public Stream class Pattern : public Stream
{ {
@@ -18,36 +17,24 @@ public:
inline uint type() const { return type_; } inline uint type() const { return type_; }
private: private:
void open( std::string description ) override;
uint type_; uint type_;
}; };
class PatternSource : public Source class PatternSource : public StreamSource
{ {
public: public:
PatternSource(glm::ivec2 resolution); PatternSource(glm::ivec2 resolution);
~PatternSource();
// implementation of source API // Source interface
void update (float dt) override;
void setActive (bool on) override;
void render() override;
bool failed() const override;
uint texture() const override;
void accept (Visitor& v) override; void accept (Visitor& v) override;
// Pattern specific interface // StreamSource interface
inline Pattern *pattern() const { return stream_; } Stream *stream() const override { return stream_; }
// specific interface
Pattern *pattern() const;
void setPattern(int id); void setPattern(int id);
protected:
void init() override;
void replaceRenderingShader() override;
Surface *patternsurface_;
Pattern *stream_;
}; };
#endif // PATTERNSOURCE_H #endif // PATTERNSOURCE_H

View File

@@ -8,7 +8,9 @@
#include "Source.h" #include "Source.h"
#include "MediaSource.h" #include "MediaSource.h"
#include "SessionSource.h" #include "SessionSource.h"
#include "StreamSource.h"
#include "PatternSource.h" #include "PatternSource.h"
#include "DeviceSource.h"
#include "Session.h" #include "Session.h"
#include "ImageShader.h" #include "ImageShader.h"
#include "ImageProcessingShader.h" #include "ImageProcessingShader.h"

View File

@@ -179,7 +179,6 @@ void Source::accept(Visitor& v)
v.visit(*this); v.visit(*this);
} }
Source::Mode Source::mode() const Source::Mode Source::mode() const
{ {
return mode_; return mode_;

View File

@@ -33,7 +33,7 @@ Stream::Stream()
width_ = 800; width_ = 800;
height_ = 600; height_ = 600;
single_frame_ = true; single_frame_ = false;
ready_ = false; ready_ = false;
failed_ = false; failed_ = false;
enabled_ = true; enabled_ = true;
@@ -71,6 +71,18 @@ guint Stream::texture() const
} }
void Stream::open(const std::string &gstreamer_description)
{
// set gstreamer pipeline source
description_ = gstreamer_description;
// close before re-openning
if (isOpen())
close();
execute_open();
}
std::string Stream::description() const std::string Stream::description() const
{ {
@@ -541,7 +553,7 @@ double Stream::updateFrameRate() const
bool Stream::fill_frame(GstBuffer *buf, FrameStatus status) bool Stream::fill_frame(GstBuffer *buf, FrameStatus status)
{ {
Log::Info("Stream fill frame"); // Log::Info("Stream fill frame");
// Do NOT overwrite an unread EOS // Do NOT overwrite an unread EOS
if ( frame_[write_index_].status == EOS ) if ( frame_[write_index_].status == EOS )
@@ -593,7 +605,9 @@ bool Stream::fill_frame(GstBuffer *buf, FrameStatus status)
// else; null buffer for EOS: give a position // else; null buffer for EOS: give a position
else { else {
frame_[write_index_].status = EOS; frame_[write_index_].status = EOS;
#ifdef STREAM_DEBUG
Log::Info("Stream EOS"); Log::Info("Stream EOS");
#endif
} }
// unlock access to frame // unlock access to frame
@@ -657,7 +671,7 @@ GstFlowReturn Stream::callback_new_sample (GstAppSink *sink, gpointer p)
{ {
GstFlowReturn ret = GST_FLOW_OK; GstFlowReturn ret = GST_FLOW_OK;
Log::Info("callback_new_sample"); // Log::Info("callback_new_sample");
// non-blocking read new sample // non-blocking read new sample
GstSample *sample = gst_app_sink_pull_sample(sink); GstSample *sample = gst_app_sink_pull_sample(sink);

View File

@@ -29,11 +29,11 @@ public:
/** /**
* Open a media using gstreamer pipeline keyword * Open a media using gstreamer pipeline keyword
* */ * */
virtual void open( std::string description ) = 0; void open(const std::string &gstreamer_description );
/** /**
* Get description string * Get description string
* */ * */
virtual std::string description() const; std::string description() const;
/** /**
* True if a media was oppenned * True if a media was oppenned
* */ * */

131
StreamSource.cpp Normal file
View File

@@ -0,0 +1,131 @@
#include <sstream>
#include <glm/gtc/matrix_transform.hpp>
#include "StreamSource.h"
#include "defines.h"
#include "ImageShader.h"
#include "Resource.h"
#include "Primitives.h"
#include "Stream.h"
#include "Visitor.h"
#include "Log.h"
GenericStreamSource::GenericStreamSource() : StreamSource()
{
// create stream
stream_ = new Stream;
// icon in mixing view
overlays_[View::MIXING]->attach( new Symbol(Symbol::EMPTY, glm::vec3(0.8f, 0.8f, 0.01f)) );
overlays_[View::LAYER]->attach( new Symbol(Symbol::EMPTY, glm::vec3(0.8f, 0.8f, 0.01f)) );
}
void GenericStreamSource::setDescription(const std::string &desc)
{
Log::Notify("Creating Stream %s", desc);
stream_->open(desc);
stream_->play(true);
}
void GenericStreamSource::accept(Visitor& v)
{
Source::accept(v);
v.visit(*this);
}
StreamSource::StreamSource() : Source()
{
// create surface
surface_ = new Surface(renderingshader_);
}
StreamSource::~StreamSource()
{
// delete media surface & stream
delete surface_;
delete stream_;
}
bool StreamSource::failed() const
{
return stream_->failed();
}
uint StreamSource::texture() const
{
return stream_->texture();
}
void StreamSource::replaceRenderingShader()
{
surface_->replaceShader(renderingshader_);
}
void StreamSource::init()
{
if ( stream_->isOpen() ) {
// update video
stream_->update();
// once the texture of media player is created
if (stream_->texture() != Resource::getTextureBlack()) {
// get the texture index from media player, apply it to the media surface
surface_->setTextureIndex( stream_->texture() );
// create Frame buffer matching size of media player
float height = float(stream_->width()) / stream_->aspectRatio();
FrameBuffer *renderbuffer = new FrameBuffer(stream_->width(), (uint)height, true);
// set the renderbuffer of the source and attach rendering nodes
attach(renderbuffer);
// done init
initialized_ = true;
Log::Info("Source Stream linked to Stream %d.", stream_->description().c_str());
// force update of activation mode
active_ = true;
touch();
}
}
}
void StreamSource::setActive (bool on)
{
bool was_active = active_;
Source::setActive(on);
// change status of media player (only if status changed)
if ( active_ != was_active ) {
stream_->enable(active_);
}
}
void StreamSource::update(float dt)
{
Source::update(dt);
// update stream
stream_->update();
}
void StreamSource::render()
{
if (!initialized_)
init();
else {
// render the media player into frame buffer
static glm::mat4 projection = glm::ortho(-1.f, 1.f, 1.f, -1.f, -1.f, 1.f);
renderbuffer_->begin();
surface_->draw(glm::identity<glm::mat4>(), projection);
renderbuffer_->end();
}
}

73
StreamSource.h Normal file
View File

@@ -0,0 +1,73 @@
#ifndef STREAMSOURCE_H
#define STREAMSOURCE_H
#include "Stream.h"
#include "Source.h"
/**
* @brief The StreamSource class
*
* StreamSource is a virtual base class
* (because stream() = 0)
* based on the virtual base class Source
* that implements the update and display
* of a Stream object (gstreamer generic)
*
* StreamSource does *not* create a stream
* in its constructor to let this for the
* specific implementation of the subclass.
* Therefore it cannot be instanciated and
* it cannot give access to its stream.
*
*/
class StreamSource: public Source
{
public:
StreamSource();
virtual ~StreamSource();
// implementation of source API
void update (float dt) override;
void setActive (bool on) override;
void render() override;
bool failed() const override;
uint texture() const override;
// pure virtual interface
virtual Stream *stream() const = 0;
protected:
void init() override;
void replaceRenderingShader() override;
Surface *surface_;
Stream *stream_;
};
/**
* @brief The GenericStreamSource class
*
* Implements the StreamSource
* with an initialization
* using a generic description
* of the gstreamer pipeline.
*
* It can be instanciated.
*/
class GenericStreamSource : public StreamSource
{
public:
GenericStreamSource();
// Source interface
void accept (Visitor& v) override;
// StreamSource interface
Stream *stream() const override { return stream_; }
// specific interface
void setDescription(const std::string &desc);
};
#endif // STREAMSOURCE_H

View File

@@ -53,6 +53,8 @@ using namespace std;
#include "MediaPlayer.h" #include "MediaPlayer.h"
#include "MediaSource.h" #include "MediaSource.h"
#include "PatternSource.h" #include "PatternSource.h"
#include "DeviceSource.h"
#include "StreamSource.h"
#include "PickingVisitor.h" #include "PickingVisitor.h"
#include "ImageShader.h" #include "ImageShader.h"
#include "ImageProcessingShader.h" #include "ImageProcessingShader.h"
@@ -854,7 +856,7 @@ void ToolBox::Render()
{ {
if (ImGui::BeginMenu("Render")) if (ImGui::BeginMenu("Render"))
{ {
if ( ImGui::MenuItem( ICON_FA_CAMERA_RETRO " Screenshot") ) if ( ImGui::MenuItem( ICON_FA_CAMERA_RETRO " Screenshot", "F12") )
UserInterface::manager().StartScreenshot(); UserInterface::manager().StartScreenshot();
ImGui::EndMenu(); ImGui::EndMenu();
@@ -871,6 +873,17 @@ void ToolBox::Render()
} }
static char buf1[64] = "";
ImGui::InputText("gstreamer pipeline", buf1, 64);
if (ImGui::Button("Create Generic Stream Source") )
{
// GenericStreamSource *s =
Mixer::manager().addSource( Mixer::manager().createSourceStream(buf1) );
}
//
// display histogram of update time and plot framerate
//
// keep array of 120 values, i.e. approx 2 seconds of recording // keep array of 120 values, i.e. approx 2 seconds of recording
static float framerate_values[2][120] = {{}}; static float framerate_values[2][120] = {{}};
static float sum[2] = { 0.f, 0.f }; static float sum[2] = { 0.f, 0.f };

View File

@@ -28,6 +28,8 @@ class ImageProcessingShader;
class Source; class Source;
class MediaSource; class MediaSource;
class PatternSource; class PatternSource;
class DeviceSource;
class GenericStreamSource;
class SessionSource; class SessionSource;
class RenderSource; class RenderSource;
class CloneSource; class CloneSource;
@@ -64,6 +66,8 @@ public:
// utility // utility
virtual void visit (Source&) {} virtual void visit (Source&) {}
virtual void visit (MediaSource&) {} virtual void visit (MediaSource&) {}
virtual void visit (GenericStreamSource&) {}
virtual void visit (DeviceSource&) {}
virtual void visit (PatternSource&) {} virtual void visit (PatternSource&) {}
virtual void visit (SessionSource&) {} virtual void visit (SessionSource&) {}
virtual void visit (RenderSource&) {} virtual void visit (RenderSource&) {}