Video Recoding Buffer management

Implemented methods to supervise encoding in FrameGrabber, avoid running out of buffer, and give user a selection of buffer sizes for recording.
This commit is contained in:
Bruno
2021-08-08 23:58:35 +02:00
parent ddd9bb4e99
commit 5a18dbaf37
9 changed files with 161 additions and 91 deletions

View File

@@ -214,8 +214,8 @@ void FrameGrabbing::grabFrame(FrameBuffer *frame_buffer, float dt)
FrameGrabber::FrameGrabber(): finished_(false), expecting_finished_(false), active_(false), accept_buffer_(false), FrameGrabber::FrameGrabber(): finished_(false), active_(false), endofstream_(false), accept_buffer_(false), buffering_full_(false),
pipeline_(nullptr), src_(nullptr), caps_(nullptr), timer_(nullptr), timestamp_(0) pipeline_(nullptr), src_(nullptr), caps_(nullptr), timer_(nullptr), timestamp_(0), frame_count_(0), buffering_size_(MIN_BUFFER_SIZE)
{ {
// unique id // unique id
id_ = BaseToolkit::uniqueId(); id_ = BaseToolkit::uniqueId();
@@ -255,12 +255,11 @@ uint64_t FrameGrabber::duration() const
void FrameGrabber::stop () void FrameGrabber::stop ()
{ {
// send end of stream
expecting_finished_ = true;
gst_app_src_end_of_stream (src_);
// stop recording // stop recording
active_ = false; active_ = false;
// send end of stream
gst_app_src_end_of_stream (src_);
} }
std::string FrameGrabber::info() const std::string FrameGrabber::info() const
@@ -285,7 +284,6 @@ void FrameGrabber::callback_enough_data (GstAppSrc *, gpointer p)
FrameGrabber *grabber = static_cast<FrameGrabber *>(p); FrameGrabber *grabber = static_cast<FrameGrabber *>(p);
if (grabber) if (grabber)
grabber->accept_buffer_ = false; grabber->accept_buffer_ = false;
} }
GstPadProbeReturn FrameGrabber::callback_event_probe(GstPad *, GstPadProbeInfo * info, gpointer p) GstPadProbeReturn FrameGrabber::callback_event_probe(GstPad *, GstPadProbeInfo * info, gpointer p)
@@ -295,7 +293,7 @@ GstPadProbeReturn FrameGrabber::callback_event_probe(GstPad *, GstPadProbeInfo *
{ {
FrameGrabber *grabber = static_cast<FrameGrabber *>(p); FrameGrabber *grabber = static_cast<FrameGrabber *>(p);
if (grabber) if (grabber)
grabber->finished_ = true; grabber->endofstream_ = true;
} }
return GST_PAD_PROBE_OK; return GST_PAD_PROBE_OK;
@@ -316,22 +314,17 @@ void FrameGrabber::addFrame (GstBuffer *buffer, GstCaps *caps, float dt)
gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, FrameGrabber::callback_event_probe, this, NULL); gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, FrameGrabber::callback_event_probe, this, NULL);
gst_object_unref (pad); gst_object_unref (pad);
} }
// terminate properly if finished
if (finished_)
terminate();
// stop if an incompatilble frame buffer given // stop if an incompatilble frame buffer given
else if ( !gst_caps_is_equal( caps_, caps )) else if ( !gst_caps_is_equal( caps_, caps ))
{ {
stop(); stop();
Log::Warning("FrameGrabber interrupted because the resolution changed."); Log::Warning("Frame capture interrupted because the resolution changed.");
} }
// store a frame if recording is active // store a frame if recording is active and if the encoder accepts data
// and if the encoder accepts data if (active_)
else if (active_ && accept_buffer_)
{ {
if (accept_buffer_) {
GstClockTime t = 0; GstClockTime t = 0;
// initialize timer on first occurence // initialize timer on first occurence
@@ -343,16 +336,41 @@ void FrameGrabber::addFrame (GstBuffer *buffer, GstCaps *caps, float dt)
// time since timer starts (first frame registered) // time since timer starts (first frame registered)
t = gst_clock_get_time(timer_) - timer_firstframe_; t = gst_clock_get_time(timer_) - timer_firstframe_;
// if time is zero (first frame) // if time is zero (first frame) or if delta time is passed one frame duration (with a margin)
// of if delta time is passed one frame duration (with a margin)
if ( t == 0 || (t - timestamp_) > (frame_duration_ - 3000) ) { if ( t == 0 || (t - timestamp_) > (frame_duration_ - 3000) ) {
// round t to multiple of frame duration // round time to a multiples of frame duration
t = ( t / frame_duration_) * frame_duration_; t = ( t / frame_duration_) * frame_duration_;
// set timing of buffer // set frame presentation time stamp
buffer->pts = t; buffer->pts = t;
// if time since last timestamp is more than 1 frame
if (t - timestamp_ > frame_duration_) {
// compute duration
buffer->duration = t - timestamp_; buffer->duration = t - timestamp_;
// keep timestamp for next addFrame to one frame later
timestamp_ = t + frame_duration_;
}
// normal case (not delayed)
else {
// normal frame duration
buffer->duration = frame_duration_;
// keep timestamp for next addFrame
timestamp_ = t;
}
// when buffering is full, refuse buffer every frame
if (buffering_full_)
accept_buffer_ = false;
else
{
// enter buffering_full_ mode if the space left in buffering is for only few frames
// (this prevents filling the buffer entirely)
// if ( (double) gst_app_src_get_current_level_bytes(src_) / (double) buffering_size_ > 0.8) // 80% test
if ( buffering_size_ - gst_app_src_get_current_level_bytes(src_) < 4 * gst_buffer_get_size(buffer))
buffering_full_ = true;
}
// increment ref counter to make sure the frame remains available // increment ref counter to make sure the frame remains available
gst_buffer_ref(buffer); gst_buffer_ref(buffer);
@@ -361,9 +379,32 @@ void FrameGrabber::addFrame (GstBuffer *buffer, GstCaps *caps, float dt)
gst_app_src_push_buffer (src_, buffer); gst_app_src_push_buffer (src_, buffer);
// NB: buffer will be unrefed by the appsrc // NB: buffer will be unrefed by the appsrc
// keep timestamp for next addFrame // count frames
timestamp_ = t; frame_count_++;
}
} }
} }
// if we received and end of stream (from callback_event_probe)
if (endofstream_)
{
// try to stop properly when interrupted
if (active_) {
// de-activate and re-send EOS
stop();
// inform
Log::Warning("Frame capture : interrupted after %s.", GstToolkit::time_to_string(timestamp_).c_str());
Log::Info("Frame capture: not space left on drive / encoding buffer full.");
}
// terminate properly if finished
else
{
finished_ = true;
terminate();
}
}
} }

View File

@@ -14,6 +14,8 @@
// https://stackoverflow.com/questions/38140527/glreadpixels-vs-glgetteximage // https://stackoverflow.com/questions/38140527/glreadpixels-vs-glgetteximage
#define USE_GLREADPIXEL #define USE_GLREADPIXEL
#define MIN_BUFFER_SIZE 33177600 // 33177600 bytes = 1 frames 4K, 9 frames 720p
class FrameBuffer; class FrameBuffer;
@@ -53,9 +55,9 @@ protected:
virtual void terminate() = 0; virtual void terminate() = 0;
// thread-safe testing termination // thread-safe testing termination
std::atomic<bool> expecting_finished_;
std::atomic<bool> finished_; std::atomic<bool> finished_;
std::atomic<bool> active_; std::atomic<bool> active_;
std::atomic<bool> endofstream_;
std::atomic<bool> accept_buffer_; std::atomic<bool> accept_buffer_;
// gstreamer pipeline // gstreamer pipeline
@@ -65,6 +67,9 @@ protected:
GstClockTime timestamp_; GstClockTime timestamp_;
GstClockTime frame_duration_; GstClockTime frame_duration_;
guint64 frame_count_;
guint64 buffering_size_;
std::atomic<bool> buffering_full_;
GstClockTime timer_firstframe_; GstClockTime timer_firstframe_;
GstClock *timer_; GstClock *timer_;

View File

@@ -152,7 +152,6 @@ bool Loopback::systemLoopbackInitialized()
Loopback::Loopback() : FrameGrabber() Loopback::Loopback() : FrameGrabber()
{ {
frame_duration_ = gst_util_uint64_scale_int (1, GST_SECOND, 60); frame_duration_ = gst_util_uint64_scale_int (1, GST_SECOND, 60);
} }
void Loopback::init(GstCaps *caps) void Loopback::init(GstCaps *caps)
@@ -190,25 +189,26 @@ void Loopback::init(GstCaps *caps)
if (src_) { if (src_) {
g_object_set (G_OBJECT (src_), g_object_set (G_OBJECT (src_),
"stream-type", GST_APP_STREAM_TYPE_STREAM,
"is-live", TRUE, "is-live", TRUE,
"format", GST_FORMAT_TIME,
// "do-timestamp", TRUE,
NULL); NULL);
// Direct encoding (no buffering) // configure stream
gst_app_src_set_max_bytes( src_, 0 ); gst_app_src_set_stream_type( src_, GST_APP_STREAM_TYPE_STREAM);
gst_app_src_set_latency( src_, -1, 0);
// Set buffer size
gst_app_src_set_max_bytes( src_, buffering_size_ );
// instruct src to use the required caps // instruct src to use the required caps
caps_ = gst_caps_copy( caps ); caps_ = gst_caps_copy( caps );
gst_app_src_set_caps (src_, caps_); gst_app_src_set_caps( src_, caps_);
// setup callbacks // setup callbacks
GstAppSrcCallbacks callbacks; GstAppSrcCallbacks callbacks;
callbacks.need_data = FrameGrabber::callback_need_data; callbacks.need_data = FrameGrabber::callback_need_data;
callbacks.enough_data = FrameGrabber::callback_enough_data; callbacks.enough_data = FrameGrabber::callback_enough_data;
callbacks.seek_data = NULL; // stream type is not seekable callbacks.seek_data = NULL; // stream type is not seekable
gst_app_src_set_callbacks (src_, &callbacks, this, NULL); gst_app_src_set_callbacks( src_, &callbacks, this, NULL);
} }
else { else {
@@ -237,6 +237,5 @@ void Loopback::init(GstCaps *caps)
void Loopback::terminate() void Loopback::terminate()
{ {
active_ = false;
Log::Notify("Loopback to %s terminated.", Loopback::system_loopback_name.c_str()); Log::Notify("Loopback to %s terminated.", Loopback::system_loopback_name.c_str());
} }

View File

@@ -60,12 +60,13 @@ void PNGRecorder::init(GstCaps *caps)
if (src_) { if (src_) {
g_object_set (G_OBJECT (src_), g_object_set (G_OBJECT (src_),
"stream-type", GST_APP_STREAM_TYPE_STREAM,
"is-live", TRUE, "is-live", TRUE,
"format", GST_FORMAT_TIME,
// "do-timestamp", TRUE,
NULL); NULL);
// configure stream
gst_app_src_set_stream_type( src_, GST_APP_STREAM_TYPE_STREAM);
gst_app_src_set_latency( src_, -1, 0);
// Direct encoding (no buffering) // Direct encoding (no buffering)
gst_app_src_set_max_bytes( src_, 0 ); gst_app_src_set_max_bytes( src_, 0 );
@@ -104,7 +105,6 @@ void PNGRecorder::init(GstCaps *caps)
void PNGRecorder::terminate() void PNGRecorder::terminate()
{ {
active_ = false;
Log::Notify("PNG Capture %s is ready.", filename_.c_str()); Log::Notify("PNG Capture %s is ready.", filename_.c_str());
} }
@@ -178,7 +178,7 @@ const std::vector<std::string> VideoRecorder::profile_description {
// 2 standard // 2 standard
// 3 hq // 3 hq
// 4 4444 // 4 4444
"avenc_prores_ks pass=2 profile=2 quantizer=26 ! ", "video/x-raw, format=I422_10LE ! avenc_prores_ks pass=2 profile=2 quantizer=26 ! ",
"video/x-raw, format=Y444_10LE ! avenc_prores_ks pass=2 profile=4 quantizer=12 ! ", "video/x-raw, format=Y444_10LE ! avenc_prores_ks pass=2 profile=4 quantizer=12 ! ",
// VP8 WebM encoding // VP8 WebM encoding
"vp8enc end-usage=vbr cpu-used=8 max-quantizer=35 deadline=100000 target-bitrate=200000 keyframe-max-dist=360 token-partitions=2 static-threshold=100 ! ", "vp8enc end-usage=vbr cpu-used=8 max-quantizer=35 deadline=100000 target-bitrate=200000 keyframe-max-dist=360 token-partitions=2 static-threshold=100 ! ",
@@ -198,8 +198,13 @@ const std::vector<std::string> VideoRecorder::profile_description {
// "qtmux ! filesink name=sink"; // "qtmux ! filesink name=sink";
VideoRecorder::VideoRecorder() : FrameGrabber() const char* VideoRecorder::buffering_preset_name[VIDEO_RECORDER_BUFFERING_NUM_PRESET] = { "30 MB", "100 MB", "200 MB", "500 MB", "1 GB", "2 GB" };
const guint64 VideoRecorder::buffering_preset_value[VIDEO_RECORDER_BUFFERING_NUM_PRESET] = { MIN_BUFFER_SIZE, 104857600, 209715200, 524288000, 1073741824, 2147483648};
VideoRecorder::VideoRecorder(guint64 buffersize) : FrameGrabber()
{ {
buffering_size_ = MAX( MIN_BUFFER_SIZE, buffersize);
} }
void VideoRecorder::init(GstCaps *caps) void VideoRecorder::init(GstCaps *caps)
@@ -239,7 +244,7 @@ void VideoRecorder::init(GstCaps *caps)
GError *error = NULL; GError *error = NULL;
pipeline_ = gst_parse_launch (description.c_str(), &error); pipeline_ = gst_parse_launch (description.c_str(), &error);
if (error != NULL) { if (error != NULL) {
Log::Warning("VideoRecorder Could not construct pipeline %s:\n%s", description.c_str(), error->message); Log::Warning("Video Recording : Could not construct pipeline %s:\n%s", description.c_str(), error->message);
g_clear_error (&error); g_clear_error (&error);
finished_ = true; finished_ = true;
return; return;
@@ -256,14 +261,17 @@ void VideoRecorder::init(GstCaps *caps)
if (src_) { if (src_) {
g_object_set (G_OBJECT (src_), g_object_set (G_OBJECT (src_),
"stream-type", GST_APP_STREAM_TYPE_STREAM,
"is-live", TRUE, "is-live", TRUE,
"format", GST_FORMAT_TIME, "format", GST_FORMAT_TIME,
// "do-timestamp", TRUE, // "do-timestamp", TRUE,
NULL); NULL);
// Direct encoding (no buffering) // configure stream
gst_app_src_set_max_bytes( src_, 0 ); gst_app_src_set_stream_type( src_, GST_APP_STREAM_TYPE_STREAM);
gst_app_src_set_latency( src_, -1, 0);
// Set buffer size
gst_app_src_set_max_bytes( src_, buffering_size_);
// instruct src to use the required caps // instruct src to use the required caps
caps_ = gst_caps_copy( caps ); caps_ = gst_caps_copy( caps );
@@ -278,7 +286,7 @@ void VideoRecorder::init(GstCaps *caps)
} }
else { else {
Log::Warning("VideoRecorder Could not configure source"); Log::Warning("Video Recording : Could not configure source");
finished_ = true; finished_ = true;
return; return;
} }
@@ -286,7 +294,7 @@ void VideoRecorder::init(GstCaps *caps)
// start recording // start recording
GstStateChangeReturn ret = gst_element_set_state (pipeline_, GST_STATE_PLAYING); GstStateChangeReturn ret = gst_element_set_state (pipeline_, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) { if (ret == GST_STATE_CHANGE_FAILURE) {
Log::Warning("VideoRecorder Could not record %s", filename_.c_str()); Log::Warning("Video Recording : Could not record %s", filename_.c_str());
finished_ = true; finished_ = true;
return; return;
} }
@@ -300,10 +308,15 @@ void VideoRecorder::init(GstCaps *caps)
void VideoRecorder::terminate() void VideoRecorder::terminate()
{ {
active_ = false; // stop the pipeline
gst_element_set_state (pipeline_, GST_STATE_NULL);
if (!expecting_finished_) guint64 N = MAX( (guint64) timestamp_ / (guint64) frame_duration_, frame_count_);
Log::Warning("Video Recording interrupted (no more disk space?)."); float loss = 100.f * ((float) (N - frame_count_) ) / (float) N;
Log::Info("Video Recording : %ld frames in %s (aming for %ld, %.0f%% lost)", frame_count_, GstToolkit::time_to_string(timestamp_).c_str(), N, loss);
Log::Info("Video Recording : try with a lower resolution / a larger buffer size / a faster codec.");
if (loss > 20.f)
Log::Warning("Video Recording lost %.0f%% of frames.", loss);
Log::Notify("Video Recording %s is ready.", filename_.c_str()); Log::Notify("Video Recording %s is ready.", filename_.c_str());
} }
@@ -312,6 +325,8 @@ std::string VideoRecorder::info() const
{ {
if (active_) if (active_)
return GstToolkit::time_to_string(timestamp_); return GstToolkit::time_to_string(timestamp_);
else else if (!endofstream_)
return "Saving file..."; return "Saving file...";
else
return "...";
} }

View File

@@ -25,6 +25,8 @@ protected:
}; };
#define VIDEO_RECORDER_BUFFERING_NUM_PRESET 6
class VideoRecorder : public FrameGrabber class VideoRecorder : public FrameGrabber
{ {
std::string filename_; std::string filename_;
@@ -48,7 +50,10 @@ public:
static const char* profile_name[DEFAULT]; static const char* profile_name[DEFAULT];
static const std::vector<std::string> profile_description; static const std::vector<std::string> profile_description;
VideoRecorder(); static const char* buffering_preset_name[VIDEO_RECORDER_BUFFERING_NUM_PRESET];
static const guint64 buffering_preset_value[VIDEO_RECORDER_BUFFERING_NUM_PRESET];
VideoRecorder(guint64 buffersize = 0);
std::string info() const override; std::string info() const override;
}; };

View File

@@ -102,6 +102,7 @@ void Settings::Save()
RecordNode->SetAttribute("profile", application.record.profile); RecordNode->SetAttribute("profile", application.record.profile);
RecordNode->SetAttribute("timeout", application.record.timeout); RecordNode->SetAttribute("timeout", application.record.timeout);
RecordNode->SetAttribute("delay", application.record.delay); RecordNode->SetAttribute("delay", application.record.delay);
RecordNode->SetAttribute("buffering_mode", application.record.buffering_mode);
pRoot->InsertEndChild(RecordNode); pRoot->InsertEndChild(RecordNode);
// Transition // Transition
@@ -314,6 +315,7 @@ void Settings::Load()
recordnode->QueryIntAttribute("profile", &application.record.profile); recordnode->QueryIntAttribute("profile", &application.record.profile);
recordnode->QueryUnsignedAttribute("timeout", &application.record.timeout); recordnode->QueryUnsignedAttribute("timeout", &application.record.timeout);
recordnode->QueryIntAttribute("delay", &application.record.delay); recordnode->QueryIntAttribute("delay", &application.record.delay);
recordnode->QueryIntAttribute("buffering_mode", &application.record.buffering_mode);
const char *path_ = recordnode->Attribute("path"); const char *path_ = recordnode->Attribute("path");
if (path_) if (path_)

View File

@@ -77,11 +77,13 @@ struct RecordConfig
int profile; int profile;
uint timeout; uint timeout;
int delay; int delay;
int buffering_mode;
RecordConfig() : path("") { RecordConfig() : path("") {
profile = 0; profile = 0;
timeout = RECORD_MAX_TIMEOUT; timeout = RECORD_MAX_TIMEOUT;
delay = 0; delay = 0;
buffering_mode = 0;
} }
}; };

View File

@@ -345,14 +345,17 @@ void VideoStreamer::init(GstCaps *caps)
if (src_) { if (src_) {
g_object_set (G_OBJECT (src_), g_object_set (G_OBJECT (src_),
"stream-type", GST_APP_STREAM_TYPE_STREAM,
"is-live", TRUE, "is-live", TRUE,
"format", GST_FORMAT_TIME, "format", GST_FORMAT_TIME,
// "do-timestamp", TRUE, // "do-timestamp", TRUE,
NULL); NULL);
// Direct encoding (no buffering) // configure stream
gst_app_src_set_max_bytes( src_, 0 ); gst_app_src_set_stream_type( src_, GST_APP_STREAM_TYPE_STREAM);
gst_app_src_set_latency( src_, -1, 0);
// Set buffer size
gst_app_src_set_max_bytes( src_, buffering_size_ );
// instruct src to use the required caps // instruct src to use the required caps
caps_ = gst_caps_copy( caps ); caps_ = gst_caps_copy( caps );
@@ -389,8 +392,6 @@ void VideoStreamer::init(GstCaps *caps)
void VideoStreamer::terminate() void VideoStreamer::terminate()
{ {
active_ = false;
// send EOS // send EOS
gst_app_src_end_of_stream (src_); gst_app_src_end_of_stream (src_);

View File

@@ -275,7 +275,9 @@ void UserInterface::handleKeyboard()
// video_recorder_ = nullptr; // video_recorder_ = nullptr;
} }
else { else {
_video_recorders.emplace_back( std::async(std::launch::async, delayTrigger, new VideoRecorder, std::chrono::seconds(Settings::application.record.delay)) ); _video_recorders.emplace_back( std::async(std::launch::async, delayTrigger,
new VideoRecorder(VideoRecorder::buffering_preset_value[Settings::application.record.buffering_mode]),
std::chrono::seconds(Settings::application.record.delay)) );
} }
} }
} }
@@ -1100,7 +1102,9 @@ void UserInterface::RenderPreview()
else { else {
ImGui::PushStyleColor(ImGuiCol_Text, ImVec4(IMGUI_COLOR_RECORD, 0.9f)); ImGui::PushStyleColor(ImGuiCol_Text, ImVec4(IMGUI_COLOR_RECORD, 0.9f));
if ( ImGui::MenuItem( ICON_FA_CIRCLE " Record", CTRL_MOD "R") ) { if ( ImGui::MenuItem( ICON_FA_CIRCLE " Record", CTRL_MOD "R") ) {
_video_recorders.emplace_back( std::async(std::launch::async, delayTrigger, new VideoRecorder, std::chrono::seconds(Settings::application.record.delay)) ); _video_recorders.emplace_back( std::async(std::launch::async, delayTrigger,
new VideoRecorder(VideoRecorder::buffering_preset_value[Settings::application.record.buffering_mode]),
std::chrono::seconds(Settings::application.record.delay)) );
} }
ImGui::PopStyleColor(1); ImGui::PopStyleColor(1);
// select profile // select profile
@@ -1138,6 +1142,11 @@ void UserInterface::RenderPreview()
ImGui::SetNextItemWidth(IMGUI_RIGHT_ALIGN); ImGui::SetNextItemWidth(IMGUI_RIGHT_ALIGN);
ImGui::SliderInt("Trigger", &Settings::application.record.delay, 0, 5, ImGui::SliderInt("Trigger", &Settings::application.record.delay, 0, 5,
Settings::application.record.delay < 1 ? "Immediate" : "After %d s"); Settings::application.record.delay < 1 ? "Immediate" : "After %d s");
ImGui::SetNextItemWidth(IMGUI_RIGHT_ALIGN);
ImGui::SliderInt("Buffer", &Settings::application.record.buffering_mode, 0, VIDEO_RECORDER_BUFFERING_NUM_PRESET-1,
VideoRecorder::buffering_preset_name[Settings::application.record.buffering_mode]);
} }
ImGui::EndMenu(); ImGui::EndMenu();
} }
@@ -4548,8 +4557,6 @@ void Navigator::RenderMainPannelSettings()
ImGui::SetCursorPosY(width_); ImGui::SetCursorPosY(width_);
// Appearance // Appearance
// ImGuiToolkit::Icon(3, 2);
// ImGui::SameLine(0, 10);
ImGui::Text("Appearance"); ImGui::Text("Appearance");
ImGui::SetNextItemWidth(IMGUI_RIGHT_ALIGN); ImGui::SetNextItemWidth(IMGUI_RIGHT_ALIGN);
if ( ImGui::DragFloat("Scale", &Settings::application.scale, 0.01, 0.5f, 2.0f, "%.1f")) if ( ImGui::DragFloat("Scale", &Settings::application.scale, 0.01, 0.5f, 2.0f, "%.1f"))
@@ -4562,8 +4569,6 @@ void Navigator::RenderMainPannelSettings()
// Options // Options
ImGui::Spacing(); ImGui::Spacing();
// ImGuiToolkit::Icon(2, 2);
// ImGui::SameLine(0, 10);
ImGui::Text("Options"); ImGui::Text("Options");
ImGuiToolkit::ButtonSwitch( ICON_FA_MOUSE_POINTER " Smooth cursor", &Settings::application.smooth_cursor); ImGuiToolkit::ButtonSwitch( ICON_FA_MOUSE_POINTER " Smooth cursor", &Settings::application.smooth_cursor);
ImGuiToolkit::ButtonSwitch( ICON_FA_TACHOMETER_ALT " Metrics", &Settings::application.widget.stats); ImGuiToolkit::ButtonSwitch( ICON_FA_TACHOMETER_ALT " Metrics", &Settings::application.widget.stats);
@@ -4578,13 +4583,8 @@ void Navigator::RenderMainPannelSettings()
// system preferences // system preferences
ImGui::Spacing(); ImGui::Spacing();
//#ifdef LINUX
// ImGuiToolkit::Icon(12, 6);
//#else
// ImGuiToolkit::Icon(6, 0);
//#endif
// ImGui::SameLine(0, 10);
ImGui::Text("System"); ImGui::Text("System");
static bool need_restart = false; static bool need_restart = false;
static bool vsync = (Settings::application.render.vsync > 0); static bool vsync = (Settings::application.render.vsync > 0);
static bool blit = Settings::application.render.blit; static bool blit = Settings::application.render.blit;