Bad idea: do not use nvidia hardware decoder for jpeg

just crashes without reason and not very useful
This commit is contained in:
Bruno Herbelin
2021-12-04 23:05:12 +01:00
parent ebd9fab312
commit bf3fc61ef7
2 changed files with 2 additions and 4 deletions

View File

@@ -214,10 +214,10 @@ string GstToolkit::gst_version()
#if GST_GL_HAVE_PLATFORM_GLX
// https://gstreamer.freedesktop.org/documentation/nvcodec/index.html?gi-language=c#plugin-nvcodec
// list ordered with higher priority first (e.g. nvidia proprietary before vaapi)
const char *plugins[12] = { "nvh264dec", "nvh265dec", "nvmpeg2videodec", "nvmpeg4videodec", "nvvp8dec", "nvvp9dec", "nvjpegdec",
const char *plugins[11] = { "nvh264dec", "nvh265dec", "nvmpeg2videodec", "nvmpeg4videodec", "nvvp8dec", "nvvp9dec",
"vaapidecodebin", "omxmpeg4videodec", "omxmpeg2dec", "omxh264dec", "vdpaumpegdec",
};
const int N = 12;
const int N = 11;
#elif GST_GL_HAVE_PLATFORM_CGL
const char *plugins[2] = { "vtdec_hw", "vtdechw" };
const int N = 2;

View File

@@ -854,8 +854,6 @@ void MediaPlayer::init_texture(guint index)
// initialize decoderName once
Log::Info("MediaPlayer %s Uses %s decoding and OpenGL PBO texturing.", std::to_string(id_).c_str(), decoderName().c_str());
}
else
Log::Info("MediaPlayer %s Uses %s decoding.", std::to_string(id_).c_str(), decoderName().c_str());
glBindTexture(GL_TEXTURE_2D, 0);
}