From 9a0c8df92157fc624767d03d2447bd2f6422d788 Mon Sep 17 00:00:00 2001 From: Rafal Kolanski Date: Mon, 15 Apr 2024 19:11:37 +1000 Subject: [PATCH] set b_ref_mode=0 for ffmpeg NVENC encoder Older NVIDIA cards (e.g. GTX 1650) don't support this feature, and ffmpeg somehow force-enabled it, despite docs saying it should be set to "auto", and --help saying the default is "disabled". Setting it to "disabled" in the encoder works around this issue. ALVR does not currently use multiple B frames, so there should be no difference and no obvious difference was visible when testing. The feature is documented as: "Using B frame as a reference improves subjective and objective encoded quality with no performance impact. Hence the users enabling multiple B frames are strongly recommended to enable this feature." Co-Authored-By: David Rosca --- alvr/server/cpp/platform/linux/EncodePipelineNvEnc.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/alvr/server/cpp/platform/linux/EncodePipelineNvEnc.cpp b/alvr/server/cpp/platform/linux/EncodePipelineNvEnc.cpp index bf72234142..64bf3108c4 100644 --- a/alvr/server/cpp/platform/linux/EncodePipelineNvEnc.cpp +++ b/alvr/server/cpp/platform/linux/EncodePipelineNvEnc.cpp @@ -151,6 +151,8 @@ alvr::EncodePipelineNvEnc::EncodePipelineNvEnc(Renderer *render, // Delay isn't actually a delay instead its how many surfaces to encode at a time av_opt_set_int(encoder_ctx->priv_data, "delay", 1, 0); av_opt_set_int(encoder_ctx->priv_data, "forced-idr", 1, 0); + // work around ffmpeg default not working for older NVIDIA cards + av_opt_set_int(encoder_ctx->priv_data, "b_ref_mode", 0, 0); encoder_ctx->pix_fmt = AV_PIX_FMT_CUDA; encoder_ctx->width = width;