Fix rate control for AMD cards using VAAPI (#2821)
This commit is contained in:
@@ -27,6 +27,8 @@ struct sockaddr;
|
||||
struct AVFrame;
|
||||
struct AVBufferRef;
|
||||
struct AVHWFramesContext;
|
||||
struct AVCodecContext;
|
||||
struct AVDictionary;
|
||||
|
||||
// Forward declarations of boost classes to avoid having to include boost headers
|
||||
// here, which results in issues with Windows.h and WinSock2.h include order.
|
||||
@@ -398,6 +400,13 @@ namespace platf {
|
||||
virtual void
|
||||
init_hwframes(AVHWFramesContext *frames) {};
|
||||
|
||||
/**
|
||||
* @brief Provides a hook for allow platform-specific code to adjust codec options.
|
||||
* @note Implementations may set or modify codec options prior to codec initialization.
|
||||
*/
|
||||
virtual void
|
||||
init_codec_options(AVCodecContext *ctx, AVDictionary *options) {};
|
||||
|
||||
/**
|
||||
* @brief Prepare to derive a context.
|
||||
* @note Implementations may make modifications required before context derivation
|
||||
|
||||
@@ -129,6 +129,16 @@ namespace va {
|
||||
return 0;
|
||||
}
|
||||
|
||||
void
|
||||
init_codec_options(AVCodecContext *ctx, AVDictionary *options) override {
|
||||
// Don't set the RC buffer size when using H.264 on Intel GPUs. It causes
|
||||
// major encoding quality degradation.
|
||||
auto vendor = vaQueryVendorString(va_display);
|
||||
if (ctx->codec_id != AV_CODEC_ID_H264 || (vendor && !strstr(vendor, "Intel"))) {
|
||||
ctx->rc_buffer_size = ctx->bit_rate * ctx->framerate.den / ctx->framerate.num;
|
||||
}
|
||||
}
|
||||
|
||||
int
|
||||
set_frame(AVFrame *frame, AVBufferRef *hw_frames_ctx_buf) override {
|
||||
this->hwframe.reset(frame);
|
||||
|
||||
Reference in New Issue
Block a user