diff --git a/README.md b/README.md index f085a75..6474477 100644 --- a/README.md +++ b/README.md @@ -3,9 +3,10 @@ This library wraps hardware video encoding and scaling in a simple interface. There are no performance loses (at the cost of library flexibility). -Currently it supports VAAPI and NVENC (no scaling).\ +Currently it supports VAAPI, NVENC, software-like wrappers (e.g. nvmpi) and software (e.g. libx264).\ Various codecs are supported (H.264, HEVC, ...).\ -VBR and CQP modes are supported (e.g. streaming and later editting). +VBR and CQP modes are supported (e.g. streaming and later editting).\ +Scaling is only supported for VAAPI.\ See library [documentation](https://bmegli.github.io/hardware-video-encoder/group__interface.html). @@ -78,6 +79,9 @@ make ## Nvidia NVENC ./hve-encode-raw-h264 10 h264_nvenc + +## Software +./hve-encode-raw-h264 10 libx264 ``` ``` bash diff --git a/examples/hve_encode_raw_h264.c b/examples/hve_encode_raw_h264.c index 1b4a2a2..d358ca9 100644 --- a/examples/hve_encode_raw_h264.c +++ b/examples/hve_encode_raw_h264.c @@ -143,6 +143,7 @@ int process_user_input(int argc, char* argv[]) fprintf(stderr, "%s 10 h264_vaapi\n", argv[0]); fprintf(stderr, "%s 10 h264_vaapi /dev/dri/renderD128\n", argv[0]); fprintf(stderr, "%s 10 h264_nvenc\n", argv[0]); + fprintf(stderr, "%s 10 libx264 # (software encoder)\n", argv[0]); return -1; } diff --git a/hve.c b/hve.c index 0bbe9fa..4a108f9 100644 --- a/hve.c +++ b/hve.c @@ -42,7 +42,7 @@ struct hve static struct hve *hve_close_and_return_null(struct hve *h, const char *msg); -static int init_hwframes_context(struct hve* h, const struct hve_config *config); +static int init_hwframes_context(struct hve* h, const struct hve_config *config, enum AVHWDeviceType device_type); static int init_hardware_scaling(struct hve *h, const struct hve_config *config); static enum AVHWDeviceType hve_hw_device_type(const char *encoder); @@ -74,16 +74,11 @@ struct hve *hve_init(const struct hve_config *config) //specified encoder or NULL / empty string for H.264 VAAPI const char *encoder = (config->encoder != NULL && config->encoder[0] != '\0') ? config->encoder : "h264_vaapi"; - //specified device or NULL / empty string for default - const char *device = (config->device != NULL && config->device[0] != '\0') ? config->device : NULL; enum AVHWDeviceType device_type = hve_hw_device_type(encoder); if(device_type == AV_HWDEVICE_TYPE_NONE) - return hve_close_and_return_null(h, "hve doesn't support selected encoder"); - - if( (err = av_hwdevice_ctx_create(&h->hw_device_ctx, device_type, device, NULL, 0) ) < 0) - return hve_close_and_return_null(h, "failed to create hardware device context"); + fprintf(stderr, "hve: not using hardware device type (enoder wrapper, software or hardware not supported by hve)\n"); if(!(codec = avcodec_find_encoder_by_name(encoder))) return hve_close_and_return_null(h, "could not find encoder"); @@ -100,10 +95,6 @@ struct hve *hve_init(const struct hve_config *config) h->avctx->time_base = (AVRational){ 1, config->framerate }; h->avctx->framerate = (AVRational){ config->framerate, 1 }; h->avctx->sample_aspect_ratio = (AVRational){ 1, 1 }; - h->avctx->pix_fmt = hve_hw_pixel_format(device_type); - - if(h->avctx->pix_fmt == AV_PIX_FMT_NONE) - return hve_close_and_return_null(h, "could not find hardware pixel format for encoder"); if(config->profile) h->avctx->profile = config->profile; @@ -123,8 +114,11 @@ struct hve *hve_init(const struct hve_config *config) return hve_close_and_return_null(h, NULL); } - if((err = init_hwframes_context(h, config)) < 0) - return hve_close_and_return_null(h, "failed to set hwframe context"); + h->avctx->pix_fmt = h->sw_pix_fmt; + + if(device_type != AV_HWDEVICE_TYPE_NONE) + if((err = init_hwframes_context(h, config, device_type)) < 0) + return hve_close_and_return_null(h, "failed to set hwframe context"); AVDictionary *opts = NULL; @@ -207,12 +201,21 @@ static struct hve *hve_close_and_return_null(struct hve *h, const char *msg) return NULL; } -static int init_hwframes_context(struct hve* h, const struct hve_config *config) +static int init_hwframes_context(struct hve* h, const struct hve_config *config, enum AVHWDeviceType device_type) { AVBufferRef* hw_frames_ref; AVHWFramesContext* frames_ctx = NULL; int err = 0, depth; + //specified device or NULL / empty string for default + const char *device = (config->device != NULL && config->device[0] != '\0') ? config->device : NULL; + + if( (h->avctx->pix_fmt = hve_hw_pixel_format(device_type)) == AV_PIX_FMT_NONE) + return HVE_ERROR_MSG("could not find hardware pixel format for encoder"); + + if( av_hwdevice_ctx_create(&h->hw_device_ctx, device_type, device, NULL, 0) < 0) + return HVE_ERROR_MSG("failed to create hardware device context"); + if(!(hw_frames_ref = av_hwframe_ctx_alloc(h->hw_device_ctx))) return HVE_ERROR_MSG("failed to create hardware frame context"); @@ -349,7 +352,7 @@ static enum AVPixelFormat hve_hw_pixel_format(enum AVHWDeviceType type) { if(type == AV_HWDEVICE_TYPE_VAAPI) return AV_PIX_FMT_VAAPI; - else if(type == AV_PIX_FMT_CUDA); + else if(type == AV_HWDEVICE_TYPE_CUDA) return AV_PIX_FMT_CUDA; return AV_PIX_FMT_NONE; @@ -410,8 +413,9 @@ int hve_send_frame(struct hve *h,struct hve_frame *frame) memcpy(h->sw_frame->linesize, frame->linesize, sizeof(frame->linesize)); memcpy(h->sw_frame->data, frame->data, sizeof(frame->data)); - if(hw_upload(h) < 0) - return HVE_ERROR_MSG("failed to upload frame data to hardware"); + if(h->hw_device_ctx) + if(hw_upload(h) < 0) + return HVE_ERROR_MSG("failed to upload frame data to hardware"); if(h->filter_graph) return scale_encode(h); @@ -463,7 +467,9 @@ static int scale_encode(struct hve *h) static int encode(struct hve *h) { - if(avcodec_send_frame(h->avctx, h->hw_frame) < 0) + AVFrame *frame = h->hw_frame ? h->hw_frame : h->sw_frame; + + if(avcodec_send_frame(h->avctx, frame) < 0) return HVE_ERROR_MSG("send_frame error"); return HVE_OK; diff --git a/hve.h b/hve.h index 9306ff2..d0b1065 100644 --- a/hve.h +++ b/hve.h @@ -48,7 +48,7 @@ struct hve; * * The width and height are dimmensions of the encoded data. * - * To enable hardware accelerated scaling specify non-zero + * To enable hardware accelerated scaling (VAAPI only) specify non-zero * input_width and input_height different from width and height. * * The device can be: @@ -63,10 +63,11 @@ struct hve; * - NULL or empty string for "h264_vaapi" * - valid ffmpeg encoder * - * You may check encoders supported by your hardware with ffmpeg: + * You may check encoders supported by your hardware/software with ffmpeg: * @code * ffmpeg -encoders | grep vaapi * ffmpeg -encoders | grep nvenc + * ffmpeg -encoders | grep h264 * @endcode * * Encoders typically can be: @@ -78,10 +79,13 @@ struct hve; * - vp9_vaapi * - h264_nvenc * - hevc_nvenc + * - h264_nvmpi (custom Jetson specific FFmpeg build) + * - hevc_nvmpi (custom Jetson specific FFmpeg build) + * - libx264 (software) * * The pixel_format (format of what you upload) typically can be: * - nv12 (this is generally safe choice) - * - yuv420p + * - yuv420p (required for some encoders) * - yuyv422 * - uyvy422 * - yuv422p @@ -93,6 +97,8 @@ struct hve; * @code * ffmpeg -h encoder=h264_vaapi * ffmpeg -h encoder=h264_nvenc + * ffmpeg -h encoder=h264_nvmpi + * ffmpeg -h encoder=libx264 * @endcode * * There are no software color conversions in this library.