From 95abdcf66b7c471d40732d791c967f76b5f5f9c5 Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Sat, 11 Aug 2018 18:22:18 -0500 Subject: [PATCH 001/109] FFmpeg4 support. Compile warnings fixes. Credit goes to many people, including ferdnyc, peterM, and other awesome folks! --- cmake/Modules/FindFFmpeg.cmake | 70 ++++++++++++++++++++++++------- include/CrashHandler.h | 8 ++-- include/FFmpegUtilities.h | 76 +++++++++++++++++++++++++++++++++- include/FFmpegWriter.h | 4 +- include/Frame.h | 2 +- include/FrameMapper.h | 2 +- include/ZmqLogger.h | 12 +++++- src/Clip.cpp | 11 ++--- src/EffectInfo.cpp | 1 + src/FFmpegReader.cpp | 30 +++++++------- src/FFmpegWriter.cpp | 51 ++++++++++++++++------- src/FrameMapper.cpp | 14 +++---- src/Timeline.cpp | 22 +++++----- tests/ReaderBase_Tests.cpp | 4 +- 14 files changed, 228 insertions(+), 79 deletions(-) diff --git a/cmake/Modules/FindFFmpeg.cmake b/cmake/Modules/FindFFmpeg.cmake index 4af6cc93..45a27a9e 100644 --- a/cmake/Modules/FindFFmpeg.cmake +++ b/cmake/Modules/FindFFmpeg.cmake @@ -91,6 +91,20 @@ FIND_LIBRARY( AVRESAMPLE_LIBRARY avresample avresample-2 avresample-3 $ENV{FFMPEGDIR}/lib/ffmpeg/ $ENV{FFMPEGDIR}/bin/ ) +#FindSwresample +FIND_PATH( SWRESAMPLE_INCLUDE_DIR libswresample/swresample.h + PATHS /usr/include/ + /usr/include/ffmpeg/ + $ENV{FFMPEGDIR}/include/ + $ENV{FFMPEGDIR}/include/ffmpeg/ ) + +FIND_LIBRARY( SWRESAMPLE_LIBRARY swresample + PATHS /usr/lib/ + /usr/lib/ffmpeg/ + $ENV{FFMPEGDIR}/lib/ + $ENV{FFMPEGDIR}/lib/ffmpeg/ + $ENV{FFMPEGDIR}/bin/ ) + SET( FFMPEG_FOUND FALSE ) IF ( AVFORMAT_INCLUDE_DIR AND AVFORMAT_LIBRARY ) @@ -117,27 +131,51 @@ IF ( AVRESAMPLE_INCLUDE_DIR AND AVRESAMPLE_LIBRARY ) SET ( AVRESAMPLE_FOUND TRUE ) ENDIF ( AVRESAMPLE_INCLUDE_DIR AND AVRESAMPLE_LIBRARY ) -IF ( AVFORMAT_INCLUDE_DIR OR AVCODEC_INCLUDE_DIR OR AVUTIL_INCLUDE_DIR OR AVDEVICE_FOUND OR SWSCALE_FOUND OR AVRESAMPLE_FOUND ) +IF ( SWRESAMPLE_INCLUDE_DIR AND SWRESAMPLE_LIBRARY ) + SET ( SWRESAMPLE_FOUND TRUE ) +ENDIF ( SWRESAMPLE_INCLUDE_DIR AND SWRESAMPLE_LIBRARY ) + +IF ( AVFORMAT_INCLUDE_DIR OR AVCODEC_INCLUDE_DIR OR AVUTIL_INCLUDE_DIR OR AVDEVICE_FOUND OR SWSCALE_FOUND OR AVRESAMPLE_FOUND OR SWRESAMPLE_FOUND ) SET ( FFMPEG_FOUND TRUE ) - SET ( FFMPEG_INCLUDE_DIR - ${AVFORMAT_INCLUDE_DIR} - ${AVCODEC_INCLUDE_DIR} - ${AVUTIL_INCLUDE_DIR} - ${AVDEVICE_INCLUDE_DIR} - ${SWSCALE_INCLUDE_DIR} - ${AVRESAMPLE_INCLUDE_DIR} ) + IF ( SWRESAMPLE_FOUND ) + SET ( FFMPEG_INCLUDE_DIR + ${AVFORMAT_INCLUDE_DIR} + ${AVCODEC_INCLUDE_DIR} + ${AVUTIL_INCLUDE_DIR} + ${AVDEVICE_INCLUDE_DIR} + ${SWSCALE_INCLUDE_DIR} + ${AVRESAMPLE_INCLUDE_DIR} + ${SWRESAMPLE_INCLUDE_DIR} ) - SET ( FFMPEG_LIBRARIES - ${AVFORMAT_LIBRARY} - ${AVCODEC_LIBRARY} - ${AVUTIL_LIBRARY} - ${AVDEVICE_LIBRARY} - ${SWSCALE_LIBRARY} - ${AVRESAMPLE_LIBRARY} ) + SET ( FFMPEG_LIBRARIES + ${AVFORMAT_LIBRARY} + ${AVCODEC_LIBRARY} + ${AVUTIL_LIBRARY} + ${AVDEVICE_LIBRARY} + ${SWSCALE_LIBRARY} + ${AVRESAMPLE_LIBRARY} + ${SWRESAMPLE_LIBRARY} ) + ELSE () + SET ( FFMPEG_INCLUDE_DIR + ${AVFORMAT_INCLUDE_DIR} + ${AVCODEC_INCLUDE_DIR} + ${AVUTIL_INCLUDE_DIR} + ${AVDEVICE_INCLUDE_DIR} + ${SWSCALE_INCLUDE_DIR} + ${AVRESAMPLE_INCLUDE_DIR} ) + + SET ( FFMPEG_LIBRARIES + ${AVFORMAT_LIBRARY} + ${AVCODEC_LIBRARY} + ${AVUTIL_LIBRARY} + ${AVDEVICE_LIBRARY} + ${SWSCALE_LIBRARY} + ${AVRESAMPLE_LIBRARY} ) + ENDIF () -ENDIF ( AVFORMAT_INCLUDE_DIR OR AVCODEC_INCLUDE_DIR OR AVUTIL_INCLUDE_DIR OR AVDEVICE_FOUND OR SWSCALE_FOUND OR AVRESAMPLE_FOUND ) +ENDIF ( AVFORMAT_INCLUDE_DIR OR AVCODEC_INCLUDE_DIR OR AVUTIL_INCLUDE_DIR OR AVDEVICE_FOUND OR SWSCALE_FOUND OR AVRESAMPLE_FOUND OR SWRESAMPLE_FOUND ) MARK_AS_ADVANCED( FFMPEG_LIBRARY_DIR diff --git a/include/CrashHandler.h b/include/CrashHandler.h index e3a4bbe5..12c79a86 100644 --- a/include/CrashHandler.h +++ b/include/CrashHandler.h @@ -53,13 +53,15 @@ namespace openshot { class CrashHandler { private: /// Default constructor - CrashHandler(){}; // Don't allow user to create an instance of this singleton + CrashHandler(){return;}; // Don't allow user to create an instance of this singleton /// Default copy method - CrashHandler(CrashHandler const&){}; // Don't allow the user to copy this instance + //CrashHandler(CrashHandler const&){}; // Don't allow the user to copy this instance + CrashHandler(CrashHandler const&) = delete; // Don't allow the user to copy this instance /// Default assignment operator - CrashHandler & operator=(CrashHandler const&){}; // Don't allow the user to assign this instance + //CrashHandler & operator=(CrashHandler const&){}; // Don't allow the user to assign this instance + CrashHandler & operator=(CrashHandler const&) = delete; // Don't allow the user to assign this instance /// Private variable to keep track of singleton instance static CrashHandler *m_pInstance; diff --git a/include/FFmpegUtilities.h b/include/FFmpegUtilities.h index 578c6586..346da541 100644 --- a/include/FFmpegUtilities.h +++ b/include/FFmpegUtilities.h @@ -43,7 +43,15 @@ #include #include #include + // Change this to the first version swrescale works + #if (LIBAVFORMAT_VERSION_MAJOR >= 57) + #define USE_SW + #endif + #ifdef USE_SW + #include + #else #include + #endif #include #include #include @@ -106,7 +114,65 @@ #define PIX_FMT_YUV420P AV_PIX_FMT_YUV420P #endif - #if IS_FFMPEG_3_2 + #ifdef USE_SW + #define SWR_CONVERT(ctx, out, linesize, out_count, in, linesize2, in_count) \ + swr_convert(ctx, out, out_count, (const uint8_t **)in, in_count) + #define SWR_ALLOC() swr_alloc() + #define SWR_CLOSE(ctx) {} + #define SWR_FREE(ctx) swr_free(ctx) + #define SWR_INIT(ctx) swr_init(ctx) + #define SWRCONTEXT SwrContext + #else + #define SWR_CONVERT(ctx, out, linesize, out_count, in, linesize2, in_count) \ + avresample_convert(ctx, out, linesize, out_count, (uint8_t **)in, linesize2, in_count) + #define SWR_ALLOC() avresample_alloc_context() + #define SWR_CLOSE(ctx) avresample_close(ctx) + #define SWR_FREE(ctx) avresample_free(ctx) + #define SWR_INIT(ctx) avresample_open(ctx) + #define SWRCONTEXT AVAudioResampleContext + #endif + + + #if (LIBAVFORMAT_VERSION_MAJOR >= 58) + #define AV_REGISTER_ALL + #define AVCODEC_REGISTER_ALL + #define AV_FILENAME url + #define MY_INPUT_BUFFER_PADDING_SIZE AV_INPUT_BUFFER_PADDING_SIZE + #define AV_ALLOCATE_FRAME() av_frame_alloc() + #define AV_ALLOCATE_IMAGE(av_frame, pix_fmt, width, height) av_image_alloc(av_frame->data, av_frame->linesize, width, height, pix_fmt, 1) + #define AV_RESET_FRAME(av_frame) av_frame_unref(av_frame) + #define AV_FREE_FRAME(av_frame) av_frame_free(av_frame) + #define AV_FREE_PACKET(av_packet) av_packet_unref(av_packet) + #define AV_FREE_CONTEXT(av_context) avcodec_free_context(&av_context) + #define AV_GET_CODEC_TYPE(av_stream) av_stream->codecpar->codec_type + #define AV_FIND_DECODER_CODEC_ID(av_stream) av_stream->codecpar->codec_id + auto AV_GET_CODEC_CONTEXT = [](AVStream* av_stream, AVCodec* av_codec) { \ + AVCodecContext *context = avcodec_alloc_context3(av_codec); \ + avcodec_parameters_to_context(context, av_stream->codecpar); \ + return context; \ + }; + #define AV_GET_CODEC_PAR_CONTEXT(av_stream, av_codec) av_codec; + #define AV_GET_CODEC_FROM_STREAM(av_stream,codec_in) + #define AV_GET_CODEC_ATTRIBUTES(av_stream, av_context) av_stream->codecpar + #define AV_GET_CODEC_PIXEL_FORMAT(av_stream, av_context) (AVPixelFormat) av_stream->codecpar->format + #define AV_GET_SAMPLE_FORMAT(av_stream, av_context) av_stream->codecpar->format + #define AV_GET_IMAGE_SIZE(pix_fmt, width, height) av_image_get_buffer_size(pix_fmt, width, height, 1) + #define AV_COPY_PICTURE_DATA(av_frame, buffer, pix_fmt, width, height) av_image_fill_arrays(av_frame->data, av_frame->linesize, buffer, pix_fmt, width, height, 1) + #define AV_OUTPUT_CONTEXT(output_context, path) avformat_alloc_output_context2( output_context, NULL, NULL, path) + #define AV_OPTION_FIND(priv_data, name) av_opt_find(priv_data, name, NULL, 0, 0) + #define AV_OPTION_SET( av_stream, priv_data, name, value, avcodec) av_opt_set(priv_data, name, value, 0); avcodec_parameters_from_context(av_stream->codecpar, avcodec); + #define AV_FORMAT_NEW_STREAM(oc, st_codec, av_codec, av_st) av_st = avformat_new_stream(oc, NULL);\ + if (!av_st) \ + throw OutOfMemory("Could not allocate memory for the video stream.", path); \ + c = avcodec_alloc_context3(av_codec); \ + st_codec = c; \ + av_st->codecpar->codec_id = av_codec->id; + #define AV_COPY_PARAMS_FROM_CONTEXT(av_stream, av_codec) avcodec_parameters_from_context(av_stream->codecpar, av_codec); + #elif IS_FFMPEG_3_2 + #define AV_REGISTER_ALL av_register_all(); + #define AVCODEC_REGISTER_ALL avcodec_register_all(); + #define AV_FILENAME filename + #define MY_INPUT_BUFFER_PADDING_SIZE FF_INPUT_BUFFER_PADDING_SIZE #define AV_ALLOCATE_FRAME() av_frame_alloc() #define AV_ALLOCATE_IMAGE(av_frame, pix_fmt, width, height) av_image_alloc(av_frame->data, av_frame->linesize, width, height, pix_fmt, 1) #define AV_RESET_FRAME(av_frame) av_frame_unref(av_frame) @@ -138,6 +204,10 @@ av_st->codecpar->codec_id = av_codec->id; #define AV_COPY_PARAMS_FROM_CONTEXT(av_stream, av_codec) avcodec_parameters_from_context(av_stream->codecpar, av_codec); #elif LIBAVFORMAT_VERSION_MAJOR >= 55 + #define AV_REGISTER_ALL av_register_all(); + #define AVCODEC_REGISTER_ALL avcodec_register_all(); + #define AV_FILENAME filename + #define MY_INPUT_BUFFER_PADDING_SIZE FF_INPUT_BUFFER_PADDING_SIZE #define AV_ALLOCATE_FRAME() av_frame_alloc() #define AV_ALLOCATE_IMAGE(av_frame, pix_fmt, width, height) avpicture_alloc((AVPicture *) av_frame, pix_fmt, width, height) #define AV_RESET_FRAME(av_frame) av_frame_unref(av_frame) @@ -164,6 +234,10 @@ c = av_st->codec; #define AV_COPY_PARAMS_FROM_CONTEXT(av_stream, av_codec) #else + #define AV_REGISTER_ALL av_register_all(); + #define AVCODEC_REGISTER_ALL avcodec_register_all(); + #define AV_FILENAME filename + #define MY_INPUT_BUFFER_PADDING_SIZE FF_INPUT_BUFFER_PADDING_SIZE #define AV_ALLOCATE_FRAME() avcodec_alloc_frame() #define AV_ALLOCATE_IMAGE(av_frame, pix_fmt, width, height) avpicture_alloc((AVPicture *) av_frame, pix_fmt, width, height) #define AV_RESET_FRAME(av_frame) avcodec_get_frame_defaults(av_frame) diff --git a/include/FFmpegWriter.h b/include/FFmpegWriter.h index 8343002e..7eefacb7 100644 --- a/include/FFmpegWriter.h +++ b/include/FFmpegWriter.h @@ -174,8 +174,8 @@ namespace openshot int initial_audio_input_frame_size; int audio_input_position; int audio_encoder_buffer_size; - AVAudioResampleContext *avr; - AVAudioResampleContext *avr_planar; + SWRCONTEXT *avr; + SWRCONTEXT *avr_planar; /* Resample options */ int original_sample_rate; diff --git a/include/Frame.h b/include/Frame.h index a7ad509f..eba7f8bb 100644 --- a/include/Frame.h +++ b/include/Frame.h @@ -62,7 +62,7 @@ #include "AudioResampler.h" #include "Fraction.h" - +#pragma SWIG nowarn=362 using namespace std; namespace openshot diff --git a/include/FrameMapper.h b/include/FrameMapper.h index 06511666..216fe73f 100644 --- a/include/FrameMapper.h +++ b/include/FrameMapper.h @@ -146,7 +146,7 @@ namespace openshot ReaderBase *reader; // The source video reader CacheMemory final_cache; // Cache of actual Frame objects bool is_dirty; // When this is true, the next call to GetFrame will re-init the mapping - AVAudioResampleContext *avr; // Audio resampling context object + SWRCONTEXT *avr; // Audio resampling context object // Internal methods used by init void AddField(int64_t frame); diff --git a/include/ZmqLogger.h b/include/ZmqLogger.h index c134f2cf..e825ed0e 100644 --- a/include/ZmqLogger.h +++ b/include/ZmqLogger.h @@ -72,11 +72,19 @@ namespace openshot { /// Default constructor ZmqLogger(){}; // Don't allow user to create an instance of this singleton +#if __GNUC__ >=7 /// Default copy method - ZmqLogger(ZmqLogger const&){}; // Don't allow the user to copy this instance + ZmqLogger(ZmqLogger const&) = delete; // Don't allow the user to assign this instance /// Default assignment operator - ZmqLogger & operator=(ZmqLogger const&){}; // Don't allow the user to assign this instance + ZmqLogger & operator=(ZmqLogger const&) = delete; // Don't allow the user to assign this instance +#else + /// Default copy method + ZmqLogger(ZmqLogger const&) {}; // Don't allow the user to assign this instance + + /// Default assignment operator + ZmqLogger & operator=(ZmqLogger const&); // Don't allow the user to assign this instance +#endif /// Private variable to keep track of singleton instance static ZmqLogger * m_pInstance; diff --git a/src/Clip.cpp b/src/Clip.cpp index 913fd71f..63e77412 100644 --- a/src/Clip.cpp +++ b/src/Clip.cpp @@ -925,13 +925,14 @@ void Clip::SetJsonValue(Json::Value root) { if (!existing_effect["type"].isNull()) { // Create instance of effect - e = EffectInfo().CreateEffect(existing_effect["type"].asString()); + if (e = EffectInfo().CreateEffect(existing_effect["type"].asString())) { - // Load Json into Effect - e->SetJsonValue(existing_effect); + // Load Json into Effect + e->SetJsonValue(existing_effect); - // Add Effect to Timeline - AddEffect(e); + // Add Effect to Timeline + AddEffect(e); + } } } } diff --git a/src/EffectInfo.cpp b/src/EffectInfo.cpp index 23bc9d02..f9e4c409 100644 --- a/src/EffectInfo.cpp +++ b/src/EffectInfo.cpp @@ -82,6 +82,7 @@ EffectBase* EffectInfo::CreateEffect(string effect_type) { else if (effect_type == "Wave") return new Wave(); + return NULL; } // Generate Json::JsonValue for this object diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 0b100050..ceccbe23 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -40,8 +40,8 @@ FFmpegReader::FFmpegReader(string path) current_video_frame(0), has_missing_frames(false), num_packets_since_video_frame(0), num_checks_since_final(0), packet(NULL) { // Initialize FFMpeg, and register all formats and codecs - av_register_all(); - avcodec_register_all(); + AV_REGISTER_ALL + AVCODEC_REGISTER_ALL // Init cache working_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * info.fps.ToDouble() * 2, info.width, info.height, info.sample_rate, info.channels); @@ -61,8 +61,8 @@ FFmpegReader::FFmpegReader(string path, bool inspect_reader) current_video_frame(0), has_missing_frames(false), num_packets_since_video_frame(0), num_checks_since_final(0), packet(NULL) { // Initialize FFMpeg, and register all formats and codecs - av_register_all(); - avcodec_register_all(); + AV_REGISTER_ALL + AVCODEC_REGISTER_ALL // Init cache working_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * info.fps.ToDouble() * 2, info.width, info.height, info.sample_rate, info.channels); @@ -974,7 +974,7 @@ void FFmpegReader::ProcessAudioPacket(int64_t requested_frame, int64_t target_fr int data_size = 0; // re-initialize buffer size (it gets changed in the avcodec_decode_audio2 method call) - int buf_size = AVCODEC_MAX_AUDIO_FRAME_SIZE + FF_INPUT_BUFFER_PADDING_SIZE; + int buf_size = AVCODEC_MAX_AUDIO_FRAME_SIZE + MY_INPUT_BUFFER_PADDING_SIZE; #pragma omp critical (ProcessAudioPacket) { #if IS_FFMPEG_3_2 @@ -1079,7 +1079,7 @@ void FFmpegReader::ProcessAudioPacket(int64_t requested_frame, int64_t target_fr // Allocate audio buffer - int16_t *audio_buf = new int16_t[AVCODEC_MAX_AUDIO_FRAME_SIZE + FF_INPUT_BUFFER_PADDING_SIZE]; + int16_t *audio_buf = new int16_t[AVCODEC_MAX_AUDIO_FRAME_SIZE + MY_INPUT_BUFFER_PADDING_SIZE]; ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ProcessAudioPacket (ReSample)", "packet_samples", packet_samples, "info.channels", info.channels, "info.sample_rate", info.sample_rate, "aCodecCtx->sample_fmt", AV_GET_SAMPLE_FORMAT(aStream, aCodecCtx), "AV_SAMPLE_FMT_S16", AV_SAMPLE_FMT_S16, "", -1); @@ -1089,11 +1089,11 @@ void FFmpegReader::ProcessAudioPacket(int64_t requested_frame, int64_t target_fr audio_converted->nb_samples = audio_frame->nb_samples; av_samples_alloc(audio_converted->data, audio_converted->linesize, info.channels, audio_frame->nb_samples, AV_SAMPLE_FMT_S16, 0); - AVAudioResampleContext *avr = NULL; + SWRCONTEXT *avr = NULL; int nb_samples = 0; // setup resample context - avr = avresample_alloc_context(); + avr = SWR_ALLOC(); av_opt_set_int(avr, "in_channel_layout", AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channel_layout, 0); av_opt_set_int(avr, "out_channel_layout", AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channel_layout, 0); av_opt_set_int(avr, "in_sample_fmt", AV_GET_SAMPLE_FORMAT(aStream, aCodecCtx), 0); @@ -1102,10 +1102,10 @@ void FFmpegReader::ProcessAudioPacket(int64_t requested_frame, int64_t target_fr av_opt_set_int(avr, "out_sample_rate", info.sample_rate, 0); av_opt_set_int(avr, "in_channels", info.channels, 0); av_opt_set_int(avr, "out_channels", info.channels, 0); - int r = avresample_open(avr); + int r = SWR_INIT(avr); // Convert audio samples - nb_samples = avresample_convert(avr, // audio resample context + nb_samples = SWR_CONVERT(avr, // audio resample context audio_converted->data, // output data pointers audio_converted->linesize[0], // output plane size, in bytes. (0 if unknown) audio_converted->nb_samples, // maximum number of samples that the output buffer can hold @@ -1117,8 +1117,8 @@ void FFmpegReader::ProcessAudioPacket(int64_t requested_frame, int64_t target_fr memcpy(audio_buf, audio_converted->data[0], audio_converted->nb_samples * av_get_bytes_per_sample(AV_SAMPLE_FMT_S16) * info.channels); // Deallocate resample buffer - avresample_close(avr); - avresample_free(&avr); + SWR_CLOSE(avr); + SWR_FREE(&avr); avr = NULL; // Free AVFrames @@ -1344,7 +1344,7 @@ void FFmpegReader::Seek(int64_t requested_frame) { seek_target = ConvertFrameToVideoPTS(requested_frame - buffer_amount); if (av_seek_frame(pFormatCtx, info.video_stream_index, seek_target, AVSEEK_FLAG_BACKWARD) < 0) { - fprintf(stderr, "%s: error while seeking video stream\n", pFormatCtx->filename); + fprintf(stderr, "%s: error while seeking video stream\n", pFormatCtx->AV_FILENAME); } else { // VIDEO SEEK @@ -1358,7 +1358,7 @@ void FFmpegReader::Seek(int64_t requested_frame) { seek_target = ConvertFrameToAudioPTS(requested_frame - buffer_amount); if (av_seek_frame(pFormatCtx, info.audio_stream_index, seek_target, AVSEEK_FLAG_BACKWARD) < 0) { - fprintf(stderr, "%s: error while seeking audio stream\n", pFormatCtx->filename); + fprintf(stderr, "%s: error while seeking audio stream\n", pFormatCtx->AV_FILENAME); } else { // AUDIO SEEK @@ -1853,6 +1853,8 @@ void FFmpegReader::CheckWorkingFrames(bool end_of_stream, int64_t requested_fram void FFmpegReader::CheckFPS() { check_fps = true; + AV_ALLOCATE_IMAGE(pFrame, AV_GET_CODEC_PIXEL_FORMAT(pStream, pCodecCtx), info.width, info.height); + int first_second_counter = 0; int second_second_counter = 0; int third_second_counter = 0; diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index ede07a43..ed4fc3fb 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -46,7 +46,7 @@ FFmpegWriter::FFmpegWriter(string path) : info.has_video = false; // Initialize FFMpeg, and register all formats and codecs - av_register_all(); + AV_REGISTER_ALL // auto detect format auto_detect_format(); @@ -299,7 +299,7 @@ void FFmpegWriter::SetOption(StreamType stream, string name, string value) /// Determine if codec name is valid bool FFmpegWriter::IsValidCodec(string codec_name) { // Initialize FFMpeg, and register all formats and codecs - av_register_all(); + AV_REGISTER_ALL // Find the codec (if any) if (avcodec_find_encoder_by_name(codec_name.c_str()) == NULL) @@ -342,7 +342,7 @@ void FFmpegWriter::WriteHeader() } // Force the output filename (which doesn't always happen for some reason) - snprintf(oc->filename, sizeof(oc->filename), "%s", path.c_str()); + snprintf(oc->AV_FILENAME, sizeof(oc->AV_FILENAME), "%s", path.c_str()); // Write the stream header, if any // TODO: add avoptions / parameters instead of NULL @@ -559,8 +559,10 @@ void FFmpegWriter::flush_encoders() { if (info.has_audio && audio_codec && AV_GET_CODEC_TYPE(audio_st) == AVMEDIA_TYPE_AUDIO && AV_GET_CODEC_ATTRIBUTES(audio_st, audio_codec)->frame_size <= 1) return; +#if (LIBAVFORMAT_VERSION_MAJOR < 58) if (info.has_video && video_codec && AV_GET_CODEC_TYPE(video_st) == AVMEDIA_TYPE_VIDEO && (oc->oformat->flags & AVFMT_RAWPICTURE) && AV_FIND_DECODER_CODEC_ID(video_st) == AV_CODEC_ID_RAWVIDEO) return; +#endif int error_code = 0; int stop_encoding = 1; @@ -751,14 +753,14 @@ void FFmpegWriter::close_audio(AVFormatContext *oc, AVStream *st) // Deallocate resample buffer if (avr) { - avresample_close(avr); - avresample_free(&avr); + SWR_CLOSE(avr); + SWR_FREE(&avr); avr = NULL; } if (avr_planar) { - avresample_close(avr_planar); - avresample_free(&avr_planar); + SWR_CLOSE(avr_planar); + SWR_FREE(&avr_planar); avr_planar = NULL; } } @@ -898,7 +900,11 @@ AVStream* FFmpegWriter::add_audio_stream() // some formats want stream headers to be separate if (oc->oformat->flags & AVFMT_GLOBALHEADER) +#if (LIBAVCODEC_VERSION_MAJOR >= 57) + c->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; +#else c->flags |= CODEC_FLAG_GLOBAL_HEADER; +#endif AV_COPY_PARAMS_FROM_CONTEXT(st, c); ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::add_audio_stream", "c->codec_id", c->codec_id, "c->bit_rate", c->bit_rate, "c->channels", c->channels, "c->sample_fmt", c->sample_fmt, "c->channel_layout", c->channel_layout, "c->sample_rate", c->sample_rate); @@ -970,7 +976,11 @@ AVStream* FFmpegWriter::add_video_stream() c->mb_decision = 2; // some formats want stream headers to be separate if (oc->oformat->flags & AVFMT_GLOBALHEADER) +#if (LIBAVCODEC_VERSION_MAJOR >= 57) + c->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; +#else c->flags |= CODEC_FLAG_GLOBAL_HEADER; +#endif // Find all supported pixel formats for this codec const PixelFormat* supported_pixel_formats = codec->pix_fmts; @@ -987,10 +997,12 @@ AVStream* FFmpegWriter::add_video_stream() // Raw video should use RGB24 c->pix_fmt = PIX_FMT_RGB24; +#if (LIBAVFORMAT_VERSION_MAJOR < 58) if (strcmp(fmt->name, "gif") != 0) // If not GIF format, skip the encoding process // Set raw picture flag (so we don't encode this video) oc->oformat->flags |= AVFMT_RAWPICTURE; +#endif } else { // Set the default codec c->pix_fmt = PIX_FMT_YUV420P; @@ -998,7 +1010,11 @@ AVStream* FFmpegWriter::add_video_stream() } AV_COPY_PARAMS_FROM_CONTEXT(st, c); +#if (LIBAVFORMAT_VERSION_MAJOR < 58) ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::add_video_stream (" + (string)fmt->name + " : " + (string)av_get_pix_fmt_name(c->pix_fmt) + ")", "c->codec_id", c->codec_id, "c->bit_rate", c->bit_rate, "c->pix_fmt", c->pix_fmt, "oc->oformat->flags", oc->oformat->flags, "AVFMT_RAWPICTURE", AVFMT_RAWPICTURE, "", -1); +#else + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::add_video_stream (" + (string)fmt->name + " : " + (string)av_get_pix_fmt_name(c->pix_fmt) + ")", "c->codec_id", c->codec_id, "c->bit_rate", c->bit_rate, "c->pix_fmt", c->pix_fmt, "oc->oformat->flags", oc->oformat->flags, "", -1, "", -1); +#endif return st; } @@ -1073,7 +1089,7 @@ void FFmpegWriter::open_audio(AVFormatContext *oc, AVStream *st) av_dict_set(&st->metadata, iter->first.c_str(), iter->second.c_str(), 0); } - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::open_audio", "audio_codec->thread_count", audio_codec->thread_count, "audio_input_frame_size", audio_input_frame_size, "buffer_size", AVCODEC_MAX_AUDIO_FRAME_SIZE + FF_INPUT_BUFFER_PADDING_SIZE, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::open_audio", "audio_codec->thread_count", audio_codec->thread_count, "audio_input_frame_size", audio_input_frame_size, "buffer_size", AVCODEC_MAX_AUDIO_FRAME_SIZE + MY_INPUT_BUFFER_PADDING_SIZE, "", -1, "", -1, "", -1); } @@ -1239,7 +1255,7 @@ void FFmpegWriter::write_audio_packets(bool final) // setup resample context if (!avr) { - avr = avresample_alloc_context(); + avr = SWR_ALLOC(); av_opt_set_int(avr, "in_channel_layout", channel_layout_in_frame, 0); av_opt_set_int(avr, "out_channel_layout", info.channel_layout, 0); av_opt_set_int(avr, "in_sample_fmt", AV_SAMPLE_FMT_S16, 0); @@ -1248,12 +1264,12 @@ void FFmpegWriter::write_audio_packets(bool final) av_opt_set_int(avr, "out_sample_rate", info.sample_rate, 0); av_opt_set_int(avr, "in_channels", channels_in_frame, 0); av_opt_set_int(avr, "out_channels", info.channels, 0); - avresample_open(avr); + SWR_INIT(avr); } int nb_samples = 0; // Convert audio samples - nb_samples = avresample_convert(avr, // audio resample context + nb_samples = SWR_CONVERT(avr, // audio resample context audio_converted->data, // output data pointers audio_converted->linesize[0], // output plane size, in bytes. (0 if unknown) audio_converted->nb_samples, // maximum number of samples that the output buffer can hold @@ -1314,7 +1330,7 @@ void FFmpegWriter::write_audio_packets(bool final) // setup resample context if (!avr_planar) { - avr_planar = avresample_alloc_context(); + avr_planar = SWR_ALLOC(); av_opt_set_int(avr_planar, "in_channel_layout", info.channel_layout, 0); av_opt_set_int(avr_planar, "out_channel_layout", info.channel_layout, 0); av_opt_set_int(avr_planar, "in_sample_fmt", output_sample_fmt, 0); @@ -1323,7 +1339,7 @@ void FFmpegWriter::write_audio_packets(bool final) av_opt_set_int(avr_planar, "out_sample_rate", info.sample_rate, 0); av_opt_set_int(avr_planar, "in_channels", info.channels, 0); av_opt_set_int(avr_planar, "out_channels", info.channels, 0); - avresample_open(avr_planar); + SWR_INIT(avr_planar); } // Create input frame (and allocate arrays) @@ -1346,7 +1362,7 @@ void FFmpegWriter::write_audio_packets(bool final) av_samples_alloc(frame_final->data, frame_final->linesize, info.channels, frame_final->nb_samples, audio_codec->sample_fmt, 0); // Convert audio samples - int nb_samples = avresample_convert(avr_planar, // audio resample context + int nb_samples = SWR_CONVERT(avr_planar, // audio resample context frame_final->data, // output data pointers frame_final->linesize[0], // output plane size, in bytes. (0 if unknown) frame_final->nb_samples, // maximum number of samples that the output buffer can hold @@ -1577,6 +1593,9 @@ void FFmpegWriter::process_video_packet(std::shared_ptr frame) // write video frame bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* frame_final) { +#if (LIBAVFORMAT_VERSION_MAJOR >= 58) + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_video_packet", "frame->number", frame->number, "oc->oformat->flags", oc->oformat->flags, "", -1, "", -1, "", -1, "", -1); +#else ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_video_packet", "frame->number", frame->number, "oc->oformat->flags & AVFMT_RAWPICTURE", oc->oformat->flags & AVFMT_RAWPICTURE, "", -1, "", -1, "", -1, "", -1); if (oc->oformat->flags & AVFMT_RAWPICTURE) { @@ -1604,7 +1623,9 @@ bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* fra // Deallocate packet AV_FREE_PACKET(&pkt); - } else { + } else +#endif + { AVPacket pkt; av_init_packet(&pkt); diff --git a/src/FrameMapper.cpp b/src/FrameMapper.cpp index f49cbc4d..c4c68f5a 100644 --- a/src/FrameMapper.cpp +++ b/src/FrameMapper.cpp @@ -650,8 +650,8 @@ void FrameMapper::Close() // Deallocate resample buffer if (avr) { - avresample_close(avr); - avresample_free(&avr); + SWR_CLOSE(avr); + SWR_FREE(&avr); avr = NULL; } } @@ -741,8 +741,8 @@ void FrameMapper::ChangeMapping(Fraction target_fps, PulldownType target_pulldow // Deallocate resample buffer if (avr) { - avresample_close(avr); - avresample_free(&avr); + SWR_CLOSE(avr); + SWR_FREE(&avr); avr = NULL; } @@ -817,7 +817,7 @@ void FrameMapper::ResampleMappedAudio(std::shared_ptr frame, int64_t orig // setup resample context if (!avr) { - avr = avresample_alloc_context(); + avr = SWR_ALLOC(); av_opt_set_int(avr, "in_channel_layout", channel_layout_in_frame, 0); av_opt_set_int(avr, "out_channel_layout", info.channel_layout, 0); av_opt_set_int(avr, "in_sample_fmt", AV_SAMPLE_FMT_S16, 0); @@ -826,11 +826,11 @@ void FrameMapper::ResampleMappedAudio(std::shared_ptr frame, int64_t orig av_opt_set_int(avr, "out_sample_rate", info.sample_rate, 0); av_opt_set_int(avr, "in_channels", channels_in_frame, 0); av_opt_set_int(avr, "out_channels", info.channels, 0); - avresample_open(avr); + SWR_INIT(avr); } // Convert audio samples - nb_samples = avresample_convert(avr, // audio resample context + nb_samples = SWR_CONVERT(avr, // audio resample context audio_converted->data, // output data pointers audio_converted->linesize[0], // output plane size, in bytes. (0 if unknown) audio_converted->nb_samples, // maximum number of samples that the output buffer can hold diff --git a/src/Timeline.cpp b/src/Timeline.cpp index d042aeeb..35b91283 100644 --- a/src/Timeline.cpp +++ b/src/Timeline.cpp @@ -1000,13 +1000,14 @@ void Timeline::SetJsonValue(Json::Value root) { if (!existing_effect["type"].isNull()) { // Create instance of effect - e = EffectInfo().CreateEffect(existing_effect["type"].asString()); + if (e = EffectInfo().CreateEffect(existing_effect["type"].asString())) { - // Load Json into Effect - e->SetJsonValue(existing_effect); + // Load Json into Effect + e->SetJsonValue(existing_effect); - // Add Effect to Timeline - AddEffect(e); + // Add Effect to Timeline + AddEffect(e); + } } } } @@ -1270,13 +1271,14 @@ void Timeline::apply_json_to_effects(Json::Value change, EffectBase* existing_ef EffectBase *e = NULL; // Init the matching effect object - e = EffectInfo().CreateEffect(effect_type); + if (e = EffectInfo().CreateEffect(effect_type)) { - // Load Json into Effect - e->SetJsonValue(change["value"]); + // Load Json into Effect + e->SetJsonValue(change["value"]); - // Add Effect to Timeline - AddEffect(e); + // Add Effect to Timeline + AddEffect(e); + } } else if (change_type == "update") { diff --git a/tests/ReaderBase_Tests.cpp b/tests/ReaderBase_Tests.cpp index 9d435304..70ca90d5 100644 --- a/tests/ReaderBase_Tests.cpp +++ b/tests/ReaderBase_Tests.cpp @@ -44,9 +44,9 @@ TEST(ReaderBase_Derived_Class) std::shared_ptr GetFrame(int64_t number) { std::shared_ptr f(new Frame()); return f; } void Close() { }; void Open() { }; - string Json() { }; + string Json() { return NULL; }; void SetJson(string value) { }; - Json::Value JsonValue() { }; + Json::Value JsonValue() { return (int) NULL; }; void SetJsonValue(Json::Value root) { }; bool IsOpen() { return true; }; string Name() { return "TestReader"; }; From 8216795c33a4297cb4728a0a53c88a6fc06c3ac2 Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Sun, 12 Aug 2018 00:15:23 -0500 Subject: [PATCH 002/109] Adding environment checking to enable/disable omp taskwait after each video/audio frame is processed. This is experimental for some users with crashes. --- include/FFmpegReader.h | 1 + include/OpenMPUtilities.h | 22 ++++++++++++++++++++-- src/FFmpegReader.cpp | 18 ++++++++++++++---- 3 files changed, 35 insertions(+), 6 deletions(-) diff --git a/include/FFmpegReader.h b/include/FFmpegReader.h index 6072756a..e2c4863a 100644 --- a/include/FFmpegReader.h +++ b/include/FFmpegReader.h @@ -105,6 +105,7 @@ namespace openshot bool check_interlace; bool check_fps; bool has_missing_frames; + bool use_omp_threads; CacheMemory working_cache; CacheMemory missing_frames; diff --git a/include/OpenMPUtilities.h b/include/OpenMPUtilities.h index 8a95a950..c0f5597b 100644 --- a/include/OpenMPUtilities.h +++ b/include/OpenMPUtilities.h @@ -29,8 +29,26 @@ #define OPENSHOT_OPENMP_UTILITIES_H #include +#include +#include - // Calculate the # of OpenMP Threads to allow - #define OPEN_MP_NUM_PROCESSORS omp_get_num_procs() +// Calculate the # of OpenMP Threads to allow +#define OPEN_MP_NUM_PROCESSORS omp_get_num_procs() + +using namespace std; + +namespace openshot { + + // Check if OS2_OMP_THREADS environment variable is present, and return + // if multiple threads should be used with OMP + static bool IsOMPEnabled() { + char* OS2_OMP_THREADS = getenv("OS2_OMP_THREADS"); + if (OS2_OMP_THREADS != NULL && strcmp(OS2_OMP_THREADS, "0") == 0) + return false; + else + return true; + } + +} #endif diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index ceccbe23..736e95ee 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -37,7 +37,8 @@ FFmpegReader::FFmpegReader(string path) audio_pts_offset(99999), video_pts_offset(99999), path(path), is_video_seek(true), check_interlace(false), check_fps(false), enable_seek(true), is_open(false), seek_audio_frame_found(0), seek_video_frame_found(0), prev_samples(0), prev_pts(0), pts_total(0), pts_counter(0), is_duration_known(false), largest_frame_processed(0), - current_video_frame(0), has_missing_frames(false), num_packets_since_video_frame(0), num_checks_since_final(0), packet(NULL) { + current_video_frame(0), has_missing_frames(false), num_packets_since_video_frame(0), num_checks_since_final(0), + packet(NULL), use_omp_threads(true) { // Initialize FFMpeg, and register all formats and codecs AV_REGISTER_ALL @@ -58,7 +59,8 @@ FFmpegReader::FFmpegReader(string path, bool inspect_reader) audio_pts_offset(99999), video_pts_offset(99999), path(path), is_video_seek(true), check_interlace(false), check_fps(false), enable_seek(true), is_open(false), seek_audio_frame_found(0), seek_video_frame_found(0), prev_samples(0), prev_pts(0), pts_total(0), pts_counter(0), is_duration_known(false), largest_frame_processed(0), - current_video_frame(0), has_missing_frames(false), num_packets_since_video_frame(0), num_checks_since_final(0), packet(NULL) { + current_video_frame(0), has_missing_frames(false), num_packets_since_video_frame(0), num_checks_since_final(0), + packet(NULL), use_omp_threads(true) { // Initialize FFMpeg, and register all formats and codecs AV_REGISTER_ALL @@ -227,6 +229,9 @@ void FFmpegReader::Open() missing_frames.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels); final_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels); + // Initialize OMP threading support + use_omp_threads = openshot::IsOMPEnabled(); + // Mark as "open" is_open = true; } @@ -633,8 +638,13 @@ std::shared_ptr FFmpegReader::ReadStream(int64_t requested_frame) ProcessAudioPacket(requested_frame, location.frame, location.sample_start); } + if (!use_omp_threads) { + // Wait on each OMP task to complete before moving on to the next one. This slows + // down processing considerably, but might be more stable on some systems. + #pragma omp taskwait + } + // Check if working frames are 'finished' - bool is_cache_found = false; if (!is_seeking) { // Check for any missing frames CheckMissingFrame(requested_frame); @@ -644,7 +654,7 @@ std::shared_ptr FFmpegReader::ReadStream(int64_t requested_frame) } // Check if requested 'final' frame is available - is_cache_found = (final_cache.GetFrame(requested_frame) != NULL); + bool is_cache_found = (final_cache.GetFrame(requested_frame) != NULL); // Increment frames processed packets_processed++; From 6b5e2d427bc7e026b0d0037986c25efaa4de0ae8 Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Sun, 12 Aug 2018 00:36:03 -0500 Subject: [PATCH 003/109] Moving `omp taskwait` to after the ProcessVideoPacket() method, since that is the only place it is useful. --- src/FFmpegReader.cpp | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 736e95ee..adf957f1 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -607,6 +607,12 @@ std::shared_ptr FFmpegReader::ReadStream(int64_t requested_frame) // Process Video Packet ProcessVideoPacket(requested_frame); + + if (!use_omp_threads) { + // Wait on each OMP task to complete before moving on to the next one. This slows + // down processing considerably, but might be more stable on some systems. + #pragma omp taskwait + } } } @@ -638,12 +644,6 @@ std::shared_ptr FFmpegReader::ReadStream(int64_t requested_frame) ProcessAudioPacket(requested_frame, location.frame, location.sample_start); } - if (!use_omp_threads) { - // Wait on each OMP task to complete before moving on to the next one. This slows - // down processing considerably, but might be more stable on some systems. - #pragma omp taskwait - } - // Check if working frames are 'finished' if (!is_seeking) { // Check for any missing frames From 340803e31eb3385d32a0b8de5e24f227c2b07a1b Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Fri, 31 Aug 2018 21:36:23 -0700 Subject: [PATCH 004/109] Initial rudimentary support for hardware acceleration (encode and decode) Only Linux vaapi for now --- include/FFmpegReader.h | 3 + include/FFmpegUtilities.h | 3 + src/FFmpegReader.cpp | 132 ++++++++++++++++++++++++++++- src/FFmpegWriter.cpp | 171 +++++++++++++++++++++++++++++++++++++- 4 files changed, 301 insertions(+), 8 deletions(-) diff --git a/include/FFmpegReader.h b/include/FFmpegReader.h index e2c4863a..fcc995ae 100644 --- a/include/FFmpegReader.h +++ b/include/FFmpegReader.h @@ -97,6 +97,9 @@ namespace openshot AVFormatContext *pFormatCtx; int i, videoStream, audioStream; AVCodecContext *pCodecCtx, *aCodecCtx; + #if (LIBAVFORMAT_VERSION_MAJOR >= 57) + AVBufferRef *hw_device_ctx = NULL; //PM + #endif AVStream *pStream, *aStream; AVPacket *packet; AVFrame *pFrame; diff --git a/include/FFmpegUtilities.h b/include/FFmpegUtilities.h index 346da541..0cc08f52 100644 --- a/include/FFmpegUtilities.h +++ b/include/FFmpegUtilities.h @@ -42,6 +42,9 @@ extern "C" { #include #include + #if (LIBAVFORMAT_VERSION_MAJOR >= 57) + #include //PM + #endif #include // Change this to the first version swrescale works #if (LIBAVFORMAT_VERSION_MAJOR >= 57) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index adf957f1..86b7cb0d 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -32,6 +32,9 @@ using namespace openshot; +int hw_de_on = 1; // Is set in UI +int hw_de_supported = 0; // Is set by FFmpegReader + FFmpegReader::FFmpegReader(string path) : last_frame(0), is_seeking(0), seeking_pts(0), seeking_frame(0), seek_count(0), audio_pts_offset(99999), video_pts_offset(99999), path(path), is_video_seek(true), check_interlace(false), @@ -103,6 +106,45 @@ bool AudioLocation::is_near(AudioLocation location, int samples_per_frame, int64 return false; } +#if IS_FFMPEG_3_2 +#if defined(__linux__) +#pragma message "You are compiling with experimental hardware decode" + +static enum AVPixelFormat get_vaapi_format(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) +{ + const enum AVPixelFormat *p; + + for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { + if (*p == AV_PIX_FMT_VAAPI) + return *p; + } + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using VA-API.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + hw_de_supported = 0; + return AV_PIX_FMT_NONE; +} + +int is_hardware_decode_supported(int codecid) +{ + int ret; + switch (codecid) { + case AV_CODEC_ID_H264: + case AV_CODEC_ID_MPEG2VIDEO: + case AV_CODEC_ID_VC1: + case AV_CODEC_ID_WMV1: + case AV_CODEC_ID_WMV2: + case AV_CODEC_ID_WMV3: + ret = 1; + break; + default : + ret = 0; + break; + } + return ret; +} + +#endif +#endif + void FFmpegReader::Open() { // Open reader if not already open @@ -111,6 +153,14 @@ void FFmpegReader::Open() // Initialize format context pFormatCtx = NULL; + char * val = getenv( "OS2_DECODE_HW" ); + if (val == NULL) { + hw_de_on = 0; + } + else{ + hw_de_on = (val[0] == '1')? 1 : 0; + } + // Open video file if (avformat_open_input(&pFormatCtx, path.c_str(), NULL, NULL) != 0) throw InvalidFile("File could not be opened.", path); @@ -151,7 +201,11 @@ void FFmpegReader::Open() // Get codec and codec context from stream AVCodec *pCodec = avcodec_find_decoder(codecId); pCodecCtx = AV_GET_CODEC_CONTEXT(pStream, pCodec); - + #if IS_FFMPEG_3_2 + #if defined(__linux__) + hw_de_supported = is_hardware_decode_supported(pCodecCtx->codec_id); + #endif + #endif // Set number of threads equal to number of processors (not to exceed 16) pCodecCtx->thread_count = min(OPEN_MP_NUM_PROCESSORS, 16); @@ -163,6 +217,23 @@ void FFmpegReader::Open() AVDictionary *opts = NULL; av_dict_set(&opts, "strict", "experimental", 0); + #if IS_FFMPEG_3_2 + #if defined(__linux__) + if (hw_de_on & hw_de_supported) { + // Open Hardware Acceleration + hw_device_ctx = NULL; + pCodecCtx->get_format = get_vaapi_format; + if (av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_VAAPI, NULL, NULL, 0) >= 0) { + if (!(pCodecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx))) { + throw InvalidCodec("Hardware device reference create failed.", path); + } + } + else { + throw InvalidCodec("Hardware device create failed.", path); + } + } + #endif + #endif // Open video codec if (avcodec_open2(pCodecCtx, pCodec, &opts) < 0) throw InvalidCodec("A video codec was found, but could not be opened.", path); @@ -252,6 +323,16 @@ void FFmpegReader::Close() { avcodec_flush_buffers(pCodecCtx); AV_FREE_CONTEXT(pCodecCtx); + #if IS_FFMPEG_3_2 + #if defined(__linux__) + if (hw_de_on) { + if (hw_device_ctx) { + av_buffer_unref(&hw_device_ctx); + hw_device_ctx = NULL; + } + } + #endif + #endif } if (info.has_audio) { @@ -703,9 +784,13 @@ std::shared_ptr FFmpegReader::ReadStream(int64_t requested_frame) int FFmpegReader::GetNextPacket() { int found_packet = 0; - AVPacket *next_packet = new AVPacket(); + AVPacket *next_packet; + #pragma omp critical(getnextpacket) + { + next_packet = new AVPacket(); found_packet = av_read_frame(pFormatCtx, next_packet); + if (packet) { // Remove previous packet before getting next one RemoveAVPacket(packet); @@ -717,7 +802,7 @@ int FFmpegReader::GetNextPacket() // Update current packet pointer packet = next_packet; } - +} // Return if packet was found (or error number) return found_packet; } @@ -734,17 +819,51 @@ bool FFmpegReader::GetAVFrame() { #if IS_FFMPEG_3_2 frameFinished = 0; + ret = avcodec_send_packet(pCodecCtx, packet); + if (ret < 0 || ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (Packet not sent)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); } else { + AVFrame *next_frame2; + #if defined(__linux__) + if (hw_de_on && hw_de_supported) { + next_frame2 = AV_ALLOCATE_FRAME(); + } + else + #endif + { + next_frame2 = next_frame; + } pFrame = new AVFrame(); while (ret >= 0) { - ret = avcodec_receive_frame(pCodecCtx, next_frame); + ret = avcodec_receive_frame(pCodecCtx, next_frame2); if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { break; } + if (ret != 0) { + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (invalid return frame received)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + } + #if defined(__linux__) + if (hw_de_on && hw_de_supported) { + int err; + if (next_frame2->format == AV_PIX_FMT_VAAPI) { + next_frame->format = AV_PIX_FMT_YUV420P; + if ((err = av_hwframe_transfer_data(next_frame,next_frame2,0)) < 0) { + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (Failed to transfer data to output frame)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + } + if ((err = av_frame_copy_props(next_frame,next_frame2)) < 0) { + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (Failed to copy props to output frame)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + } + } + } + else + #endif + { // No hardware acceleration used -> no copy from GPU memory needed + next_frame = next_frame2; + } + //} // TODO also handle possible further frames // Use only the first frame like avcodec_decode_video2 if (frameFinished == 0 ) { @@ -759,6 +878,11 @@ bool FFmpegReader::GetAVFrame() } } } + #if defined(__linux__) + if (hw_de_on && hw_de_supported) { + AV_FREE_FRAME(&next_frame2); + } + #endif } #else avcodec_decode_video2(pCodecCtx, next_frame, &frameFinished, packet); diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index ed4fc3fb..a62e8287 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -32,6 +32,49 @@ using namespace openshot; +#if IS_FFMPEG_3_2 +int hw_en_on = 1; // Is set in UI +int hw_en_supported = 0; // Is set by FFmpegWriter +static AVBufferRef *hw_device_ctx = NULL; +AVFrame *hw_frame = NULL; + +static int set_hwframe_ctx(AVCodecContext *ctx, AVBufferRef *hw_device_ctx, int64_t width, int64_t height) +{ + AVBufferRef *hw_frames_ref; + AVHWFramesContext *frames_ctx = NULL; + int err = 0; + + if (!(hw_frames_ref = av_hwframe_ctx_alloc(hw_device_ctx))) { + fprintf(stderr, "Failed to create VAAPI frame context.\n"); + return -1; + } + frames_ctx = (AVHWFramesContext *)(hw_frames_ref->data); + frames_ctx->format = AV_PIX_FMT_VAAPI; + frames_ctx->sw_format = AV_PIX_FMT_NV12; + frames_ctx->width = width; + frames_ctx->height = height; + frames_ctx->initial_pool_size = 20; + if ((err = av_hwframe_ctx_init(hw_frames_ref)) < 0) { + fprintf(stderr, "Failed to initialize VAAPI frame context." + "Error code: %s\n",av_err2str(err)); + av_buffer_unref(&hw_frames_ref); + return err; + } + ctx->hw_frames_ctx = av_buffer_ref(hw_frames_ref); + if (!ctx->hw_frames_ctx) + err = AVERROR(ENOMEM); + + av_buffer_unref(&hw_frames_ref); + return err; +} +#endif + +#if IS_FFMPEG_3_2 +#if defined(__linux__) +#pragma message "You are compiling with experimental hardware encode" +#endif +#endif + FFmpegWriter::FFmpegWriter(string path) : path(path), fmt(NULL), oc(NULL), audio_st(NULL), video_st(NULL), audio_pts(0), video_pts(0), samples(NULL), audio_outbuf(NULL), audio_outbuf_size(0), audio_input_frame_size(0), audio_input_position(0), @@ -116,7 +159,26 @@ void FFmpegWriter::SetVideoOptions(bool has_video, string codec, Fraction fps, i // Set the video options if (codec.length() > 0) { - AVCodec *new_codec = avcodec_find_encoder_by_name(codec.c_str()); + AVCodec *new_codec; + // Check if the codec selected is a hardware accelerated codec + #if IS_FFMPEG_3_2 + #if defined(__linux__) + if ( (strcmp(codec.c_str(),"h264_vaapi") == 0)) { + new_codec = avcodec_find_encoder_by_name(codec.c_str()); + hw_en_on = 1; + hw_en_supported = 1; + } + else { + new_codec = avcodec_find_encoder_by_name(codec.c_str()); + hw_en_on = 0; + hw_en_supported = 0; + } + #else // is FFmpeg 3 but not linux + new_codec = avcodec_find_encoder_by_name(codec.c_str()); + #endif //__linux__ + #else // not ffmpeg 3 + new_codec = avcodec_find_encoder_by_name(codec.c_str()); + #endif //IS_FFMPEG_3_2 if (new_codec == NULL) throw InvalidCodec("A valid video codec could not be found for this file.", path); else { @@ -506,6 +568,7 @@ void FFmpegWriter::write_queued_frames() is_writing = false; } // end omp single + } // end omp parallel // Raise exception from main thread @@ -735,6 +798,16 @@ void FFmpegWriter::close_video(AVFormatContext *oc, AVStream *st) { AV_FREE_CONTEXT(video_codec); video_codec = NULL; + #if IS_FFMPEG_3_2 + #if defined(__linux__) + if (hw_en_on && hw_en_supported) { + if (hw_device_ctx) { + av_buffer_unref(&hw_device_ctx); + hw_device_ctx = NULL; + } + } + #endif + #endif } // Close the audio codec @@ -1102,6 +1175,23 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) // Set number of threads equal to number of processors (not to exceed 16) video_codec->thread_count = min(OPEN_MP_NUM_PROCESSORS, 16); + #if IS_FFMPEG_3_2 + #if defined(__linux__) + if (hw_en_on && hw_en_supported) { + // Use the hw device given in the environment variable HW_DEVICE_SET or the default if not set + char *dev_hw = getenv( "HW_DEVICE_SET" ); + // Check if it is there and writable + if( dev_hw != NULL && access( dev_hw, W_OK ) == -1 ) { + dev_hw = NULL; // use default + } + if (av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_VAAPI, + dev_hw, NULL, 0) < 0) { + cerr << "FFmpegWriter::open_video : Codec name: " << info.vcodec.c_str() << " ERROR creating\n"; + throw InvalidCodec("Could not create hwdevice", path); + } + } + #endif + #endif /* find the video encoder */ codec = avcodec_find_encoder_by_name(info.vcodec.c_str()); if (!codec) @@ -1117,6 +1207,24 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) AVDictionary *opts = NULL; av_dict_set(&opts, "strict", "experimental", 0); + #if IS_FFMPEG_3_2 + #if defined(__linux__) + if (hw_en_on && hw_en_supported) { + video_codec->max_b_frames = 0; // At least this GPU doesn't support b-frames + video_codec->pix_fmt = AV_PIX_FMT_VAAPI; + video_codec->profile = FF_PROFILE_H264_BASELINE | FF_PROFILE_H264_CONSTRAINED; + av_opt_set(video_codec->priv_data,"preset","slow",0); + av_opt_set(video_codec->priv_data,"tune","zerolatency",0); + av_opt_set(video_codec->priv_data, "vprofile", "baseline", AV_OPT_SEARCH_CHILDREN); + // set hw_frames_ctx for encoder's AVCodecContext + int err; + if ((err = set_hwframe_ctx(video_codec, hw_device_ctx, info.width, info.height)) < 0) { + fprintf(stderr, "Failed to set hwframe context.\n"); + } + } + #endif + #endif + /* open the codec */ if (avcodec_open2(video_codec, codec, &opts) < 0) throw InvalidCodec("Could not open codec", path); @@ -1566,7 +1674,15 @@ void FFmpegWriter::process_video_packet(std::shared_ptr frame) // Init AVFrame for source image & final (converted image) frame_source = allocate_avframe(PIX_FMT_RGBA, source_image_width, source_image_height, &bytes_source, (uint8_t*) pixels); #if IS_FFMPEG_3_2 - AVFrame *frame_final = allocate_avframe((AVPixelFormat)(video_st->codecpar->format), info.width, info.height, &bytes_final, NULL); + AVFrame *frame_final; + #if defined(__linux__) + if (hw_en_on && hw_en_supported) { + frame_final = allocate_avframe(AV_PIX_FMT_NV12, info.width, info.height, &bytes_final, NULL); + } else + #endif + { + frame_final = allocate_avframe((AVPixelFormat)(video_st->codecpar->format), info.width, info.height, &bytes_final, NULL); + } #else AVFrame *frame_final = allocate_avframe(video_codec->pix_fmt, info.width, info.height, &bytes_final, NULL); #endif @@ -1641,14 +1757,41 @@ bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* fra // Assign the initial AVFrame PTS from the frame counter frame_final->pts = write_video_count; - + #if IS_FFMPEG_3_2 + #if defined(__linux__) + if (hw_en_on && hw_en_supported) { + if (!(hw_frame = av_frame_alloc())) { + fprintf(stderr, "Error code: av_hwframe_alloc\n"); + } + if (av_hwframe_get_buffer(video_codec->hw_frames_ctx, hw_frame, 0) < 0) { + fprintf(stderr, "Error code: av_hwframe_get_buffer\n"); + } + if (!hw_frame->hw_frames_ctx) { + fprintf(stderr, "Error hw_frames_ctx.\n"); + } + hw_frame->format = AV_PIX_FMT_NV12; + if ( av_hwframe_transfer_data(hw_frame, frame_final, 0) < 0) { + fprintf(stderr, "Error while transferring frame data to surface.\n"); + } + av_frame_copy_props(hw_frame, frame_final); + } + #endif + #endif /* encode the image */ int got_packet_ptr = 0; int error_code = 0; #if IS_FFMPEG_3_2 // Write video packet (latest version of FFmpeg) int frameFinished = 0; - int ret = avcodec_send_frame(video_codec, frame_final); + int ret; + #if defined(__linux__) + #if IS_FFMPEG_3_2 + if (hw_en_on && hw_en_supported) { + ret = avcodec_send_frame(video_codec, hw_frame); //hw_frame!!! + } else + #endif + #endif + ret = avcodec_send_frame(video_codec, frame_final); error_code = ret; if (ret < 0 ) { ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_video_packet (Frame not sent)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); @@ -1709,6 +1852,7 @@ bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* fra //pkt.pts = pkt.dts = write_video_count; // set the timestamp +// av_packet_rescale_ts(&pkt, video_st->time_base,video_codec->time_base); if (pkt.pts != AV_NOPTS_VALUE) pkt.pts = av_rescale_q(pkt.pts, video_codec->time_base, video_st->time_base); if (pkt.dts != AV_NOPTS_VALUE) @@ -1732,6 +1876,16 @@ bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* fra // Deallocate packet AV_FREE_PACKET(&pkt); + #if IS_FFMPEG_3_2 + #if defined(__linux__) + if (hw_en_on && hw_en_supported) { + if (hw_frame) { + av_frame_free(&hw_frame); + hw_frame = NULL; + } + } + #endif + #endif } // Success @@ -1752,7 +1906,16 @@ void FFmpegWriter::InitScalers(int source_width, int source_height) for (int x = 0; x < num_of_rescalers; x++) { // Init the software scaler from FFMpeg + #if IS_FFMPEG_3_2 + #if defined(__linux__) + if (hw_en_on && hw_en_supported) { + img_convert_ctx = sws_getContext(source_width, source_height, PIX_FMT_RGBA, info.width, info.height, AV_PIX_FMT_NV12, SWS_BILINEAR, NULL, NULL, NULL); + } else + #endif + #endif + { img_convert_ctx = sws_getContext(source_width, source_height, PIX_FMT_RGBA, info.width, info.height, AV_GET_CODEC_PIXEL_FORMAT(video_st, video_st->codec), SWS_BILINEAR, NULL, NULL, NULL); + } // Add rescaler to vector image_rescalers.push_back(img_convert_ctx); From 384b6e0bc3e08435c641eab38a237f0c0b35039b Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Fri, 31 Aug 2018 21:50:23 -0700 Subject: [PATCH 005/109] Set limit of threads for OpenMP and ffmpeg by setting the environment variables LIMIT_OMP_THREADS and LIMIT_FF_THREADS If they are not set the normal values are used --- include/OpenMPUtilities.h | 3 ++- src/FFmpegReader.cpp | 4 ++-- src/FFmpegWriter.cpp | 4 ++-- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/include/OpenMPUtilities.h b/include/OpenMPUtilities.h index c0f5597b..6ebde218 100644 --- a/include/OpenMPUtilities.h +++ b/include/OpenMPUtilities.h @@ -33,7 +33,8 @@ #include // Calculate the # of OpenMP Threads to allow -#define OPEN_MP_NUM_PROCESSORS omp_get_num_procs() +#define OPEN_MP_NUM_PROCESSORS ((getenv( "LIMIT_OMP_THREADS" )==NULL) ? omp_get_num_procs() : (min(omp_get_num_procs(), max(2, atoi(getenv( "LIMIT_OMP_THREADS" ))) ))) +#define FF_NUM_PROCESSORS ((getenv( "LIMIT_FF_THREADS" )==NULL) ? omp_get_num_procs() : (min(omp_get_num_procs(), max(2, atoi(getenv( "LIMIT_FF_THREADS" ))) ))) using namespace std; diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 86b7cb0d..3386d8ef 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -207,7 +207,7 @@ void FFmpegReader::Open() #endif #endif // Set number of threads equal to number of processors (not to exceed 16) - pCodecCtx->thread_count = min(OPEN_MP_NUM_PROCESSORS, 16); + pCodecCtx->thread_count = min(FF_NUM_PROCESSORS, 16); if (pCodec == NULL) { throw InvalidCodec("A valid video codec could not be found for this file.", path); @@ -262,7 +262,7 @@ void FFmpegReader::Open() aCodecCtx = AV_GET_CODEC_CONTEXT(aStream, aCodec); // Set number of threads equal to number of processors (not to exceed 16) - aCodecCtx->thread_count = min(OPEN_MP_NUM_PROCESSORS, 16); + aCodecCtx->thread_count = min(FF_NUM_PROCESSORS, 16); if (aCodec == NULL) { throw InvalidCodec("A valid audio codec could not be found for this file.", path); diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index a62e8287..5c902021 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -1099,7 +1099,7 @@ void FFmpegWriter::open_audio(AVFormatContext *oc, AVStream *st) AV_GET_CODEC_FROM_STREAM(st, audio_codec) // Set number of threads equal to number of processors (not to exceed 16) - audio_codec->thread_count = min(OPEN_MP_NUM_PROCESSORS, 16); + audio_codec->thread_count = min(FF_NUM_PROCESSORS, 16); // Find the audio encoder codec = avcodec_find_encoder_by_name(info.acodec.c_str()); @@ -1173,7 +1173,7 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) AV_GET_CODEC_FROM_STREAM(st, video_codec) // Set number of threads equal to number of processors (not to exceed 16) - video_codec->thread_count = min(OPEN_MP_NUM_PROCESSORS, 16); + video_codec->thread_count = min(FF_NUM_PROCESSORS, 16); #if IS_FFMPEG_3_2 #if defined(__linux__) From 314177bceba3280d3d2c465dc9e34f533252ef88 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sun, 2 Sep 2018 18:46:04 -0700 Subject: [PATCH 006/109] Let the user choose which installed graphics card to use for decoding HW_DE_DEVICE_SET and/or encoding HW_EN_DEVICE_SET Possible options are /dev/dri/renderD128 for the first, /dev/dri/renderD129 for the second, and so on. --- src/FFmpegReader.cpp | 8 +++++++- src/FFmpegWriter.cpp | 4 ++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 3386d8ef..7910693c 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -221,9 +221,15 @@ void FFmpegReader::Open() #if defined(__linux__) if (hw_de_on & hw_de_supported) { // Open Hardware Acceleration + // Use the hw device given in the environment variable HW_DE_DEVICE_SET or the default if not set + char *dev_hw = getenv( "HW_DE_DEVICE_SET" ); + // Check if it is there and writable + if( dev_hw != NULL && access( dev_hw, W_OK ) == -1 ) { + dev_hw = NULL; // use default + } hw_device_ctx = NULL; pCodecCtx->get_format = get_vaapi_format; - if (av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_VAAPI, NULL, NULL, 0) >= 0) { + if (av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_VAAPI, dev_hw, NULL, 0) >= 0) { if (!(pCodecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx))) { throw InvalidCodec("Hardware device reference create failed.", path); } diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index 5c902021..24ccec0a 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -1178,8 +1178,8 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) #if IS_FFMPEG_3_2 #if defined(__linux__) if (hw_en_on && hw_en_supported) { - // Use the hw device given in the environment variable HW_DEVICE_SET or the default if not set - char *dev_hw = getenv( "HW_DEVICE_SET" ); + // Use the hw device given in the environment variable HW_EN_DEVICE_SET or the default if not set + char *dev_hw = getenv( "HW_EN_DEVICE_SET" ); // Check if it is there and writable if( dev_hw != NULL && access( dev_hw, W_OK ) == -1 ) { dev_hw = NULL; // use default From 063faefa694ac72b242c039666c7946cb58212a5 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Tue, 4 Sep 2018 10:08:01 -0700 Subject: [PATCH 007/109] Hardware acceleration for Windows and Mac, still disabled but code is there. This should show where modifications are to be made to support Linux, Mac, and Windows Only decoding, encoding will follow soon --- src/FFmpegReader.cpp | 155 ++++++++++++++++++++++++++++++++++++++----- 1 file changed, 140 insertions(+), 15 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 7910693c..5ee6a7b2 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -107,8 +107,8 @@ bool AudioLocation::is_near(AudioLocation location, int samples_per_frame, int64 } #if IS_FFMPEG_3_2 -#if defined(__linux__) #pragma message "You are compiling with experimental hardware decode" +#if defined(__linux__) static enum AVPixelFormat get_vaapi_format(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) { @@ -141,8 +141,109 @@ int is_hardware_decode_supported(int codecid) } return ret; } - #endif + +#if defined(_WIN32) +// Works for Windows 64 and Windows 32 +// FIXME Here goes the detection for Windows +// AV_HWDEVICE_TYPE_DXVA2 AV_PIX_FMT_DXVA2_VLD AV_HWDEVICE_TYPE_D3D11VA AV_PIX_FMT_D3D11 + +static enum AVPixelFormat get_dxva2_format(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) +{ + const enum AVPixelFormat *p; + + for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { + if (*p == AV_PIX_FMT_DXVA2_VLD) + return *p; + } + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using DXVA2.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + hw_de_supported = 0; + return AV_PIX_FMT_NONE; +} + +int is_hardware_decode_supported(int codecid) +{ + /* int ret; + switch (codecid) { + case AV_CODEC_ID_H264: + case AV_CODEC_ID_MPEG2VIDEO: + case AV_CODEC_ID_VC1: + case AV_CODEC_ID_WMV1: + case AV_CODEC_ID_WMV2: + case AV_CODEC_ID_WMV3: + ret = 1; + break; + default : + ret = 0; + break; + } + return ret;*/ + return 0; +} +#endif + +#if defined(__APPLE__) +// FIXME Here goes the detection for Mac +// Constants for MAC: AV_HWDEVICE_TYPE_QSV AV_PIX_FMT_QSV +int is_hardware_decode_supported(int codecid) +{ +/* int ret; + switch (codecid) { + case AV_CODEC_ID_H264: + case AV_CODEC_ID_MPEG2VIDEO: + case AV_CODEC_ID_VC1: + case AV_CODEC_ID_WMV1: + case AV_CODEC_ID_WMV2: + case AV_CODEC_ID_WMV3: + ret = 1; + break; + default : + ret = 0; + break; + } + return ret;*/ + return 0; +} +static int get_qsv_format(AVCodecContext *avctx, const enum AVPixelFormat *pix_fmts) +{ + while (*pix_fmts != AV_PIX_FMT_NONE) { + if (*pix_fmts == AV_PIX_FMT_QSV) { + DecodeContext *decode = avctx->opaque; + AVHWFramesContext *frames_ctx; + AVQSVFramesContext *frames_hwctx; + int ret; + + /* create a pool of surfaces to be used by the decoder */ + avctx->hw_frames_ctx = av_hwframe_ctx_alloc(decode->hw_device_ref); + if (!avctx->hw_frames_ctx) + return AV_PIX_FMT_NONE; + frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data; + frames_hwctx = frames_ctx->hwctx; + + frames_ctx->format = AV_PIX_FMT_QSV; + frames_ctx->sw_format = avctx->sw_pix_fmt; + frames_ctx->width = FFALIGN(avctx->coded_width, 32); + frames_ctx->height = FFALIGN(avctx->coded_height, 32); + frames_ctx->initial_pool_size = 32; + + frames_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET; + + ret = av_hwframe_ctx_init(avctx->hw_frames_ctx); + if (ret < 0) + return AV_PIX_FMT_NONE; + + return AV_PIX_FMT_QSV; + } + + pix_fmts++; + } + + fprintf(stderr, "The QSV pixel format not offered in get_format()\n"); + + return AV_PIX_FMT_NONE; +} +#endif + #endif void FFmpegReader::Open() @@ -202,9 +303,9 @@ void FFmpegReader::Open() AVCodec *pCodec = avcodec_find_decoder(codecId); pCodecCtx = AV_GET_CODEC_CONTEXT(pStream, pCodec); #if IS_FFMPEG_3_2 - #if defined(__linux__) +// #if defined(__linux__) hw_de_supported = is_hardware_decode_supported(pCodecCtx->codec_id); - #endif +// #endif #endif // Set number of threads equal to number of processors (not to exceed 16) pCodecCtx->thread_count = min(FF_NUM_PROCESSORS, 16); @@ -218,8 +319,8 @@ void FFmpegReader::Open() av_dict_set(&opts, "strict", "experimental", 0); #if IS_FFMPEG_3_2 - #if defined(__linux__) - if (hw_de_on & hw_de_supported) { +// #if defined(__linux__) + if (hw_de_on && hw_de_supported) { // Open Hardware Acceleration // Use the hw device given in the environment variable HW_DE_DEVICE_SET or the default if not set char *dev_hw = getenv( "HW_DE_DEVICE_SET" ); @@ -228,8 +329,23 @@ void FFmpegReader::Open() dev_hw = NULL; // use default } hw_device_ctx = NULL; +// FIXME get_XXX_format +// FIXME AV_HWDEVICE_TYPE_.... +// IMPORTANT: The get_format has different names because even for one plattform +// like Linux there are different modes of access like vaapi and vdpau and these +// should be chosen by the user in the future + #if defined(__linux__) pCodecCtx->get_format = get_vaapi_format; if (av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_VAAPI, dev_hw, NULL, 0) >= 0) { + #endif + #if defined(_WIN32) + pCodecCtx->get_format = get_dxva2_format; + if (av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_DXVA2, dev_hw, NULL, 0) >= 0) { + #endif + #if defined(__APPLE__) + pCodecCtx->get_format = get_qsv_format; + if (av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_QSV, dev_hw, NULL, 0) >= 0) { + #endif if (!(pCodecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx))) { throw InvalidCodec("Hardware device reference create failed.", path); } @@ -238,7 +354,7 @@ void FFmpegReader::Open() throw InvalidCodec("Hardware device create failed.", path); } } - #endif +// #endif #endif // Open video codec if (avcodec_open2(pCodecCtx, pCodec, &opts) < 0) @@ -330,14 +446,14 @@ void FFmpegReader::Close() avcodec_flush_buffers(pCodecCtx); AV_FREE_CONTEXT(pCodecCtx); #if IS_FFMPEG_3_2 - #if defined(__linux__) +// #if defined(__linux__) if (hw_de_on) { if (hw_device_ctx) { av_buffer_unref(&hw_device_ctx); hw_device_ctx = NULL; } } - #endif +// #endif #endif } if (info.has_audio) @@ -833,12 +949,12 @@ bool FFmpegReader::GetAVFrame() } else { AVFrame *next_frame2; - #if defined(__linux__) +// #if defined(__linux__) if (hw_de_on && hw_de_supported) { next_frame2 = AV_ALLOCATE_FRAME(); } else - #endif +// #endif { next_frame2 = next_frame; } @@ -851,10 +967,19 @@ bool FFmpegReader::GetAVFrame() if (ret != 0) { ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (invalid return frame received)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); } - #if defined(__linux__) +// #if defined(__linux__) if (hw_de_on && hw_de_supported) { int err; +// FIXME AV_PIX_FMT_VAAPI + #if defined(__linux__) if (next_frame2->format == AV_PIX_FMT_VAAPI) { + #endif + #if defined(__WIN32__) + if (next_frame2->format == AV_PIX_FMT_DXVA2_VLD) { + #endif + #if defined(__APPLE__) + if (next_frame2->format == AV_PIX_FMT_QSV) { + #endif next_frame->format = AV_PIX_FMT_YUV420P; if ((err = av_hwframe_transfer_data(next_frame,next_frame2,0)) < 0) { ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (Failed to transfer data to output frame)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); @@ -865,7 +990,7 @@ bool FFmpegReader::GetAVFrame() } } else - #endif +// #endif { // No hardware acceleration used -> no copy from GPU memory needed next_frame = next_frame2; } @@ -884,11 +1009,11 @@ bool FFmpegReader::GetAVFrame() } } } - #if defined(__linux__) +// #if defined(__linux__) if (hw_de_on && hw_de_supported) { AV_FREE_FRAME(&next_frame2); } - #endif +// #endif } #else avcodec_decode_video2(pCodecCtx, next_frame, &frameFinished, packet); From be979cd78c7562d3a3ec2208b7d6f7bf50dd3f80 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Thu, 6 Sep 2018 08:28:50 -0700 Subject: [PATCH 008/109] Accelerated encode now supported by Windows and Mac. Only tested on Linux though due to absense of hardware/software. Tested to compile on Ubuntu 14.04, 16.04, 18.04, and 18.10 Acceleration only available on systems with ffmpeg 3.2 and up Very early code, work in progress. Issues to be fixed soon: if hardware cannot decode because the size is too big it keeps trying. more interfaces supported like vdpau in Linux error handling user interface Many commented lines of code are still in the source to help people start who may want to help. --- src/FFmpegWriter.cpp | 80 ++++++++++++++++++++++++++++++++++---------- 1 file changed, 62 insertions(+), 18 deletions(-) diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index 24ccec0a..d0a542ae 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -49,7 +49,13 @@ static int set_hwframe_ctx(AVCodecContext *ctx, AVBufferRef *hw_device_ctx, int6 return -1; } frames_ctx = (AVHWFramesContext *)(hw_frames_ref->data); + #if defined(__linux__) frames_ctx->format = AV_PIX_FMT_VAAPI; + #elif defined(_WIN32) + frames_ctx->format = AV_PIX_FMT_DXVA2_VLD; + #elif defined(__APPLE__) + frames_ctx->format = AV_PIX_FMT_QSV; + #endif frames_ctx->sw_format = AV_PIX_FMT_NV12; frames_ctx->width = width; frames_ctx->height = height; @@ -70,9 +76,9 @@ static int set_hwframe_ctx(AVCodecContext *ctx, AVBufferRef *hw_device_ctx, int6 #endif #if IS_FFMPEG_3_2 -#if defined(__linux__) +//#if defined(__linux__) #pragma message "You are compiling with experimental hardware encode" -#endif +//#endif #endif FFmpegWriter::FFmpegWriter(string path) : @@ -171,6 +177,28 @@ void FFmpegWriter::SetVideoOptions(bool has_video, string codec, Fraction fps, i else { new_codec = avcodec_find_encoder_by_name(codec.c_str()); hw_en_on = 0; + hw_en_supported = 0; + } + #elif defined(_WIN32) + if ( (strcmp(codec.c_str(),"h264_dxva2") == 0)) { + new_codec = avcodec_find_encoder_by_name(codec.c_str()); + hw_en_on = 1; + hw_en_supported = 1; + } + else { + new_codec = avcodec_find_encoder_by_name(codec.c_str()); + hw_en_on = 0; + hw_en_supported = 0; + } + #elif defined(__APPLE__) + if ( (strcmp(codec.c_str(),"h264_qsv") == 0)) { + new_codec = avcodec_find_encoder_by_name(codec.c_str()); + hw_en_on = 1; + hw_en_supported = 1; + } + else { + new_codec = avcodec_find_encoder_by_name(codec.c_str()); + hw_en_on = 0; hw_en_supported = 0; } #else // is FFmpeg 3 but not linux @@ -799,14 +827,14 @@ void FFmpegWriter::close_video(AVFormatContext *oc, AVStream *st) AV_FREE_CONTEXT(video_codec); video_codec = NULL; #if IS_FFMPEG_3_2 - #if defined(__linux__) +// #if defined(__linux__) if (hw_en_on && hw_en_supported) { if (hw_device_ctx) { av_buffer_unref(&hw_device_ctx); hw_device_ctx = NULL; } } - #endif +// #endif #endif } @@ -1176,8 +1204,8 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) video_codec->thread_count = min(FF_NUM_PROCESSORS, 16); #if IS_FFMPEG_3_2 - #if defined(__linux__) if (hw_en_on && hw_en_supported) { + #if defined(__linux__) // Use the hw device given in the environment variable HW_EN_DEVICE_SET or the default if not set char *dev_hw = getenv( "HW_EN_DEVICE_SET" ); // Check if it is there and writable @@ -1189,8 +1217,20 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) cerr << "FFmpegWriter::open_video : Codec name: " << info.vcodec.c_str() << " ERROR creating\n"; throw InvalidCodec("Could not create hwdevice", path); } + #elif defined(_WIN32) + if (av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_DXVA2, + NULL, NULL, 0) < 0) { + cerr << "FFmpegWriter::open_video : Codec name: " << info.vcodec.c_str() << " ERROR creating\n"; + throw InvalidCodec("Could not create hwdevice", path); + } + #elif defined(__APPLE__) + if (av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_QSV, + NULL, NULL, 0) < 0) { + cerr << "FFmpegWriter::open_video : Codec name: " << info.vcodec.c_str() << " ERROR creating\n"; + throw InvalidCodec("Could not create hwdevice", path); + } + #endif } - #endif #endif /* find the video encoder */ codec = avcodec_find_encoder_by_name(info.vcodec.c_str()); @@ -1208,10 +1248,15 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) av_dict_set(&opts, "strict", "experimental", 0); #if IS_FFMPEG_3_2 - #if defined(__linux__) if (hw_en_on && hw_en_supported) { video_codec->max_b_frames = 0; // At least this GPU doesn't support b-frames + #if defined(__linux__) video_codec->pix_fmt = AV_PIX_FMT_VAAPI; + #elif defined(_WIN32) + video_codec->pix_fmt = AV_PIX_FMT_DXVA2_VLD + #elif defined(__APPLE__) + video_codec->pix_fmt = AV_PIX_FMT_QSV + #endif video_codec->profile = FF_PROFILE_H264_BASELINE | FF_PROFILE_H264_CONSTRAINED; av_opt_set(video_codec->priv_data,"preset","slow",0); av_opt_set(video_codec->priv_data,"tune","zerolatency",0); @@ -1222,7 +1267,6 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) fprintf(stderr, "Failed to set hwframe context.\n"); } } - #endif #endif /* open the codec */ @@ -1675,11 +1719,11 @@ void FFmpegWriter::process_video_packet(std::shared_ptr frame) frame_source = allocate_avframe(PIX_FMT_RGBA, source_image_width, source_image_height, &bytes_source, (uint8_t*) pixels); #if IS_FFMPEG_3_2 AVFrame *frame_final; - #if defined(__linux__) +// #if defined(__linux__) if (hw_en_on && hw_en_supported) { frame_final = allocate_avframe(AV_PIX_FMT_NV12, info.width, info.height, &bytes_final, NULL); } else - #endif +// #endif { frame_final = allocate_avframe((AVPixelFormat)(video_st->codecpar->format), info.width, info.height, &bytes_final, NULL); } @@ -1758,7 +1802,7 @@ bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* fra // Assign the initial AVFrame PTS from the frame counter frame_final->pts = write_video_count; #if IS_FFMPEG_3_2 - #if defined(__linux__) +// #if defined(__linux__) if (hw_en_on && hw_en_supported) { if (!(hw_frame = av_frame_alloc())) { fprintf(stderr, "Error code: av_hwframe_alloc\n"); @@ -1775,7 +1819,7 @@ bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* fra } av_frame_copy_props(hw_frame, frame_final); } - #endif +// #endif #endif /* encode the image */ int got_packet_ptr = 0; @@ -1784,13 +1828,13 @@ bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* fra // Write video packet (latest version of FFmpeg) int frameFinished = 0; int ret; - #if defined(__linux__) +// #if defined(__linux__) #if IS_FFMPEG_3_2 if (hw_en_on && hw_en_supported) { ret = avcodec_send_frame(video_codec, hw_frame); //hw_frame!!! } else #endif - #endif +// #endif ret = avcodec_send_frame(video_codec, frame_final); error_code = ret; if (ret < 0 ) { @@ -1877,14 +1921,14 @@ bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* fra // Deallocate packet AV_FREE_PACKET(&pkt); #if IS_FFMPEG_3_2 - #if defined(__linux__) +// #if defined(__linux__) if (hw_en_on && hw_en_supported) { if (hw_frame) { av_frame_free(&hw_frame); hw_frame = NULL; } } - #endif +// #endif #endif } @@ -1907,11 +1951,11 @@ void FFmpegWriter::InitScalers(int source_width, int source_height) { // Init the software scaler from FFMpeg #if IS_FFMPEG_3_2 - #if defined(__linux__) +// #if defined(__linux__) if (hw_en_on && hw_en_supported) { img_convert_ctx = sws_getContext(source_width, source_height, PIX_FMT_RGBA, info.width, info.height, AV_PIX_FMT_NV12, SWS_BILINEAR, NULL, NULL, NULL); } else - #endif +// #endif #endif { img_convert_ctx = sws_getContext(source_width, source_height, PIX_FMT_RGBA, info.width, info.height, AV_GET_CODEC_PIXEL_FORMAT(video_st, video_st->codec), SWS_BILINEAR, NULL, NULL, NULL); From 6925f6f7c2fdbf179d549bee5ee4c73b6a221aec Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Fri, 7 Sep 2018 10:44:18 -0700 Subject: [PATCH 009/109] Use the static scheduler in ordered clause. Otherwise OpenMP uses a scheduler it thinks is best which can be dynamic or guided. Both sometimes let other threads continue before the block is finished. That will crash the program with high thread counts and a cache that is not large enough to hold old enough frames, which leads to a crash when in some cases like transitions two different frames are used although one is no longer in the cache. The static scheduler always waits until the block is finished before enabling other threads. --- src/Timeline.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/Timeline.cpp b/src/Timeline.cpp index 35b91283..34dab1d8 100644 --- a/src/Timeline.cpp +++ b/src/Timeline.cpp @@ -717,7 +717,8 @@ std::shared_ptr Timeline::GetFrame(int64_t requested_frame) #pragma omp parallel { // Loop through all requested frames - #pragma omp for ordered firstprivate(nearby_clips, requested_frame, minimum_frames) + // The scheduler has to be static! + #pragma omp for ordered schedule(static,1) firstprivate(nearby_clips, requested_frame, minimum_frames) for (int64_t frame_number = requested_frame; frame_number < requested_frame + minimum_frames; frame_number++) { // Debug output From f7dd2b18c38612d41f6c05a30e8e0c88c49d010c Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sat, 8 Sep 2018 16:31:03 -0700 Subject: [PATCH 010/109] First adjustment to later include NVENC (nvidia encoder) --- src/FFmpegWriter.cpp | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index d0a542ae..e01e2f83 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -35,6 +35,8 @@ using namespace openshot; #if IS_FFMPEG_3_2 int hw_en_on = 1; // Is set in UI int hw_en_supported = 0; // Is set by FFmpegWriter +AVPixelFormat hw_en_av_pix_fmt = AV_PIX_FMT_NONE; +AVHWDeviceType hw_en_av_device_type = AV_HWDEVICE_TYPE_VAAPI; static AVBufferRef *hw_device_ctx = NULL; AVFrame *hw_frame = NULL; @@ -50,7 +52,7 @@ static int set_hwframe_ctx(AVCodecContext *ctx, AVBufferRef *hw_device_ctx, int6 } frames_ctx = (AVHWFramesContext *)(hw_frames_ref->data); #if defined(__linux__) - frames_ctx->format = AV_PIX_FMT_VAAPI; + frames_ctx->format = hw_en_av_pix_fmt; #elif defined(_WIN32) frames_ctx->format = AV_PIX_FMT_DXVA2_VLD; #elif defined(__APPLE__) @@ -173,6 +175,8 @@ void FFmpegWriter::SetVideoOptions(bool has_video, string codec, Fraction fps, i new_codec = avcodec_find_encoder_by_name(codec.c_str()); hw_en_on = 1; hw_en_supported = 1; + hw_en_av_pix_fmt = AV_PIX_FMT_VAAPI; + hw_en_av_device_type = AV_HWDEVICE_TYPE_VAAPI; } else { new_codec = avcodec_find_encoder_by_name(codec.c_str()); @@ -1212,7 +1216,7 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) if( dev_hw != NULL && access( dev_hw, W_OK ) == -1 ) { dev_hw = NULL; // use default } - if (av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_VAAPI, + if (av_hwdevice_ctx_create(&hw_device_ctx, hw_en_av_device_type, dev_hw, NULL, 0) < 0) { cerr << "FFmpegWriter::open_video : Codec name: " << info.vcodec.c_str() << " ERROR creating\n"; throw InvalidCodec("Could not create hwdevice", path); @@ -1251,7 +1255,7 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) if (hw_en_on && hw_en_supported) { video_codec->max_b_frames = 0; // At least this GPU doesn't support b-frames #if defined(__linux__) - video_codec->pix_fmt = AV_PIX_FMT_VAAPI; + video_codec->pix_fmt = hw_en_av_pix_fmt; #elif defined(_WIN32) video_codec->pix_fmt = AV_PIX_FMT_DXVA2_VLD #elif defined(__APPLE__) From 16c8302f485d530f8c2513c4f96d6fbb43df310e Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sat, 8 Sep 2018 16:53:53 -0700 Subject: [PATCH 011/109] Basic support for nvidia encode (decode later) --- src/FFmpegWriter.cpp | 55 ++++++++++++++++++-------------------------- 1 file changed, 23 insertions(+), 32 deletions(-) diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index e01e2f83..924f3490 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -47,23 +47,17 @@ static int set_hwframe_ctx(AVCodecContext *ctx, AVBufferRef *hw_device_ctx, int6 int err = 0; if (!(hw_frames_ref = av_hwframe_ctx_alloc(hw_device_ctx))) { - fprintf(stderr, "Failed to create VAAPI frame context.\n"); + fprintf(stderr, "Failed to create HW frame context.\n"); return -1; } frames_ctx = (AVHWFramesContext *)(hw_frames_ref->data); - #if defined(__linux__) frames_ctx->format = hw_en_av_pix_fmt; - #elif defined(_WIN32) - frames_ctx->format = AV_PIX_FMT_DXVA2_VLD; - #elif defined(__APPLE__) - frames_ctx->format = AV_PIX_FMT_QSV; - #endif frames_ctx->sw_format = AV_PIX_FMT_NV12; frames_ctx->width = width; frames_ctx->height = height; frames_ctx->initial_pool_size = 20; if ((err = av_hwframe_ctx_init(hw_frames_ref)) < 0) { - fprintf(stderr, "Failed to initialize VAAPI frame context." + fprintf(stderr, "Failed to initialize HW frame context." "Error code: %s\n",av_err2str(err)); av_buffer_unref(&hw_frames_ref); return err; @@ -178,16 +172,27 @@ void FFmpegWriter::SetVideoOptions(bool has_video, string codec, Fraction fps, i hw_en_av_pix_fmt = AV_PIX_FMT_VAAPI; hw_en_av_device_type = AV_HWDEVICE_TYPE_VAAPI; } - else { - new_codec = avcodec_find_encoder_by_name(codec.c_str()); - hw_en_on = 0; - hw_en_supported = 0; - } + else { + if ( (strcmp(codec.c_str(),"h264_nvenc") == 0)) { + new_codec = avcodec_find_encoder_by_name(codec.c_str()); + hw_en_on = 1; + hw_en_supported = 1; + hw_en_av_pix_fmt = AV_PIX_FMT_CUDA; + hw_en_av_device_type = AV_HWDEVICE_TYPE_CUDA; + } + else { + new_codec = avcodec_find_encoder_by_name(codec.c_str()); + hw_en_on = 0; + hw_en_supported = 0; + } + } #elif defined(_WIN32) if ( (strcmp(codec.c_str(),"h264_dxva2") == 0)) { new_codec = avcodec_find_encoder_by_name(codec.c_str()); hw_en_on = 1; hw_en_supported = 1; + hw_en_av_pix_fmt = AV_PIX_FMT_DXVA2_VLD; + hw_en_av_device_type = AV_HWDEVICE_TYPE_DXVA2; } else { new_codec = avcodec_find_encoder_by_name(codec.c_str()); @@ -199,6 +204,8 @@ void FFmpegWriter::SetVideoOptions(bool has_video, string codec, Fraction fps, i new_codec = avcodec_find_encoder_by_name(codec.c_str()); hw_en_on = 1; hw_en_supported = 1; + hw_en_av_pix_fmt = AV_PIX_FMT_QSV; + hw_en_av_device_type = AV_HWDEVICE_TYPE_QSV; } else { new_codec = avcodec_find_encoder_by_name(codec.c_str()); @@ -1216,24 +1223,14 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) if( dev_hw != NULL && access( dev_hw, W_OK ) == -1 ) { dev_hw = NULL; // use default } + #else + dev_hw = NULL; // use default + #endif if (av_hwdevice_ctx_create(&hw_device_ctx, hw_en_av_device_type, dev_hw, NULL, 0) < 0) { cerr << "FFmpegWriter::open_video : Codec name: " << info.vcodec.c_str() << " ERROR creating\n"; throw InvalidCodec("Could not create hwdevice", path); } - #elif defined(_WIN32) - if (av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_DXVA2, - NULL, NULL, 0) < 0) { - cerr << "FFmpegWriter::open_video : Codec name: " << info.vcodec.c_str() << " ERROR creating\n"; - throw InvalidCodec("Could not create hwdevice", path); - } - #elif defined(__APPLE__) - if (av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_QSV, - NULL, NULL, 0) < 0) { - cerr << "FFmpegWriter::open_video : Codec name: " << info.vcodec.c_str() << " ERROR creating\n"; - throw InvalidCodec("Could not create hwdevice", path); - } - #endif } #endif /* find the video encoder */ @@ -1254,13 +1251,7 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) #if IS_FFMPEG_3_2 if (hw_en_on && hw_en_supported) { video_codec->max_b_frames = 0; // At least this GPU doesn't support b-frames - #if defined(__linux__) video_codec->pix_fmt = hw_en_av_pix_fmt; - #elif defined(_WIN32) - video_codec->pix_fmt = AV_PIX_FMT_DXVA2_VLD - #elif defined(__APPLE__) - video_codec->pix_fmt = AV_PIX_FMT_QSV - #endif video_codec->profile = FF_PROFILE_H264_BASELINE | FF_PROFILE_H264_CONSTRAINED; av_opt_set(video_codec->priv_data,"preset","slow",0); av_opt_set(video_codec->priv_data,"tune","zerolatency",0); From e7c1ced0da1590e79a992d4ac8634edd46bc66a2 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sat, 8 Sep 2018 21:17:24 -0700 Subject: [PATCH 012/109] Cleanup import video hardware accelerated and first attempt with nvidia cards. Still no error handling when the dimensions of the video are too large --- src/FFmpegReader.cpp | 242 ++++++++++++++++++------------------------- 1 file changed, 98 insertions(+), 144 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 5ee6a7b2..7d9c27ca 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -34,6 +34,8 @@ using namespace openshot; int hw_de_on = 1; // Is set in UI int hw_de_supported = 0; // Is set by FFmpegReader +AVPixelFormat hw_de_av_pix_fmt = AV_PIX_FMT_NONE; +AVHWDeviceType hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; FFmpegReader::FFmpegReader(string path) : last_frame(0), is_seeking(0), seeking_pts(0), seeking_frame(0), seek_count(0), @@ -108,112 +110,60 @@ bool AudioLocation::is_near(AudioLocation location, int samples_per_frame, int64 #if IS_FFMPEG_3_2 #pragma message "You are compiling with experimental hardware decode" -#if defined(__linux__) static enum AVPixelFormat get_vaapi_format(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) { const enum AVPixelFormat *p; for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { - if (*p == AV_PIX_FMT_VAAPI) - return *p; + if (*p == AV_PIX_FMT_VAAPI) { + hw_de_av_pix_fmt = AV_PIX_FMT_VAAPI; + hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; + return *p; + } + if (*p == AV_PIX_FMT_CUDA) { + hw_de_av_pix_fmt = AV_PIX_FMT_CUDA; + hw_de_av_device_type = AV_HWDEVICE_TYPE_CUDA; + return *p; + } } ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using VA-API.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); hw_de_supported = 0; return AV_PIX_FMT_NONE; } -int is_hardware_decode_supported(int codecid) -{ - int ret; - switch (codecid) { - case AV_CODEC_ID_H264: - case AV_CODEC_ID_MPEG2VIDEO: - case AV_CODEC_ID_VC1: - case AV_CODEC_ID_WMV1: - case AV_CODEC_ID_WMV2: - case AV_CODEC_ID_WMV3: - ret = 1; - break; - default : - ret = 0; - break; - } - return ret; -} -#endif - -#if defined(_WIN32) -// Works for Windows 64 and Windows 32 -// FIXME Here goes the detection for Windows -// AV_HWDEVICE_TYPE_DXVA2 AV_PIX_FMT_DXVA2_VLD AV_HWDEVICE_TYPE_D3D11VA AV_PIX_FMT_D3D11 - -static enum AVPixelFormat get_dxva2_format(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) +static enum AVPixelFormat get_dx_format(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) { const enum AVPixelFormat *p; for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { - if (*p == AV_PIX_FMT_DXVA2_VLD) - return *p; + if (*p == AV_PIX_FMT_DXVA2_VLD) { + hw_de_av_pix_fmt = AV_PIX_FMT_DXVA2_VLD; + hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; + return *p; + } + if (*p == AV_PIX_FMT_D3D11) { + hw_de_av_pix_fmt = AV_PIX_FMT_D3D11; + hw_de_av_device_type = AV_HWDEVICE_TYPE_D3D11VA; + return *p; + } } ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using DXVA2.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); hw_de_supported = 0; + hw_de_av_pix_fmt = AV_PIX_FMT_NONE; return AV_PIX_FMT_NONE; } -int is_hardware_decode_supported(int codecid) -{ - /* int ret; - switch (codecid) { - case AV_CODEC_ID_H264: - case AV_CODEC_ID_MPEG2VIDEO: - case AV_CODEC_ID_VC1: - case AV_CODEC_ID_WMV1: - case AV_CODEC_ID_WMV2: - case AV_CODEC_ID_WMV3: - ret = 1; - break; - default : - ret = 0; - break; - } - return ret;*/ - return 0; -} -#endif - -#if defined(__APPLE__) -// FIXME Here goes the detection for Mac -// Constants for MAC: AV_HWDEVICE_TYPE_QSV AV_PIX_FMT_QSV -int is_hardware_decode_supported(int codecid) -{ -/* int ret; - switch (codecid) { - case AV_CODEC_ID_H264: - case AV_CODEC_ID_MPEG2VIDEO: - case AV_CODEC_ID_VC1: - case AV_CODEC_ID_WMV1: - case AV_CODEC_ID_WMV2: - case AV_CODEC_ID_WMV3: - ret = 1; - break; - default : - ret = 0; - break; - } - return ret;*/ - return 0; -} static int get_qsv_format(AVCodecContext *avctx, const enum AVPixelFormat *pix_fmts) { - while (*pix_fmts != AV_PIX_FMT_NONE) { + /* while (*pix_fmts != AV_PIX_FMT_NONE) { if (*pix_fmts == AV_PIX_FMT_QSV) { DecodeContext *decode = avctx->opaque; AVHWFramesContext *frames_ctx; AVQSVFramesContext *frames_hwctx; int ret; - /* create a pool of surfaces to be used by the decoder */ + // create a pool of surfaces to be used by the decoder avctx->hw_frames_ctx = av_hwframe_ctx_alloc(decode->hw_device_ref); if (!avctx->hw_frames_ctx) return AV_PIX_FMT_NONE; @@ -239,10 +189,40 @@ static int get_qsv_format(AVCodecContext *avctx, const enum AVPixelFormat *pix_f } fprintf(stderr, "The QSV pixel format not offered in get_format()\n"); + */ + const enum AVPixelFormat *p; + for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { + if (*p == AV_PIX_FMT_QSV) { + hw_de_av_pix_fmt = AV_PIX_FMT_QSV; + hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; + return *p; + } + } + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using QSV.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + hw_de_supported = 0; + hw_de_av_pix_fmt = AV_PIX_FMT_NONE; return AV_PIX_FMT_NONE; } -#endif + +int is_hardware_decode_supported(int codecid) +{ + int ret; + switch (codecid) { + case AV_CODEC_ID_H264: + case AV_CODEC_ID_MPEG2VIDEO: + case AV_CODEC_ID_VC1: + case AV_CODEC_ID_WMV1: + case AV_CODEC_ID_WMV2: + case AV_CODEC_ID_WMV3: + ret = 1; + break; + default : + ret = 0; + break; + } + return ret; +} #endif @@ -303,9 +283,7 @@ void FFmpegReader::Open() AVCodec *pCodec = avcodec_find_decoder(codecId); pCodecCtx = AV_GET_CODEC_CONTEXT(pStream, pCodec); #if IS_FFMPEG_3_2 -// #if defined(__linux__) - hw_de_supported = is_hardware_decode_supported(pCodecCtx->codec_id); -// #endif + hw_de_supported = is_hardware_decode_supported(pCodecCtx->codec_id); #endif // Set number of threads equal to number of processors (not to exceed 16) pCodecCtx->thread_count = min(FF_NUM_PROCESSORS, 16); @@ -319,42 +297,36 @@ void FFmpegReader::Open() av_dict_set(&opts, "strict", "experimental", 0); #if IS_FFMPEG_3_2 -// #if defined(__linux__) - if (hw_de_on && hw_de_supported) { - // Open Hardware Acceleration - // Use the hw device given in the environment variable HW_DE_DEVICE_SET or the default if not set - char *dev_hw = getenv( "HW_DE_DEVICE_SET" ); - // Check if it is there and writable - if( dev_hw != NULL && access( dev_hw, W_OK ) == -1 ) { - dev_hw = NULL; // use default - } - hw_device_ctx = NULL; -// FIXME get_XXX_format -// FIXME AV_HWDEVICE_TYPE_.... -// IMPORTANT: The get_format has different names because even for one plattform -// like Linux there are different modes of access like vaapi and vdpau and these -// should be chosen by the user in the future - #if defined(__linux__) - pCodecCtx->get_format = get_vaapi_format; - if (av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_VAAPI, dev_hw, NULL, 0) >= 0) { - #endif - #if defined(_WIN32) - pCodecCtx->get_format = get_dxva2_format; - if (av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_DXVA2, dev_hw, NULL, 0) >= 0) { - #endif - #if defined(__APPLE__) - pCodecCtx->get_format = get_qsv_format; - if (av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_QSV, dev_hw, NULL, 0) >= 0) { - #endif - if (!(pCodecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx))) { - throw InvalidCodec("Hardware device reference create failed.", path); - } - } - else { - throw InvalidCodec("Hardware device create failed.", path); + if (hw_de_on && hw_de_supported) { + // Open Hardware Acceleration + // Use the hw device given in the environment variable HW_DE_DEVICE_SET or the default if not set + char *dev_hw = getenv( "HW_DE_DEVICE_SET" ); + // Check if it is there and writable + if( dev_hw != NULL && access( dev_hw, W_OK ) == -1 ) { + dev_hw = NULL; // use default + } + hw_device_ctx = NULL; + #if defined(__linux__) + pCodecCtx->get_format = get_vaapi_format; + //if (av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_VAAPI, dev_hw, NULL, 0) >= 0) { + #endif + #if defined(_WIN32) + pCodecCtx->get_format = get_dx_format; + //if (av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_DXVA2, dev_hw, NULL, 0) >= 0) { + #endif + #if defined(__APPLE__) + pCodecCtx->get_format = get_qsv_format; + //if (av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_QSV, dev_hw, NULL, 0) >= 0) { + #endif + if (av_hwdevice_ctx_create(&hw_device_ctx, hw_de_av_device_type, dev_hw, NULL, 0) >= 0) { + if (!(pCodecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx))) { + throw InvalidCodec("Hardware device reference create failed.", path); } } -// #endif + else { + throw InvalidCodec("Hardware device create failed.", path); + } + } #endif // Open video codec if (avcodec_open2(pCodecCtx, pCodec, &opts) < 0) @@ -446,14 +418,12 @@ void FFmpegReader::Close() avcodec_flush_buffers(pCodecCtx); AV_FREE_CONTEXT(pCodecCtx); #if IS_FFMPEG_3_2 -// #if defined(__linux__) - if (hw_de_on) { - if (hw_device_ctx) { - av_buffer_unref(&hw_device_ctx); - hw_device_ctx = NULL; - } + if (hw_de_on) { + if (hw_device_ctx) { + av_buffer_unref(&hw_device_ctx); + hw_device_ctx = NULL; } -// #endif + } #endif } if (info.has_audio) @@ -949,37 +919,25 @@ bool FFmpegReader::GetAVFrame() } else { AVFrame *next_frame2; -// #if defined(__linux__) if (hw_de_on && hw_de_supported) { next_frame2 = AV_ALLOCATE_FRAME(); } else -// #endif { next_frame2 = next_frame; } pFrame = new AVFrame(); while (ret >= 0) { ret = avcodec_receive_frame(pCodecCtx, next_frame2); - if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { - break; - } + if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { + break; + } if (ret != 0) { ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (invalid return frame received)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); } -// #if defined(__linux__) if (hw_de_on && hw_de_supported) { int err; -// FIXME AV_PIX_FMT_VAAPI - #if defined(__linux__) - if (next_frame2->format == AV_PIX_FMT_VAAPI) { - #endif - #if defined(__WIN32__) - if (next_frame2->format == AV_PIX_FMT_DXVA2_VLD) { - #endif - #if defined(__APPLE__) - if (next_frame2->format == AV_PIX_FMT_QSV) { - #endif + if (next_frame2->format == hw_de_av_pix_fmt) { next_frame->format = AV_PIX_FMT_YUV420P; if ((err = av_hwframe_transfer_data(next_frame,next_frame2,0)) < 0) { ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (Failed to transfer data to output frame)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); @@ -990,11 +948,9 @@ bool FFmpegReader::GetAVFrame() } } else -// #endif { // No hardware acceleration used -> no copy from GPU memory needed next_frame = next_frame2; } - //} // TODO also handle possible further frames // Use only the first frame like avcodec_decode_video2 if (frameFinished == 0 ) { @@ -1009,11 +965,9 @@ bool FFmpegReader::GetAVFrame() } } } -// #if defined(__linux__) - if (hw_de_on && hw_de_supported) { - AV_FREE_FRAME(&next_frame2); - } -// #endif + if (hw_de_on && hw_de_supported) { + AV_FREE_FRAME(&next_frame2); + } } #else avcodec_decode_video2(pCodecCtx, next_frame, &frameFinished, packet); From 0191ff5dbb6a4934e5a28a65d0488f626def93ae Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sat, 8 Sep 2018 21:32:04 -0700 Subject: [PATCH 013/109] Further cleanup --- src/FFmpegReader.cpp | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 7d9c27ca..5602e40c 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -300,23 +300,23 @@ void FFmpegReader::Open() if (hw_de_on && hw_de_supported) { // Open Hardware Acceleration // Use the hw device given in the environment variable HW_DE_DEVICE_SET or the default if not set - char *dev_hw = getenv( "HW_DE_DEVICE_SET" ); + char *dev_hw = NULL; + #if defined(__linux__) + dev_hw = getenv( "HW_DE_DEVICE_SET" ); // Check if it is there and writable if( dev_hw != NULL && access( dev_hw, W_OK ) == -1 ) { dev_hw = NULL; // use default } + #endif hw_device_ctx = NULL; #if defined(__linux__) pCodecCtx->get_format = get_vaapi_format; - //if (av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_VAAPI, dev_hw, NULL, 0) >= 0) { #endif #if defined(_WIN32) pCodecCtx->get_format = get_dx_format; - //if (av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_DXVA2, dev_hw, NULL, 0) >= 0) { #endif #if defined(__APPLE__) pCodecCtx->get_format = get_qsv_format; - //if (av_hwdevice_ctx_create(&hw_device_ctx, AV_HWDEVICE_TYPE_QSV, dev_hw, NULL, 0) >= 0) { #endif if (av_hwdevice_ctx_create(&hw_device_ctx, hw_de_av_device_type, dev_hw, NULL, 0) >= 0) { if (!(pCodecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx))) { From 36cbba2a3d8f5df2aebc0c867ce9a52ded8790f2 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sat, 8 Sep 2018 21:55:23 -0700 Subject: [PATCH 014/109] More cleanup --- src/FFmpegReader.cpp | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 5602e40c..70b235df 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -111,6 +111,7 @@ bool AudioLocation::is_near(AudioLocation location, int samples_per_frame, int64 #if IS_FFMPEG_3_2 #pragma message "You are compiling with experimental hardware decode" +#if defined(__linux__) static enum AVPixelFormat get_vaapi_format(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) { const enum AVPixelFormat *p; @@ -131,7 +132,9 @@ static enum AVPixelFormat get_vaapi_format(AVCodecContext *ctx, const enum AVPix hw_de_supported = 0; return AV_PIX_FMT_NONE; } +#endif +#if defined(_WIN32) static enum AVPixelFormat get_dx_format(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) { const enum AVPixelFormat *p; @@ -153,7 +156,9 @@ static enum AVPixelFormat get_dx_format(AVCodecContext *ctx, const enum AVPixelF hw_de_av_pix_fmt = AV_PIX_FMT_NONE; return AV_PIX_FMT_NONE; } +#endif +#if defined(__APPLE__) static int get_qsv_format(AVCodecContext *avctx, const enum AVPixelFormat *pix_fmts) { /* while (*pix_fmts != AV_PIX_FMT_NONE) { @@ -204,6 +209,7 @@ static int get_qsv_format(AVCodecContext *avctx, const enum AVPixelFormat *pix_f hw_de_av_pix_fmt = AV_PIX_FMT_NONE; return AV_PIX_FMT_NONE; } +#endif int is_hardware_decode_supported(int codecid) { From e7c94e700add4af1ba2235efc6b886083e070ab8 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sat, 8 Sep 2018 22:19:41 -0700 Subject: [PATCH 015/109] hide dx11 --- src/FFmpegReader.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 70b235df..9f4bb514 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -145,11 +145,11 @@ static enum AVPixelFormat get_dx_format(AVCodecContext *ctx, const enum AVPixelF hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; return *p; } - if (*p == AV_PIX_FMT_D3D11) { +/* if (*p == AV_PIX_FMT_D3D11) { hw_de_av_pix_fmt = AV_PIX_FMT_D3D11; hw_de_av_device_type = AV_HWDEVICE_TYPE_D3D11VA; return *p; - } + }*/ } ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using DXVA2.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); hw_de_supported = 0; From d6f52ead3bfd5ae3646e6226c4371fa7d52c644e Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sat, 8 Sep 2018 22:30:16 -0700 Subject: [PATCH 016/109] Only use the hw accel variables when ffmpeg >= 3.2 --- src/FFmpegReader.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 9f4bb514..c067f3fe 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -32,10 +32,12 @@ using namespace openshot; +#if IS_FFMPEG_3_2 int hw_de_on = 1; // Is set in UI int hw_de_supported = 0; // Is set by FFmpegReader AVPixelFormat hw_de_av_pix_fmt = AV_PIX_FMT_NONE; AVHWDeviceType hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; +#endif FFmpegReader::FFmpegReader(string path) : last_frame(0), is_seeking(0), seeking_pts(0), seeking_frame(0), seek_count(0), From 2a80ccacaac4e028664de5968ede58bb4f997e6a Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sat, 8 Sep 2018 22:57:46 -0700 Subject: [PATCH 017/109] Let hw_de_on be visible to all versions of ffmpeg --- src/FFmpegReader.cpp | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index c067f3fe..4ce92889 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -32,9 +32,9 @@ using namespace openshot; -#if IS_FFMPEG_3_2 int hw_de_on = 1; // Is set in UI int hw_de_supported = 0; // Is set by FFmpegReader +#if IS_FFMPEG_3_2 AVPixelFormat hw_de_av_pix_fmt = AV_PIX_FMT_NONE; AVHWDeviceType hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; #endif @@ -147,11 +147,11 @@ static enum AVPixelFormat get_dx_format(AVCodecContext *ctx, const enum AVPixelF hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; return *p; } -/* if (*p == AV_PIX_FMT_D3D11) { + if (*p == AV_PIX_FMT_D3D11) { hw_de_av_pix_fmt = AV_PIX_FMT_D3D11; hw_de_av_device_type = AV_HWDEVICE_TYPE_D3D11VA; return *p; - }*/ + } } ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using DXVA2.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); hw_de_supported = 0; @@ -291,7 +291,9 @@ void FFmpegReader::Open() AVCodec *pCodec = avcodec_find_decoder(codecId); pCodecCtx = AV_GET_CODEC_CONTEXT(pStream, pCodec); #if IS_FFMPEG_3_2 - hw_de_supported = is_hardware_decode_supported(pCodecCtx->codec_id); + if (hw_de_on) { + hw_de_supported = is_hardware_decode_supported(pCodecCtx->codec_id); + } #endif // Set number of threads equal to number of processors (not to exceed 16) pCodecCtx->thread_count = min(FF_NUM_PROCESSORS, 16); @@ -309,13 +311,11 @@ void FFmpegReader::Open() // Open Hardware Acceleration // Use the hw device given in the environment variable HW_DE_DEVICE_SET or the default if not set char *dev_hw = NULL; - #if defined(__linux__) dev_hw = getenv( "HW_DE_DEVICE_SET" ); // Check if it is there and writable if( dev_hw != NULL && access( dev_hw, W_OK ) == -1 ) { dev_hw = NULL; // use default } - #endif hw_device_ctx = NULL; #if defined(__linux__) pCodecCtx->get_format = get_vaapi_format; From c29bf21c75ee2342709021ded86e262f37f0f176 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sun, 9 Sep 2018 09:05:16 -0700 Subject: [PATCH 018/109] Simplifications of FFmpegReader and start of setting parameters per input file --- include/FFmpegReader.h | 5 +++ src/FFmpegReader.cpp | 83 ++++-------------------------------------- 2 files changed, 12 insertions(+), 76 deletions(-) diff --git a/include/FFmpegReader.h b/include/FFmpegReader.h index fcc995ae..caf68e5e 100644 --- a/include/FFmpegReader.h +++ b/include/FFmpegReader.h @@ -146,6 +146,11 @@ namespace openshot int64_t largest_frame_processed; int64_t current_video_frame; // can't reliably use PTS of video to determine this + //int hw_de_supported = 0; // Is set by FFmpegReader + //AVPixelFormat hw_de_av_pix_fmt = AV_PIX_FMT_NONE; + + int is_hardware_decode_supported(int codecid); + /// Check for the correct frames per second value by scanning the 1st few seconds of video packets. void CheckFPS(); diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 4ce92889..083de926 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -113,12 +113,12 @@ bool AudioLocation::is_near(AudioLocation location, int samples_per_frame, int64 #if IS_FFMPEG_3_2 #pragma message "You are compiling with experimental hardware decode" -#if defined(__linux__) -static enum AVPixelFormat get_vaapi_format(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) +static enum AVPixelFormat get_hw_dec_format(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) { const enum AVPixelFormat *p; for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { + //Linux formats if (*p == AV_PIX_FMT_VAAPI) { hw_de_av_pix_fmt = AV_PIX_FMT_VAAPI; hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; @@ -129,19 +129,7 @@ static enum AVPixelFormat get_vaapi_format(AVCodecContext *ctx, const enum AVPix hw_de_av_device_type = AV_HWDEVICE_TYPE_CUDA; return *p; } - } - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using VA-API.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); - hw_de_supported = 0; - return AV_PIX_FMT_NONE; -} -#endif - -#if defined(_WIN32) -static enum AVPixelFormat get_dx_format(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) -{ - const enum AVPixelFormat *p; - - for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { + // Windows formats if (*p == AV_PIX_FMT_DXVA2_VLD) { hw_de_av_pix_fmt = AV_PIX_FMT_DXVA2_VLD; hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; @@ -152,68 +140,19 @@ static enum AVPixelFormat get_dx_format(AVCodecContext *ctx, const enum AVPixelF hw_de_av_device_type = AV_HWDEVICE_TYPE_D3D11VA; return *p; } - } - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using DXVA2.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); - hw_de_supported = 0; - hw_de_av_pix_fmt = AV_PIX_FMT_NONE; - return AV_PIX_FMT_NONE; -} -#endif - -#if defined(__APPLE__) -static int get_qsv_format(AVCodecContext *avctx, const enum AVPixelFormat *pix_fmts) -{ - /* while (*pix_fmts != AV_PIX_FMT_NONE) { - if (*pix_fmts == AV_PIX_FMT_QSV) { - DecodeContext *decode = avctx->opaque; - AVHWFramesContext *frames_ctx; - AVQSVFramesContext *frames_hwctx; - int ret; - - // create a pool of surfaces to be used by the decoder - avctx->hw_frames_ctx = av_hwframe_ctx_alloc(decode->hw_device_ref); - if (!avctx->hw_frames_ctx) - return AV_PIX_FMT_NONE; - frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data; - frames_hwctx = frames_ctx->hwctx; - - frames_ctx->format = AV_PIX_FMT_QSV; - frames_ctx->sw_format = avctx->sw_pix_fmt; - frames_ctx->width = FFALIGN(avctx->coded_width, 32); - frames_ctx->height = FFALIGN(avctx->coded_height, 32); - frames_ctx->initial_pool_size = 32; - - frames_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET; - - ret = av_hwframe_ctx_init(avctx->hw_frames_ctx); - if (ret < 0) - return AV_PIX_FMT_NONE; - - return AV_PIX_FMT_QSV; - } - - pix_fmts++; - } - - fprintf(stderr, "The QSV pixel format not offered in get_format()\n"); - */ - const enum AVPixelFormat *p; - - for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { + //Mac format if (*p == AV_PIX_FMT_QSV) { hw_de_av_pix_fmt = AV_PIX_FMT_QSV; hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; return *p; } } - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using QSV.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); hw_de_supported = 0; - hw_de_av_pix_fmt = AV_PIX_FMT_NONE; return AV_PIX_FMT_NONE; } -#endif -int is_hardware_decode_supported(int codecid) +int FFmpegReader::is_hardware_decode_supported(int codecid) { int ret; switch (codecid) { @@ -317,15 +256,7 @@ void FFmpegReader::Open() dev_hw = NULL; // use default } hw_device_ctx = NULL; - #if defined(__linux__) - pCodecCtx->get_format = get_vaapi_format; - #endif - #if defined(_WIN32) - pCodecCtx->get_format = get_dx_format; - #endif - #if defined(__APPLE__) - pCodecCtx->get_format = get_qsv_format; - #endif + pCodecCtx->get_format = get_hw_dec_format; if (av_hwdevice_ctx_create(&hw_device_ctx, hw_de_av_device_type, dev_hw, NULL, 0) >= 0) { if (!(pCodecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx))) { throw InvalidCodec("Hardware device reference create failed.", path); From aff1be93b8056fdd15bd062aba71d407e6dab8af Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sun, 9 Sep 2018 10:54:31 -0700 Subject: [PATCH 019/109] Support for multiple input files --- include/FFmpegReader.h | 7 +++++-- src/FFmpegReader.cpp | 32 ++++++++++++++++++-------------- 2 files changed, 23 insertions(+), 16 deletions(-) diff --git a/include/FFmpegReader.h b/include/FFmpegReader.h index caf68e5e..f571af73 100644 --- a/include/FFmpegReader.h +++ b/include/FFmpegReader.h @@ -146,8 +146,11 @@ namespace openshot int64_t largest_frame_processed; int64_t current_video_frame; // can't reliably use PTS of video to determine this - //int hw_de_supported = 0; // Is set by FFmpegReader - //AVPixelFormat hw_de_av_pix_fmt = AV_PIX_FMT_NONE; + int hw_de_supported = 0; // Is set by FFmpegReader + #if IS_FFMPEG_3_2 + AVPixelFormat hw_de_av_pix_fmt = AV_PIX_FMT_NONE; + AVHWDeviceType hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; + #endif int is_hardware_decode_supported(int codecid); diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 083de926..4ec46191 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -33,10 +33,10 @@ using namespace openshot; int hw_de_on = 1; // Is set in UI -int hw_de_supported = 0; // Is set by FFmpegReader +//int hw_de_supported = 0; // Is set by FFmpegReader #if IS_FFMPEG_3_2 -AVPixelFormat hw_de_av_pix_fmt = AV_PIX_FMT_NONE; -AVHWDeviceType hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; +AVPixelFormat hw_de_av_pix_fmt_global = AV_PIX_FMT_NONE; +AVHWDeviceType hw_de_av_device_type_global = AV_HWDEVICE_TYPE_VAAPI; #endif FFmpegReader::FFmpegReader(string path) @@ -120,35 +120,35 @@ static enum AVPixelFormat get_hw_dec_format(AVCodecContext *ctx, const enum AVPi for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { //Linux formats if (*p == AV_PIX_FMT_VAAPI) { - hw_de_av_pix_fmt = AV_PIX_FMT_VAAPI; - hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; + hw_de_av_pix_fmt_global = AV_PIX_FMT_VAAPI; + hw_de_av_device_type_global = AV_HWDEVICE_TYPE_VAAPI; return *p; } if (*p == AV_PIX_FMT_CUDA) { - hw_de_av_pix_fmt = AV_PIX_FMT_CUDA; - hw_de_av_device_type = AV_HWDEVICE_TYPE_CUDA; + hw_de_av_pix_fmt_global = AV_PIX_FMT_CUDA; + hw_de_av_device_type_global = AV_HWDEVICE_TYPE_CUDA; return *p; } // Windows formats if (*p == AV_PIX_FMT_DXVA2_VLD) { - hw_de_av_pix_fmt = AV_PIX_FMT_DXVA2_VLD; - hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; + hw_de_av_pix_fmt_global = AV_PIX_FMT_DXVA2_VLD; + hw_de_av_device_type_global = AV_HWDEVICE_TYPE_DXVA2; return *p; } if (*p == AV_PIX_FMT_D3D11) { - hw_de_av_pix_fmt = AV_PIX_FMT_D3D11; - hw_de_av_device_type = AV_HWDEVICE_TYPE_D3D11VA; + hw_de_av_pix_fmt_global = AV_PIX_FMT_D3D11; + hw_de_av_device_type_global = AV_HWDEVICE_TYPE_D3D11VA; return *p; } //Mac format if (*p == AV_PIX_FMT_QSV) { - hw_de_av_pix_fmt = AV_PIX_FMT_QSV; - hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; + hw_de_av_pix_fmt_global = AV_PIX_FMT_QSV; + hw_de_av_device_type_global = AV_HWDEVICE_TYPE_QSV; return *p; } } ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); - hw_de_supported = 0; + //hw_de_supported = 0; return AV_PIX_FMT_NONE; } @@ -853,6 +853,10 @@ bool FFmpegReader::GetAVFrame() ret = avcodec_send_packet(pCodecCtx, packet); + // Get the format from the variables set in get_hw_dec_format + hw_de_av_pix_fmt = hw_de_av_pix_fmt_global; + hw_de_av_device_type = hw_de_av_device_type_global; + if (ret < 0 || ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (Packet not sent)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); } From f8fed171cef786c2dc4e572207ed4c361162d19f Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sun, 9 Sep 2018 12:57:04 -0700 Subject: [PATCH 020/109] More code cleanup (easier to read) Comment included with start of error handling --- src/FFmpegReader.cpp | 60 +++++++++++++++++++++++--------------------- 1 file changed, 32 insertions(+), 28 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 4ec46191..4dabce7e 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -118,37 +118,35 @@ static enum AVPixelFormat get_hw_dec_format(AVCodecContext *ctx, const enum AVPi const enum AVPixelFormat *p; for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { - //Linux formats - if (*p == AV_PIX_FMT_VAAPI) { - hw_de_av_pix_fmt_global = AV_PIX_FMT_VAAPI; - hw_de_av_device_type_global = AV_HWDEVICE_TYPE_VAAPI; - return *p; - } - if (*p == AV_PIX_FMT_CUDA) { - hw_de_av_pix_fmt_global = AV_PIX_FMT_CUDA; - hw_de_av_device_type_global = AV_HWDEVICE_TYPE_CUDA; - return *p; - } - // Windows formats - if (*p == AV_PIX_FMT_DXVA2_VLD) { - hw_de_av_pix_fmt_global = AV_PIX_FMT_DXVA2_VLD; - hw_de_av_device_type_global = AV_HWDEVICE_TYPE_DXVA2; - return *p; - } - if (*p == AV_PIX_FMT_D3D11) { - hw_de_av_pix_fmt_global = AV_PIX_FMT_D3D11; - hw_de_av_device_type_global = AV_HWDEVICE_TYPE_D3D11VA; - return *p; - } - //Mac format - if (*p == AV_PIX_FMT_QSV) { - hw_de_av_pix_fmt_global = AV_PIX_FMT_QSV; - hw_de_av_device_type_global = AV_HWDEVICE_TYPE_QSV; - return *p; + switch (*p) { + case AV_PIX_FMT_VAAPI: + hw_de_av_pix_fmt_global = AV_PIX_FMT_VAAPI; + hw_de_av_device_type_global = AV_HWDEVICE_TYPE_VAAPI; + return *p; + break; + case AV_PIX_FMT_CUDA: + hw_de_av_pix_fmt_global = AV_PIX_FMT_CUDA; + hw_de_av_device_type_global = AV_HWDEVICE_TYPE_CUDA; + return *p; + break; + case AV_PIX_FMT_DXVA2_VLD: + hw_de_av_pix_fmt_global = AV_PIX_FMT_DXVA2_VLD; + hw_de_av_device_type_global = AV_HWDEVICE_TYPE_DXVA2; + return *p; + break; + case AV_PIX_FMT_D3D11: + hw_de_av_pix_fmt_global = AV_PIX_FMT_D3D11; + hw_de_av_device_type_global = AV_HWDEVICE_TYPE_D3D11VA; + return *p; + break; + case AV_PIX_FMT_QSV: + hw_de_av_pix_fmt_global = AV_PIX_FMT_QSV; + hw_de_av_device_type_global = AV_HWDEVICE_TYPE_QSV; + return *p; + break; } } ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); - //hw_de_supported = 0; return AV_PIX_FMT_NONE; } @@ -266,6 +264,12 @@ void FFmpegReader::Open() throw InvalidCodec("Hardware device create failed.", path); } } +/* // Check to see if the hardware supports that file (size!) + AVHWFramesConstraints *constraints = NULL; + constraints = av_hwdevice_get_hwframe_constraints(hw_device_ctx->device_ref,hwconfig); + if (constraints) + av_hwframe_constraints_free(&constraints); +*/ #endif // Open video codec if (avcodec_open2(pCodecCtx, pCodec, &opts) < 0) From 4db2217f0d94dfce07359d6e526efe34389c87af Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Tue, 11 Sep 2018 20:18:11 -0700 Subject: [PATCH 021/109] Fallback for hardware accelerated decode to software decode in case the GPU can noy handle the dimensions of the frame. Not yet working, va_config not yet set. --- src/FFmpegReader.cpp | 131 ++++++++++++++++++++++++++++--------------- src/FFmpegWriter.cpp | 6 -- 2 files changed, 86 insertions(+), 51 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 4dabce7e..caa33de1 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -29,6 +29,7 @@ */ #include "../include/FFmpegReader.h" +#include "libavutil/hwcontext_vaapi.h" using namespace openshot; @@ -111,7 +112,6 @@ bool AudioLocation::is_near(AudioLocation location, int samples_per_frame, int64 } #if IS_FFMPEG_3_2 -#pragma message "You are compiling with experimental hardware decode" static enum AVPixelFormat get_hw_dec_format(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) { @@ -226,55 +226,96 @@ void FFmpegReader::Open() // Get codec and codec context from stream AVCodec *pCodec = avcodec_find_decoder(codecId); - pCodecCtx = AV_GET_CODEC_CONTEXT(pStream, pCodec); - #if IS_FFMPEG_3_2 - if (hw_de_on) { - hw_de_supported = is_hardware_decode_supported(pCodecCtx->codec_id); - } - #endif - // Set number of threads equal to number of processors (not to exceed 16) - pCodecCtx->thread_count = min(FF_NUM_PROCESSORS, 16); - - if (pCodec == NULL) { - throw InvalidCodec("A valid video codec could not be found for this file.", path); - } - - // Init options AVDictionary *opts = NULL; - av_dict_set(&opts, "strict", "experimental", 0); + int retry_decode_open = 2; + // If hw accel is selected but hardware connot handle repeat with software decoding + do { + pCodecCtx = AV_GET_CODEC_CONTEXT(pStream, pCodec); + #if IS_FFMPEG_3_2 + if (hw_de_on && (retry_decode_open==2)) { + // Up to here no decision is made if hardware or software decode + hw_de_supported = is_hardware_decode_supported(pCodecCtx->codec_id); + } + #endif + retry_decode_open = 0; + // Set number of threads equal to number of processors (not to exceed 16) + pCodecCtx->thread_count = min(FF_NUM_PROCESSORS, 16); - #if IS_FFMPEG_3_2 - if (hw_de_on && hw_de_supported) { - // Open Hardware Acceleration - // Use the hw device given in the environment variable HW_DE_DEVICE_SET or the default if not set - char *dev_hw = NULL; - dev_hw = getenv( "HW_DE_DEVICE_SET" ); - // Check if it is there and writable - if( dev_hw != NULL && access( dev_hw, W_OK ) == -1 ) { - dev_hw = NULL; // use default - } - hw_device_ctx = NULL; - pCodecCtx->get_format = get_hw_dec_format; - if (av_hwdevice_ctx_create(&hw_device_ctx, hw_de_av_device_type, dev_hw, NULL, 0) >= 0) { - if (!(pCodecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx))) { - throw InvalidCodec("Hardware device reference create failed.", path); + if (pCodec == NULL) { + throw InvalidCodec("A valid video codec could not be found for this file.", path); + } + + // Init options + av_dict_set(&opts, "strict", "experimental", 0); + #if IS_FFMPEG_3_2 + if (hw_de_on && hw_de_supported) { + // Open Hardware Acceleration + // Use the hw device given in the environment variable HW_DE_DEVICE_SET or the default if not set + char *dev_hw = NULL; + dev_hw = getenv( "HW_DE_DEVICE_SET" ); + // Check if it is there and writable + if( dev_hw != NULL && access( dev_hw, W_OK ) == -1 ) { + dev_hw = NULL; // use default + } + hw_device_ctx = NULL; + // Here the first hardware initialisations are made + pCodecCtx->get_format = get_hw_dec_format; + if (av_hwdevice_ctx_create(&hw_device_ctx, hw_de_av_device_type, dev_hw, NULL, 0) >= 0) { + if (!(pCodecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx))) { + throw InvalidCodec("Hardware device reference create failed.", path); + } + } + else { + throw InvalidCodec("Hardware device create failed.", path); } } - else { - throw InvalidCodec("Hardware device create failed.", path); - } - } -/* // Check to see if the hardware supports that file (size!) - AVHWFramesConstraints *constraints = NULL; - constraints = av_hwdevice_get_hwframe_constraints(hw_device_ctx->device_ref,hwconfig); - if (constraints) - av_hwframe_constraints_free(&constraints); -*/ - #endif - // Open video codec - if (avcodec_open2(pCodecCtx, pCodec, &opts) < 0) - throw InvalidCodec("A video codec was found, but could not be opened.", path); + #endif + // Open video codec + if (avcodec_open2(pCodecCtx, pCodec, &opts) < 0) + throw InvalidCodec("A video codec was found, but could not be opened.", path); + #if IS_FFMPEG_3_2 + if (hw_de_on && hw_de_supported) { + AVHWFramesConstraints *constraints = NULL; + // NOT WORKING needs hwconfig config_id !!!!!!!!!!!!!!!!!!!!!!!!!!! + //AVVAAPIHWConfig *hwconfig = NULL; + // void *hwconfig = NULL; + // hwconfig = av_hwdevice_hwconfig_alloc(hw_device_ctx); + //hwconfig->config_id = ((VAAPIDecodeContext *)pCodecCtx->priv_data)->va_config; + // constraints = av_hwdevice_get_hwframe_constraints(hw_device_ctx,(void*)hwconfig); + constraints = av_hwdevice_get_hwframe_constraints(hw_device_ctx,NULL); // No usable information! + if (constraints) { +// constraints->max_height = 1100; // Just for testing +// constraints->max_width = 1950; // Just for testing + if (pCodecCtx->coded_width < constraints->min_width || + pCodecCtx->coded_height < constraints->min_height || + pCodecCtx->coded_width > constraints->max_width || + pCodecCtx->coded_height > constraints->max_height) { + cerr << "DIMENSIONS ARE TOO LARGE for hardware acceleration\n"; + hw_de_supported = 0; + retry_decode_open = 1; + AV_FREE_CONTEXT(pCodecCtx); + if (hw_device_ctx) { + av_buffer_unref(&hw_device_ctx); + hw_device_ctx = NULL; + } + } + else { + // All is just peachy + cerr << "MinWidth : " << constraints->min_width << "MinHeight : " << constraints->min_height << "MaxWidth : " << constraints->max_width << "MaxHeight : " << constraints->max_height << "\n"; + cerr << "Frame width :" << pCodecCtx->coded_width << "Frame height :" << pCodecCtx->coded_height << "\n"; + retry_decode_open = 0; + } + av_hwframe_constraints_free(&constraints); + } + else { + cerr << "Constraints could not be found\n"; + } + } // if hw_de_on && hw_de_supported + #else + retry_decode_open = 0; + #endif + } while (retry_decode_open); // retry_decode_open // Free options av_dict_free(&opts); diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index 924f3490..c16ce7a8 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -71,12 +71,6 @@ static int set_hwframe_ctx(AVCodecContext *ctx, AVBufferRef *hw_device_ctx, int6 } #endif -#if IS_FFMPEG_3_2 -//#if defined(__linux__) -#pragma message "You are compiling with experimental hardware encode" -//#endif -#endif - FFmpegWriter::FFmpegWriter(string path) : path(path), fmt(NULL), oc(NULL), audio_st(NULL), video_st(NULL), audio_pts(0), video_pts(0), samples(NULL), audio_outbuf(NULL), audio_outbuf_size(0), audio_input_frame_size(0), audio_input_position(0), From a1ffa6b13202c8ab2caf85880bc36f924104a134 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Tue, 11 Sep 2018 20:30:56 -0700 Subject: [PATCH 022/109] Removed one include --- src/FFmpegReader.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index caa33de1..e2d5d7bc 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -29,7 +29,7 @@ */ #include "../include/FFmpegReader.h" -#include "libavutil/hwcontext_vaapi.h" +//#include "libavutil/hwcontext_vaapi.h" using namespace openshot; From cfcddd13e5333f1ef0040b26659baba4ecb8ff20 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Thu, 13 Sep 2018 12:37:32 -0700 Subject: [PATCH 023/109] Still not able to retreive the maximum dimensions supported by the hardware (line 312 FFmegReader.cpp) Now using defaults of 1950 * 1100 defined in lines 35,36 --- src/FFmpegReader.cpp | 71 ++++++++++++++++++++++++++++++++++++-------- 1 file changed, 58 insertions(+), 13 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index e2d5d7bc..c89f8cd0 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -29,7 +29,36 @@ */ #include "../include/FFmpegReader.h" -//#include "libavutil/hwcontext_vaapi.h" +#include "libavutil/hwcontext_vaapi.h" + + +#define MAX_SUPPORTED_WIDTH 1950 +#define MAX_SUPPORTED_HEIGHT 1100 + +typedef struct VAAPIDecodeContext { + VAProfile va_profile; + VAEntrypoint va_entrypoint; + VAConfigID va_config; + VAContextID va_context; + + #if FF_API_STRUCT_VAAPI_CONTEXT +// FF_DISABLE_DEPRECATION_WARNINGS + int have_old_context; + struct vaapi_context *old_context; + AVBufferRef *device_ref; +// FF_ENABLE_DEPRECATION_WARNINGS + #endif + + AVHWDeviceContext *device; + AVVAAPIDeviceContext *hwctx; + + AVHWFramesContext *frames; + AVVAAPIFramesContext *hwfc; + + enum AVPixelFormat surface_format; + int surface_count; + } VAAPIDecodeContext; + using namespace openshot; @@ -277,16 +306,12 @@ void FFmpegReader::Open() #if IS_FFMPEG_3_2 if (hw_de_on && hw_de_supported) { AVHWFramesConstraints *constraints = NULL; - // NOT WORKING needs hwconfig config_id !!!!!!!!!!!!!!!!!!!!!!!!!!! - //AVVAAPIHWConfig *hwconfig = NULL; - // void *hwconfig = NULL; - // hwconfig = av_hwdevice_hwconfig_alloc(hw_device_ctx); - //hwconfig->config_id = ((VAAPIDecodeContext *)pCodecCtx->priv_data)->va_config; - // constraints = av_hwdevice_get_hwframe_constraints(hw_device_ctx,(void*)hwconfig); - constraints = av_hwdevice_get_hwframe_constraints(hw_device_ctx,NULL); // No usable information! + void *hwconfig = NULL; + hwconfig = av_hwdevice_hwconfig_alloc(hw_device_ctx); + // NOT WORKING needs va_config ! + ((AVVAAPIHWConfig *)hwconfig)->config_id = ((VAAPIDecodeContext *)(pCodecCtx->priv_data))->va_config; + constraints = av_hwdevice_get_hwframe_constraints(hw_device_ctx,hwconfig); if (constraints) { -// constraints->max_height = 1100; // Just for testing -// constraints->max_width = 1950; // Just for testing if (pCodecCtx->coded_width < constraints->min_width || pCodecCtx->coded_height < constraints->min_height || pCodecCtx->coded_width > constraints->max_width || @@ -302,14 +327,34 @@ void FFmpegReader::Open() } else { // All is just peachy - cerr << "MinWidth : " << constraints->min_width << "MinHeight : " << constraints->min_height << "MaxWidth : " << constraints->max_width << "MaxHeight : " << constraints->max_height << "\n"; - cerr << "Frame width :" << pCodecCtx->coded_width << "Frame height :" << pCodecCtx->coded_height << "\n"; + cerr << "Min width : " << constraints->min_width << " MinHeight : " << constraints->min_height << "MaxWidth : " << constraints->max_width << "MaxHeight : " << constraints->max_height << "\n"; + cerr << "Frame width : " << pCodecCtx->coded_width << " Frame height : " << pCodecCtx->coded_height << "\n"; retry_decode_open = 0; } av_hwframe_constraints_free(&constraints); + if (hwconfig) { + av_freep(&hwconfig); + } } else { - cerr << "Constraints could not be found\n"; + cerr << "Constraints could not be found using default 1k limit\n"; + if (pCodecCtx->coded_width < 0 || + pCodecCtx->coded_height < 0 || + pCodecCtx->coded_width > MAX_SUPPORTED_WIDTH || + pCodecCtx->coded_height > MAX_SUPPORTED_HEIGHT) { + cerr << "DIMENSIONS ARE TOO LARGE for hardware acceleration\n"; + hw_de_supported = 0; + retry_decode_open = 1; + AV_FREE_CONTEXT(pCodecCtx); + if (hw_device_ctx) { + av_buffer_unref(&hw_device_ctx); + hw_device_ctx = NULL; + } + } + else { + cerr << "Frame width : " << pCodecCtx->coded_width << " Frame height : " << pCodecCtx->coded_height << "\n"; + retry_decode_open = 0; + } } } // if hw_de_on && hw_de_supported #else From 10c8d695957ab184182029d558ffe11ccc0ec9b1 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Thu, 13 Sep 2018 14:45:09 -0700 Subject: [PATCH 024/109] Maximum width and height for hardware decode can now be set in preferences --- src/FFmpegReader.cpp | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index c89f8cd0..725ba524 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -337,11 +337,15 @@ void FFmpegReader::Open() } } else { - cerr << "Constraints could not be found using default 1k limit\n"; + int max_h, max_w; + max_h = ((getenv( "LIMIT_HEIGHT_MAX" )==NULL) ? MAX_SUPPORTED_HEIGHT : atoi(getenv( "LIMIT_HEIGHT_MAX" ))); + max_w = ((getenv( "LIMIT_WIDTH_MAX" )==NULL) ? MAX_SUPPORTED_WIDTH : atoi(getenv( "LIMIT_WIDTH_MAX" ))); + cerr << "Constraints could not be found using default limit\n"; + cerr << " Max Width : " << max_w << " Height : " << max_h << "\n"; if (pCodecCtx->coded_width < 0 || pCodecCtx->coded_height < 0 || - pCodecCtx->coded_width > MAX_SUPPORTED_WIDTH || - pCodecCtx->coded_height > MAX_SUPPORTED_HEIGHT) { + pCodecCtx->coded_width > max_w || + pCodecCtx->coded_height > max_h ) { cerr << "DIMENSIONS ARE TOO LARGE for hardware acceleration\n"; hw_de_supported = 0; retry_decode_open = 1; From 3a2d46826c9fd6249861940181b6b5978e347312 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Thu, 13 Sep 2018 18:04:16 -0700 Subject: [PATCH 025/109] Included an if for included files not present in ffmpeg 2 --- src/FFmpegReader.cpp | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 725ba524..aa3ef4ac 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -29,6 +29,8 @@ */ #include "../include/FFmpegReader.h" + +#if IS_FFMPEG_3_2 #include "libavutil/hwcontext_vaapi.h" @@ -58,6 +60,7 @@ typedef struct VAAPIDecodeContext { enum AVPixelFormat surface_format; int surface_count; } VAAPIDecodeContext; +#endif using namespace openshot; From d97a1bc85057212e83c6e575772c2f91232082f6 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Fri, 14 Sep 2018 14:03:03 -0700 Subject: [PATCH 026/109] Commented code that isn't working yet but complicates compilation by needing extra packages. --- src/FFmpegReader.cpp | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index aa3ef4ac..f8c5f4af 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -31,12 +31,12 @@ #include "../include/FFmpegReader.h" #if IS_FFMPEG_3_2 -#include "libavutil/hwcontext_vaapi.h" +//#include "libavutil/hwcontext_vaapi.h" #define MAX_SUPPORTED_WIDTH 1950 #define MAX_SUPPORTED_HEIGHT 1100 - +/* typedef struct VAAPIDecodeContext { VAProfile va_profile; VAEntrypoint va_entrypoint; @@ -60,6 +60,7 @@ typedef struct VAAPIDecodeContext { enum AVPixelFormat surface_format; int surface_count; } VAAPIDecodeContext; + */ #endif @@ -312,7 +313,7 @@ void FFmpegReader::Open() void *hwconfig = NULL; hwconfig = av_hwdevice_hwconfig_alloc(hw_device_ctx); // NOT WORKING needs va_config ! - ((AVVAAPIHWConfig *)hwconfig)->config_id = ((VAAPIDecodeContext *)(pCodecCtx->priv_data))->va_config; + // ((AVVAAPIHWConfig *)hwconfig)->config_id = ((VAAPIDecodeContext *)(pCodecCtx->priv_data))->va_config; constraints = av_hwdevice_get_hwframe_constraints(hw_device_ctx,hwconfig); if (constraints) { if (pCodecCtx->coded_width < constraints->min_width || From 08c7f88376fc2e79c265a654fa36ba90a5d19f38 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Fri, 14 Sep 2018 14:40:29 -0700 Subject: [PATCH 027/109] The part of the code that should get the config that is used to get the constraints of the GPU is now inside a #if . One can enable it by setting the constant in line 33 of FFmpegReader.cpp to 1. Do not enable that part unless you want to fid a way that works as it also needs the package libva-dev (Ubuntu) to be installed. --- src/FFmpegReader.cpp | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index f8c5f4af..3b508f23 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -30,13 +30,15 @@ #include "../include/FFmpegReader.h" +#define PRAYFORAWONDER 0 + #if IS_FFMPEG_3_2 -//#include "libavutil/hwcontext_vaapi.h" - - #define MAX_SUPPORTED_WIDTH 1950 #define MAX_SUPPORTED_HEIGHT 1100 -/* + +#if PRAYFORAWONDER +#include "libavutil/hwcontext_vaapi.h" + typedef struct VAAPIDecodeContext { VAProfile va_profile; VAEntrypoint va_entrypoint; @@ -60,7 +62,8 @@ typedef struct VAAPIDecodeContext { enum AVPixelFormat surface_format; int surface_count; } VAAPIDecodeContext; - */ + + #endif #endif @@ -313,7 +316,9 @@ void FFmpegReader::Open() void *hwconfig = NULL; hwconfig = av_hwdevice_hwconfig_alloc(hw_device_ctx); // NOT WORKING needs va_config ! - // ((AVVAAPIHWConfig *)hwconfig)->config_id = ((VAAPIDecodeContext *)(pCodecCtx->priv_data))->va_config; + #if PRAYFORAWONDER + ((AVVAAPIHWConfig *)hwconfig)->config_id = ((VAAPIDecodeContext *)(pCodecCtx->priv_data))->va_config; + #endif constraints = av_hwdevice_get_hwframe_constraints(hw_device_ctx,hwconfig); if (constraints) { if (pCodecCtx->coded_width < constraints->min_width || From df9d1a57170e387613d30fcd4a2283d73459a450 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sun, 16 Sep 2018 18:14:31 -0700 Subject: [PATCH 028/109] Implement the use of CRF instead od kB/s or MB/s for some formats: VP8, VP9, h264, h265 0 crf with VP9 is lossless 0 crf with VP8, h264, h265 should be lossless --- src/FFmpegWriter.cpp | 29 +++++++++++++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-) diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index c16ce7a8..b1d98a80 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -241,7 +241,9 @@ void FFmpegWriter::SetVideoOptions(bool has_video, string codec, Fraction fps, i info.pixel_ratio.num = pixel_ratio.num; info.pixel_ratio.den = pixel_ratio.den; } - if (bit_rate >= 1000) + if (bit_rate >= 1000) // bit_rate is the bitrate in b/s + info.video_bit_rate = bit_rate; + if ((bit_rate > 0) && (bit_rate <=63)) // bit_rate is the crf value info.video_bit_rate = bit_rate; info.interlaced_frame = interlaced; @@ -1040,7 +1042,30 @@ AVStream* FFmpegWriter::add_video_stream() #endif /* Init video encoder options */ - c->bit_rate = info.video_bit_rate; + if (info.video_bit_rate > 1000) { + c->bit_rate = info.video_bit_rate; + } +#if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(55, 39, 101) + else { + switch (c->codec_id) { + case AV_CODEC_ID_VP8 : + av_opt_set_int(c->priv_data, "crf", min(info.video_bit_rate,63), 0); + break; + case AV_CODEC_ID_VP9 : + av_opt_set_int(c->priv_data, "crf", min(info.video_bit_rate,63), 0); + if (info.video_bit_rate == 0) { + av_opt_set_int(c->priv_data, "lossless", 1, 0); + } + break; + case AV_CODEC_ID_H264 : + av_opt_set_int(c->priv_data, "crf", min(info.video_bit_rate,51), 0); + break; + case AV_CODEC_ID_H265 : + av_opt_set_int(c->priv_data, "crf", min(info.video_bit_rate,51), 0); + break; + } + } +#endif //TODO: Implement variable bitrate feature (which actually works). This implementation throws //invalid bitrate errors and rc buffer underflow errors, etc... From 38f4bc6a216a33344c3b0857dd302d1e2bbd96f2 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Tue, 18 Sep 2018 11:10:16 -0700 Subject: [PATCH 029/109] Adding aoutput if decode device is not found --- src/FFmpegReader.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 3b508f23..b9be0b54 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -292,6 +292,7 @@ void FFmpegReader::Open() // Check if it is there and writable if( dev_hw != NULL && access( dev_hw, W_OK ) == -1 ) { dev_hw = NULL; // use default + cerr << "\n\n\nDecode Device not present using default\n\n\n"; } hw_device_ctx = NULL; // Here the first hardware initialisations are made From 800dc874592e2451adde09eb39bd09416fac8236 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Tue, 18 Sep 2018 11:35:19 -0700 Subject: [PATCH 030/109] Information is printed to the console where openshot was started that shows if hardware decode or siftware decode is being used --- src/FFmpegReader.cpp | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index b9be0b54..7dbb21d9 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -292,7 +292,7 @@ void FFmpegReader::Open() // Check if it is there and writable if( dev_hw != NULL && access( dev_hw, W_OK ) == -1 ) { dev_hw = NULL; // use default - cerr << "\n\n\nDecode Device not present using default\n\n\n"; + cerr << "\n\n\nDecode Device not present using default\n\n\n"; } hw_device_ctx = NULL; // Here the first hardware initialisations are made @@ -337,6 +337,7 @@ void FFmpegReader::Open() } else { // All is just peachy + cerr << "\nDecode hardware acceleration is used\n"; cerr << "Min width : " << constraints->min_width << " MinHeight : " << constraints->min_height << "MaxWidth : " << constraints->max_width << "MaxHeight : " << constraints->max_height << "\n"; cerr << "Frame width : " << pCodecCtx->coded_width << " Frame height : " << pCodecCtx->coded_height << "\n"; retry_decode_open = 0; @@ -351,12 +352,13 @@ void FFmpegReader::Open() max_h = ((getenv( "LIMIT_HEIGHT_MAX" )==NULL) ? MAX_SUPPORTED_HEIGHT : atoi(getenv( "LIMIT_HEIGHT_MAX" ))); max_w = ((getenv( "LIMIT_WIDTH_MAX" )==NULL) ? MAX_SUPPORTED_WIDTH : atoi(getenv( "LIMIT_WIDTH_MAX" ))); cerr << "Constraints could not be found using default limit\n"; - cerr << " Max Width : " << max_w << " Height : " << max_h << "\n"; if (pCodecCtx->coded_width < 0 || pCodecCtx->coded_height < 0 || pCodecCtx->coded_width > max_w || pCodecCtx->coded_height > max_h ) { cerr << "DIMENSIONS ARE TOO LARGE for hardware acceleration\n"; + cerr << " Max Width : " << max_w << " Height : " << max_h << "\n"; + cerr << "Frame width : " << pCodecCtx->coded_width << " Frame height : " << pCodecCtx->coded_height << "\n"; hw_de_supported = 0; retry_decode_open = 1; AV_FREE_CONTEXT(pCodecCtx); @@ -366,11 +368,16 @@ void FFmpegReader::Open() } } else { + cerr << "\nDecode hardware acceleration is used\n"; + cerr << " Max Width : " << max_w << " Height : " << max_h << "\n"; cerr << "Frame width : " << pCodecCtx->coded_width << " Frame height : " << pCodecCtx->coded_height << "\n"; retry_decode_open = 0; } } } // if hw_de_on && hw_de_supported + else { + cerr << "\nDecode in software is used\n"; + } #else retry_decode_open = 0; #endif From 161acb3d7d5d95284f10f05287465c268bc692f3 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Tue, 18 Sep 2018 12:38:53 -0700 Subject: [PATCH 031/109] Include messages in the compile display to make sure the right ffmpeg version is used (>= 3.2) to get hardware acceleration --- src/FFmpegReader.cpp | 6 ++++++ src/FFmpegWriter.cpp | 6 ++++++ 2 files changed, 12 insertions(+) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 7dbb21d9..76c0ddd0 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -32,6 +32,12 @@ #define PRAYFORAWONDER 0 +#if IS_FFMPEG_3_2 +#pragma message "You are compiling with experimental hardware decode" +#else +#pragma message "You are compiling only with software decode" +#endif + #if IS_FFMPEG_3_2 #define MAX_SUPPORTED_WIDTH 1950 #define MAX_SUPPORTED_HEIGHT 1100 diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index b1d98a80..0b978d8a 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -32,6 +32,12 @@ using namespace openshot; +#if IS_FFMPEG_3_2 +#pragma message "You are compiling with experimental hardware encode" +#else +#pragma message "You are compiling only with software encode" +#endif + #if IS_FFMPEG_3_2 int hw_en_on = 1; // Is set in UI int hw_en_supported = 0; // Is set by FFmpegWriter From 1f36d122984390b8d0da2d0e2acc0d0adcca2a15 Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Tue, 18 Sep 2018 14:45:56 -0500 Subject: [PATCH 032/109] Moving delcaration outside of conditional compile logic (so Windows and Mac builds work) --- src/FFmpegWriter.cpp | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index b1d98a80..295c7c64 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -1235,13 +1235,14 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) #if IS_FFMPEG_3_2 if (hw_en_on && hw_en_supported) { + char *dev_hw = NULL; #if defined(__linux__) - // Use the hw device given in the environment variable HW_EN_DEVICE_SET or the default if not set - char *dev_hw = getenv( "HW_EN_DEVICE_SET" ); - // Check if it is there and writable - if( dev_hw != NULL && access( dev_hw, W_OK ) == -1 ) { - dev_hw = NULL; // use default - } + // Use the hw device given in the environment variable HW_EN_DEVICE_SET or the default if not set + dev_hw = getenv( "HW_EN_DEVICE_SET" ); + // Check if it is there and writable + if( dev_hw != NULL && access( dev_hw, W_OK ) == -1 ) { + dev_hw = NULL; // use default + } #else dev_hw = NULL; // use default #endif From 555eb1f3e2237caa51a7e573cea0cab18177f2f7 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Tue, 18 Sep 2018 13:08:42 -0700 Subject: [PATCH 033/109] Use logger for messages about acceleration --- src/FFmpegReader.cpp | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 76c0ddd0..aab11460 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -35,7 +35,7 @@ #if IS_FFMPEG_3_2 #pragma message "You are compiling with experimental hardware decode" #else -#pragma message "You are compiling only with software decode" +#pragma message "You are compiling only with software decode" #endif #if IS_FFMPEG_3_2 @@ -298,7 +298,8 @@ void FFmpegReader::Open() // Check if it is there and writable if( dev_hw != NULL && access( dev_hw, W_OK ) == -1 ) { dev_hw = NULL; // use default - cerr << "\n\n\nDecode Device not present using default\n\n\n"; + //cerr << "\n\n\nDecode Device not present using default\n\n\n"; + ZmqLogger::Instance()->AppendDebugMethod("\n\n\nDecode Device not present using default\n\n\n", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); } hw_device_ctx = NULL; // Here the first hardware initialisations are made @@ -332,6 +333,7 @@ void FFmpegReader::Open() pCodecCtx->coded_height < constraints->min_height || pCodecCtx->coded_width > constraints->max_width || pCodecCtx->coded_height > constraints->max_height) { + ZmqLogger::Instance()->AppendDebugMethod("DIMENSIONS ARE TOO LARGE for hardware acceleration\n", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); cerr << "DIMENSIONS ARE TOO LARGE for hardware acceleration\n"; hw_de_supported = 0; retry_decode_open = 1; @@ -343,6 +345,7 @@ void FFmpegReader::Open() } else { // All is just peachy + ZmqLogger::Instance()->AppendDebugMethod("\nDecode hardware acceleration is used\n", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); cerr << "\nDecode hardware acceleration is used\n"; cerr << "Min width : " << constraints->min_width << " MinHeight : " << constraints->min_height << "MaxWidth : " << constraints->max_width << "MaxHeight : " << constraints->max_height << "\n"; cerr << "Frame width : " << pCodecCtx->coded_width << " Frame height : " << pCodecCtx->coded_height << "\n"; @@ -362,6 +365,7 @@ void FFmpegReader::Open() pCodecCtx->coded_height < 0 || pCodecCtx->coded_width > max_w || pCodecCtx->coded_height > max_h ) { + ZmqLogger::Instance()->AppendDebugMethod("DIMENSIONS ARE TOO LARGE for hardware acceleration\n", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); cerr << "DIMENSIONS ARE TOO LARGE for hardware acceleration\n"; cerr << " Max Width : " << max_w << " Height : " << max_h << "\n"; cerr << "Frame width : " << pCodecCtx->coded_width << " Frame height : " << pCodecCtx->coded_height << "\n"; @@ -374,6 +378,7 @@ void FFmpegReader::Open() } } else { + ZmqLogger::Instance()->AppendDebugMethod("\nDecode hardware acceleration is used\n", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); cerr << "\nDecode hardware acceleration is used\n"; cerr << " Max Width : " << max_w << " Height : " << max_h << "\n"; cerr << "Frame width : " << pCodecCtx->coded_width << " Frame height : " << pCodecCtx->coded_height << "\n"; @@ -382,6 +387,7 @@ void FFmpegReader::Open() } } // if hw_de_on && hw_de_supported else { + ZmqLogger::Instance()->AppendDebugMethod("\nDecode in software is used\n", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); cerr << "\nDecode in software is used\n"; } #else From 0b260a90879d50a26b22bd8f6640de85bb8023db Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Tue, 18 Sep 2018 15:31:34 -0700 Subject: [PATCH 034/109] Code cleanup and move messages regarding hardware acceleration to Debug Logger --- src/FFmpegReader.cpp | 31 ++++++++++++++++--------------- src/FFmpegWriter.cpp | 15 ++++++++++----- 2 files changed, 26 insertions(+), 20 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index aab11460..2225e72c 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -334,7 +334,7 @@ void FFmpegReader::Open() pCodecCtx->coded_width > constraints->max_width || pCodecCtx->coded_height > constraints->max_height) { ZmqLogger::Instance()->AppendDebugMethod("DIMENSIONS ARE TOO LARGE for hardware acceleration\n", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); - cerr << "DIMENSIONS ARE TOO LARGE for hardware acceleration\n"; + //cerr << "DIMENSIONS ARE TOO LARGE for hardware acceleration\n"; hw_de_supported = 0; retry_decode_open = 1; AV_FREE_CONTEXT(pCodecCtx); @@ -345,10 +345,10 @@ void FFmpegReader::Open() } else { // All is just peachy - ZmqLogger::Instance()->AppendDebugMethod("\nDecode hardware acceleration is used\n", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); - cerr << "\nDecode hardware acceleration is used\n"; - cerr << "Min width : " << constraints->min_width << " MinHeight : " << constraints->min_height << "MaxWidth : " << constraints->max_width << "MaxHeight : " << constraints->max_height << "\n"; - cerr << "Frame width : " << pCodecCtx->coded_width << " Frame height : " << pCodecCtx->coded_height << "\n"; + ZmqLogger::Instance()->AppendDebugMethod("\nDecode hardware acceleration is used\n", "Min width :", constraints->min_width, "Min Height :", constraints->min_height, "MaxWidth :", constraints->max_width, "MaxHeight :", constraints->max_height, "Frame width :", pCodecCtx->coded_width, "Frame height :", pCodecCtx->coded_height); + //cerr << "\nDecode hardware acceleration is used\n"; + //cerr << "Min width : " << constraints->min_width << " MinHeight : " << constraints->min_height << "MaxWidth : " << constraints->max_width << "MaxHeight : " << constraints->max_height << "\n"; + //cerr << "Frame width : " << pCodecCtx->coded_width << " Frame height : " << pCodecCtx->coded_height << "\n"; retry_decode_open = 0; } av_hwframe_constraints_free(&constraints); @@ -360,15 +360,16 @@ void FFmpegReader::Open() int max_h, max_w; max_h = ((getenv( "LIMIT_HEIGHT_MAX" )==NULL) ? MAX_SUPPORTED_HEIGHT : atoi(getenv( "LIMIT_HEIGHT_MAX" ))); max_w = ((getenv( "LIMIT_WIDTH_MAX" )==NULL) ? MAX_SUPPORTED_WIDTH : atoi(getenv( "LIMIT_WIDTH_MAX" ))); - cerr << "Constraints could not be found using default limit\n"; + ZmqLogger::Instance()->AppendDebugMethod("Constraints could not be found using default limit\n", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + //cerr << "Constraints could not be found using default limit\n"; if (pCodecCtx->coded_width < 0 || pCodecCtx->coded_height < 0 || pCodecCtx->coded_width > max_w || pCodecCtx->coded_height > max_h ) { - ZmqLogger::Instance()->AppendDebugMethod("DIMENSIONS ARE TOO LARGE for hardware acceleration\n", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); - cerr << "DIMENSIONS ARE TOO LARGE for hardware acceleration\n"; - cerr << " Max Width : " << max_w << " Height : " << max_h << "\n"; - cerr << "Frame width : " << pCodecCtx->coded_width << " Frame height : " << pCodecCtx->coded_height << "\n"; + ZmqLogger::Instance()->AppendDebugMethod("DIMENSIONS ARE TOO LARGE for hardware acceleration\n", "Max Width :", max_w, "Max Height :", max_h, "Frame width :", pCodecCtx->coded_width, "Frame height :", pCodecCtx->coded_height, "", -1, "", -1); + //cerr << "DIMENSIONS ARE TOO LARGE for hardware acceleration\n"; + //cerr << " Max Width : " << max_w << " Height : " << max_h << "\n"; + //cerr << "Frame width : " << pCodecCtx->coded_width << " Frame height : " << pCodecCtx->coded_height << "\n"; hw_de_supported = 0; retry_decode_open = 1; AV_FREE_CONTEXT(pCodecCtx); @@ -378,17 +379,17 @@ void FFmpegReader::Open() } } else { - ZmqLogger::Instance()->AppendDebugMethod("\nDecode hardware acceleration is used\n", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); - cerr << "\nDecode hardware acceleration is used\n"; - cerr << " Max Width : " << max_w << " Height : " << max_h << "\n"; - cerr << "Frame width : " << pCodecCtx->coded_width << " Frame height : " << pCodecCtx->coded_height << "\n"; + ZmqLogger::Instance()->AppendDebugMethod("\nDecode hardware acceleration is used\n", "Max Width :", max_w, "Max Height :", max_h, "Frame width :", pCodecCtx->coded_width, "Frame height :", pCodecCtx->coded_height, "", -1, "", -1); + //cerr << "\nDecode hardware acceleration is used\n"; + //cerr << " Max Width : " << max_w << " Height : " << max_h << "\n"; + //cerr << "Frame width : " << pCodecCtx->coded_width << " Frame height : " << pCodecCtx->coded_height << "\n"; retry_decode_open = 0; } } } // if hw_de_on && hw_de_supported else { ZmqLogger::Instance()->AppendDebugMethod("\nDecode in software is used\n", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); - cerr << "\nDecode in software is used\n"; + //cerr << "\nDecode in software is used\n"; } #else retry_decode_open = 0; diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index 840acba1..9791043c 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -1254,7 +1254,8 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) #endif if (av_hwdevice_ctx_create(&hw_device_ctx, hw_en_av_device_type, dev_hw, NULL, 0) < 0) { - cerr << "FFmpegWriter::open_video : Codec name: " << info.vcodec.c_str() << " ERROR creating\n"; + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::open_video : Codec name: ", info.vcodec.c_str(), -1, " ERROR creating\n", -1, "", -1, "", -1, "", -1, "", -1); + //cerr << "FFmpegWriter::open_video : Codec name: " << info.vcodec.c_str() << " ERROR creating\n"; throw InvalidCodec("Could not create hwdevice", path); } } @@ -1860,10 +1861,12 @@ bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* fra error_code = ret; if (ret < 0 ) { ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_video_packet (Frame not sent)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); - if (ret == AVERROR(EAGAIN) ) + if (ret == AVERROR(EAGAIN) ) { cerr << "Frame EAGAIN" << "\n"; - if (ret == AVERROR_EOF ) + } + if (ret == AVERROR_EOF ) { cerr << "Frame AVERROR_EOF" << "\n"; + } avcodec_send_frame(video_codec, NULL); } else { @@ -1884,10 +1887,12 @@ bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* fra #if LIBAVFORMAT_VERSION_MAJOR >= 54 // Write video packet (older than FFmpeg 3.2) error_code = avcodec_encode_video2(video_codec, &pkt, frame_final, &got_packet_ptr); - if (error_code != 0 ) + if (error_code != 0 ) { cerr << "Frame AVERROR_EOF" << "\n"; - if (got_packet_ptr == 0 ) + } + if (got_packet_ptr == 0 ) { cerr << "Frame gotpacket error" << "\n"; + } #else // Write video packet (even older versions of FFmpeg) int video_outbuf_size = 200000; From f2323da447b7584f8c7b91d6aca55afe1a3d0ec9 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Wed, 19 Sep 2018 17:51:21 -0700 Subject: [PATCH 035/109] Preparation to choose the graphics card not by name but by number 1, 2, 3. First implementation just for Linux and decode --- src/FFmpegReader.cpp | 33 +++++++++++++++++++++++++++++---- 1 file changed, 29 insertions(+), 4 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 2225e72c..547169b4 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -294,17 +294,42 @@ void FFmpegReader::Open() // Open Hardware Acceleration // Use the hw device given in the environment variable HW_DE_DEVICE_SET or the default if not set char *dev_hw = NULL; + char adapter[256]; + char *adapter_ptr = NULL; + int adapter_num; dev_hw = getenv( "HW_DE_DEVICE_SET" ); + if( dev_hw != NULL) { + adapter_num = atoi(dev_hw); + if (adapter_num < 3 && adapter_num >=0) { + #if defined(__linux__) + snprintf(adapter,sizeof(adapter),"/dev/dri/renderD%d", adapter_num+128); + adapter_ptr = adapter; + #elif defined(_WIN32) + adapter_ptr = NULL; + #elif defined(__APPLE__) + adapter_ptr = NULL; + #endif + } + else { + adapter_ptr = NULL; // Just to be sure + } + } // Check if it is there and writable - if( dev_hw != NULL && access( dev_hw, W_OK ) == -1 ) { - dev_hw = NULL; // use default + #if defined(__linux__) + if( adapter_ptr != NULL && access( adapter_ptr, W_OK ) == -1 ) { + #elif defined(_WIN32) + if( adapter_ptr != NULL ) { + #elif defined(__APPLE__) + if( adapter_ptr != NULL ) { + #endif + adapter_ptr = NULL; // use default //cerr << "\n\n\nDecode Device not present using default\n\n\n"; - ZmqLogger::Instance()->AppendDebugMethod("\n\n\nDecode Device not present using default\n\n\n", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Decode Device not present using default", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); } hw_device_ctx = NULL; // Here the first hardware initialisations are made pCodecCtx->get_format = get_hw_dec_format; - if (av_hwdevice_ctx_create(&hw_device_ctx, hw_de_av_device_type, dev_hw, NULL, 0) >= 0) { + if (av_hwdevice_ctx_create(&hw_device_ctx, hw_de_av_device_type, adapter_ptr, NULL, 0) >= 0) { if (!(pCodecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx))) { throw InvalidCodec("Hardware device reference create failed.", path); } From 02273973390311edb06cb44a886a5b26cc698d94 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Wed, 19 Sep 2018 21:37:12 -0700 Subject: [PATCH 036/109] Set the graphics card used to decode or encode by setting the environment variable HW_EN_DEVICE_SET for enncoding and HW_DE_DEVICE_SET for decoding. The first card is 0, the second 1 and so on. For now only running on Linux. --- src/FFmpegReader.cpp | 1 + src/FFmpegWriter.cpp | 42 +++++++++++++++++++++++++++++++++--------- 2 files changed, 34 insertions(+), 9 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 547169b4..912c1e2f 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -303,6 +303,7 @@ void FFmpegReader::Open() if (adapter_num < 3 && adapter_num >=0) { #if defined(__linux__) snprintf(adapter,sizeof(adapter),"/dev/dri/renderD%d", adapter_num+128); + // Maybe 127 is better because the first card would be 1?! adapter_ptr = adapter; #elif defined(_WIN32) adapter_ptr = NULL; diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index 9791043c..497538df 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -1242,18 +1242,42 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) #if IS_FFMPEG_3_2 if (hw_en_on && hw_en_supported) { char *dev_hw = NULL; - #if defined(__linux__) + char adapter[256]; + char *adapter_ptr = NULL; + int adapter_num; // Use the hw device given in the environment variable HW_EN_DEVICE_SET or the default if not set dev_hw = getenv( "HW_EN_DEVICE_SET" ); - // Check if it is there and writable - if( dev_hw != NULL && access( dev_hw, W_OK ) == -1 ) { - dev_hw = NULL; // use default - } - #else - dev_hw = NULL; // use default - #endif + if( dev_hw != NULL) { + adapter_num = atoi(dev_hw); + if (adapter_num < 3 && adapter_num >=0) { + #if defined(__linux__) + snprintf(adapter,sizeof(adapter),"/dev/dri/renderD%d", adapter_num+128); + // Maybe 127 is better because the first card would be 1?! + adapter_ptr = adapter; + #elif defined(_WIN32) + adapter_ptr = NULL; + #elif defined(__APPLE__) + adapter_ptr = NULL; + #endif + } + else { + adapter_ptr = NULL; // Just to be sure + } + } +// Check if it is there and writable + #if defined(__linux__) + if( adapter_ptr != NULL && access( adapter_ptr, W_OK ) == -1 ) { + #elif defined(_WIN32) + if( adapter_ptr != NULL ) { + #elif defined(__APPLE__) + if( adapter_ptr != NULL ) { + #endif + adapter_ptr = NULL; // use default + //cerr << "\n\n\nEncode Device not present using default\n\n\n"; + ZmqLogger::Instance()->AppendDebugMethod("Encode Device not present using default", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + } if (av_hwdevice_ctx_create(&hw_device_ctx, hw_en_av_device_type, - dev_hw, NULL, 0) < 0) { + adapter_ptr, NULL, 0) < 0) { ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::open_video : Codec name: ", info.vcodec.c_str(), -1, " ERROR creating\n", -1, "", -1, "", -1, "", -1, "", -1); //cerr << "FFmpegWriter::open_video : Codec name: " << info.vcodec.c_str() << " ERROR creating\n"; throw InvalidCodec("Could not create hwdevice", path); From b925a9ba2582833354ff89c9b6928352229b2618 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sun, 23 Sep 2018 09:51:56 -0700 Subject: [PATCH 037/109] protect add_effect with critical --- src/Timeline.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/src/Timeline.cpp b/src/Timeline.cpp index 34dab1d8..fe7c1022 100644 --- a/src/Timeline.cpp +++ b/src/Timeline.cpp @@ -281,6 +281,7 @@ void Timeline::add_layer(std::shared_ptr new_frame, Clip* source_clip, in /* Apply effects to the source frame (if any). If multiple clips are overlapping, only process the * effects on the top clip. */ if (is_top_clip && source_frame) + #pragma omp critical (T_addLayer) source_frame = apply_effects(source_frame, timeline_frame_number, source_clip->Layer()); // Declare an image to hold the source frame's image From 1cd8401a58394bd49f61eb17628e67b5f05c8088 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sun, 23 Sep 2018 10:09:20 -0700 Subject: [PATCH 038/109] Put brackets in the if statement to show that the pragma critical and the followwing command are one block. --- src/Timeline.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/Timeline.cpp b/src/Timeline.cpp index fe7c1022..71805837 100644 --- a/src/Timeline.cpp +++ b/src/Timeline.cpp @@ -280,9 +280,10 @@ void Timeline::add_layer(std::shared_ptr new_frame, Clip* source_clip, in /* Apply effects to the source frame (if any). If multiple clips are overlapping, only process the * effects on the top clip. */ - if (is_top_clip && source_frame) + if (is_top_clip && source_frame) { #pragma omp critical (T_addLayer) source_frame = apply_effects(source_frame, timeline_frame_number, source_clip->Layer()); + } // Declare an image to hold the source frame's image std::shared_ptr source_image; From 53eec32d9eb061f32a26e1c41c4aee73b67ad57c Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Tue, 25 Sep 2018 08:04:48 -0700 Subject: [PATCH 039/109] In case CRF is not supported like in hardware accelerated codecs or in mpeg2 a bitrate is calculated that should be close to the one expected with the given CRF value. --- src/FFmpegWriter.cpp | 59 +++++++++++++++++++++++++++++++------------- 1 file changed, 42 insertions(+), 17 deletions(-) diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index 497538df..207d545f 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -1053,23 +1053,48 @@ AVStream* FFmpegWriter::add_video_stream() } #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(55, 39, 101) else { - switch (c->codec_id) { - case AV_CODEC_ID_VP8 : - av_opt_set_int(c->priv_data, "crf", min(info.video_bit_rate,63), 0); - break; - case AV_CODEC_ID_VP9 : - av_opt_set_int(c->priv_data, "crf", min(info.video_bit_rate,63), 0); - if (info.video_bit_rate == 0) { - av_opt_set_int(c->priv_data, "lossless", 1, 0); - } - break; - case AV_CODEC_ID_H264 : - av_opt_set_int(c->priv_data, "crf", min(info.video_bit_rate,51), 0); - break; - case AV_CODEC_ID_H265 : - av_opt_set_int(c->priv_data, "crf", min(info.video_bit_rate,51), 0); - break; - } + if (hw_en_on) { + double mbs = 15000000.0; + if (info.video_bit_rate > 0) { + if (info.video_bit_rate > 42) { + mbs = 380.0; + } + else { + mbs *= pow(0.912,info.video_bit_rate); + } + } + c->bit_rate = (int)(mbs); + } + else { + switch (c->codec_id) { + case AV_CODEC_ID_VP8 : + av_opt_set_int(c->priv_data, "crf", min(info.video_bit_rate,63), 0); + break; + case AV_CODEC_ID_VP9 : + av_opt_set_int(c->priv_data, "crf", min(info.video_bit_rate,63), 0); + if (info.video_bit_rate == 0) { + av_opt_set_int(c->priv_data, "lossless", 1, 0); + } + break; + case AV_CODEC_ID_H264 : + av_opt_set_int(c->priv_data, "crf", min(info.video_bit_rate,51), 0); + break; + case AV_CODEC_ID_H265 : + av_opt_set_int(c->priv_data, "crf", min(info.video_bit_rate,51), 0); + break; + default: + double mbs = 15000000.0; + if (info.video_bit_rate > 0) { + if (info.video_bit_rate > 42) { + mbs = 380.0; + } + else { + mbs *= pow(0.912,info.video_bit_rate); + } + } + c->bit_rate = (int)(mbs); + } + } } #endif From 325f58f7731c3c58c29f172d69f4ac6a279f97a8 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Thu, 15 Nov 2018 22:04:20 -0800 Subject: [PATCH 040/109] Changes to use AV1 if ffmpeg >= 4.0 is used with libaom support --- src/FFmpegWriter.cpp | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index 207d545f..66befe5c 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -1067,6 +1067,12 @@ AVStream* FFmpegWriter::add_video_stream() } else { switch (c->codec_id) { +#if (LIBAVCODEC_VERSION_MAJOR >= 58) + case AV_CODEC_ID_AV1 : + av_opt_set_int(c->priv_data, "crf", min(info.video_bit_rate,63), 0); + c->bit_rate = 0; + break; +#endif case AV_CODEC_ID_VP8 : av_opt_set_int(c->priv_data, "crf", min(info.video_bit_rate,63), 0); break; From 514cb1134014d4604ec4062981e75f6d37fa9660 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sun, 25 Nov 2018 20:28:25 -0800 Subject: [PATCH 041/109] When multiple graphics cards are installed the import with hardware acceleration has to have the card number set or the opening of the device will fail. TODO check multiple formats. Right now only the first is checked which is vaapi. --- src/FFmpegReader.cpp | 33 +++++++++++++++++++-------------- 1 file changed, 19 insertions(+), 14 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index d78b7c8f..231d41c7 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -300,21 +300,24 @@ void FFmpegReader::Open() dev_hw = getenv( "HW_DE_DEVICE_SET" ); if( dev_hw != NULL) { adapter_num = atoi(dev_hw); - if (adapter_num < 3 && adapter_num >=0) { - #if defined(__linux__) - snprintf(adapter,sizeof(adapter),"/dev/dri/renderD%d", adapter_num+128); - // Maybe 127 is better because the first card would be 1?! - adapter_ptr = adapter; - #elif defined(_WIN32) - adapter_ptr = NULL; - #elif defined(__APPLE__) - adapter_ptr = NULL; - #endif - } - else { - adapter_ptr = NULL; // Just to be sure - } + } else { + adapter_num = 0; } + if (adapter_num < 3 && adapter_num >=0) { + #if defined(__linux__) + snprintf(adapter,sizeof(adapter),"/dev/dri/renderD%d", adapter_num+128); + // Maybe 127 is better because the first card would be 1?! + adapter_ptr = adapter; + #elif defined(_WIN32) + adapter_ptr = NULL; + #elif defined(__APPLE__) + adapter_ptr = NULL; + #endif + } + else { + adapter_ptr = NULL; // Just to be sure + } + //} // Check if it is there and writable #if defined(__linux__) if( adapter_ptr != NULL && access( adapter_ptr, W_OK ) == -1 ) { @@ -329,6 +332,8 @@ void FFmpegReader::Open() } hw_device_ctx = NULL; // Here the first hardware initialisations are made + // TODO: check for each format in an extra call + // Now only vaapi the first in the list is found pCodecCtx->get_format = get_hw_dec_format; if (av_hwdevice_ctx_create(&hw_device_ctx, hw_de_av_device_type, adapter_ptr, NULL, 0) >= 0) { if (!(pCodecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx))) { From e7f2494040fb0a17c5f8f0c90789a66140fe8e5d Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Mon, 26 Nov 2018 09:36:21 -0800 Subject: [PATCH 042/109] First changes to make hardware accelerated DECODE work with decoders other than vaapi. Encode is already working for nvenc; nvidia driver 396 has to be installed for nvenc to work. On nVidia card turn accelerated decode off in Preferences->Performance for now --- include/FFmpegReader.h | 2 +- src/FFmpegReader.cpp | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/include/FFmpegReader.h b/include/FFmpegReader.h index f571af73..62437d7a 100644 --- a/include/FFmpegReader.h +++ b/include/FFmpegReader.h @@ -149,7 +149,7 @@ namespace openshot int hw_de_supported = 0; // Is set by FFmpegReader #if IS_FFMPEG_3_2 AVPixelFormat hw_de_av_pix_fmt = AV_PIX_FMT_NONE; - AVHWDeviceType hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; + AVHWDeviceType hw_de_av_device_type = AV_HWDEVICE_TYPE_NONE; #endif int is_hardware_decode_supported(int codecid); diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 231d41c7..9ed85114 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -79,7 +79,7 @@ int hw_de_on = 1; // Is set in UI //int hw_de_supported = 0; // Is set by FFmpegReader #if IS_FFMPEG_3_2 AVPixelFormat hw_de_av_pix_fmt_global = AV_PIX_FMT_NONE; -AVHWDeviceType hw_de_av_device_type_global = AV_HWDEVICE_TYPE_VAAPI; +AVHWDeviceType hw_de_av_device_type_global = AV_HWDEVICE_TYPE_NONE; #endif FFmpegReader::FFmpegReader(string path) @@ -334,6 +334,7 @@ void FFmpegReader::Open() // Here the first hardware initialisations are made // TODO: check for each format in an extra call // Now only vaapi the first in the list is found + hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; pCodecCtx->get_format = get_hw_dec_format; if (av_hwdevice_ctx_create(&hw_device_ctx, hw_de_av_device_type, adapter_ptr, NULL, 0) >= 0) { if (!(pCodecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx))) { From 1713fecc9080f3b68c9b458122e4fe3610b55f59 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Mon, 26 Nov 2018 13:31:53 -0800 Subject: [PATCH 043/109] More adjustments to enable hardware decode with nvdec/cuvid --- src/FFmpegReader.cpp | 94 ++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 90 insertions(+), 4 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 9ed85114..941435f3 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -155,7 +155,7 @@ bool AudioLocation::is_near(AudioLocation location, int samples_per_frame, int64 #if IS_FFMPEG_3_2 -static enum AVPixelFormat get_hw_dec_format(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) +static enum AVPixelFormat get_hw_dec_format_va(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) { const enum AVPixelFormat *p; @@ -166,21 +166,69 @@ static enum AVPixelFormat get_hw_dec_format(AVCodecContext *ctx, const enum AVPi hw_de_av_device_type_global = AV_HWDEVICE_TYPE_VAAPI; return *p; break; + } + } + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + return AV_PIX_FMT_NONE; + } + +static enum AVPixelFormat get_hw_dec_format_cu(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) +{ + const enum AVPixelFormat *p; + + for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { + switch (*p) { case AV_PIX_FMT_CUDA: hw_de_av_pix_fmt_global = AV_PIX_FMT_CUDA; hw_de_av_device_type_global = AV_HWDEVICE_TYPE_CUDA; return *p; break; + } + } + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + return AV_PIX_FMT_NONE; + } + +static enum AVPixelFormat get_hw_dec_format_dx(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) +{ + const enum AVPixelFormat *p; + + for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { + switch (*p) { case AV_PIX_FMT_DXVA2_VLD: hw_de_av_pix_fmt_global = AV_PIX_FMT_DXVA2_VLD; hw_de_av_device_type_global = AV_HWDEVICE_TYPE_DXVA2; return *p; break; + } + } + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + return AV_PIX_FMT_NONE; + } + +static enum AVPixelFormat get_hw_dec_format_d3(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) +{ + const enum AVPixelFormat *p; + + for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { + switch (*p) { case AV_PIX_FMT_D3D11: hw_de_av_pix_fmt_global = AV_PIX_FMT_D3D11; hw_de_av_device_type_global = AV_HWDEVICE_TYPE_D3D11VA; return *p; break; + } + } + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + return AV_PIX_FMT_NONE; + } + +static enum AVPixelFormat get_hw_dec_format_qs(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) +{ + const enum AVPixelFormat *p; + + for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { + switch (*p) { case AV_PIX_FMT_QSV: hw_de_av_pix_fmt_global = AV_PIX_FMT_QSV; hw_de_av_device_type_global = AV_HWDEVICE_TYPE_QSV; @@ -334,16 +382,54 @@ void FFmpegReader::Open() // Here the first hardware initialisations are made // TODO: check for each format in an extra call // Now only vaapi the first in the list is found + #if defined(__linux__) hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; - pCodecCtx->get_format = get_hw_dec_format; + pCodecCtx->get_format = get_hw_dec_format_va; if (av_hwdevice_ctx_create(&hw_device_ctx, hw_de_av_device_type, adapter_ptr, NULL, 0) >= 0) { if (!(pCodecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx))) { - throw InvalidCodec("Hardware device reference create failed.", path); + throw InvalidCodec("Hardware device reference create failed vaapi.", path); } } else { - throw InvalidCodec("Hardware device create failed.", path); + hw_de_av_device_type = AV_HWDEVICE_TYPE_CUDA; + pCodecCtx->get_format = get_hw_dec_format_cu; + hw_device_ctx = NULL; + if (av_hwdevice_ctx_create(&hw_device_ctx, hw_de_av_device_type, adapter_ptr, NULL, 0) >= 0) { + if (!(pCodecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx))) { + throw InvalidCodec("Hardware device reference create failed cuda.", path); + } + } + else { + throw InvalidCodec("Hardware device create failed.", path); + + } } + #endif + #if defined(_WIN32) + hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2_VLD; + pCodecCtx->get_format = get_hw_dec_format_dx; + if (av_hwdevice_ctx_create(&hw_device_ctx, hw_de_av_device_type, adapter_ptr, NULL, 0) >= 0) { + if (!(pCodecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx))) { + throw InvalidCodec("Hardware device reference create failed vaapi.", path); + } + } + else { + throw InvalidCodec("Hardware device create failed.", path); + } + #endif + #if defined(__APPLE__) + hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; + pCodecCtx->get_format = get_hw_dec_format_qs; + if (av_hwdevice_ctx_create(&hw_device_ctx, hw_de_av_device_type, adapter_ptr, NULL, 0) >= 0) { + if (!(pCodecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx))) { + throw InvalidCodec("Hardware device reference create failed vaapi.", path); + } + } + else { + throw InvalidCodec("Hardware device create failed.", path); + } + #endif + } #endif // Open video codec From 7cadeb364b94226f770fc077b3c759537f452a61 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Mon, 26 Nov 2018 18:08:08 -0800 Subject: [PATCH 044/109] More cleanup --- src/FFmpegReader.cpp | 30 +++++++++++++++++++++--------- 1 file changed, 21 insertions(+), 9 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 941435f3..2f641ab8 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -155,6 +155,7 @@ bool AudioLocation::is_near(AudioLocation location, int samples_per_frame, int64 #if IS_FFMPEG_3_2 +#if defined(__linux__) static enum AVPixelFormat get_hw_dec_format_va(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) { const enum AVPixelFormat *p; @@ -188,7 +189,9 @@ static enum AVPixelFormat get_hw_dec_format_cu(AVCodecContext *ctx, const enum A ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); return AV_PIX_FMT_NONE; } +#endif +#if defined(_WIN32) static enum AVPixelFormat get_hw_dec_format_dx(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) { const enum AVPixelFormat *p; @@ -222,7 +225,9 @@ static enum AVPixelFormat get_hw_dec_format_d3(AVCodecContext *ctx, const enum A ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); return AV_PIX_FMT_NONE; } +#endif +#if defined(__APPLE__) static enum AVPixelFormat get_hw_dec_format_qs(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) { const enum AVPixelFormat *p; @@ -239,6 +244,7 @@ static enum AVPixelFormat get_hw_dec_format_qs(AVCodecContext *ctx, const enum A ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); return AV_PIX_FMT_NONE; } +#endif int FFmpegReader::is_hardware_decode_supported(int codecid) { @@ -345,7 +351,7 @@ void FFmpegReader::Open() char adapter[256]; char *adapter_ptr = NULL; int adapter_num; - dev_hw = getenv( "HW_DE_DEVICE_SET" ); + dev_hw = getenv( "HW_DE_DEVICE_SET" ); // The first card is 0 if( dev_hw != NULL) { adapter_num = atoi(dev_hw); } else { @@ -354,7 +360,6 @@ void FFmpegReader::Open() if (adapter_num < 3 && adapter_num >=0) { #if defined(__linux__) snprintf(adapter,sizeof(adapter),"/dev/dri/renderD%d", adapter_num+128); - // Maybe 127 is better because the first card would be 1?! adapter_ptr = adapter; #elif defined(_WIN32) adapter_ptr = NULL; @@ -380,8 +385,6 @@ void FFmpegReader::Open() } hw_device_ctx = NULL; // Here the first hardware initialisations are made - // TODO: check for each format in an extra call - // Now only vaapi the first in the list is found #if defined(__linux__) hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; pCodecCtx->get_format = get_hw_dec_format_va; @@ -401,7 +404,6 @@ void FFmpegReader::Open() } else { throw InvalidCodec("Hardware device create failed.", path); - } } #endif @@ -410,19 +412,29 @@ void FFmpegReader::Open() pCodecCtx->get_format = get_hw_dec_format_dx; if (av_hwdevice_ctx_create(&hw_device_ctx, hw_de_av_device_type, adapter_ptr, NULL, 0) >= 0) { if (!(pCodecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx))) { - throw InvalidCodec("Hardware device reference create failed vaapi.", path); + throw InvalidCodec("Hardware device reference create failed dxva2.", path); } } else { - throw InvalidCodec("Hardware device create failed.", path); - } + hw_de_av_device_type = AV_HWDEVICE_TYPE_D3D11; + pCodecCtx->get_format = get_hw_dec_format_cu; + hw_device_ctx = NULL; + if (av_hwdevice_ctx_create(&hw_device_ctx, hw_de_av_device_type, adapter_ptr, NULL, 0) >= 0) { + if (!(pCodecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx))) { + throw InvalidCodec("Hardware device reference create failed d3d11.", path); + } + } + else { + throw InvalidCodec("Hardware device create failed.", path); + } + } #endif #if defined(__APPLE__) hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; pCodecCtx->get_format = get_hw_dec_format_qs; if (av_hwdevice_ctx_create(&hw_device_ctx, hw_de_av_device_type, adapter_ptr, NULL, 0) >= 0) { if (!(pCodecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx))) { - throw InvalidCodec("Hardware device reference create failed vaapi.", path); + throw InvalidCodec("Hardware device reference create failed qsv.", path); } } else { From d07e8518232f06b4b18ebc0237437670d54b56aa Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sat, 8 Dec 2018 10:34:24 -0800 Subject: [PATCH 045/109] Hardware decode and encode can now be configured completely in Preferences->Performance. The old enable hardware decode is disabled. Now the graphics card can be chosen (0 is the first one) that should be used for encode and/or decode. They needn't be the same! nVidia decode still not working nVidia encode is working with driver 396 Vaapi should be working. mesa-va-drivers must be installed for AMD i965-va-driver must be installed for intel GPUs. Using one card to decode and one to encode an option with laptops with an iGPU and a dedicated GPU (dGPU), as an example. --- src/FFmpegReader.cpp | 72 ++++++++++++++++++++++++++++++++++---------- 1 file changed, 56 insertions(+), 16 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 2f641ab8..2d7aaafb 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -275,13 +275,29 @@ void FFmpegReader::Open() // Initialize format context pFormatCtx = NULL; - char * val = getenv( "OS2_DECODE_HW" ); + // Old version turn hardware decode on + /*char * val = getenv( "OS2_DECODE_HW" ); if (val == NULL) { hw_de_on = 0; } else{ hw_de_on = (val[0] == '1')? 1 : 0; - } + }*/ + + // New version turn hardware decode on + { + char *decoder_hw = NULL; + decoder_hw = getenv( "HW_DECODER" ); + if(decoder_hw != NULL) { + if( strncmp(decoder_hw,"NONE",4) == 0) { + hw_de_on = 0; + } else { + hw_de_on = 1; + } + } else { + hw_de_on = 0; + } + } // Open video file if (avformat_open_input(&pFormatCtx, path.c_str(), NULL, NULL) != 0) @@ -348,6 +364,7 @@ void FFmpegReader::Open() // Open Hardware Acceleration // Use the hw device given in the environment variable HW_DE_DEVICE_SET or the default if not set char *dev_hw = NULL; + char *decoder_hw = NULL; char adapter[256]; char *adapter_ptr = NULL; int adapter_num; @@ -361,6 +378,38 @@ void FFmpegReader::Open() #if defined(__linux__) snprintf(adapter,sizeof(adapter),"/dev/dri/renderD%d", adapter_num+128); adapter_ptr = adapter; + decoder_hw = getenv( "HW_DECODER" ); + if(decoder_hw != NULL) { + if (strncmp(decoder_hw,"NONE",4) == 0) { //Will never happen + hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; + pCodecCtx->get_format = get_hw_dec_format_va; + } + if (strncmp(decoder_hw,"HW_DE_VAAPI",11) == 0) { //Will never happen + hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; + pCodecCtx->get_format = get_hw_dec_format_va; + } + if (strncmp(decoder_hw,"HW_DE_NVDEC",11) == 0) { //Will never happen + hw_de_av_device_type = AV_HWDEVICE_TYPE_CUDA; + pCodecCtx->get_format = get_hw_dec_format_cu; + } + } else { + hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; + pCodecCtx->get_format = get_hw_dec_format_va; + } + + /* This is a hack: + my first card is AMD, my second card is nVidia + */ + switch (adapter_num) { + case 1: + hw_de_av_device_type = AV_HWDEVICE_TYPE_CUDA; + pCodecCtx->get_format = get_hw_dec_format_cu; + break; + default: + hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; + pCodecCtx->get_format = get_hw_dec_format_va; + break; + } #elif defined(_WIN32) adapter_ptr = NULL; #elif defined(__APPLE__) @@ -386,25 +435,16 @@ void FFmpegReader::Open() hw_device_ctx = NULL; // Here the first hardware initialisations are made #if defined(__linux__) - hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; - pCodecCtx->get_format = get_hw_dec_format_va; + //hw_de_av_device_type = AV_HWDEVICE_TYPE_CUDA; + //pCodecCtx->get_format = get_hw_dec_format_cu; if (av_hwdevice_ctx_create(&hw_device_ctx, hw_de_av_device_type, adapter_ptr, NULL, 0) >= 0) { + cerr << "\n\n**** HW device create OK ******** \n\n"; if (!(pCodecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx))) { - throw InvalidCodec("Hardware device reference create failed vaapi.", path); + throw InvalidCodec("Hardware device reference create failed cuda.", path); } } else { - hw_de_av_device_type = AV_HWDEVICE_TYPE_CUDA; - pCodecCtx->get_format = get_hw_dec_format_cu; - hw_device_ctx = NULL; - if (av_hwdevice_ctx_create(&hw_device_ctx, hw_de_av_device_type, adapter_ptr, NULL, 0) >= 0) { - if (!(pCodecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx))) { - throw InvalidCodec("Hardware device reference create failed cuda.", path); - } - } - else { - throw InvalidCodec("Hardware device create failed.", path); - } + throw InvalidCodec("Hardware device create failed.", path); } #endif #if defined(_WIN32) From 70954f800c0f87cf6199e768586ca99dce017dd2 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sat, 8 Dec 2018 15:54:29 -0800 Subject: [PATCH 046/109] Typo, plus removed hack for my hardware --- src/FFmpegReader.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 2d7aaafb..a3e247f5 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -400,7 +400,7 @@ void FFmpegReader::Open() /* This is a hack: my first card is AMD, my second card is nVidia */ - switch (adapter_num) { +/* switch (adapter_num) { case 1: hw_de_av_device_type = AV_HWDEVICE_TYPE_CUDA; pCodecCtx->get_format = get_hw_dec_format_cu; @@ -409,7 +409,7 @@ void FFmpegReader::Open() hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; pCodecCtx->get_format = get_hw_dec_format_va; break; - } + }*/ #elif defined(_WIN32) adapter_ptr = NULL; #elif defined(__APPLE__) @@ -440,7 +440,7 @@ void FFmpegReader::Open() if (av_hwdevice_ctx_create(&hw_device_ctx, hw_de_av_device_type, adapter_ptr, NULL, 0) >= 0) { cerr << "\n\n**** HW device create OK ******** \n\n"; if (!(pCodecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx))) { - throw InvalidCodec("Hardware device reference create failed cuda.", path); + throw InvalidCodec("Hardware device reference create failed.", path); } } else { From 23e287110d2b6950861a1460a802f310b9fb6f5a Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sat, 8 Dec 2018 18:11:06 -0800 Subject: [PATCH 047/109] Bring Windows and Mac up to date --- src/FFmpegReader.cpp | 84 +++++++++++++++++--------------------------- 1 file changed, 32 insertions(+), 52 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index a3e247f5..6d5b058a 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -397,23 +397,42 @@ void FFmpegReader::Open() pCodecCtx->get_format = get_hw_dec_format_va; } - /* This is a hack: - my first card is AMD, my second card is nVidia - */ -/* switch (adapter_num) { - case 1: - hw_de_av_device_type = AV_HWDEVICE_TYPE_CUDA; - pCodecCtx->get_format = get_hw_dec_format_cu; - break; - default: - hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; - pCodecCtx->get_format = get_hw_dec_format_va; - break; - }*/ #elif defined(_WIN32) adapter_ptr = NULL; + decoder_hw = getenv( "HW_DECODER" ); + if(decoder_hw != NULL) { + if (strncmp(decoder_hw,"NONE",4) == 0) { //Will never happen + hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2_VLD; + pCodecCtx->get_format = get_hw_dec_format_dx; + } + if (strncmp(decoder_hw,"HW_DE_WINDOWS_DXVA2",19) == 0) { //Will never happen + hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2_VLD; + pCodecCtx->get_format = get_hw_dec_format_dx; + } + if (strncmp(decoder_hw,"HW_DE_WINDOWS_D3D11",19) == 0) { //Will never happen + hw_de_av_device_type = AV_HWDEVICE_TYPE_D3D11; + pCodecCtx->get_format = get_hw_dec_format_d3; + } + } else { + hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2_VLD; + pCodecCtx->get_format = get_hw_dec_format_dx; + } #elif defined(__APPLE__) adapter_ptr = NULL; + decoder_hw = getenv( "HW_DECODER" ); + if(decoder_hw != NULL) { + if (strncmp(decoder_hw,"NONE",4) == 0) { //Will never happen + hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; + pCodecCtx->get_format = get_hw_dec_format_qs; + } + if (strncmp(decoder_hw,"HW_DE_MACOS",11) == 0) { //Will never happen + hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; + pCodecCtx->get_format = get_hw_dec_format_qs; + } + } else { + hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; + pCodecCtx->get_format = get_hw_dec_format_qs; + } #endif } else { @@ -429,14 +448,10 @@ void FFmpegReader::Open() if( adapter_ptr != NULL ) { #endif adapter_ptr = NULL; // use default - //cerr << "\n\n\nDecode Device not present using default\n\n\n"; ZmqLogger::Instance()->AppendDebugMethod("Decode Device not present using default", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); } hw_device_ctx = NULL; // Here the first hardware initialisations are made - #if defined(__linux__) - //hw_de_av_device_type = AV_HWDEVICE_TYPE_CUDA; - //pCodecCtx->get_format = get_hw_dec_format_cu; if (av_hwdevice_ctx_create(&hw_device_ctx, hw_de_av_device_type, adapter_ptr, NULL, 0) >= 0) { cerr << "\n\n**** HW device create OK ******** \n\n"; if (!(pCodecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx))) { @@ -446,41 +461,6 @@ void FFmpegReader::Open() else { throw InvalidCodec("Hardware device create failed.", path); } - #endif - #if defined(_WIN32) - hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2_VLD; - pCodecCtx->get_format = get_hw_dec_format_dx; - if (av_hwdevice_ctx_create(&hw_device_ctx, hw_de_av_device_type, adapter_ptr, NULL, 0) >= 0) { - if (!(pCodecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx))) { - throw InvalidCodec("Hardware device reference create failed dxva2.", path); - } - } - else { - hw_de_av_device_type = AV_HWDEVICE_TYPE_D3D11; - pCodecCtx->get_format = get_hw_dec_format_cu; - hw_device_ctx = NULL; - if (av_hwdevice_ctx_create(&hw_device_ctx, hw_de_av_device_type, adapter_ptr, NULL, 0) >= 0) { - if (!(pCodecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx))) { - throw InvalidCodec("Hardware device reference create failed d3d11.", path); - } - } - else { - throw InvalidCodec("Hardware device create failed.", path); - } - } - #endif - #if defined(__APPLE__) - hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; - pCodecCtx->get_format = get_hw_dec_format_qs; - if (av_hwdevice_ctx_create(&hw_device_ctx, hw_de_av_device_type, adapter_ptr, NULL, 0) >= 0) { - if (!(pCodecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx))) { - throw InvalidCodec("Hardware device reference create failed qsv.", path); - } - } - else { - throw InvalidCodec("Hardware device create failed.", path); - } - #endif } #endif From de1bd4f50647f2a4eb0c971419b711d3957c3654 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sun, 9 Dec 2018 09:02:46 -0800 Subject: [PATCH 048/109] Typos in Windows part --- src/FFmpegReader.cpp | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 6d5b058a..f541c295 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -402,19 +402,19 @@ void FFmpegReader::Open() decoder_hw = getenv( "HW_DECODER" ); if(decoder_hw != NULL) { if (strncmp(decoder_hw,"NONE",4) == 0) { //Will never happen - hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2_VLD; + hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; pCodecCtx->get_format = get_hw_dec_format_dx; } if (strncmp(decoder_hw,"HW_DE_WINDOWS_DXVA2",19) == 0) { //Will never happen - hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2_VLD; + hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; pCodecCtx->get_format = get_hw_dec_format_dx; } if (strncmp(decoder_hw,"HW_DE_WINDOWS_D3D11",19) == 0) { //Will never happen - hw_de_av_device_type = AV_HWDEVICE_TYPE_D3D11; + hw_de_av_device_type = AV_HWDEVICE_TYPE_D3D11VA; pCodecCtx->get_format = get_hw_dec_format_d3; } } else { - hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2_VLD; + hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; pCodecCtx->get_format = get_hw_dec_format_dx; } #elif defined(__APPLE__) @@ -457,6 +457,14 @@ void FFmpegReader::Open() if (!(pCodecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx))) { throw InvalidCodec("Hardware device reference create failed.", path); } + /* + ret = av_hwframe_ctx_init(pCodecCtx->hw_device_ctx); + ret = av_hwframe_ctx_init(ist->hw_frames_ctx); + if (ret < 0) { + av_log(avctx, AV_LOG_ERROR, "Error initializing a CUDA frame pool\n"); + return ret; + } + */ } else { throw InvalidCodec("Hardware device create failed.", path); From 4dcc72a769f2d2f7c790fe7b769e896c11bb15d4 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Wed, 19 Dec 2018 09:12:15 -0800 Subject: [PATCH 049/109] Fixed bug compiling for older ffmpeg versions < 3.2 --- src/FFmpegReader.cpp | 18 ++++++++++++++++++ src/FFmpegWriter.cpp | 5 ++++- 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index f541c295..2c181193 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -458,6 +458,24 @@ void FFmpegReader::Open() throw InvalidCodec("Hardware device reference create failed.", path); } /* + av_buffer_unref(&ist->hw_frames_ctx); + ist->hw_frames_ctx = av_hwframe_ctx_alloc(hw_device_ctx); + if (!ist->hw_frames_ctx) { + av_log(avctx, AV_LOG_ERROR, "Error creating a CUDA frames context\n"); + return AVERROR(ENOMEM); + } + + frames_ctx = (AVHWFramesContext*)ist->hw_frames_ctx->data; + + frames_ctx->format = AV_PIX_FMT_CUDA; + frames_ctx->sw_format = avctx->sw_pix_fmt; + frames_ctx->width = avctx->width; + frames_ctx->height = avctx->height; + + av_log(avctx, AV_LOG_DEBUG, "Initializing CUDA frames context: sw_format = %s, width = %d, height = %d\n", + av_get_pix_fmt_name(frames_ctx->sw_format), frames_ctx->width, frames_ctx->height); + + ret = av_hwframe_ctx_init(pCodecCtx->hw_device_ctx); ret = av_hwframe_ctx_init(ist->hw_frames_ctx); if (ret < 0) { diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index 66befe5c..c4f2ca43 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -1053,6 +1053,7 @@ AVStream* FFmpegWriter::add_video_stream() } #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(55, 39, 101) else { +#if IS_FFMPEG_3_2 if (hw_en_on) { double mbs = 15000000.0; if (info.video_bit_rate > 0) { @@ -1065,7 +1066,9 @@ AVStream* FFmpegWriter::add_video_stream() } c->bit_rate = (int)(mbs); } - else { + else +#endif + { switch (c->codec_id) { #if (LIBAVCODEC_VERSION_MAJOR >= 58) case AV_CODEC_ID_AV1 : From e10695f9d480d917d8f25025e210ad4da025279f Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Thu, 20 Dec 2018 09:18:26 -0800 Subject: [PATCH 050/109] Fixed two memory leaks --- src/FFmpegReader.cpp | 1 + src/FFmpegWriter.cpp | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 2c181193..859f4cf6 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -2426,6 +2426,7 @@ void FFmpegReader::RemoveAVFrame(AVFrame* remove_frame) { // Free memory av_freep(&remove_frame->data[0]); + AV_FREE_FRAME(&remove_frame); } } diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index c4f2ca43..8e29dc59 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -1615,8 +1615,8 @@ void FFmpegWriter::write_audio_packets(bool final) } else { // Create a new array - final_samples = new int16_t[audio_input_position * (av_get_bytes_per_sample(audio_codec->sample_fmt) / av_get_bytes_per_sample(AV_SAMPLE_FMT_S16))]; - + //final_samples = new int16_t[audio_input_position * (av_get_bytes_per_sample(audio_codec->sample_fmt) / av_get_bytes_per_sample(AV_SAMPLE_FMT_S16))]; + final_samples = (int16_t*)av_malloc(sizeof(int16_t) * audio_input_position * (av_get_bytes_per_sample(audio_codec->sample_fmt) / av_get_bytes_per_sample(AV_SAMPLE_FMT_S16))); // Copy audio into buffer for frame memcpy(final_samples, samples, audio_input_position * av_get_bytes_per_sample(audio_codec->sample_fmt)); From f2db5fdb39bf1de2511857551a9c6d7916ede102 Mon Sep 17 00:00:00 2001 From: "FeRD (Frank Dana)" Date: Sat, 26 Jan 2019 11:50:21 -0500 Subject: [PATCH 051/109] FFmpegUtilities: Rename RSHIFT to FF_RSHIFT FFmpeg and Ruby have competing definitions of the RSHIFT macro, so building the Ruby bindings causes warnings to be thrown when it's redefined. This change defines FF_RSHIFT to replace FFmpeg's RSHIFT, which is undef'd --- include/FFmpegUtilities.h | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/include/FFmpegUtilities.h b/include/FFmpegUtilities.h index 346da541..e16466e3 100644 --- a/include/FFmpegUtilities.h +++ b/include/FFmpegUtilities.h @@ -114,6 +114,13 @@ #define PIX_FMT_YUV420P AV_PIX_FMT_YUV420P #endif + // FFmpeg's libavutil/common.h defines an RSHIFT incompatible with Ruby's + // definition in ruby/config.h, so we move it to FF_RSHIFT + #ifdef RSHIFT + #define FF_RSHIFT(a, b) RSHIFT(a, b) + #undef RSHIFT + #endif + #ifdef USE_SW #define SWR_CONVERT(ctx, out, linesize, out_count, in, linesize2, in_count) \ swr_convert(ctx, out, out_count, (const uint8_t **)in, in_count) From bb8efeb72b55d093349d950f533f32b1243d0852 Mon Sep 17 00:00:00 2001 From: "FeRD (Frank Dana)" Date: Sat, 26 Jan 2019 11:53:08 -0500 Subject: [PATCH 052/109] Ruby: Rename RSHIFT to RB_RSHIFT, temporarily When Ruby attempts to load the FFmpeg header files, it'll complain that RSHIFT is redefined if the Ruby definition is still in place. So, we define RB_RSHIFT to contain the Ruby definition, undef RSHIFT, load the FFmpeg headers, move its RSHIFT into FF_RSHIFT if necessary, and then restore Ruby's RSHIFT from RB_RSHIFT. --- src/bindings/ruby/openshot.i | 39 +++++++++++++++++++++++++++++++++++- 1 file changed, 38 insertions(+), 1 deletion(-) diff --git a/src/bindings/ruby/openshot.i b/src/bindings/ruby/openshot.i index b9a35d41..82925b71 100644 --- a/src/bindings/ruby/openshot.i +++ b/src/bindings/ruby/openshot.i @@ -57,6 +57,14 @@ namespace std { %{ +/* Ruby and FFmpeg define competing RSHIFT macros, + * so we move Ruby's out of the way for now. We'll + * restore it after dealing with FFmpeg's + */ +#ifdef RSHIFT + #define RB_RSHIFT(a, b) RSHIFT(a, b) + #undef RSHIFT +#endif #include "../../../include/Version.h" #include "../../../include/ReaderBase.h" #include "../../../include/WriterBase.h" @@ -91,7 +99,15 @@ namespace std { #include "../../../include/Settings.h" #include "../../../include/Timeline.h" #include "../../../include/ZmqLogger.h" - +/* Move FFmpeg's RSHIFT to FF_RSHIFT, if present */ +#ifdef RSHIFT + #define FF_RSHIFT(a, b) RSHIFT(a, b) + #undef RSHIFT +#endif +/* And restore Ruby's RSHIFT, if we captured it */ +#ifdef RB_RSHIFT + #define RSHIFT(a, b) RB_RSHIFT(a, b) +#endif %} #ifdef USE_BLACKMAGIC @@ -132,8 +148,29 @@ namespace std { %include "../../../include/EffectInfo.h" %include "../../../include/Enums.h" %include "../../../include/Exceptions.h" + +/* Ruby and FFmpeg define competing RSHIFT macros, + * so we move Ruby's out of the way for now. We'll + * restore it after dealing with FFmpeg's + */ +#ifdef RSHIFT + #define RB_RSHIFT(a, b) RSHIFT(a, b) + #undef RSHIFT +#endif + %include "../../../include/FFmpegReader.h" %include "../../../include/FFmpegWriter.h" + +/* Move FFmpeg's RSHIFT to FF_RSHIFT, if present */ +#ifdef RSHIFT + #define FF_RSHIFT(a, b) RSHIFT(a, b) + #undef RSHIFT +#endif +/* And restore Ruby's RSHIFT, if we captured it */ +#ifdef RB_RSHIFT + #define RSHIFT(a, b) RB_RSHIFT(a, b) +#endif + %include "../../../include/Fraction.h" %include "../../../include/Frame.h" %include "../../../include/FrameMapper.h" From 9aeec7d90ff172539add5c1363d8707fceabe4f1 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sat, 26 Jan 2019 12:22:09 -0800 Subject: [PATCH 053/109] Set the bitrate to 0 if no valid value was given. It is needed for the crf lossless setting --- src/FFmpegReader.cpp | 4 +++- src/FFmpegWriter.cpp | 8 ++++++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 011e8fe4..a2a605ad 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -284,6 +284,8 @@ void FFmpegReader::Open() hw_de_on = (val[0] == '1')? 1 : 0; }*/ + //hw_de_on = Settings::Instance()->HARDWARE_DECODE; + // New version turn hardware decode on { char *decoder_hw = NULL; @@ -368,7 +370,7 @@ void FFmpegReader::Open() char adapter[256]; char *adapter_ptr = NULL; int adapter_num; - dev_hw = getenv( "HW_DE_DEVICE_SET" ); // The first card is 0 + dev_hw = getenv( "HW_DE_DEVICE_SET" ); // The first card is 0 if( dev_hw != NULL) { adapter_num = atoi(dev_hw); } else { diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index 8b5ee159..55816c23 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -247,8 +247,10 @@ void FFmpegWriter::SetVideoOptions(bool has_video, string codec, Fraction fps, i info.pixel_ratio.num = pixel_ratio.num; info.pixel_ratio.den = pixel_ratio.den; } - if (bit_rate >= 1000) // bit_rate is the bitrate in b/s + if (bit_rate >= 1000) // bit_rate is the bitrate in b/s info.video_bit_rate = bit_rate; + else + info.video_bit_rate = 0; info.interlaced_frame = interlaced; info.top_field_first = top_field_first; @@ -1083,7 +1085,9 @@ AVStream* FFmpegWriter::add_video_stream() if (info.video_bit_rate > 1000) { c->bit_rate = info.video_bit_rate; } - + else { + c->bit_rate = 0; + } //TODO: Implement variable bitrate feature (which actually works). This implementation throws //invalid bitrate errors and rc buffer underflow errors, etc... //c->rc_min_rate = info.video_bit_rate; From 46051fbba10416ab1c3bd59388b4957c5727f63a Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sun, 27 Jan 2019 10:07:40 -0800 Subject: [PATCH 054/109] Form follows function Moved crf back to SetVideoOptions and adjusted parameters Now h.264 and VP9 have working crf Some small changes in preparation for Settings --- src/FFmpegReader.cpp | 18 ++++----- src/FFmpegWriter.cpp | 91 +++++++++++++++++++++++++++++++++++++------- 2 files changed, 87 insertions(+), 22 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index a2a605ad..dd5f063b 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -291,7 +291,7 @@ void FFmpegReader::Open() char *decoder_hw = NULL; decoder_hw = getenv( "HW_DECODER" ); if(decoder_hw != NULL) { - if( strncmp(decoder_hw,"NONE",4) == 0) { + if( strncmp(decoder_hw,"0",4) == 0) { hw_de_on = 0; } else { hw_de_on = 1; @@ -382,15 +382,15 @@ void FFmpegReader::Open() adapter_ptr = adapter; decoder_hw = getenv( "HW_DECODER" ); if(decoder_hw != NULL) { - if (strncmp(decoder_hw,"NONE",4) == 0) { //Will never happen + if (strncmp(decoder_hw,"0",4) == 0) { //Will never happen hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; pCodecCtx->get_format = get_hw_dec_format_va; } - if (strncmp(decoder_hw,"HW_DE_VAAPI",11) == 0) { //Will never happen + if (strncmp(decoder_hw,"1",11) == 0) { hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; pCodecCtx->get_format = get_hw_dec_format_va; } - if (strncmp(decoder_hw,"HW_DE_NVDEC",11) == 0) { //Will never happen + if (strncmp(decoder_hw,"2",11) == 0) { hw_de_av_device_type = AV_HWDEVICE_TYPE_CUDA; pCodecCtx->get_format = get_hw_dec_format_cu; } @@ -403,15 +403,15 @@ void FFmpegReader::Open() adapter_ptr = NULL; decoder_hw = getenv( "HW_DECODER" ); if(decoder_hw != NULL) { - if (strncmp(decoder_hw,"NONE",4) == 0) { //Will never happen + if (strncmp(decoder_hw,"0",4) == 0) { //Will never happen hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; pCodecCtx->get_format = get_hw_dec_format_dx; } - if (strncmp(decoder_hw,"HW_DE_WINDOWS_DXVA2",19) == 0) { //Will never happen + if (strncmp(decoder_hw,"3",19) == 0) { hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; pCodecCtx->get_format = get_hw_dec_format_dx; } - if (strncmp(decoder_hw,"HW_DE_WINDOWS_D3D11",19) == 0) { //Will never happen + if (strncmp(decoder_hw,"4",19) == 0) { hw_de_av_device_type = AV_HWDEVICE_TYPE_D3D11VA; pCodecCtx->get_format = get_hw_dec_format_d3; } @@ -423,11 +423,11 @@ void FFmpegReader::Open() adapter_ptr = NULL; decoder_hw = getenv( "HW_DECODER" ); if(decoder_hw != NULL) { - if (strncmp(decoder_hw,"NONE",4) == 0) { //Will never happen + if (strncmp(decoder_hw,"0",4) == 0) { //Will never happen hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; pCodecCtx->get_format = get_hw_dec_format_qs; } - if (strncmp(decoder_hw,"HW_DE_MACOS",11) == 0) { //Will never happen + if (strncmp(decoder_hw,"5",11) == 0) { hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; pCodecCtx->get_format = get_hw_dec_format_qs; } diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index 55816c23..2c81cf96 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -249,8 +249,10 @@ void FFmpegWriter::SetVideoOptions(bool has_video, string codec, Fraction fps, i } if (bit_rate >= 1000) // bit_rate is the bitrate in b/s info.video_bit_rate = bit_rate; - else - info.video_bit_rate = 0; + if ((bit_rate >= 0) && (bit_rate < 64) ) // bit_rate is the bitrate in b/s + info.video_bit_rate = bit_rate; + //else + // info.video_bit_rate = 0; info.interlaced_frame = interlaced; info.top_field_first = top_field_first; @@ -392,29 +394,35 @@ void FFmpegWriter::SetOption(StreamType stream, string name, string value) // encode quality and special settings like lossless // This might be better in an extra methods as more options // and way to set quality are possible + int tempi = stoi(value); #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(55, 39, 101) switch (c->codec_id) { case AV_CODEC_ID_VP8 : - av_opt_set_int(c->priv_data, "crf", min(stoi(value),63), 0); + av_opt_set_int(c->priv_data, "crf", min(tempi,63), 0); break; case AV_CODEC_ID_VP9 : - av_opt_set_int(c->priv_data, "crf", min(stoi(value),63), 0); - if (stoi(value) == 0) { + av_opt_set_int(c->priv_data, "crf", min(tempi,63), 0); + if (tempi == 0) { av_opt_set_int(c->priv_data, "lossless", 1, 0); + av_opt_set(c->priv_data, "preset", "veryslow", 0); } break; case AV_CODEC_ID_H264 : - av_opt_set_int(c->priv_data, "crf", min(stoi(value),51), 0); + av_opt_set_int(c->priv_data, "crf", min(tempi,51), 0); + if (tempi == 0) { + av_opt_set(c->priv_data, "preset", "veryslow", 0); + } break; case AV_CODEC_ID_H265 : - av_opt_set_int(c->priv_data, "crf", min(stoi(value),51), 0); - if (stoi(value) == 0) { + av_opt_set_int(c->priv_data, "crf", min(tempi,51), 0); + if (tempi == 0) { av_opt_set_int(c->priv_data, "lossless", 1, 0); + av_opt_set(c->priv_data, "preset", "veryslow", 0); } break; #ifdef AV_CODEC_ID_AV1 case AV_CODEC_ID_AV1 : - av_opt_set_int(c->priv_data, "crf", min(stoi(value),63), 0); + av_opt_set_int(c->priv_data, "crf", min(tempi,63), 0); break; #endif } @@ -1082,13 +1090,70 @@ AVStream* FFmpegWriter::add_video_stream() #endif /* Init video encoder options */ - if (info.video_bit_rate > 1000) { + if (info.video_bit_rate >= 1000) { c->bit_rate = info.video_bit_rate; } - else { - c->bit_rate = 0; +// else { +// c->bit_rate = 0; +// } + #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(55, 39, 101) + else { +#if IS_FFMPEG_3_2 + if (hw_en_on) { + double mbs = 15000000.0; + if (info.video_bit_rate > 0) { + if (info.video_bit_rate > 42) { + mbs = 380.0; + } + else { + mbs *= pow(0.912,info.video_bit_rate); + } + } + c->bit_rate = (int)(mbs); + } + else +#endif + { + switch (c->codec_id) { +#if (LIBAVCODEC_VERSION_MAJOR >= 58) + case AV_CODEC_ID_AV1 : + av_opt_set_int(c->priv_data, "crf", min(info.video_bit_rate,63), 0); + c->bit_rate = 0; + break; +#endif + case AV_CODEC_ID_VP8 : + av_opt_set_int(c->priv_data, "crf", min(info.video_bit_rate,63), 0); + break; + case AV_CODEC_ID_VP9 : + av_opt_set_int(c->priv_data, "crf", min(info.video_bit_rate,63), 0); + c->bit_rate = 0; + if (info.video_bit_rate == 0) { + av_opt_set_int(c->priv_data, "lossless", 1, 0); + } + break; + case AV_CODEC_ID_H264 : + av_opt_set_int(c->priv_data, "crf", min(info.video_bit_rate,51), 0); + break; + case AV_CODEC_ID_H265 : + av_opt_set_int(c->priv_data, "crf", min(info.video_bit_rate,51), 0); + break; + default: + double mbs = 15000000.0; + if (info.video_bit_rate > 0) { + if (info.video_bit_rate > 42) { + mbs = 380.0; + } + else { + mbs *= pow(0.912,info.video_bit_rate); + } + } + c->bit_rate = (int)(mbs); + } + } } - //TODO: Implement variable bitrate feature (which actually works). This implementation throws +#endif + +//TODO: Implement variable bitrate feature (which actually works). This implementation throws //invalid bitrate errors and rc buffer underflow errors, etc... //c->rc_min_rate = info.video_bit_rate; //c->rc_max_rate = info.video_bit_rate; From 1a44bd789cf307937d712c89047251bd3a1351b5 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sun, 27 Jan 2019 10:26:20 -0800 Subject: [PATCH 055/109] Make sure that crf is not set in SetOptions --- src/FFmpegWriter.cpp | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index 2c81cf96..c5c40685 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -390,7 +390,7 @@ void FFmpegWriter::SetOption(StreamType stream, string name, string value) // Buffer size convert >> c->rc_buffer_size; - else if (name == "crf") { +/* else if (name == "crf") { // encode quality and special settings like lossless // This might be better in an extra methods as more options // and way to set quality are possible @@ -428,7 +428,7 @@ void FFmpegWriter::SetOption(StreamType stream, string name, string value) } #endif } - +*/ else // Set AVOption AV_OPTION_SET(st, c->priv_data, name.c_str(), value.c_str(), c); @@ -1128,7 +1128,8 @@ AVStream* FFmpegWriter::add_video_stream() av_opt_set_int(c->priv_data, "crf", min(info.video_bit_rate,63), 0); c->bit_rate = 0; if (info.video_bit_rate == 0) { - av_opt_set_int(c->priv_data, "lossless", 1, 0); + av_opt_set(c->priv_data, "preset", "veryslow", 0); + av_opt_set_int(c->priv_data, "lossless", 1, 0); } break; case AV_CODEC_ID_H264 : From 39bf06b3d3ab7469583064362565fb6f1be7fb48 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sun, 27 Jan 2019 10:50:23 -0800 Subject: [PATCH 056/109] Now VP8, VP9, h.264, h.265 have working crf --- src/FFmpegWriter.cpp | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index c5c40685..778c352e 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -1123,6 +1123,11 @@ AVStream* FFmpegWriter::add_video_stream() #endif case AV_CODEC_ID_VP8 : av_opt_set_int(c->priv_data, "crf", min(info.video_bit_rate,63), 0); + c->bit_rate = 10000000; + if (info.video_bit_rate == 0) { + av_opt_set(c->priv_data, "preset", "veryslow", 0); + c->bit_rate = 30000000; + } break; case AV_CODEC_ID_VP9 : av_opt_set_int(c->priv_data, "crf", min(info.video_bit_rate,63), 0); @@ -1137,6 +1142,10 @@ AVStream* FFmpegWriter::add_video_stream() break; case AV_CODEC_ID_H265 : av_opt_set_int(c->priv_data, "crf", min(info.video_bit_rate,51), 0); + if (info.video_bit_rate == 0) { + av_opt_set(c->priv_data, "preset", "veryslow", 0); + av_opt_set_int(c->priv_data, "lossless", 1, 0); + } break; default: double mbs = 15000000.0; From 05fb797776086103d82c9a448dc5928b18de5bab Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Tue, 29 Jan 2019 10:26:10 -0800 Subject: [PATCH 057/109] Move the check if hw accell ecoding is used with crf to the right place --- src/FFmpegWriter.cpp | 38 +++++++++++++++++++------------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index 3ae1dd24..d98d88d3 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -400,7 +400,24 @@ void FFmpegWriter::SetOption(StreamType stream, string name, string value) // encode quality and special settings like lossless // This might be better in an extra methods as more options // and way to set quality are possible - #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(55, 39, 101) + #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(55, 39, 101) + #if IS_FFMPEG_3_2 + if (hw_en_on) { + double mbs = 15000000.0; + if (info.video_bit_rate > 0) { + if (info.video_bit_rate > 42) { + mbs = 380000.0; + } + else { + mbs *= pow(0.912,info.video_bit_rate); + } + } + c->bit_rate = (int)(mbs); + } else + #endif +// #endif +// #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(55, 39, 101) + { switch (c->codec_id) { #if (LIBAVCODEC_VERSION_MAJOR >= 58) case AV_CODEC_ID_AV1 : @@ -447,6 +464,7 @@ void FFmpegWriter::SetOption(StreamType stream, string name, string value) } c->bit_rate = (int)(mbs); } + } #endif } @@ -1108,24 +1126,6 @@ AVStream* FFmpegWriter::add_video_stream() if (info.video_bit_rate >= 1000) { c->bit_rate = info.video_bit_rate; } - #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(55, 39, 101) - else { - #if IS_FFMPEG_3_2 - if (hw_en_on) { - double mbs = 15000000.0; - if (info.video_bit_rate > 0) { - if (info.video_bit_rate > 42) { - mbs = 380000.0; - } - else { - mbs *= pow(0.912,info.video_bit_rate); - } - } - c->bit_rate = (int)(mbs); - } - #endif - } - #endif //TODO: Implement variable bitrate feature (which actually works). This implementation throws //invalid bitrate errors and rc buffer underflow errors, etc... From 7e3669b620d96af79a98892eb9b7a11e2114853e Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Tue, 29 Jan 2019 12:38:52 -0800 Subject: [PATCH 058/109] Formating --- src/FFmpegWriter.cpp | 459 +++++++++++++++++++++---------------------- 1 file changed, 228 insertions(+), 231 deletions(-) diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index d98d88d3..6b8f240f 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -48,32 +48,32 @@ AVFrame *hw_frame = NULL; static int set_hwframe_ctx(AVCodecContext *ctx, AVBufferRef *hw_device_ctx, int64_t width, int64_t height) { - AVBufferRef *hw_frames_ref; - AVHWFramesContext *frames_ctx = NULL; - int err = 0; + AVBufferRef *hw_frames_ref; + AVHWFramesContext *frames_ctx = NULL; + int err = 0; - if (!(hw_frames_ref = av_hwframe_ctx_alloc(hw_device_ctx))) { - fprintf(stderr, "Failed to create HW frame context.\n"); - return -1; - } - frames_ctx = (AVHWFramesContext *)(hw_frames_ref->data); - frames_ctx->format = hw_en_av_pix_fmt; - frames_ctx->sw_format = AV_PIX_FMT_NV12; - frames_ctx->width = width; - frames_ctx->height = height; - frames_ctx->initial_pool_size = 20; - if ((err = av_hwframe_ctx_init(hw_frames_ref)) < 0) { - fprintf(stderr, "Failed to initialize HW frame context." - "Error code: %s\n",av_err2str(err)); - av_buffer_unref(&hw_frames_ref); - return err; - } - ctx->hw_frames_ctx = av_buffer_ref(hw_frames_ref); - if (!ctx->hw_frames_ctx) - err = AVERROR(ENOMEM); + if (!(hw_frames_ref = av_hwframe_ctx_alloc(hw_device_ctx))) { + fprintf(stderr, "Failed to create HW frame context.\n"); + return -1; + } + frames_ctx = (AVHWFramesContext *)(hw_frames_ref->data); + frames_ctx->format = hw_en_av_pix_fmt; + frames_ctx->sw_format = AV_PIX_FMT_NV12; + frames_ctx->width = width; + frames_ctx->height = height; + frames_ctx->initial_pool_size = 20; + if ((err = av_hwframe_ctx_init(hw_frames_ref)) < 0) { + fprintf(stderr, "Failed to initialize HW frame context." + "Error code: %s\n",av_err2str(err)); + av_buffer_unref(&hw_frames_ref); + return err; + } + ctx->hw_frames_ctx = av_buffer_ref(hw_frames_ref); + if (!ctx->hw_frames_ctx) + err = AVERROR(ENOMEM); - av_buffer_unref(&hw_frames_ref); - return err; + av_buffer_unref(&hw_frames_ref); + return err; } #endif @@ -176,49 +176,49 @@ void FFmpegWriter::SetVideoOptions(bool has_video, string codec, Fraction fps, i if ( (strcmp(codec.c_str(),"h264_vaapi") == 0)) { new_codec = avcodec_find_encoder_by_name(codec.c_str()); hw_en_on = 1; - hw_en_supported = 1; - hw_en_av_pix_fmt = AV_PIX_FMT_VAAPI; - hw_en_av_device_type = AV_HWDEVICE_TYPE_VAAPI; + hw_en_supported = 1; + hw_en_av_pix_fmt = AV_PIX_FMT_VAAPI; + hw_en_av_device_type = AV_HWDEVICE_TYPE_VAAPI; + } + else { + if ( (strcmp(codec.c_str(),"h264_nvenc") == 0)) { + new_codec = avcodec_find_encoder_by_name(codec.c_str()); + hw_en_on = 1; + hw_en_supported = 1; + hw_en_av_pix_fmt = AV_PIX_FMT_CUDA; + hw_en_av_device_type = AV_HWDEVICE_TYPE_CUDA; + } + else { + new_codec = avcodec_find_encoder_by_name(codec.c_str()); + hw_en_on = 0; + hw_en_supported = 0; + } } - else { - if ( (strcmp(codec.c_str(),"h264_nvenc") == 0)) { - new_codec = avcodec_find_encoder_by_name(codec.c_str()); - hw_en_on = 1; - hw_en_supported = 1; - hw_en_av_pix_fmt = AV_PIX_FMT_CUDA; - hw_en_av_device_type = AV_HWDEVICE_TYPE_CUDA; - } - else { - new_codec = avcodec_find_encoder_by_name(codec.c_str()); - hw_en_on = 0; - hw_en_supported = 0; - } - } #elif defined(_WIN32) if ( (strcmp(codec.c_str(),"h264_dxva2") == 0)) { new_codec = avcodec_find_encoder_by_name(codec.c_str()); hw_en_on = 1; - hw_en_supported = 1; - hw_en_av_pix_fmt = AV_PIX_FMT_DXVA2_VLD; - hw_en_av_device_type = AV_HWDEVICE_TYPE_DXVA2; + hw_en_supported = 1; + hw_en_av_pix_fmt = AV_PIX_FMT_DXVA2_VLD; + hw_en_av_device_type = AV_HWDEVICE_TYPE_DXVA2; } else { new_codec = avcodec_find_encoder_by_name(codec.c_str()); hw_en_on = 0; - hw_en_supported = 0; + hw_en_supported = 0; } - #elif defined(__APPLE__) + #elif defined(__APPLE__) if ( (strcmp(codec.c_str(),"h264_qsv") == 0)) { new_codec = avcodec_find_encoder_by_name(codec.c_str()); hw_en_on = 1; - hw_en_supported = 1; - hw_en_av_pix_fmt = AV_PIX_FMT_QSV; - hw_en_av_device_type = AV_HWDEVICE_TYPE_QSV; + hw_en_supported = 1; + hw_en_av_pix_fmt = AV_PIX_FMT_QSV; + hw_en_av_device_type = AV_HWDEVICE_TYPE_QSV; } else { new_codec = avcodec_find_encoder_by_name(codec.c_str()); hw_en_on = 0; - hw_en_supported = 0; + hw_en_supported = 0; } #else // is FFmpeg 3 but not linux new_codec = avcodec_find_encoder_by_name(codec.c_str()); @@ -255,7 +255,7 @@ void FFmpegWriter::SetVideoOptions(bool has_video, string codec, Fraction fps, i info.pixel_ratio.num = pixel_ratio.num; info.pixel_ratio.den = pixel_ratio.den; } - if (bit_rate >= 1000) // bit_rate is the bitrate in b/s + if (bit_rate >= 1000) // bit_rate is the bitrate in b/s info.video_bit_rate = bit_rate; if ((bit_rate >= 0) && (bit_rate < 64) ) // bit_rate is the bitrate in crf info.video_bit_rate = bit_rate; @@ -348,8 +348,8 @@ void FFmpegWriter::SetOption(StreamType stream, string name, string value) // Was option found? if (option || (name == "g" || name == "qmin" || name == "qmax" || name == "max_b_frames" || name == "mb_decision" || - name == "level" || name == "profile" || name == "slices" || name == "rc_min_rate" || name == "rc_max_rate" || - name == "crf")) + name == "level" || name == "profile" || name == "slices" || name == "rc_min_rate" || name == "rc_max_rate" || + name == "crf")) { // Check for specific named options if (name == "g") @@ -396,28 +396,26 @@ void FFmpegWriter::SetOption(StreamType stream, string name, string value) // Buffer size convert >> c->rc_buffer_size; - else if (name == "crf") { + else if (name == "crf") { // encode quality and special settings like lossless // This might be better in an extra methods as more options // and way to set quality are possible - #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(55, 39, 101) - #if IS_FFMPEG_3_2 - if (hw_en_on) { - double mbs = 15000000.0; - if (info.video_bit_rate > 0) { - if (info.video_bit_rate > 42) { - mbs = 380000.0; - } - else { - mbs *= pow(0.912,info.video_bit_rate); - } - } - c->bit_rate = (int)(mbs); - } else - #endif -// #endif -// #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(55, 39, 101) - { + #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(55, 39, 101) + #if IS_FFMPEG_3_2 + if (hw_en_on) { + double mbs = 15000000.0; + if (info.video_bit_rate > 0) { + if (info.video_bit_rate > 42) { + mbs = 380000.0; + } + else { + mbs *= pow(0.912,info.video_bit_rate); + } + } + c->bit_rate = (int)(mbs); + } else + #endif + { switch (c->codec_id) { #if (LIBAVCODEC_VERSION_MAJOR >= 58) case AV_CODEC_ID_AV1 : @@ -519,7 +517,7 @@ void FFmpegWriter::WriteHeader() throw InvalidFile("Could not open or write file.", path); } - // Force the output filename (which doesn't always happen for some reason) + // Force the output filename (which doesn't always happen for some reason) snprintf(oc->AV_FILENAME, sizeof(oc->AV_FILENAME), "%s", path.c_str()); // Write the stream header, if any @@ -743,11 +741,11 @@ void FFmpegWriter::flush_encoders() return; #endif - int error_code = 0; - int stop_encoding = 1; + int error_code = 0; + int stop_encoding = 1; - // FLUSH VIDEO ENCODER - if (info.has_video) + // FLUSH VIDEO ENCODER + if (info.has_video) for (;;) { // Increment PTS (in frames and scaled to the codec's timebase) @@ -846,8 +844,8 @@ void FFmpegWriter::flush_encoders() av_freep(&video_outbuf); } - // FLUSH AUDIO ENCODER - if (info.has_audio) + // FLUSH AUDIO ENCODER + if (info.has_audio) for (;;) { // Increment PTS (in samples and scaled to the codec's timebase) @@ -914,16 +912,16 @@ void FFmpegWriter::close_video(AVFormatContext *oc, AVStream *st) { AV_FREE_CONTEXT(video_codec); video_codec = NULL; - #if IS_FFMPEG_3_2 + #if IS_FFMPEG_3_2 // #if defined(__linux__) - if (hw_en_on && hw_en_supported) { - if (hw_device_ctx) { - av_buffer_unref(&hw_device_ctx); - hw_device_ctx = NULL; - } - } + if (hw_en_on && hw_en_supported) { + if (hw_device_ctx) { + av_buffer_unref(&hw_device_ctx); + hw_device_ctx = NULL; + } + } // #endif - #endif + #endif } // Close the audio codec @@ -1042,15 +1040,15 @@ AVStream* FFmpegWriter::add_audio_stream() // Set valid sample rate (or throw error) if (codec->supported_samplerates) { int i; - for (i = 0; codec->supported_samplerates[i] != 0; i++) - if (info.sample_rate == codec->supported_samplerates[i]) - { - // Set the valid sample rate - c->sample_rate = info.sample_rate; - break; - } - if (codec->supported_samplerates[i] == 0) - throw InvalidSampleRate("An invalid sample rate was detected for this codec.", path); + for (i = 0; codec->supported_samplerates[i] != 0; i++) + if (info.sample_rate == codec->supported_samplerates[i]) + { + // Set the valid sample rate + c->sample_rate = info.sample_rate; + break; + } + if (codec->supported_samplerates[i] == 0) + throw InvalidSampleRate("An invalid sample rate was detected for this codec.", path); } else // Set sample rate c->sample_rate = info.sample_rate; @@ -1174,31 +1172,31 @@ AVStream* FFmpegWriter::add_video_stream() #endif // Find all supported pixel formats for this codec - const PixelFormat* supported_pixel_formats = codec->pix_fmts; - while (supported_pixel_formats != NULL && *supported_pixel_formats != PIX_FMT_NONE) { - // Assign the 1st valid pixel format (if one is missing) - if (c->pix_fmt == PIX_FMT_NONE) - c->pix_fmt = *supported_pixel_formats; - ++supported_pixel_formats; - } + const PixelFormat* supported_pixel_formats = codec->pix_fmts; + while (supported_pixel_formats != NULL && *supported_pixel_formats != PIX_FMT_NONE) { + // Assign the 1st valid pixel format (if one is missing) + if (c->pix_fmt == PIX_FMT_NONE) + c->pix_fmt = *supported_pixel_formats; + ++supported_pixel_formats; + } - // Codec doesn't have any pix formats? - if (c->pix_fmt == PIX_FMT_NONE) { - if(fmt->video_codec == AV_CODEC_ID_RAWVIDEO) { - // Raw video should use RGB24 - c->pix_fmt = PIX_FMT_RGB24; + // Codec doesn't have any pix formats? + if (c->pix_fmt == PIX_FMT_NONE) { + if(fmt->video_codec == AV_CODEC_ID_RAWVIDEO) { + // Raw video should use RGB24 + c->pix_fmt = PIX_FMT_RGB24; #if (LIBAVFORMAT_VERSION_MAJOR < 58) - if (strcmp(fmt->name, "gif") != 0) - // If not GIF format, skip the encoding process - // Set raw picture flag (so we don't encode this video) - oc->oformat->flags |= AVFMT_RAWPICTURE; + if (strcmp(fmt->name, "gif") != 0) + // If not GIF format, skip the encoding process + // Set raw picture flag (so we don't encode this video) + oc->oformat->flags |= AVFMT_RAWPICTURE; #endif - } else { - // Set the default codec - c->pix_fmt = PIX_FMT_YUV420P; - } - } + } else { + // Set the default codec + c->pix_fmt = PIX_FMT_YUV420P; + } + } AV_COPY_PARAMS_FROM_CONTEXT(st, c); #if (LIBAVFORMAT_VERSION_MAJOR < 58) @@ -1293,51 +1291,50 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) // Set number of threads equal to number of processors (not to exceed 16) video_codec->thread_count = min(FF_NUM_PROCESSORS, 16); - #if IS_FFMPEG_3_2 - if (hw_en_on && hw_en_supported) { - char *dev_hw = NULL; - char adapter[256]; - char *adapter_ptr = NULL; - int adapter_num; + #if IS_FFMPEG_3_2 + if (hw_en_on && hw_en_supported) { + char *dev_hw = NULL; + char adapter[256]; + char *adapter_ptr = NULL; + int adapter_num; // Use the hw device given in the environment variable HW_EN_DEVICE_SET or the default if not set dev_hw = getenv( "HW_EN_DEVICE_SET" ); - if( dev_hw != NULL) { - adapter_num = atoi(dev_hw); - if (adapter_num < 3 && adapter_num >=0) { - #if defined(__linux__) - snprintf(adapter,sizeof(adapter),"/dev/dri/renderD%d", adapter_num+128); - // Maybe 127 is better because the first card would be 1?! - adapter_ptr = adapter; - #elif defined(_WIN32) - adapter_ptr = NULL; - #elif defined(__APPLE__) - adapter_ptr = NULL; - #endif - } - else { - adapter_ptr = NULL; // Just to be sure - } - } + if( dev_hw != NULL) { + adapter_num = atoi(dev_hw); + if (adapter_num < 3 && adapter_num >=0) { + #if defined(__linux__) + snprintf(adapter,sizeof(adapter),"/dev/dri/renderD%d", adapter_num+128); + // Maybe 127 is better because the first card would be 1?! + adapter_ptr = adapter; + #elif defined(_WIN32) + adapter_ptr = NULL; + #elif defined(__APPLE__) + adapter_ptr = NULL; + #endif + } + else { + adapter_ptr = NULL; // Just to be sure + } + } // Check if it is there and writable - #if defined(__linux__) - if( adapter_ptr != NULL && access( adapter_ptr, W_OK ) == -1 ) { - #elif defined(_WIN32) - if( adapter_ptr != NULL ) { - #elif defined(__APPLE__) - if( adapter_ptr != NULL ) { - #endif - adapter_ptr = NULL; // use default - //cerr << "\n\n\nEncode Device not present using default\n\n\n"; - ZmqLogger::Instance()->AppendDebugMethod("Encode Device not present using default", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); - } - if (av_hwdevice_ctx_create(&hw_device_ctx, hw_en_av_device_type, - adapter_ptr, NULL, 0) < 0) { - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::open_video : Codec name: ", info.vcodec.c_str(), -1, " ERROR creating\n", -1, "", -1, "", -1, "", -1, "", -1); - //cerr << "FFmpegWriter::open_video : Codec name: " << info.vcodec.c_str() << " ERROR creating\n"; - throw InvalidCodec("Could not create hwdevice", path); - } - } - #endif + #if defined(__linux__) + if( adapter_ptr != NULL && access( adapter_ptr, W_OK ) == -1 ) { + #elif defined(_WIN32) + if( adapter_ptr != NULL ) { + #elif defined(__APPLE__) + if( adapter_ptr != NULL ) { + #endif + adapter_ptr = NULL; // use default + //cerr << "\n\n\nEncode Device not present using default\n\n\n"; + ZmqLogger::Instance()->AppendDebugMethod("Encode Device not present using default", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + } + if (av_hwdevice_ctx_create(&hw_device_ctx, hw_en_av_device_type, + adapter_ptr, NULL, 0) < 0) { + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::open_video : Codec name: ", info.vcodec.c_str(), -1, " ERROR creating\n", -1, "", -1, "", -1, "", -1, "", -1); + throw InvalidCodec("Could not create hwdevice", path); + } + } + #endif /* find the video encoder */ codec = avcodec_find_encoder_by_name(info.vcodec.c_str()); if (!codec) @@ -1345,29 +1342,29 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) if (!codec) throw InvalidCodec("Could not find codec", path); - /* Force max_b_frames to 0 in some cases (i.e. for mjpeg image sequences */ - if(video_codec->max_b_frames && video_codec->codec_id != AV_CODEC_ID_MPEG4 && video_codec->codec_id != AV_CODEC_ID_MPEG1VIDEO && video_codec->codec_id != AV_CODEC_ID_MPEG2VIDEO) - video_codec->max_b_frames = 0; + /* Force max_b_frames to 0 in some cases (i.e. for mjpeg image sequences */ + if(video_codec->max_b_frames && video_codec->codec_id != AV_CODEC_ID_MPEG4 && video_codec->codec_id != AV_CODEC_ID_MPEG1VIDEO && video_codec->codec_id != AV_CODEC_ID_MPEG2VIDEO) + video_codec->max_b_frames = 0; // Init options AVDictionary *opts = NULL; av_dict_set(&opts, "strict", "experimental", 0); - #if IS_FFMPEG_3_2 - if (hw_en_on && hw_en_supported) { - video_codec->max_b_frames = 0; // At least this GPU doesn't support b-frames - video_codec->pix_fmt = hw_en_av_pix_fmt; - video_codec->profile = FF_PROFILE_H264_BASELINE | FF_PROFILE_H264_CONSTRAINED; - av_opt_set(video_codec->priv_data,"preset","slow",0); - av_opt_set(video_codec->priv_data,"tune","zerolatency",0); - av_opt_set(video_codec->priv_data, "vprofile", "baseline", AV_OPT_SEARCH_CHILDREN); - // set hw_frames_ctx for encoder's AVCodecContext - int err; - if ((err = set_hwframe_ctx(video_codec, hw_device_ctx, info.width, info.height)) < 0) { - fprintf(stderr, "Failed to set hwframe context.\n"); - } - } - #endif + #if IS_FFMPEG_3_2 + if (hw_en_on && hw_en_supported) { + video_codec->max_b_frames = 0; // At least this GPU doesn't support b-frames + video_codec->pix_fmt = hw_en_av_pix_fmt; + video_codec->profile = FF_PROFILE_H264_BASELINE | FF_PROFILE_H264_CONSTRAINED; + av_opt_set(video_codec->priv_data,"preset","slow",0); + av_opt_set(video_codec->priv_data,"tune","zerolatency",0); + av_opt_set(video_codec->priv_data, "vprofile", "baseline", AV_OPT_SEARCH_CHILDREN); + // set hw_frames_ctx for encoder's AVCodecContext + int err; + if ((err = set_hwframe_ctx(video_codec, hw_device_ctx, info.width, info.height)) < 0) { + fprintf(stderr, "Failed to set hwframe context.\n"); + } + } + #endif /* open the codec */ if (avcodec_open2(video_codec, codec, &opts) < 0) @@ -1627,9 +1624,9 @@ void FFmpegWriter::write_audio_packets(bool final) memcpy(samples, frame_final->data[0], nb_samples * av_get_bytes_per_sample(audio_codec->sample_fmt) * info.channels); // deallocate AVFrame - av_freep(&(audio_frame->data[0])); - AV_FREE_FRAME(&audio_frame); - all_queued_samples = NULL; // this array cleared with above call + av_freep(&(audio_frame->data[0])); + AV_FREE_FRAME(&audio_frame); + all_queued_samples = NULL; // this array cleared with above call ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_audio_packets (Successfully completed 2nd resampling for Planar formats)", "nb_samples", nb_samples, "", -1, "", -1, "", -1, "", -1, "", -1); @@ -1730,7 +1727,7 @@ void FFmpegWriter::write_audio_packets(bool final) // deallocate AVFrame av_freep(&(frame_final->data[0])); - AV_FREE_FRAME(&frame_final); + AV_FREE_FRAME(&frame_final); // deallocate memory for packet AV_FREE_PACKET(&pkt); @@ -1820,19 +1817,19 @@ void FFmpegWriter::process_video_packet(std::shared_ptr frame) #if IS_FFMPEG_3_2 AVFrame *frame_final; // #if defined(__linux__) - if (hw_en_on && hw_en_supported) { - frame_final = allocate_avframe(AV_PIX_FMT_NV12, info.width, info.height, &bytes_final, NULL); - } else + if (hw_en_on && hw_en_supported) { + frame_final = allocate_avframe(AV_PIX_FMT_NV12, info.width, info.height, &bytes_final, NULL); + } else // #endif - { - frame_final = allocate_avframe((AVPixelFormat)(video_st->codecpar->format), info.width, info.height, &bytes_final, NULL); - } + { + frame_final = allocate_avframe((AVPixelFormat)(video_st->codecpar->format), info.width, info.height, &bytes_final, NULL); + } #else AVFrame *frame_final = allocate_avframe(video_codec->pix_fmt, info.width, info.height, &bytes_final, NULL); #endif // Fill with data - AV_COPY_PICTURE_DATA(frame_source, (uint8_t*)pixels, PIX_FMT_RGBA, source_image_width, source_image_height); + AV_COPY_PICTURE_DATA(frame_source, (uint8_t*)pixels, PIX_FMT_RGBA, source_image_width, source_image_height); ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::process_video_packet", "frame->number", frame->number, "bytes_source", bytes_source, "bytes_final", bytes_final, "", -1, "", -1, "", -1); // Resize & convert pixel format @@ -1901,50 +1898,50 @@ bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* fra // Assign the initial AVFrame PTS from the frame counter frame_final->pts = write_video_count; - #if IS_FFMPEG_3_2 + #if IS_FFMPEG_3_2 // #if defined(__linux__) - if (hw_en_on && hw_en_supported) { - if (!(hw_frame = av_frame_alloc())) { - fprintf(stderr, "Error code: av_hwframe_alloc\n"); - } - if (av_hwframe_get_buffer(video_codec->hw_frames_ctx, hw_frame, 0) < 0) { - fprintf(stderr, "Error code: av_hwframe_get_buffer\n"); - } - if (!hw_frame->hw_frames_ctx) { - fprintf(stderr, "Error hw_frames_ctx.\n"); - } - hw_frame->format = AV_PIX_FMT_NV12; - if ( av_hwframe_transfer_data(hw_frame, frame_final, 0) < 0) { - fprintf(stderr, "Error while transferring frame data to surface.\n"); - } - av_frame_copy_props(hw_frame, frame_final); - } + if (hw_en_on && hw_en_supported) { + if (!(hw_frame = av_frame_alloc())) { + fprintf(stderr, "Error code: av_hwframe_alloc\n"); + } + if (av_hwframe_get_buffer(video_codec->hw_frames_ctx, hw_frame, 0) < 0) { + fprintf(stderr, "Error code: av_hwframe_get_buffer\n"); + } + if (!hw_frame->hw_frames_ctx) { + fprintf(stderr, "Error hw_frames_ctx.\n"); + } + hw_frame->format = AV_PIX_FMT_NV12; + if ( av_hwframe_transfer_data(hw_frame, frame_final, 0) < 0) { + fprintf(stderr, "Error while transferring frame data to surface.\n"); + } + av_frame_copy_props(hw_frame, frame_final); + } // #endif - #endif + #endif /* encode the image */ int got_packet_ptr = 0; int error_code = 0; #if IS_FFMPEG_3_2 // Write video packet (latest version of FFmpeg) int frameFinished = 0; - int ret; + int ret; // #if defined(__linux__) - #if IS_FFMPEG_3_2 - if (hw_en_on && hw_en_supported) { - ret = avcodec_send_frame(video_codec, hw_frame); //hw_frame!!! - } else - #endif + #if IS_FFMPEG_3_2 + if (hw_en_on && hw_en_supported) { + ret = avcodec_send_frame(video_codec, hw_frame); //hw_frame!!! + } else + #endif // #endif - ret = avcodec_send_frame(video_codec, frame_final); + ret = avcodec_send_frame(video_codec, frame_final); error_code = ret; if (ret < 0 ) { ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_video_packet (Frame not sent)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); if (ret == AVERROR(EAGAIN) ) { cerr << "Frame EAGAIN" << "\n"; - } + } if (ret == AVERROR_EOF ) { cerr << "Frame AVERROR_EOF" << "\n"; - } + } avcodec_send_frame(video_codec, NULL); } else { @@ -1967,10 +1964,10 @@ bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* fra error_code = avcodec_encode_video2(video_codec, &pkt, frame_final, &got_packet_ptr); if (error_code != 0 ) { cerr << "Frame AVERROR_EOF" << "\n"; - } + } if (got_packet_ptr == 0 ) { cerr << "Frame gotpacket error" << "\n"; - } + } #else // Write video packet (even older versions of FFmpeg) int video_outbuf_size = 200000; @@ -2026,14 +2023,14 @@ bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* fra AV_FREE_PACKET(&pkt); #if IS_FFMPEG_3_2 // #if defined(__linux__) - if (hw_en_on && hw_en_supported) { - if (hw_frame) { - av_frame_free(&hw_frame); - hw_frame = NULL; - } - } + if (hw_en_on && hw_en_supported) { + if (hw_frame) { + av_frame_free(&hw_frame); + hw_frame = NULL; + } + } // #endif - #endif + #endif } // Success @@ -2061,14 +2058,14 @@ void FFmpegWriter::InitScalers(int source_width, int source_height) // Init the software scaler from FFMpeg #if IS_FFMPEG_3_2 // #if defined(__linux__) - if (hw_en_on && hw_en_supported) { - img_convert_ctx = sws_getContext(source_width, source_height, PIX_FMT_RGBA, info.width, info.height, AV_PIX_FMT_NV12, SWS_BILINEAR, NULL, NULL, NULL); - } else + if (hw_en_on && hw_en_supported) { + img_convert_ctx = sws_getContext(source_width, source_height, PIX_FMT_RGBA, info.width, info.height, AV_PIX_FMT_NV12, SWS_BILINEAR, NULL, NULL, NULL); + } else // #endif - #endif - { - img_convert_ctx = sws_getContext(source_width, source_height, PIX_FMT_RGBA, info.width, info.height, AV_GET_CODEC_PIXEL_FORMAT(video_st, video_st->codec), SWS_BILINEAR, NULL, NULL, NULL); - } + #endif + { + img_convert_ctx = sws_getContext(source_width, source_height, PIX_FMT_RGBA, info.width, info.height, AV_GET_CODEC_PIXEL_FORMAT(video_st, video_st->codec), SWS_BILINEAR, NULL, NULL, NULL); + } // Add rescaler to vector image_rescalers.push_back(img_convert_ctx); From 2ca84217bc18d7746183db4c76a32819af5f34d2 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Wed, 30 Jan 2019 09:58:54 -0800 Subject: [PATCH 059/109] First changes to use Settings instead of GetEnv --- include/Settings.h | 21 +++++++++++++ src/FFmpegReader.cpp | 75 +++++++++++++++++++++++++------------------- src/Settings.cpp | 8 +++++ 3 files changed, 71 insertions(+), 33 deletions(-) diff --git a/include/Settings.h b/include/Settings.h index ec26338b..0102479a 100644 --- a/include/Settings.h +++ b/include/Settings.h @@ -79,6 +79,9 @@ namespace openshot { /// Use video card for faster video decoding (if supported) bool HARDWARE_DECODE = false; + /// Use video codec for faster video decoding (if supported) + int HARDWARE_DECODER = 0; + /// Use video card for faster video encoding (if supported) bool HARDWARE_ENCODE = false; @@ -94,6 +97,24 @@ namespace openshot { /// Wait for OpenMP task to finish before continuing (used to limit threads on slower systems) bool WAIT_FOR_VIDEO_PROCESSING_TASK = false; + /// Number of threads of OpenMP + int OMP_THREADS = 6;//OPEN_MP_NUM_PROCESSORS + + /// Number of threads that ffmpeg uses + int FF_THREADS = 8;//FF_NUM_PROCESSORS + + /// Maximum rows that hardware decode can handle + int DE_LIMIT_HEIGHT_MAX = 1100; + + /// Maximum columns that hardware decode can handle + int DE_LIMIT_WIDTH_MAX = 1950; + + /// Which GPU to use to decode (0 is the first) + int HW_DE_DEVICE_SET = 0; + + /// Which GPU to use to encode (0 is the first) + int HW_EN_DEVICE_SET = 0; + /// Create or get an instance of this logger singleton (invoke the class with this method) static Settings * Instance(); }; diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index dd5f063b..03063d11 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -287,7 +287,7 @@ void FFmpegReader::Open() //hw_de_on = Settings::Instance()->HARDWARE_DECODE; // New version turn hardware decode on - { + /* { char *decoder_hw = NULL; decoder_hw = getenv( "HW_DECODER" ); if(decoder_hw != NULL) { @@ -299,7 +299,11 @@ void FFmpegReader::Open() } else { hw_de_on = 0; } - } + }*/ + // Newest versions + { + hw_de_on = (Settings::Instance()->HARDWARE_DECODER == 0 ? 0 : 1); + } // Open video file if (avformat_open_input(&pFormatCtx, path.c_str(), NULL, NULL) != 0) @@ -366,7 +370,8 @@ void FFmpegReader::Open() // Open Hardware Acceleration // Use the hw device given in the environment variable HW_DE_DEVICE_SET or the default if not set char *dev_hw = NULL; - char *decoder_hw = NULL; + //char *decoder_hw = NULL; + int i_decoder_hw = 0; char adapter[256]; char *adapter_ptr = NULL; int adapter_num; @@ -380,60 +385,62 @@ void FFmpegReader::Open() #if defined(__linux__) snprintf(adapter,sizeof(adapter),"/dev/dri/renderD%d", adapter_num+128); adapter_ptr = adapter; - decoder_hw = getenv( "HW_DECODER" ); - if(decoder_hw != NULL) { - if (strncmp(decoder_hw,"0",4) == 0) { //Will never happen + i_decoder_hw = Settings::Instance()->HARDWARE_DECODER; + switch (i_decoder_hw) { + case 0: hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; pCodecCtx->get_format = get_hw_dec_format_va; - } - if (strncmp(decoder_hw,"1",11) == 0) { + break; + case 1: hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; pCodecCtx->get_format = get_hw_dec_format_va; - } - if (strncmp(decoder_hw,"2",11) == 0) { + break; + case 2: hw_de_av_device_type = AV_HWDEVICE_TYPE_CUDA; pCodecCtx->get_format = get_hw_dec_format_cu; - } - } else { - hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; - pCodecCtx->get_format = get_hw_dec_format_va; - } + break; + default: + hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; + pCodecCtx->get_format = get_hw_dec_format_va; + break; + } #elif defined(_WIN32) adapter_ptr = NULL; - decoder_hw = getenv( "HW_DECODER" ); - if(decoder_hw != NULL) { - if (strncmp(decoder_hw,"0",4) == 0) { //Will never happen + i_decoder_hw = Settings::Instance()->HARDWARE_DECODER; + switch (i_decoder_hw) { + case 0: hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; pCodecCtx->get_format = get_hw_dec_format_dx; - } - if (strncmp(decoder_hw,"3",19) == 0) { + break; + case 3: hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; pCodecCtx->get_format = get_hw_dec_format_dx; - } - if (strncmp(decoder_hw,"4",19) == 0) { + break; + case 4: hw_de_av_device_type = AV_HWDEVICE_TYPE_D3D11VA; pCodecCtx->get_format = get_hw_dec_format_d3; - } - } else { + default: hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; pCodecCtx->get_format = get_hw_dec_format_dx; + break; } #elif defined(__APPLE__) adapter_ptr = NULL; - decoder_hw = getenv( "HW_DECODER" ); - if(decoder_hw != NULL) { - if (strncmp(decoder_hw,"0",4) == 0) { //Will never happen + i_decoder_hw = Settings::Instance()->HARDWARE_DECODER; + switch (i_decoder_hw) { + case 0: hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; pCodecCtx->get_format = get_hw_dec_format_qs; - } - if (strncmp(decoder_hw,"5",11) == 0) { + break; + case 5: hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; pCodecCtx->get_format = get_hw_dec_format_qs; - } - } else { + break; + default: hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; pCodecCtx->get_format = get_hw_dec_format_qs; + break; } #endif } @@ -536,8 +543,10 @@ void FFmpegReader::Open() } else { int max_h, max_w; - max_h = ((getenv( "LIMIT_HEIGHT_MAX" )==NULL) ? MAX_SUPPORTED_HEIGHT : atoi(getenv( "LIMIT_HEIGHT_MAX" ))); - max_w = ((getenv( "LIMIT_WIDTH_MAX" )==NULL) ? MAX_SUPPORTED_WIDTH : atoi(getenv( "LIMIT_WIDTH_MAX" ))); + //max_h = ((getenv( "LIMIT_HEIGHT_MAX" )==NULL) ? MAX_SUPPORTED_HEIGHT : atoi(getenv( "LIMIT_HEIGHT_MAX" ))); + max_h = Settings::Instance()->DE_LIMIT_HEIGHT_MAX; + //max_w = ((getenv( "LIMIT_WIDTH_MAX" )==NULL) ? MAX_SUPPORTED_WIDTH : atoi(getenv( "LIMIT_WIDTH_MAX" ))); + max_w = Settings::Instance()->DE_LIMIT_WIDTH_MAX; ZmqLogger::Instance()->AppendDebugMethod("Constraints could not be found using default limit\n", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); //cerr << "Constraints could not be found using default limit\n"; if (pCodecCtx->coded_width < 0 || diff --git a/src/Settings.cpp b/src/Settings.cpp index b13f0f5a..961e3682 100644 --- a/src/Settings.cpp +++ b/src/Settings.cpp @@ -41,11 +41,19 @@ Settings *Settings::Instance() // Create the actual instance of logger only once m_pInstance = new Settings; m_pInstance->HARDWARE_DECODE = false; + m_pInstance->HARDWARE_DECODER = 0; m_pInstance->HARDWARE_ENCODE = false; m_pInstance->HIGH_QUALITY_SCALING = false; m_pInstance->MAX_WIDTH = 0; m_pInstance->MAX_HEIGHT = 0; m_pInstance->WAIT_FOR_VIDEO_PROCESSING_TASK = false; + m_pInstance->OMP_THREADS = 6;//OPEN_MP_NUM_PROCESSORS + m_pInstance->FF_THREADS = 8;//FF_NUM_PROCESSORS + m_pInstance->DE_LIMIT_HEIGHT_MAX = 1100; + m_pInstance->DE_LIMIT_WIDTH_MAX = 1950; + m_pInstance->HW_DE_DEVICE_SET = 0; + m_pInstance->HW_EN_DEVICE_SET = 0; + } return m_pInstance; From 596ae0efac24144aa85ceb5a8cd10b6b1e8d8a48 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Wed, 30 Jan 2019 20:44:36 -0800 Subject: [PATCH 060/109] More changes to move to Settings, still needs work --- include/OpenMPUtilities.h | 11 +++++++++-- include/Settings.h | 2 +- src/FFmpegReader.cpp | 29 +++++++++++++++++------------ src/FFmpegWriter.cpp | 17 +++++++++++------ src/Settings.cpp | 2 +- 5 files changed, 39 insertions(+), 22 deletions(-) diff --git a/include/OpenMPUtilities.h b/include/OpenMPUtilities.h index 9af58150..f0adfd4b 100644 --- a/include/OpenMPUtilities.h +++ b/include/OpenMPUtilities.h @@ -32,9 +32,16 @@ #include #include +#include "../include/Settings.h" + +using namespace std; +using namespace openshot; + // Calculate the # of OpenMP Threads to allow -#define OPEN_MP_NUM_PROCESSORS ((getenv( "LIMIT_OMP_THREADS" )==NULL) ? omp_get_num_procs() : (min(omp_get_num_procs(), max(2, atoi(getenv( "LIMIT_OMP_THREADS" ))) ))) -#define FF_NUM_PROCESSORS ((getenv( "LIMIT_FF_THREADS" )==NULL) ? omp_get_num_procs() : (min(omp_get_num_procs(), max(2, atoi(getenv( "LIMIT_FF_THREADS" ))) ))) +//#define OPEN_MP_NUM_PROCESSORS ((getenv( "LIMIT_OMP_THREADS" )==NULL) ? omp_get_num_procs() : (min(omp_get_num_procs(), max(2, atoi(getenv( "LIMIT_OMP_THREADS" ))) ))) +//#define FF_NUM_PROCESSORS ((getenv( "LIMIT_FF_THREADS" )==NULL) ? omp_get_num_procs() : (min(omp_get_num_procs(), max(2, atoi(getenv( "LIMIT_FF_THREADS" ))) ))) +#define OPEN_MP_NUM_PROCESSORS (min(omp_get_num_procs(), max(2, openshot::Settings::Instance()->OMP_THREADS) )) +#define FF_NUM_PROCESSORS (min(omp_get_num_procs(), max(2, openshot::Settings::Instance()->FF_THREADS) )) diff --git a/include/Settings.h b/include/Settings.h index 0102479a..15ff5fa3 100644 --- a/include/Settings.h +++ b/include/Settings.h @@ -98,7 +98,7 @@ namespace openshot { bool WAIT_FOR_VIDEO_PROCESSING_TASK = false; /// Number of threads of OpenMP - int OMP_THREADS = 6;//OPEN_MP_NUM_PROCESSORS + int OMP_THREADS = 12;//OPEN_MP_NUM_PROCESSORS /// Number of threads that ffmpeg uses int FF_THREADS = 8;//FF_NUM_PROCESSORS diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 03063d11..b45145b7 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -284,7 +284,7 @@ void FFmpegReader::Open() hw_de_on = (val[0] == '1')? 1 : 0; }*/ - //hw_de_on = Settings::Instance()->HARDWARE_DECODE; + //hw_de_on = openshot::Settings::Instance()->HARDWARE_DECODE; // New version turn hardware decode on /* { @@ -302,7 +302,7 @@ void FFmpegReader::Open() }*/ // Newest versions { - hw_de_on = (Settings::Instance()->HARDWARE_DECODER == 0 ? 0 : 1); + hw_de_on = (openshot::Settings::Instance()->HARDWARE_DECODER == 0 ? 0 : 1); } // Open video file @@ -369,23 +369,25 @@ void FFmpegReader::Open() if (hw_de_on && hw_de_supported) { // Open Hardware Acceleration // Use the hw device given in the environment variable HW_DE_DEVICE_SET or the default if not set - char *dev_hw = NULL; + //char *dev_hw = NULL; //char *decoder_hw = NULL; int i_decoder_hw = 0; char adapter[256]; char *adapter_ptr = NULL; int adapter_num; - dev_hw = getenv( "HW_DE_DEVICE_SET" ); // The first card is 0 +/* dev_hw = getenv( "HW_DE_DEVICE_SET" ); // The first card is 0 if( dev_hw != NULL) { adapter_num = atoi(dev_hw); } else { adapter_num = 0; - } + }*/ + adapter_num = openshot::Settings::Instance()->HW_DE_DEVICE_SET; + fprintf(stderr, "\n\nDecodiing Device Nr: %d\n", adapter_num); if (adapter_num < 3 && adapter_num >=0) { #if defined(__linux__) snprintf(adapter,sizeof(adapter),"/dev/dri/renderD%d", adapter_num+128); adapter_ptr = adapter; - i_decoder_hw = Settings::Instance()->HARDWARE_DECODER; + i_decoder_hw = openshot::Settings::Instance()->HARDWARE_DECODER; switch (i_decoder_hw) { case 0: hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; @@ -407,7 +409,7 @@ void FFmpegReader::Open() #elif defined(_WIN32) adapter_ptr = NULL; - i_decoder_hw = Settings::Instance()->HARDWARE_DECODER; + i_decoder_hw = openshot::Settings::Instance()->HARDWARE_DECODER; switch (i_decoder_hw) { case 0: hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; @@ -427,7 +429,7 @@ void FFmpegReader::Open() } #elif defined(__APPLE__) adapter_ptr = NULL; - i_decoder_hw = Settings::Instance()->HARDWARE_DECODER; + i_decoder_hw = openshot::Settings::Instance()->HARDWARE_DECODER; switch (i_decoder_hw) { case 0: hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; @@ -456,6 +458,9 @@ void FFmpegReader::Open() #elif defined(__APPLE__) if( adapter_ptr != NULL ) { #endif + ZmqLogger::Instance()->AppendDebugMethod("Decode Device present using device", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + } + else { adapter_ptr = NULL; // use default ZmqLogger::Instance()->AppendDebugMethod("Decode Device not present using default", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); } @@ -544,9 +549,9 @@ void FFmpegReader::Open() else { int max_h, max_w; //max_h = ((getenv( "LIMIT_HEIGHT_MAX" )==NULL) ? MAX_SUPPORTED_HEIGHT : atoi(getenv( "LIMIT_HEIGHT_MAX" ))); - max_h = Settings::Instance()->DE_LIMIT_HEIGHT_MAX; + max_h = openshot::Settings::Instance()->DE_LIMIT_HEIGHT_MAX; //max_w = ((getenv( "LIMIT_WIDTH_MAX" )==NULL) ? MAX_SUPPORTED_WIDTH : atoi(getenv( "LIMIT_WIDTH_MAX" ))); - max_w = Settings::Instance()->DE_LIMIT_WIDTH_MAX; + max_w = openshot::Settings::Instance()->DE_LIMIT_WIDTH_MAX; ZmqLogger::Instance()->AppendDebugMethod("Constraints could not be found using default limit\n", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); //cerr << "Constraints could not be found using default limit\n"; if (pCodecCtx->coded_width < 0 || @@ -1354,10 +1359,10 @@ void FFmpegReader::ProcessVideoPacket(int64_t requested_frame) // without losing quality. NOTE: We cannot go smaller than the timeline itself, or the add_layer timeline // method will scale it back to timeline size before scaling it smaller again. This needs to be fixed in // the future. - int max_width = Settings::Instance()->MAX_WIDTH; + int max_width = openshot::Settings::Instance()->MAX_WIDTH; if (max_width <= 0) max_width = info.width; - int max_height = Settings::Instance()->MAX_HEIGHT; + int max_height = openshot::Settings::Instance()->MAX_HEIGHT; if (max_height <= 0) max_height = info.height; diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index 6b8f240f..6a947d4c 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -1293,15 +1293,17 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) #if IS_FFMPEG_3_2 if (hw_en_on && hw_en_supported) { - char *dev_hw = NULL; + //char *dev_hw = NULL; char adapter[256]; char *adapter_ptr = NULL; int adapter_num; // Use the hw device given in the environment variable HW_EN_DEVICE_SET or the default if not set - dev_hw = getenv( "HW_EN_DEVICE_SET" ); - if( dev_hw != NULL) { - adapter_num = atoi(dev_hw); - if (adapter_num < 3 && adapter_num >=0) { + //dev_hw = getenv( "HW_EN_DEVICE_SET" ); + //if( dev_hw != NULL) { + // adapter_num = atoi(dev_hw); + adapter_num = openshot::Settings::Instance()->HW_EN_DEVICE_SET; + fprintf(stderr, "\n\nEncodiing Device Nr: %d\n", adapter_num); + if (adapter_num < 3 && adapter_num >=0) { #if defined(__linux__) snprintf(adapter,sizeof(adapter),"/dev/dri/renderD%d", adapter_num+128); // Maybe 127 is better because the first card would be 1?! @@ -1315,7 +1317,7 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) else { adapter_ptr = NULL; // Just to be sure } - } +// } // Check if it is there and writable #if defined(__linux__) if( adapter_ptr != NULL && access( adapter_ptr, W_OK ) == -1 ) { @@ -1324,6 +1326,9 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) #elif defined(__APPLE__) if( adapter_ptr != NULL ) { #endif + ZmqLogger::Instance()->AppendDebugMethod("Encode Device present using device", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + } + else { adapter_ptr = NULL; // use default //cerr << "\n\n\nEncode Device not present using default\n\n\n"; ZmqLogger::Instance()->AppendDebugMethod("Encode Device not present using default", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); diff --git a/src/Settings.cpp b/src/Settings.cpp index 961e3682..4f502341 100644 --- a/src/Settings.cpp +++ b/src/Settings.cpp @@ -47,7 +47,7 @@ Settings *Settings::Instance() m_pInstance->MAX_WIDTH = 0; m_pInstance->MAX_HEIGHT = 0; m_pInstance->WAIT_FOR_VIDEO_PROCESSING_TASK = false; - m_pInstance->OMP_THREADS = 6;//OPEN_MP_NUM_PROCESSORS + m_pInstance->OMP_THREADS = 12;//OPEN_MP_NUM_PROCESSORS m_pInstance->FF_THREADS = 8;//FF_NUM_PROCESSORS m_pInstance->DE_LIMIT_HEIGHT_MAX = 1100; m_pInstance->DE_LIMIT_WIDTH_MAX = 1950; From 2e635e3d87a4ef2f902b097bd9914f77a9ce87cf Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Thu, 31 Jan 2019 09:42:26 -0800 Subject: [PATCH 061/109] Formating and Cleanup Fix forgotten break in switch --- include/OpenMPUtilities.h | 2 - include/Settings.h | 4 +- src/FFmpegReader.cpp | 317 ++++++++++++++++---------------------- src/FFmpegWriter.cpp | 27 +--- src/Settings.cpp | 4 +- 5 files changed, 144 insertions(+), 210 deletions(-) diff --git a/include/OpenMPUtilities.h b/include/OpenMPUtilities.h index f0adfd4b..0411b6ba 100644 --- a/include/OpenMPUtilities.h +++ b/include/OpenMPUtilities.h @@ -38,8 +38,6 @@ using namespace std; using namespace openshot; // Calculate the # of OpenMP Threads to allow -//#define OPEN_MP_NUM_PROCESSORS ((getenv( "LIMIT_OMP_THREADS" )==NULL) ? omp_get_num_procs() : (min(omp_get_num_procs(), max(2, atoi(getenv( "LIMIT_OMP_THREADS" ))) ))) -//#define FF_NUM_PROCESSORS ((getenv( "LIMIT_FF_THREADS" )==NULL) ? omp_get_num_procs() : (min(omp_get_num_procs(), max(2, atoi(getenv( "LIMIT_FF_THREADS" ))) ))) #define OPEN_MP_NUM_PROCESSORS (min(omp_get_num_procs(), max(2, openshot::Settings::Instance()->OMP_THREADS) )) #define FF_NUM_PROCESSORS (min(omp_get_num_procs(), max(2, openshot::Settings::Instance()->FF_THREADS) )) diff --git a/include/Settings.h b/include/Settings.h index 15ff5fa3..b01d9590 100644 --- a/include/Settings.h +++ b/include/Settings.h @@ -98,10 +98,10 @@ namespace openshot { bool WAIT_FOR_VIDEO_PROCESSING_TASK = false; /// Number of threads of OpenMP - int OMP_THREADS = 12;//OPEN_MP_NUM_PROCESSORS + int OMP_THREADS = 12; /// Number of threads that ffmpeg uses - int FF_THREADS = 8;//FF_NUM_PROCESSORS + int FF_THREADS = 8; /// Maximum rows that hardware decode can handle int DE_LIMIT_HEIGHT_MAX = 1100; diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index b45145b7..2e938f35 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -76,7 +76,6 @@ typedef struct VAAPIDecodeContext { using namespace openshot; int hw_de_on = 1; // Is set in UI -//int hw_de_supported = 0; // Is set by FFmpegReader #if IS_FFMPEG_3_2 AVPixelFormat hw_de_av_pix_fmt_global = AV_PIX_FMT_NONE; AVHWDeviceType hw_de_av_device_type_global = AV_HWDEVICE_TYPE_NONE; @@ -158,91 +157,91 @@ bool AudioLocation::is_near(AudioLocation location, int samples_per_frame, int64 #if defined(__linux__) static enum AVPixelFormat get_hw_dec_format_va(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) { - const enum AVPixelFormat *p; + const enum AVPixelFormat *p; - for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { + for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { switch (*p) { case AV_PIX_FMT_VAAPI: hw_de_av_pix_fmt_global = AV_PIX_FMT_VAAPI; hw_de_av_device_type_global = AV_HWDEVICE_TYPE_VAAPI; - return *p; + return *p; break; - } - } - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); - return AV_PIX_FMT_NONE; - } + } + } + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + return AV_PIX_FMT_NONE; +} static enum AVPixelFormat get_hw_dec_format_cu(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) { - const enum AVPixelFormat *p; + const enum AVPixelFormat *p; - for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { + for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { switch (*p) { case AV_PIX_FMT_CUDA: hw_de_av_pix_fmt_global = AV_PIX_FMT_CUDA; hw_de_av_device_type_global = AV_HWDEVICE_TYPE_CUDA; - return *p; + return *p; break; - } - } - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); - return AV_PIX_FMT_NONE; - } + } + } + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + return AV_PIX_FMT_NONE; +} #endif #if defined(_WIN32) static enum AVPixelFormat get_hw_dec_format_dx(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) { - const enum AVPixelFormat *p; + const enum AVPixelFormat *p; - for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { + for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { switch (*p) { case AV_PIX_FMT_DXVA2_VLD: hw_de_av_pix_fmt_global = AV_PIX_FMT_DXVA2_VLD; hw_de_av_device_type_global = AV_HWDEVICE_TYPE_DXVA2; - return *p; + return *p; break; - } - } - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); - return AV_PIX_FMT_NONE; - } + } + } + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + return AV_PIX_FMT_NONE; +} static enum AVPixelFormat get_hw_dec_format_d3(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) { - const enum AVPixelFormat *p; + const enum AVPixelFormat *p; - for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { + for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { switch (*p) { case AV_PIX_FMT_D3D11: hw_de_av_pix_fmt_global = AV_PIX_FMT_D3D11; hw_de_av_device_type_global = AV_HWDEVICE_TYPE_D3D11VA; - return *p; + return *p; break; - } - } - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); - return AV_PIX_FMT_NONE; - } + } + } + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + return AV_PIX_FMT_NONE; +} #endif #if defined(__APPLE__) static enum AVPixelFormat get_hw_dec_format_qs(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) { - const enum AVPixelFormat *p; + const enum AVPixelFormat *p; - for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { + for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { switch (*p) { case AV_PIX_FMT_QSV: hw_de_av_pix_fmt_global = AV_PIX_FMT_QSV; hw_de_av_device_type_global = AV_HWDEVICE_TYPE_QSV; - return *p; + return *p; break; } - } + } ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); - return AV_PIX_FMT_NONE; + return AV_PIX_FMT_NONE; } #endif @@ -274,33 +273,6 @@ void FFmpegReader::Open() { // Initialize format context pFormatCtx = NULL; - - // Old version turn hardware decode on - /*char * val = getenv( "OS2_DECODE_HW" ); - if (val == NULL) { - hw_de_on = 0; - } - else{ - hw_de_on = (val[0] == '1')? 1 : 0; - }*/ - - //hw_de_on = openshot::Settings::Instance()->HARDWARE_DECODE; - - // New version turn hardware decode on - /* { - char *decoder_hw = NULL; - decoder_hw = getenv( "HW_DECODER" ); - if(decoder_hw != NULL) { - if( strncmp(decoder_hw,"0",4) == 0) { - hw_de_on = 0; - } else { - hw_de_on = 1; - } - } else { - hw_de_on = 0; - } - }*/ - // Newest versions { hw_de_on = (openshot::Settings::Instance()->HARDWARE_DECODER == 0 ? 0 : 1); } @@ -368,106 +340,96 @@ void FFmpegReader::Open() #if IS_FFMPEG_3_2 if (hw_de_on && hw_de_supported) { // Open Hardware Acceleration - // Use the hw device given in the environment variable HW_DE_DEVICE_SET or the default if not set - //char *dev_hw = NULL; - //char *decoder_hw = NULL; int i_decoder_hw = 0; - char adapter[256]; - char *adapter_ptr = NULL; - int adapter_num; -/* dev_hw = getenv( "HW_DE_DEVICE_SET" ); // The first card is 0 - if( dev_hw != NULL) { - adapter_num = atoi(dev_hw); - } else { - adapter_num = 0; - }*/ + char adapter[256]; + char *adapter_ptr = NULL; + int adapter_num; adapter_num = openshot::Settings::Instance()->HW_DE_DEVICE_SET; fprintf(stderr, "\n\nDecodiing Device Nr: %d\n", adapter_num); - if (adapter_num < 3 && adapter_num >=0) { - #if defined(__linux__) - snprintf(adapter,sizeof(adapter),"/dev/dri/renderD%d", adapter_num+128); - adapter_ptr = adapter; + if (adapter_num < 3 && adapter_num >=0) { + #if defined(__linux__) + snprintf(adapter,sizeof(adapter),"/dev/dri/renderD%d", adapter_num+128); + adapter_ptr = adapter; i_decoder_hw = openshot::Settings::Instance()->HARDWARE_DECODER; switch (i_decoder_hw) { case 0: - hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; - pCodecCtx->get_format = get_hw_dec_format_va; + hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; + pCodecCtx->get_format = get_hw_dec_format_va; break; case 1: - hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; - pCodecCtx->get_format = get_hw_dec_format_va; + hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; + pCodecCtx->get_format = get_hw_dec_format_va; break; case 2: - hw_de_av_device_type = AV_HWDEVICE_TYPE_CUDA; - pCodecCtx->get_format = get_hw_dec_format_cu; + hw_de_av_device_type = AV_HWDEVICE_TYPE_CUDA; + pCodecCtx->get_format = get_hw_dec_format_cu; break; default: - hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; - pCodecCtx->get_format = get_hw_dec_format_va; + hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; + pCodecCtx->get_format = get_hw_dec_format_va; break; } - #elif defined(_WIN32) - adapter_ptr = NULL; + #elif defined(_WIN32) + adapter_ptr = NULL; i_decoder_hw = openshot::Settings::Instance()->HARDWARE_DECODER; switch (i_decoder_hw) { case 0: - hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; - pCodecCtx->get_format = get_hw_dec_format_dx; - break; + hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; + pCodecCtx->get_format = get_hw_dec_format_dx; + break; case 3: - hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; - pCodecCtx->get_format = get_hw_dec_format_dx; - break; - case 4: - hw_de_av_device_type = AV_HWDEVICE_TYPE_D3D11VA; - pCodecCtx->get_format = get_hw_dec_format_d3; - default: - hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; - pCodecCtx->get_format = get_hw_dec_format_dx; + hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; + pCodecCtx->get_format = get_hw_dec_format_dx; break; - } - #elif defined(__APPLE__) - adapter_ptr = NULL; + case 4: + hw_de_av_device_type = AV_HWDEVICE_TYPE_D3D11VA; + pCodecCtx->get_format = get_hw_dec_format_d3; + break; + default: + hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; + pCodecCtx->get_format = get_hw_dec_format_dx; + break; + } + #elif defined(__APPLE__) + adapter_ptr = NULL; i_decoder_hw = openshot::Settings::Instance()->HARDWARE_DECODER; switch (i_decoder_hw) { case 0: - hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; - pCodecCtx->get_format = get_hw_dec_format_qs; - break; - case 5: - hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; - pCodecCtx->get_format = get_hw_dec_format_qs; - break; - default: - hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; - pCodecCtx->get_format = get_hw_dec_format_qs; + hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; + pCodecCtx->get_format = get_hw_dec_format_qs; break; - } - #endif - } - else { - adapter_ptr = NULL; // Just to be sure - } - //} - // Check if it is there and writable - #if defined(__linux__) - if( adapter_ptr != NULL && access( adapter_ptr, W_OK ) == -1 ) { - #elif defined(_WIN32) - if( adapter_ptr != NULL ) { - #elif defined(__APPLE__) - if( adapter_ptr != NULL ) { - #endif + case 5: + hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; + pCodecCtx->get_format = get_hw_dec_format_qs; + break; + default: + hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; + pCodecCtx->get_format = get_hw_dec_format_qs; + break; + } + #endif + } + else { + adapter_ptr = NULL; // Just to be sure + } + // Check if it is there and writable + #if defined(__linux__) + if( adapter_ptr != NULL && access( adapter_ptr, W_OK ) == -1 ) { + #elif defined(_WIN32) + if( adapter_ptr != NULL ) { + #elif defined(__APPLE__) + if( adapter_ptr != NULL ) { + #endif ZmqLogger::Instance()->AppendDebugMethod("Decode Device present using device", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); } else { - adapter_ptr = NULL; // use default - ZmqLogger::Instance()->AppendDebugMethod("Decode Device not present using default", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); - } + adapter_ptr = NULL; // use default + ZmqLogger::Instance()->AppendDebugMethod("Decode Device not present using default", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + } hw_device_ctx = NULL; // Here the first hardware initialisations are made if (av_hwdevice_ctx_create(&hw_device_ctx, hw_de_av_device_type, adapter_ptr, NULL, 0) >= 0) { - cerr << "\n\n**** HW device create OK ******** \n\n"; if (!(pCodecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx))) { throw InvalidCodec("Hardware device reference create failed.", path); } @@ -523,8 +485,7 @@ void FFmpegReader::Open() pCodecCtx->coded_height < constraints->min_height || pCodecCtx->coded_width > constraints->max_width || pCodecCtx->coded_height > constraints->max_height) { - ZmqLogger::Instance()->AppendDebugMethod("DIMENSIONS ARE TOO LARGE for hardware acceleration\n", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); - //cerr << "DIMENSIONS ARE TOO LARGE for hardware acceleration\n"; + ZmqLogger::Instance()->AppendDebugMethod("DIMENSIONS ARE TOO LARGE for hardware acceleration\n", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); hw_de_supported = 0; retry_decode_open = 1; AV_FREE_CONTEXT(pCodecCtx); @@ -535,10 +496,7 @@ void FFmpegReader::Open() } else { // All is just peachy - ZmqLogger::Instance()->AppendDebugMethod("\nDecode hardware acceleration is used\n", "Min width :", constraints->min_width, "Min Height :", constraints->min_height, "MaxWidth :", constraints->max_width, "MaxHeight :", constraints->max_height, "Frame width :", pCodecCtx->coded_width, "Frame height :", pCodecCtx->coded_height); - //cerr << "\nDecode hardware acceleration is used\n"; - //cerr << "Min width : " << constraints->min_width << " MinHeight : " << constraints->min_height << "MaxWidth : " << constraints->max_width << "MaxHeight : " << constraints->max_height << "\n"; - //cerr << "Frame width : " << pCodecCtx->coded_width << " Frame height : " << pCodecCtx->coded_height << "\n"; + ZmqLogger::Instance()->AppendDebugMethod("\nDecode hardware acceleration is used\n", "Min width :", constraints->min_width, "Min Height :", constraints->min_height, "MaxWidth :", constraints->max_width, "MaxHeight :", constraints->max_height, "Frame width :", pCodecCtx->coded_width, "Frame height :", pCodecCtx->coded_height); retry_decode_open = 0; } av_hwframe_constraints_free(&constraints); @@ -552,16 +510,13 @@ void FFmpegReader::Open() max_h = openshot::Settings::Instance()->DE_LIMIT_HEIGHT_MAX; //max_w = ((getenv( "LIMIT_WIDTH_MAX" )==NULL) ? MAX_SUPPORTED_WIDTH : atoi(getenv( "LIMIT_WIDTH_MAX" ))); max_w = openshot::Settings::Instance()->DE_LIMIT_WIDTH_MAX; - ZmqLogger::Instance()->AppendDebugMethod("Constraints could not be found using default limit\n", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Constraints could not be found using default limit\n", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); //cerr << "Constraints could not be found using default limit\n"; if (pCodecCtx->coded_width < 0 || pCodecCtx->coded_height < 0 || pCodecCtx->coded_width > max_w || pCodecCtx->coded_height > max_h ) { - ZmqLogger::Instance()->AppendDebugMethod("DIMENSIONS ARE TOO LARGE for hardware acceleration\n", "Max Width :", max_w, "Max Height :", max_h, "Frame width :", pCodecCtx->coded_width, "Frame height :", pCodecCtx->coded_height, "", -1, "", -1); - //cerr << "DIMENSIONS ARE TOO LARGE for hardware acceleration\n"; - //cerr << " Max Width : " << max_w << " Height : " << max_h << "\n"; - //cerr << "Frame width : " << pCodecCtx->coded_width << " Frame height : " << pCodecCtx->coded_height << "\n"; + ZmqLogger::Instance()->AppendDebugMethod("DIMENSIONS ARE TOO LARGE for hardware acceleration\n", "Max Width :", max_w, "Max Height :", max_h, "Frame width :", pCodecCtx->coded_width, "Frame height :", pCodecCtx->coded_height, "", -1, "", -1); hw_de_supported = 0; retry_decode_open = 1; AV_FREE_CONTEXT(pCodecCtx); @@ -571,18 +526,14 @@ void FFmpegReader::Open() } } else { - ZmqLogger::Instance()->AppendDebugMethod("\nDecode hardware acceleration is used\n", "Max Width :", max_w, "Max Height :", max_h, "Frame width :", pCodecCtx->coded_width, "Frame height :", pCodecCtx->coded_height, "", -1, "", -1); - //cerr << "\nDecode hardware acceleration is used\n"; - //cerr << " Max Width : " << max_w << " Height : " << max_h << "\n"; - //cerr << "Frame width : " << pCodecCtx->coded_width << " Frame height : " << pCodecCtx->coded_height << "\n"; + ZmqLogger::Instance()->AppendDebugMethod("\nDecode hardware acceleration is used\n", "Max Width :", max_w, "Max Height :", max_h, "Frame width :", pCodecCtx->coded_width, "Frame height :", pCodecCtx->coded_height, "", -1, "", -1); retry_decode_open = 0; } } } // if hw_de_on && hw_de_supported - else { - ZmqLogger::Instance()->AppendDebugMethod("\nDecode in software is used\n", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); - //cerr << "\nDecode in software is used\n"; - } + else { + ZmqLogger::Instance()->AppendDebugMethod("\nDecode in software is used\n", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + } #else retry_decode_open = 0; #endif @@ -897,8 +848,8 @@ std::shared_ptr FFmpegReader::GetFrame(int64_t requested_frame) } else { - #pragma omp critical (ReadStream) - { + #pragma omp critical (ReadStream) + { // Check the cache a 2nd time (due to a potential previous lock) if (has_missing_frames) CheckMissingFrame(requested_frame); @@ -944,8 +895,8 @@ std::shared_ptr FFmpegReader::GetFrame(int64_t requested_frame) frame = ReadStream(requested_frame); } } - } //omp critical - return frame; + } //omp critical + return frame; } } @@ -1141,11 +1092,11 @@ int FFmpegReader::GetNextPacket() found_packet = av_read_frame(pFormatCtx, next_packet); - if (packet) { - // Remove previous packet before getting next one - RemoveAVPacket(packet); - packet = NULL; - } + if (packet) { + // Remove previous packet before getting next one + RemoveAVPacket(packet); + packet = NULL; + } if (found_packet >= 0) { @@ -1191,15 +1142,15 @@ bool FFmpegReader::GetAVFrame() pFrame = new AVFrame(); while (ret >= 0) { ret = avcodec_receive_frame(pCodecCtx, next_frame2); - if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { - break; + if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { + break; } if (ret != 0) { ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (invalid return frame received)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); } if (hw_de_on && hw_de_supported) { int err; - if (next_frame2->format == hw_de_av_pix_fmt) { + if (next_frame2->format == hw_de_av_pix_fmt) { next_frame->format = AV_PIX_FMT_YUV420P; if ((err = av_hwframe_transfer_data(next_frame,next_frame2,0)) < 0) { ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (Failed to transfer data to output frame)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); @@ -1437,7 +1388,7 @@ void FFmpegReader::ProcessVideoPacket(int64_t requested_frame) // Resize / Convert to RGB sws_scale(img_convert_ctx, my_frame->data, my_frame->linesize, 0, - original_height, pFrameRGB->data, pFrameRGB->linesize); + original_height, pFrameRGB->data, pFrameRGB->linesize); // Create or get the existing frame object std::shared_ptr f = CreateFrame(current_frame); @@ -2257,8 +2208,8 @@ bool FFmpegReader::CheckMissingFrame(int64_t requested_frame) void FFmpegReader::CheckWorkingFrames(bool end_of_stream, int64_t requested_frame) { // Loop through all working queue frames - bool checked_count_tripped = false; - int max_checked_count = 80; + bool checked_count_tripped = false; + int max_checked_count = 80; while (true) { @@ -2291,11 +2242,11 @@ void FFmpegReader::CheckWorkingFrames(bool end_of_stream, int64_t requested_fram // Get check count for this frame checked_frames_size = checked_frames.size(); - if (!checked_count_tripped || f->number >= requested_frame) - checked_count = checked_frames[f->number]; - else - // Force checked count over the limit - checked_count = max_checked_count; + if (!checked_count_tripped || f->number >= requested_frame) + checked_count = checked_frames[f->number]; + else + // Force checked count over the limit + checked_count = max_checked_count; } if (previous_packet_location.frame == f->number && !end_of_stream) @@ -2311,8 +2262,8 @@ void FFmpegReader::CheckWorkingFrames(bool end_of_stream, int64_t requested_fram // Debug output ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckWorkingFrames (exceeded checked_count)", "requested_frame", requested_frame, "frame_number", f->number, "is_video_ready", is_video_ready, "is_audio_ready", is_audio_ready, "checked_count", checked_count, "checked_frames_size", checked_frames_size); - // Trigger checked count tripped mode (clear out all frames before requested frame) - checked_count_tripped = true; + // Trigger checked count tripped mode (clear out all frames before requested frame) + checked_count_tripped = true; if (info.has_video && !is_video_ready && last_video_frame) { // Copy image from last frame @@ -2357,8 +2308,8 @@ void FFmpegReader::CheckWorkingFrames(bool end_of_stream, int64_t requested_fram missing_frames.Add(f); } - // Remove from 'checked' count - checked_frames.erase(f->number); + // Remove from 'checked' count + checked_frames.erase(f->number); } // Remove frame from working cache @@ -2482,10 +2433,10 @@ void FFmpegReader::CheckFPS() // Remove AVFrame from cache (and deallocate it's memory) void FFmpegReader::RemoveAVFrame(AVFrame* remove_frame) { - // Remove pFrame (if exists) - if (remove_frame) - { - // Free memory + // Remove pFrame (if exists) + if (remove_frame) + { + // Free memory #pragma omp critical (packet_cache) { av_freep(&remove_frame->data[0]); @@ -2500,7 +2451,7 @@ void FFmpegReader::RemoveAVFrame(AVFrame* remove_frame) void FFmpegReader::RemoveAVPacket(AVPacket* remove_packet) { // deallocate memory for packet - AV_FREE_PACKET(remove_packet); + AV_FREE_PACKET(remove_packet); // Delete the object delete remove_packet; diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index 6a947d4c..14171894 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -462,7 +462,7 @@ void FFmpegWriter::SetOption(StreamType stream, string name, string value) } c->bit_rate = (int)(mbs); } - } + } #endif } @@ -1298,9 +1298,6 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) char *adapter_ptr = NULL; int adapter_num; // Use the hw device given in the environment variable HW_EN_DEVICE_SET or the default if not set - //dev_hw = getenv( "HW_EN_DEVICE_SET" ); - //if( dev_hw != NULL) { - // adapter_num = atoi(dev_hw); adapter_num = openshot::Settings::Instance()->HW_EN_DEVICE_SET; fprintf(stderr, "\n\nEncodiing Device Nr: %d\n", adapter_num); if (adapter_num < 3 && adapter_num >=0) { @@ -1317,7 +1314,6 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) else { adapter_ptr = NULL; // Just to be sure } -// } // Check if it is there and writable #if defined(__linux__) if( adapter_ptr != NULL && access( adapter_ptr, W_OK ) == -1 ) { @@ -1539,9 +1535,9 @@ void FFmpegWriter::write_audio_packets(bool final) // Remove converted audio av_freep(&(audio_frame->data[0])); - AV_FREE_FRAME(&audio_frame); + AV_FREE_FRAME(&audio_frame); av_freep(&audio_converted->data[0]); - AV_FREE_FRAME(&audio_converted); + AV_FREE_FRAME(&audio_converted); all_queued_samples = NULL; // this array cleared with above call ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_audio_packets (Successfully completed 1st resampling)", "nb_samples", nb_samples, "remaining_frame_samples", remaining_frame_samples, "", -1, "", -1, "", -1, "", -1); @@ -1732,7 +1728,7 @@ void FFmpegWriter::write_audio_packets(bool final) // deallocate AVFrame av_freep(&(frame_final->data[0])); - AV_FREE_FRAME(&frame_final); + AV_FREE_FRAME(&frame_final); // deallocate memory for packet AV_FREE_PACKET(&pkt); @@ -1821,11 +1817,9 @@ void FFmpegWriter::process_video_packet(std::shared_ptr frame) frame_source = allocate_avframe(PIX_FMT_RGBA, source_image_width, source_image_height, &bytes_source, (uint8_t*) pixels); #if IS_FFMPEG_3_2 AVFrame *frame_final; -// #if defined(__linux__) if (hw_en_on && hw_en_supported) { frame_final = allocate_avframe(AV_PIX_FMT_NV12, info.width, info.height, &bytes_final, NULL); } else -// #endif { frame_final = allocate_avframe((AVPixelFormat)(video_st->codecpar->format), info.width, info.height, &bytes_final, NULL); } @@ -1887,7 +1881,7 @@ bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* fra } else #endif - { + { AVPacket pkt; av_init_packet(&pkt); @@ -1904,7 +1898,6 @@ bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* fra // Assign the initial AVFrame PTS from the frame counter frame_final->pts = write_video_count; #if IS_FFMPEG_3_2 -// #if defined(__linux__) if (hw_en_on && hw_en_supported) { if (!(hw_frame = av_frame_alloc())) { fprintf(stderr, "Error code: av_hwframe_alloc\n"); @@ -1921,7 +1914,6 @@ bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* fra } av_frame_copy_props(hw_frame, frame_final); } -// #endif #endif /* encode the image */ int got_packet_ptr = 0; @@ -1930,13 +1922,11 @@ bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* fra // Write video packet (latest version of FFmpeg) int frameFinished = 0; int ret; -// #if defined(__linux__) #if IS_FFMPEG_3_2 if (hw_en_on && hw_en_supported) { ret = avcodec_send_frame(video_codec, hw_frame); //hw_frame!!! } else #endif -// #endif ret = avcodec_send_frame(video_codec, frame_final); error_code = ret; if (ret < 0 ) { @@ -2002,7 +1992,6 @@ bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* fra //pkt.pts = pkt.dts = write_video_count; // set the timestamp -// av_packet_rescale_ts(&pkt, video_st->time_base,video_codec->time_base); if (pkt.pts != AV_NOPTS_VALUE) pkt.pts = av_rescale_q(pkt.pts, video_codec->time_base, video_st->time_base); if (pkt.dts != AV_NOPTS_VALUE) @@ -2026,15 +2015,13 @@ bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* fra // Deallocate packet AV_FREE_PACKET(&pkt); - #if IS_FFMPEG_3_2 -// #if defined(__linux__) + #if IS_FFMPEG_3_2 if (hw_en_on && hw_en_supported) { if (hw_frame) { av_frame_free(&hw_frame); hw_frame = NULL; } } -// #endif #endif } @@ -2062,11 +2049,9 @@ void FFmpegWriter::InitScalers(int source_width, int source_height) { // Init the software scaler from FFMpeg #if IS_FFMPEG_3_2 -// #if defined(__linux__) if (hw_en_on && hw_en_supported) { img_convert_ctx = sws_getContext(source_width, source_height, PIX_FMT_RGBA, info.width, info.height, AV_PIX_FMT_NV12, SWS_BILINEAR, NULL, NULL, NULL); } else -// #endif #endif { img_convert_ctx = sws_getContext(source_width, source_height, PIX_FMT_RGBA, info.width, info.height, AV_GET_CODEC_PIXEL_FORMAT(video_st, video_st->codec), SWS_BILINEAR, NULL, NULL, NULL); diff --git a/src/Settings.cpp b/src/Settings.cpp index 4f502341..461f9183 100644 --- a/src/Settings.cpp +++ b/src/Settings.cpp @@ -47,8 +47,8 @@ Settings *Settings::Instance() m_pInstance->MAX_WIDTH = 0; m_pInstance->MAX_HEIGHT = 0; m_pInstance->WAIT_FOR_VIDEO_PROCESSING_TASK = false; - m_pInstance->OMP_THREADS = 12;//OPEN_MP_NUM_PROCESSORS - m_pInstance->FF_THREADS = 8;//FF_NUM_PROCESSORS + m_pInstance->OMP_THREADS = 12; + m_pInstance->FF_THREADS = 8; m_pInstance->DE_LIMIT_HEIGHT_MAX = 1100; m_pInstance->DE_LIMIT_WIDTH_MAX = 1950; m_pInstance->HW_DE_DEVICE_SET = 0; From 334a46cc5de5a0331bf376a02188d5dba50987fa Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Fri, 1 Feb 2019 03:38:44 -0800 Subject: [PATCH 062/109] Fix check if GPU can be used for encoding and decoding --- src/FFmpegReader.cpp | 2 +- src/FFmpegWriter.cpp | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 2e938f35..7439db4d 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -415,7 +415,7 @@ void FFmpegReader::Open() } // Check if it is there and writable #if defined(__linux__) - if( adapter_ptr != NULL && access( adapter_ptr, W_OK ) == -1 ) { + if( adapter_ptr != NULL && access( adapter_ptr, W_OK ) == 0 ) { #elif defined(_WIN32) if( adapter_ptr != NULL ) { #elif defined(__APPLE__) diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index 14171894..41285314 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -1316,7 +1316,7 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) } // Check if it is there and writable #if defined(__linux__) - if( adapter_ptr != NULL && access( adapter_ptr, W_OK ) == -1 ) { + if( adapter_ptr != NULL && access( adapter_ptr, W_OK ) == 0 ) { #elif defined(_WIN32) if( adapter_ptr != NULL ) { #elif defined(__APPLE__) From a2b8eaff37896d0b5e97a203d611ca000f2bcbee Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Fri, 15 Feb 2019 10:11:45 -0800 Subject: [PATCH 063/109] Allow to use nvenc and nvdec in Windows for nVidia cards. nVidia card don't use the DX API like intel or AMD cards. If ffmpeg and the libraries are compiled with nvenc and nvdec support on WIndows this should(!) now work. --- src/FFmpegReader.cpp | 21 +++++++++++++++++++++ src/FFmpegWriter.cpp | 15 ++++++++++++--- 2 files changed, 33 insertions(+), 3 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 7439db4d..1c80bb84 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -224,6 +224,23 @@ static enum AVPixelFormat get_hw_dec_format_d3(AVCodecContext *ctx, const enum A ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); return AV_PIX_FMT_NONE; } + +static enum AVPixelFormat get_hw_dec_format_cu(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) +{ + const enum AVPixelFormat *p; + + for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { + switch (*p) { + case AV_PIX_FMT_CUDA: + hw_de_av_pix_fmt_global = AV_PIX_FMT_CUDA; + hw_de_av_device_type_global = AV_HWDEVICE_TYPE_CUDA; + return *p; + break; + } + } + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + return AV_PIX_FMT_NONE; +} #endif #if defined(__APPLE__) @@ -378,6 +395,10 @@ void FFmpegReader::Open() hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; pCodecCtx->get_format = get_hw_dec_format_dx; break; + case 2: + hw_de_av_device_type = AV_HWDEVICE_TYPE_CUDA; + pCodecCtx->get_format = get_hw_dec_format_cu; + break; case 3: hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; pCodecCtx->get_format = get_hw_dec_format_dx; diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index 41285314..fb8040a8 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -203,9 +203,18 @@ void FFmpegWriter::SetVideoOptions(bool has_video, string codec, Fraction fps, i hw_en_av_device_type = AV_HWDEVICE_TYPE_DXVA2; } else { - new_codec = avcodec_find_encoder_by_name(codec.c_str()); - hw_en_on = 0; - hw_en_supported = 0; + if ( (strcmp(codec.c_str(),"h264_nvenc") == 0)) { + new_codec = avcodec_find_encoder_by_name(codec.c_str()); + hw_en_on = 1; + hw_en_supported = 1; + hw_en_av_pix_fmt = AV_PIX_FMT_CUDA; + hw_en_av_device_type = AV_HWDEVICE_TYPE_CUDA; + } + else { + new_codec = avcodec_find_encoder_by_name(codec.c_str()); + hw_en_on = 0; + hw_en_supported = 0; + } } #elif defined(__APPLE__) if ( (strcmp(codec.c_str(),"h264_qsv") == 0)) { From 48a2656080931cde020d1fbe61fb95cca25b5222 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Fri, 1 Mar 2019 16:32:52 -0800 Subject: [PATCH 064/109] AVoid crashes with mp3 that are tagged by removing AV_ALLOCATE_IMAGE(pFrame, AV_GET_CODEC_PIXEL_FORMAT( ... --- src/FFmpegReader.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index dae538f2..1b2c6b82 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -2369,7 +2369,7 @@ void FFmpegReader::CheckWorkingFrames(bool end_of_stream, int64_t requested_fram void FFmpegReader::CheckFPS() { check_fps = true; - AV_ALLOCATE_IMAGE(pFrame, AV_GET_CODEC_PIXEL_FORMAT(pStream, pCodecCtx), info.width, info.height); + int first_second_counter = 0; int second_second_counter = 0; From 16c3d53d038b72002d51a7d0605d1223c521ecdb Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Fri, 8 Mar 2019 14:26:14 -0800 Subject: [PATCH 065/109] Fix problem with q values for crf quality setting. DOTO adjust q values according to desired quality --- src/FFmpegWriter.cpp | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index fb8040a8..90266ffa 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -1132,6 +1132,12 @@ AVStream* FFmpegWriter::add_video_stream() /* Init video encoder options */ if (info.video_bit_rate >= 1000) { c->bit_rate = info.video_bit_rate; + c->qmin = 2; + c->qmax = 30; + } + else { + c->qmin = 0; + c->qmax = 63; } //TODO: Implement variable bitrate feature (which actually works). This implementation throws @@ -1141,8 +1147,8 @@ AVStream* FFmpegWriter::add_video_stream() //c->rc_buffer_size = FFMAX(c->rc_max_rate, 15000000) * 112L / 15000000 * 16384; //if ( !c->rc_initial_buffer_occupancy ) // c->rc_initial_buffer_occupancy = c->rc_buffer_size * 3/4; - c->qmin = 2; - c->qmax = 30; +// c->qmin = 2; +// c->qmax = 30; /* resolution must be a multiple of two */ // TODO: require /2 height and width From 6a21c984e9898e84b6d58a6ef936c7ce9dca7b26 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Fri, 8 Mar 2019 14:41:40 -0800 Subject: [PATCH 066/109] Fixed q values for low fixed bitrates. Low bitrates should now be produced if desired. DOTO fine tune the q values --- src/FFmpegWriter.cpp | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index 90266ffa..17290bce 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -1132,8 +1132,14 @@ AVStream* FFmpegWriter::add_video_stream() /* Init video encoder options */ if (info.video_bit_rate >= 1000) { c->bit_rate = info.video_bit_rate; - c->qmin = 2; - c->qmax = 30; + if (info.video_bit_rate >= 1500000) { + c->qmin = 2; + c->qmax = 30; + } + else { + c->qmin = 0; + c->qmax = 63; + } } else { c->qmin = 0; From 6b9a9ca6ff1022ad60956d8643ca09decc839d38 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Fri, 8 Mar 2019 16:52:34 -0800 Subject: [PATCH 067/109] Removed the branch for low fixed bitrate q values as it did not work with mpeg2 export. Now for low fixed bitrates no presets for the q values are set. TODO find the optimum q values for each codec for low and high bitrates --- src/FFmpegWriter.cpp | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index 17290bce..cf4004c4 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -1136,10 +1136,8 @@ AVStream* FFmpegWriter::add_video_stream() c->qmin = 2; c->qmax = 30; } - else { - c->qmin = 0; - c->qmax = 63; - } + // Here should be the setting for low fixed bitrate + // Defaults are used because mpeg2 otherwise had problems } else { c->qmin = 0; @@ -1153,8 +1151,6 @@ AVStream* FFmpegWriter::add_video_stream() //c->rc_buffer_size = FFMAX(c->rc_max_rate, 15000000) * 112L / 15000000 * 16384; //if ( !c->rc_initial_buffer_occupancy ) // c->rc_initial_buffer_occupancy = c->rc_buffer_size * 3/4; -// c->qmin = 2; -// c->qmax = 30; /* resolution must be a multiple of two */ // TODO: require /2 height and width From b5ebc996eefde4aace92bf287a8a3297488cdd75 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sun, 10 Mar 2019 10:42:48 -0700 Subject: [PATCH 068/109] Adjust the q values for low quality crf settings --- src/FFmpegWriter.cpp | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index cf4004c4..b1eb7ec3 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -1140,8 +1140,14 @@ AVStream* FFmpegWriter::add_video_stream() // Defaults are used because mpeg2 otherwise had problems } else { - c->qmin = 0; - c->qmax = 63; + if (info.video_bit_rate < 40) { + c->qmin = 0; + c->qmax = 63; + } + else { + c->qmin = info.video_bit_rate - 5; + c->qmax = 63; + } } //TODO: Implement variable bitrate feature (which actually works). This implementation throws From a170d7db38f6a3798bec89bb8dd77ee3c8c5f384 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sun, 10 Mar 2019 13:09:47 -0700 Subject: [PATCH 069/109] Check if the codec supports CRF when setting q values --- src/FFmpegWriter.cpp | 29 ++++++++++++++++++++++------- 1 file changed, 22 insertions(+), 7 deletions(-) diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index b1eb7ec3..12bfda54 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -1140,13 +1140,28 @@ AVStream* FFmpegWriter::add_video_stream() // Defaults are used because mpeg2 otherwise had problems } else { - if (info.video_bit_rate < 40) { - c->qmin = 0; - c->qmax = 63; - } - else { - c->qmin = info.video_bit_rate - 5; - c->qmax = 63; + // Check if codec supports crf + switch (c->codec_id) { + #if (LIBAVCODEC_VERSION_MAJOR >= 58) + case AV_CODEC_ID_AV1 : + #endif + case AV_CODEC_ID_VP8 : + case AV_CODEC_ID_VP9 : + case AV_CODEC_ID_H264 : + case AV_CODEC_ID_H265 : + if (info.video_bit_rate < 40) { + c->qmin = 0; + c->qmax = 63; + } + else { + c->qmin = info.video_bit_rate - 5; + c->qmax = 63; + } + break; + default: + // Here should be the setting for codecs that don't support crf + // For now defaults are used + break; } } From cd4e25ea6782748b18bbf5ba9709b727b029ac9b Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sun, 10 Mar 2019 13:28:56 -0700 Subject: [PATCH 070/109] Fix for FFmpeg 2.x --- src/FFmpegWriter.cpp | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index 12bfda54..1c9094a6 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -1142,13 +1142,15 @@ AVStream* FFmpegWriter::add_video_stream() else { // Check if codec supports crf switch (c->codec_id) { + #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(55, 39, 101) #if (LIBAVCODEC_VERSION_MAJOR >= 58) case AV_CODEC_ID_AV1 : #endif - case AV_CODEC_ID_VP8 : case AV_CODEC_ID_VP9 : - case AV_CODEC_ID_H264 : case AV_CODEC_ID_H265 : + #endif + case AV_CODEC_ID_VP8 : + case AV_CODEC_ID_H264 : if (info.video_bit_rate < 40) { c->qmin = 0; c->qmax = 63; From 999d2021cfc88156ab99e8aa6607fcbd9587f200 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sat, 6 Apr 2019 09:54:58 -0700 Subject: [PATCH 071/109] cmake target test renamed to os_test (test is predefined in cmake 3) --- tests/CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 2d2a0122..07bb23c3 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -227,5 +227,5 @@ IF (NOT DISABLE_TESTS) #################### MAKE TEST ###################### # Hook up the 'make test' target to the 'openshot-test' executable - ADD_CUSTOM_TARGET(test ${CMAKE_CURRENT_BINARY_DIR}/openshot-test) + ADD_CUSTOM_TARGET(os_test ${CMAKE_CURRENT_BINARY_DIR}/openshot-test) ENDIF (NOT DISABLE_TESTS) From 9a7a720e3c3fe961698de779787ac83b5b71c53b Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sat, 6 Apr 2019 10:26:04 -0700 Subject: [PATCH 072/109] change target of test to os_test in travis --- .travis.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 879a8190..4afd8467 100644 --- a/.travis.yml +++ b/.travis.yml @@ -15,7 +15,7 @@ matrix: - mkdir -p build; cd build; - cmake -D"CMAKE_BUILD_TYPE:STRING=Debug" ../ - make VERBOSE=1 - - make test + - make os_test - language: cpp name: "FFmpeg 3" @@ -30,7 +30,7 @@ matrix: - mkdir -p build; cd build; - cmake -D"CMAKE_BUILD_TYPE:STRING=Debug" ../ - make VERBOSE=1 - - make test + - make os_test - language: cpp name: "FFmpeg 4" @@ -47,4 +47,4 @@ matrix: - mkdir -p build; cd build; - cmake -D"CMAKE_BUILD_TYPE:STRING=Debug" ../ - make VERBOSE=1 - - make test + - make os_test From f61d054a74ff8124c77d263b933d5e7d24fd4245 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Thu, 11 Apr 2019 07:39:01 -0700 Subject: [PATCH 073/109] cmake hack Find the right install directory. I hope someone will come up with a more elegant way. --- src/bindings/python/CMakeLists.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/bindings/python/CMakeLists.txt b/src/bindings/python/CMakeLists.txt index 93ae9360..d4358b5e 100644 --- a/src/bindings/python/CMakeLists.txt +++ b/src/bindings/python/CMakeLists.txt @@ -67,8 +67,8 @@ if (PYTHONLIBS_FOUND AND PYTHONINTERP_FOUND) ### FIND THE PYTHON INTERPRETER (AND THE SITE PACKAGES FOLDER) execute_process ( COMMAND ${PYTHON_EXECUTABLE} -c "\ -from distutils.sysconfig import get_python_lib; \ -print( get_python_lib( plat_specific=True, prefix='${CMAKE_INSTALL_PREFIX}' ) )" +import site; from distutils.sysconfig import get_python_lib; \ +print( get_python_lib( plat_specific=True, standard_lib=True, prefix='${CMAKE_INSTALL_PREFIX}' ) + '/' + site.getsitepackages()[0].split('/')[-1] )" OUTPUT_VARIABLE _ABS_PYTHON_MODULE_PATH OUTPUT_STRIP_TRAILING_WHITESPACE ) From 94d4de48db00921c09bc6caa5c4cff316bed5eca Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Thu, 11 Apr 2019 08:06:11 -0700 Subject: [PATCH 074/109] 2nd attempt --- src/bindings/python/CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/bindings/python/CMakeLists.txt b/src/bindings/python/CMakeLists.txt index d4358b5e..0e2f6eee 100644 --- a/src/bindings/python/CMakeLists.txt +++ b/src/bindings/python/CMakeLists.txt @@ -68,7 +68,7 @@ if (PYTHONLIBS_FOUND AND PYTHONINTERP_FOUND) ### FIND THE PYTHON INTERPRETER (AND THE SITE PACKAGES FOLDER) execute_process ( COMMAND ${PYTHON_EXECUTABLE} -c "\ import site; from distutils.sysconfig import get_python_lib; \ -print( get_python_lib( plat_specific=True, standard_lib=True, prefix='${CMAKE_INSTALL_PREFIX}' ) + '/' + site.getsitepackages()[0].split('/')[-1] )" +print( get_python_lib( plat_specific=True, standard_lib=True, prefix='${CMAKE_INSTALL_PREFIX}' ) + '/' + get_python_lib( plat_specific=False, standard_lib=False, prefix='${CMAKE_INSTALL_PREFIX}' ).split('/')[-1] )" OUTPUT_VARIABLE _ABS_PYTHON_MODULE_PATH OUTPUT_STRIP_TRAILING_WHITESPACE ) From 2dd19696000243ce7335dffc44f9924b97f0b280 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Thu, 11 Apr 2019 15:42:03 -0700 Subject: [PATCH 075/109] Alternate version --- src/bindings/python/CMakeLists.txt | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/src/bindings/python/CMakeLists.txt b/src/bindings/python/CMakeLists.txt index 0e2f6eee..da8c1afa 100644 --- a/src/bindings/python/CMakeLists.txt +++ b/src/bindings/python/CMakeLists.txt @@ -66,11 +66,18 @@ if (PYTHONLIBS_FOUND AND PYTHONINTERP_FOUND) ${PYTHON_LIBRARIES} openshot) ### FIND THE PYTHON INTERPRETER (AND THE SITE PACKAGES FOLDER) - execute_process ( COMMAND ${PYTHON_EXECUTABLE} -c "\ -import site; from distutils.sysconfig import get_python_lib; \ -print( get_python_lib( plat_specific=True, standard_lib=True, prefix='${CMAKE_INSTALL_PREFIX}' ) + '/' + get_python_lib( plat_specific=False, standard_lib=False, prefix='${CMAKE_INSTALL_PREFIX}' ).split('/')[-1] )" - OUTPUT_VARIABLE _ABS_PYTHON_MODULE_PATH - OUTPUT_STRIP_TRAILING_WHITESPACE ) + if (CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT) + # If not prefix found, detect python site package folder + EXECUTE_PROCESS ( COMMAND ${PYTHON_EXECUTABLE} -c "import site; print(site.getsitepackages()[0])" + OUTPUT_VARIABLE _ABS_PYTHON_MODULE_PATH + OUTPUT_STRIP_TRAILING_WHITESPACE ) + else() + execute_process ( COMMAND ${PYTHON_EXECUTABLE} -c "\ + from distutils.sysconfig import get_python_lib; \ + print( get_python_lib( plat_specific=True, prefix='${CMAKE_INSTALL_PREFIX}' ) )" + OUTPUT_VARIABLE _ABS_PYTHON_MODULE_PATH + OUTPUT_STRIP_TRAILING_WHITESPACE ) + endif() GET_FILENAME_COMPONENT(_ABS_PYTHON_MODULE_PATH "${_ABS_PYTHON_MODULE_PATH}" ABSOLUTE) From dcff7245b3a651e4e8293fb8da6e9484c387c8f9 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Thu, 11 Apr 2019 16:01:26 -0700 Subject: [PATCH 076/109] Revert to older version plus add slash --- src/bindings/python/CMakeLists.txt | 19 +++++++------------ 1 file changed, 7 insertions(+), 12 deletions(-) diff --git a/src/bindings/python/CMakeLists.txt b/src/bindings/python/CMakeLists.txt index da8c1afa..c8686097 100644 --- a/src/bindings/python/CMakeLists.txt +++ b/src/bindings/python/CMakeLists.txt @@ -66,18 +66,13 @@ if (PYTHONLIBS_FOUND AND PYTHONINTERP_FOUND) ${PYTHON_LIBRARIES} openshot) ### FIND THE PYTHON INTERPRETER (AND THE SITE PACKAGES FOLDER) - if (CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT) - # If not prefix found, detect python site package folder - EXECUTE_PROCESS ( COMMAND ${PYTHON_EXECUTABLE} -c "import site; print(site.getsitepackages()[0])" - OUTPUT_VARIABLE _ABS_PYTHON_MODULE_PATH - OUTPUT_STRIP_TRAILING_WHITESPACE ) - else() - execute_process ( COMMAND ${PYTHON_EXECUTABLE} -c "\ - from distutils.sysconfig import get_python_lib; \ - print( get_python_lib( plat_specific=True, prefix='${CMAKE_INSTALL_PREFIX}' ) )" - OUTPUT_VARIABLE _ABS_PYTHON_MODULE_PATH - OUTPUT_STRIP_TRAILING_WHITESPACE ) - endif() + execute_process ( COMMAND ${PYTHON_EXECUTABLE} -c "\ +import site; from distutils.sysconfig import get_python_lib; \ +print( get_python_lib( plat_specific=True, standard_lib=True, prefix='${CMAKE_INSTALL_PREFIX}' ) \ + + '/' + get_python_lib( plat_specific=False, standard_lib=False, prefix='${CMAKE_INSTALL_PREFIX}' ).split('/')[-1] \ + + '/' )" + OUTPUT_VARIABLE _ABS_PYTHON_MODULE_PATH + OUTPUT_STRIP_TRAILING_WHITESPACE ) GET_FILENAME_COMPONENT(_ABS_PYTHON_MODULE_PATH "${_ABS_PYTHON_MODULE_PATH}" ABSOLUTE) From 893b91b528b6c0447df8f3931b452a352f234b4c Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Thu, 18 Apr 2019 01:07:57 -0500 Subject: [PATCH 077/109] Adding doc/HW-ACCEL.md document, code reformatting, some variable renaming --- README.md | 7 + doc/HW-ACCEL.md | 84 +++++ include/FFmpegReader.h | 30 +- include/FFmpegWriter.h | 89 +++-- src/FFmpegReader.cpp | 778 ++++++++++++++++++----------------------- 5 files changed, 493 insertions(+), 495 deletions(-) create mode 100644 doc/HW-ACCEL.md diff --git a/README.md b/README.md index 8deb86a1..cf69c1cf 100644 --- a/README.md +++ b/README.md @@ -34,6 +34,13 @@ are also available in the /docs/ source folder. * [Mac](https://github.com/OpenShot/libopenshot/wiki/Mac-Build-Instructions) * [Windows](https://github.com/OpenShot/libopenshot/wiki/Windows-Build-Instructions) +## Hardware Acceleration + +OpenShot now supports experimental hardware acceleration, both for encoding and +decoding videos. When enabled, this can either speed up those operations or slow +them down, depending on the power and features supported by your graphics card. +Please see [doc/HW-ACCELL.md](doc/HW-ACCEL.md) for more information. + ## Documentation Beautiful HTML documentation can be generated using Doxygen. diff --git a/doc/HW-ACCEL.md b/doc/HW-ACCEL.md new file mode 100644 index 00000000..edbe85b4 --- /dev/null +++ b/doc/HW-ACCEL.md @@ -0,0 +1,84 @@ +## Hardware Acceleration + +Observations for developers wanting to make hardware acceleration work. + +*All observations are for Linux (but contributions welcome).* + +## Supported FFmpeg Versions + +* HW accel is supported from ffmpeg version 3.2 (3.3 for nVidia drivers) +* HW accel was removed for nVidia drivers in Ubuntu for ffmpeg 4+ +* I could not manage to build a version of ffmpeg 4.1 with the nVidia SDK +that worked with nVidia cards. There might be a problem in ffmpeg 4+ +that prohibits this. + +**Notice:** The ffmpeg versions of Ubuntu and PPAs for Ubuntu show the +same behaviour. ffmpeg 3 has working nVidia hardware acceleration while +ffmpeg 4+ has no support for nVidia hardware acceleration +included. + +## OpenShot Settings + +The following settings are use by libopenshot to enable, disable, and control +the various hardware acceleration features. + +``` +/// Use video card for faster video decoding (if supported) +bool HARDWARE_DECODE = false; + +/// Use video codec for faster video decoding (if supported) +int HARDWARE_DECODER = 0; + +/// Use video card for faster video encoding (if supported) +bool HARDWARE_ENCODE = false; + +/// Number of threads of OpenMP +int OMP_THREADS = 12; + +/// Number of threads that ffmpeg uses +int FF_THREADS = 8; + +/// Maximum rows that hardware decode can handle +int DE_LIMIT_HEIGHT_MAX = 1100; + +/// Maximum columns that hardware decode can handle +int DE_LIMIT_WIDTH_MAX = 1950; + +/// Which GPU to use to decode (0 is the first) +int HW_DE_DEVICE_SET = 0; + +/// Which GPU to use to encode (0 is the first) +int HW_EN_DEVICE_SET = 0; +``` + +## Libva / VA-API (Video Acceleration API) + +The correct version of libva is needed (libva in Ubuntu 16.04 or libva2 +in Ubuntu 18.04) for the AppImage to work with hardware acceleration. +An AppImage that works on both systems (supporting libva and libva2), +might be possible when no libva is included in the AppImage. + +* vaapi is working for intel and AMD +* vaapi is working for decode only for nouveau +* nVidia driver is working for export only + +## AMD Graphics Cards (RadeonOpenCompute/ROCm) + +Decoding and encoding on the (AMD) GPU can be done on systems where ROCm +is installed and run. Possible future use for GPU acceleration of effects (contributions +welcome). + +## Multiple Graphics Cards + +If the computer has multiple graphics cards installed, you can choose which +should be used by libopenshot. Also, you can optionally use one card for +decoding and the other for encoding (if both cards support acceleration). + +## Help Us Improve Hardware Support + +This information might be wrong, and we would love to continue improving +our support for hardware acceleration in OpenShot. Please help us update +this document if you find an error or discover some new information. + +**Desperately Needed:** a way to compile ffmpeg 4.0 and up with working nVidia +hardware acceleration support on Ubuntu Linux! diff --git a/include/FFmpegReader.h b/include/FFmpegReader.h index acbec206..abf1af57 100644 --- a/include/FFmpegReader.h +++ b/include/FFmpegReader.h @@ -50,18 +50,17 @@ using namespace std; -namespace openshot -{ +namespace openshot { /** * @brief This struct holds the associated video frame and starting sample # for an audio packet. * * Because audio packets do not match up with video frames, this helps determine exactly * where the audio packet's samples belong. */ - struct AudioLocation - { + struct AudioLocation { int64_t frame; int sample_start; + bool is_near(AudioLocation location, int samples_per_frame, int64_t amount); }; @@ -91,17 +90,16 @@ namespace openshot * r.Close(); * @endcode */ - class FFmpegReader : public ReaderBase - { + class FFmpegReader : public ReaderBase { private: string path; AVFormatContext *pFormatCtx; int i, videoStream, audioStream; AVCodecContext *pCodecCtx, *aCodecCtx; - #if (LIBAVFORMAT_VERSION_MAJOR >= 57) +#if (LIBAVFORMAT_VERSION_MAJOR >= 57) AVBufferRef *hw_device_ctx = NULL; //PM - #endif +#endif AVStream *pStream, *aStream; AVPacket *packet; AVFrame *pFrame; @@ -145,15 +143,15 @@ namespace openshot int64_t video_pts_offset; int64_t last_frame; int64_t largest_frame_processed; - int64_t current_video_frame; // can't reliably use PTS of video to determine this + int64_t current_video_frame; // can't reliably use PTS of video to determine this - int hw_de_supported = 0; // Is set by FFmpegReader - #if IS_FFMPEG_3_2 + int hw_de_supported = 0; // Is set by FFmpegReader +#if IS_FFMPEG_3_2 AVPixelFormat hw_de_av_pix_fmt = AV_PIX_FMT_NONE; AVHWDeviceType hw_de_av_device_type = AV_HWDEVICE_TYPE_NONE; - #endif +#endif - int is_hardware_decode_supported(int codecid); + int IsHardwareDecodeSupported(int codecid); /// Check for the correct frames per second value by scanning the 1st few seconds of video packets. void CheckFPS(); @@ -210,10 +208,10 @@ namespace openshot std::shared_ptr ReadStream(int64_t requested_frame); /// Remove AVFrame from cache (and deallocate it's memory) - void RemoveAVFrame(AVFrame*); + void RemoveAVFrame(AVFrame *); /// Remove AVPacket from cache (and deallocate it's memory) - void RemoveAVPacket(AVPacket*); + void RemoveAVPacket(AVPacket *); /// Seek to a specific Frame. This is not always frame accurate, it's more of an estimation on many codecs. void Seek(int64_t requested_frame); @@ -251,7 +249,7 @@ namespace openshot void Close(); /// Get the cache object used by this reader - CacheMemory* GetCache() { return &final_cache; }; + CacheMemory *GetCache() { return &final_cache; }; /// Get a shared pointer to a openshot::Frame object for a specific frame number of this reader. /// diff --git a/include/FFmpegWriter.h b/include/FFmpegWriter.h index e219f72c..b93ef7b3 100644 --- a/include/FFmpegWriter.h +++ b/include/FFmpegWriter.h @@ -56,14 +56,12 @@ using namespace std; -namespace openshot -{ +namespace openshot { /// This enumeration designates the type of stream when encoding (video or audio) - enum StreamType - { - VIDEO_STREAM, ///< A video stream (used to determine which type of stream) - AUDIO_STREAM ///< An audio stream (used to determine which type of stream) + enum StreamType { + VIDEO_STREAM, ///< A video stream (used to determine which type of stream) + AUDIO_STREAM ///< An audio stream (used to determine which type of stream) }; /** @@ -141,8 +139,7 @@ namespace openshot * r.Close(); * @endcode */ - class FFmpegWriter : public WriterBase - { + class FFmpegWriter : public WriterBase { private: string path; int cache_size; @@ -155,56 +152,56 @@ namespace openshot bool write_header; bool write_trailer; - AVOutputFormat *fmt; - AVFormatContext *oc; - AVStream *audio_st, *video_st; - AVCodecContext *video_codec; - AVCodecContext *audio_codec; - SwsContext *img_convert_ctx; - double audio_pts, video_pts; - int16_t *samples; - uint8_t *audio_outbuf; - uint8_t *audio_encoder_buffer; + AVOutputFormat *fmt; + AVFormatContext *oc; + AVStream *audio_st, *video_st; + AVCodecContext *video_codec; + AVCodecContext *audio_codec; + SwsContext *img_convert_ctx; + double audio_pts, video_pts; + int16_t *samples; + uint8_t *audio_outbuf; + uint8_t *audio_encoder_buffer; - int num_of_rescalers; + int num_of_rescalers; int rescaler_position; - vector image_rescalers; + vector image_rescalers; - int audio_outbuf_size; - int audio_input_frame_size; - int initial_audio_input_frame_size; - int audio_input_position; - int audio_encoder_buffer_size; - SWRCONTEXT *avr; - SWRCONTEXT *avr_planar; + int audio_outbuf_size; + int audio_input_frame_size; + int initial_audio_input_frame_size; + int audio_input_position; + int audio_encoder_buffer_size; + SWRCONTEXT *avr; + SWRCONTEXT *avr_planar; - /* Resample options */ - int original_sample_rate; - int original_channels; + /* Resample options */ + int original_sample_rate; + int original_channels; - std::shared_ptr last_frame; - deque > spooled_audio_frames; - deque > spooled_video_frames; + std::shared_ptr last_frame; + deque > spooled_audio_frames; + deque > spooled_video_frames; - deque > queued_audio_frames; - deque > queued_video_frames; + deque > queued_audio_frames; + deque > queued_video_frames; - deque > processed_frames; - deque > deallocate_frames; + deque > processed_frames; + deque > deallocate_frames; - map, AVFrame*> av_frames; + map, AVFrame *> av_frames; - /// Add an AVFrame to the cache - void add_avframe(std::shared_ptr frame, AVFrame* av_frame); + /// Add an AVFrame to the cache + void add_avframe(std::shared_ptr frame, AVFrame *av_frame); /// Add an audio output stream - AVStream* add_audio_stream(); + AVStream *add_audio_stream(); /// Add a video output stream - AVStream* add_video_stream(); + AVStream *add_video_stream(); /// Allocate an AVFrame object - AVFrame* allocate_avframe(PixelFormat pix_fmt, int width, int height, int *buffer_size, uint8_t *new_buffer); + AVFrame *allocate_avframe(PixelFormat pix_fmt, int width, int height, int *buffer_size, uint8_t *new_buffer); /// Auto detect format (from path) void auto_detect_format(); @@ -239,7 +236,7 @@ namespace openshot void write_audio_packets(bool final); /// write video frame - bool write_video_packet(std::shared_ptr frame, AVFrame* frame_final); + bool write_video_packet(std::shared_ptr frame, AVFrame *frame_final); /// write all queued frames void write_queued_frames(); @@ -303,7 +300,7 @@ namespace openshot /// @param interlaced Does this video need to be interlaced? /// @param top_field_first Which frame should be used as the top field? /// @param bit_rate The video bit rate used during encoding - void SetVideoOptions(bool has_video, string codec, Fraction fps, int width, int height,Fraction pixel_ratio, bool interlaced, bool top_field_first, int bit_rate); + void SetVideoOptions(bool has_video, string codec, Fraction fps, int width, int height, Fraction pixel_ratio, bool interlaced, bool top_field_first, int bit_rate); /// @brief Set custom options (some codecs accept additional params). This must be called after the /// PrepareStreams() method, otherwise the streams have not been initialized yet. @@ -324,7 +321,7 @@ namespace openshot /// @param reader A openshot::ReaderBase object which will provide frames to be written /// @param start The starting frame number of the reader /// @param length The number of frames to write - void WriteFrame(ReaderBase* reader, int64_t start, int64_t length); + void WriteFrame(ReaderBase *reader, int64_t start, int64_t length); /// @brief Write the file trailer (after all frames are written). This is called automatically /// by the Close() method if this method has not yet been called. diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 1b2c6b82..3af851e1 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -30,7 +30,7 @@ #include "../include/FFmpegReader.h" -#define PRAYFORAWONDER 0 +#define ENABLE_VAAPI 0 #if IS_FFMPEG_3_2 #pragma message "You are compiling with experimental hardware decode" @@ -42,52 +42,51 @@ #define MAX_SUPPORTED_WIDTH 1950 #define MAX_SUPPORTED_HEIGHT 1100 -#if PRAYFORAWONDER +#if ENABLE_VAAPI #include "libavutil/hwcontext_vaapi.h" typedef struct VAAPIDecodeContext { - VAProfile va_profile; - VAEntrypoint va_entrypoint; - VAConfigID va_config; - VAContextID va_context; + VAProfile va_profile; + VAEntrypoint va_entrypoint; + VAConfigID va_config; + VAContextID va_context; - #if FF_API_STRUCT_VAAPI_CONTEXT -// FF_DISABLE_DEPRECATION_WARNINGS - int have_old_context; - struct vaapi_context *old_context; - AVBufferRef *device_ref; -// FF_ENABLE_DEPRECATION_WARNINGS - #endif +#if FF_API_STRUCT_VAAPI_CONTEXT + // FF_DISABLE_DEPRECATION_WARNINGS + int have_old_context; + struct vaapi_context *old_context; + AVBufferRef *device_ref; + // FF_ENABLE_DEPRECATION_WARNINGS +#endif - AVHWDeviceContext *device; - AVVAAPIDeviceContext *hwctx; + AVHWDeviceContext *device; + AVVAAPIDeviceContext *hwctx; - AVHWFramesContext *frames; - AVVAAPIFramesContext *hwfc; + AVHWFramesContext *frames; + AVVAAPIFramesContext *hwfc; - enum AVPixelFormat surface_format; - int surface_count; + enum AVPixelFormat surface_format; + int surface_count; } VAAPIDecodeContext; - - #endif +#endif #endif using namespace openshot; -int hw_de_on = 1; // Is set in UI +int hw_de_on = 0; #if IS_FFMPEG_3_2 AVPixelFormat hw_de_av_pix_fmt_global = AV_PIX_FMT_NONE; -AVHWDeviceType hw_de_av_device_type_global = AV_HWDEVICE_TYPE_NONE; + AVHWDeviceType hw_de_av_device_type_global = AV_HWDEVICE_TYPE_NONE; #endif FFmpegReader::FFmpegReader(string path) - : last_frame(0), is_seeking(0), seeking_pts(0), seeking_frame(0), seek_count(0), - audio_pts_offset(99999), video_pts_offset(99999), path(path), is_video_seek(true), check_interlace(false), - check_fps(false), enable_seek(true), is_open(false), seek_audio_frame_found(0), seek_video_frame_found(0), - prev_samples(0), prev_pts(0), pts_total(0), pts_counter(0), is_duration_known(false), largest_frame_processed(0), - current_video_frame(0), has_missing_frames(false), num_packets_since_video_frame(0), num_checks_since_final(0), - packet(NULL) { + : last_frame(0), is_seeking(0), seeking_pts(0), seeking_frame(0), seek_count(0), + audio_pts_offset(99999), video_pts_offset(99999), path(path), is_video_seek(true), check_interlace(false), + check_fps(false), enable_seek(true), is_open(false), seek_audio_frame_found(0), seek_video_frame_found(0), + prev_samples(0), prev_pts(0), pts_total(0), pts_counter(0), is_duration_known(false), largest_frame_processed(0), + current_video_frame(0), has_missing_frames(false), num_packets_since_video_frame(0), num_checks_since_final(0), + packet(NULL) { // Initialize FFMpeg, and register all formats and codecs AV_REGISTER_ALL @@ -134,8 +133,7 @@ FFmpegReader::~FFmpegReader() { } // This struct holds the associated video frame and starting sample # for an audio packet. -bool AudioLocation::is_near(AudioLocation location, int samples_per_frame, int64_t amount) -{ +bool AudioLocation::is_near(AudioLocation location, int samples_per_frame, int64_t amount) { // Is frame even close to this one? if (abs(location.frame - frame) >= 2) // This is too far away to be considered @@ -168,7 +166,7 @@ static enum AVPixelFormat get_hw_dec_format_va(AVCodecContext *ctx, const enum A break; } } - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::get_hw_dec_format_va (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); return AV_PIX_FMT_NONE; } @@ -185,7 +183,7 @@ static enum AVPixelFormat get_hw_dec_format_cu(AVCodecContext *ctx, const enum A break; } } - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::get_hw_dec_format_cu (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); return AV_PIX_FMT_NONE; } #endif @@ -204,7 +202,7 @@ static enum AVPixelFormat get_hw_dec_format_dx(AVCodecContext *ctx, const enum A break; } } - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::get_hw_dec_format_dx (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); return AV_PIX_FMT_NONE; } @@ -221,7 +219,7 @@ static enum AVPixelFormat get_hw_dec_format_d3(AVCodecContext *ctx, const enum A break; } } - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::get_hw_dec_format_d3 (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); return AV_PIX_FMT_NONE; } @@ -238,7 +236,7 @@ static enum AVPixelFormat get_hw_dec_format_cu(AVCodecContext *ctx, const enum A break; } } - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::get_hw_dec_format_cu (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); return AV_PIX_FMT_NONE; } #endif @@ -257,12 +255,12 @@ static enum AVPixelFormat get_hw_dec_format_qs(AVCodecContext *ctx, const enum A break; } } - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::get_hw_dec_format_qs (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); return AV_PIX_FMT_NONE; } #endif -int FFmpegReader::is_hardware_decode_supported(int codecid) +int FFmpegReader::IsHardwareDecodeSupported(int codecid) { int ret; switch (codecid) { @@ -283,15 +281,13 @@ int FFmpegReader::is_hardware_decode_supported(int codecid) #endif -void FFmpegReader::Open() -{ +void FFmpegReader::Open() { // Open reader if not already open - if (!is_open) - { + if (!is_open) { // Initialize format context pFormatCtx = NULL; { - hw_de_on = (openshot::Settings::Instance()->HARDWARE_DECODER == 0 ? 0 : 1); + hw_de_on = (openshot::Settings::Instance()->HARDWARE_DECODER == 0 ? 0 : 1); } // Open video file @@ -305,8 +301,7 @@ void FFmpegReader::Open() videoStream = -1; audioStream = -1; // Loop through each stream, and identify the video and audio stream index - for (unsigned int i = 0; i < pFormatCtx->nb_streams; i++) - { + for (unsigned int i = 0; i < pFormatCtx->nb_streams; i++) { // Is this a video stream? if (AV_GET_CODEC_TYPE(pFormatCtx->streams[i]) == AVMEDIA_TYPE_VIDEO && videoStream < 0) { videoStream = i; @@ -320,8 +315,7 @@ void FFmpegReader::Open() throw NoStreamsFound("No video or audio streams found in this file.", path); // Is there a video stream? - if (videoStream != -1) - { + if (videoStream != -1) { // Set the stream index info.video_stream_index = videoStream; @@ -335,16 +329,17 @@ void FFmpegReader::Open() AVCodec *pCodec = avcodec_find_decoder(codecId); AVDictionary *opts = NULL; int retry_decode_open = 2; - // If hw accel is selected but hardware connot handle repeat with software decoding + // If hw accel is selected but hardware cannot handle repeat with software decoding do { pCodecCtx = AV_GET_CODEC_CONTEXT(pStream, pCodec); - #if IS_FFMPEG_3_2 +#if IS_FFMPEG_3_2 if (hw_de_on && (retry_decode_open==2)) { // Up to here no decision is made if hardware or software decode - hw_de_supported = is_hardware_decode_supported(pCodecCtx->codec_id); + hw_de_supported = IsHardwareDecodeSupported(pCodecCtx->codec_id); } - #endif +#endif retry_decode_open = 0; + // Set number of threads equal to number of processors (not to exceed 16) pCodecCtx->thread_count = min(FF_NUM_PROCESSORS, 16); @@ -354,7 +349,7 @@ void FFmpegReader::Open() // Init options av_dict_set(&opts, "strict", "experimental", 0); - #if IS_FFMPEG_3_2 +#if IS_FFMPEG_3_2 if (hw_de_on && hw_de_supported) { // Open Hardware Acceleration int i_decoder_hw = 0; @@ -363,143 +358,148 @@ void FFmpegReader::Open() int adapter_num; adapter_num = openshot::Settings::Instance()->HW_DE_DEVICE_SET; fprintf(stderr, "\n\nDecodiing Device Nr: %d\n", adapter_num); + if (adapter_num < 3 && adapter_num >=0) { - #if defined(__linux__) - snprintf(adapter,sizeof(adapter),"/dev/dri/renderD%d", adapter_num+128); - adapter_ptr = adapter; - i_decoder_hw = openshot::Settings::Instance()->HARDWARE_DECODER; - switch (i_decoder_hw) { +#if defined(__linux__) + snprintf(adapter,sizeof(adapter),"/dev/dri/renderD%d", adapter_num+128); + adapter_ptr = adapter; + i_decoder_hw = openshot::Settings::Instance()->HARDWARE_DECODER; + switch (i_decoder_hw) { + case 0: + hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; + pCodecCtx->get_format = get_hw_dec_format_va; + break; + case 1: + hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; + pCodecCtx->get_format = get_hw_dec_format_va; + break; + case 2: + hw_de_av_device_type = AV_HWDEVICE_TYPE_CUDA; + pCodecCtx->get_format = get_hw_dec_format_cu; + break; + default: + hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; + pCodecCtx->get_format = get_hw_dec_format_va; + break; + } + +#elif defined(_WIN32) + adapter_ptr = NULL; + i_decoder_hw = openshot::Settings::Instance()->HARDWARE_DECODER; + switch (i_decoder_hw) { case 0: - hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; - pCodecCtx->get_format = get_hw_dec_format_va; - break; - case 1: - hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; - pCodecCtx->get_format = get_hw_dec_format_va; + hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; + pCodecCtx->get_format = get_hw_dec_format_dx; break; case 2: hw_de_av_device_type = AV_HWDEVICE_TYPE_CUDA; pCodecCtx->get_format = get_hw_dec_format_cu; break; + case 3: + hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; + pCodecCtx->get_format = get_hw_dec_format_dx; + break; + case 4: + hw_de_av_device_type = AV_HWDEVICE_TYPE_D3D11VA; + pCodecCtx->get_format = get_hw_dec_format_d3; + break; default: - hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; - pCodecCtx->get_format = get_hw_dec_format_va; + hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; + pCodecCtx->get_format = get_hw_dec_format_dx; break; } +#elif defined(__APPLE__) + adapter_ptr = NULL; + i_decoder_hw = openshot::Settings::Instance()->HARDWARE_DECODER; + switch (i_decoder_hw) { + case 0: + hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; + pCodecCtx->get_format = get_hw_dec_format_qs; + break; + case 5: + hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; + pCodecCtx->get_format = get_hw_dec_format_qs; + break; + default: + hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; + pCodecCtx->get_format = get_hw_dec_format_qs; + break; + } +#endif - #elif defined(_WIN32) - adapter_ptr = NULL; - i_decoder_hw = openshot::Settings::Instance()->HARDWARE_DECODER; - switch (i_decoder_hw) { - case 0: - hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; - pCodecCtx->get_format = get_hw_dec_format_dx; - break; - case 2: - hw_de_av_device_type = AV_HWDEVICE_TYPE_CUDA; - pCodecCtx->get_format = get_hw_dec_format_cu; - break; - case 3: - hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; - pCodecCtx->get_format = get_hw_dec_format_dx; - break; - case 4: - hw_de_av_device_type = AV_HWDEVICE_TYPE_D3D11VA; - pCodecCtx->get_format = get_hw_dec_format_d3; - break; - default: - hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; - pCodecCtx->get_format = get_hw_dec_format_dx; - break; - } - #elif defined(__APPLE__) - adapter_ptr = NULL; - i_decoder_hw = openshot::Settings::Instance()->HARDWARE_DECODER; - switch (i_decoder_hw) { - case 0: - hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; - pCodecCtx->get_format = get_hw_dec_format_qs; - break; - case 5: - hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; - pCodecCtx->get_format = get_hw_dec_format_qs; - break; - default: - hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; - pCodecCtx->get_format = get_hw_dec_format_qs; - break; - } - #endif - } - else { + } else { adapter_ptr = NULL; // Just to be sure } + // Check if it is there and writable - #if defined(__linux__) +#if defined(__linux__) if( adapter_ptr != NULL && access( adapter_ptr, W_OK ) == 0 ) { - #elif defined(_WIN32) +#elif defined(_WIN32) if( adapter_ptr != NULL ) { - #elif defined(__APPLE__) +#elif defined(__APPLE__) if( adapter_ptr != NULL ) { - #endif +#endif ZmqLogger::Instance()->AppendDebugMethod("Decode Device present using device", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); } else { adapter_ptr = NULL; // use default ZmqLogger::Instance()->AppendDebugMethod("Decode Device not present using default", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); } + hw_device_ctx = NULL; // Here the first hardware initialisations are made if (av_hwdevice_ctx_create(&hw_device_ctx, hw_de_av_device_type, adapter_ptr, NULL, 0) >= 0) { if (!(pCodecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx))) { throw InvalidCodec("Hardware device reference create failed.", path); } - /* - av_buffer_unref(&ist->hw_frames_ctx); - ist->hw_frames_ctx = av_hwframe_ctx_alloc(hw_device_ctx); - if (!ist->hw_frames_ctx) { - av_log(avctx, AV_LOG_ERROR, "Error creating a CUDA frames context\n"); - return AVERROR(ENOMEM); - } - frames_ctx = (AVHWFramesContext*)ist->hw_frames_ctx->data; + /* + av_buffer_unref(&ist->hw_frames_ctx); + ist->hw_frames_ctx = av_hwframe_ctx_alloc(hw_device_ctx); + if (!ist->hw_frames_ctx) { + av_log(avctx, AV_LOG_ERROR, "Error creating a CUDA frames context\n"); + return AVERROR(ENOMEM); + } - frames_ctx->format = AV_PIX_FMT_CUDA; - frames_ctx->sw_format = avctx->sw_pix_fmt; - frames_ctx->width = avctx->width; - frames_ctx->height = avctx->height; + frames_ctx = (AVHWFramesContext*)ist->hw_frames_ctx->data; - av_log(avctx, AV_LOG_DEBUG, "Initializing CUDA frames context: sw_format = %s, width = %d, height = %d\n", - av_get_pix_fmt_name(frames_ctx->sw_format), frames_ctx->width, frames_ctx->height); + frames_ctx->format = AV_PIX_FMT_CUDA; + frames_ctx->sw_format = avctx->sw_pix_fmt; + frames_ctx->width = avctx->width; + frames_ctx->height = avctx->height; + + av_log(avctx, AV_LOG_DEBUG, "Initializing CUDA frames context: sw_format = %s, width = %d, height = %d\n", + av_get_pix_fmt_name(frames_ctx->sw_format), frames_ctx->width, frames_ctx->height); - ret = av_hwframe_ctx_init(pCodecCtx->hw_device_ctx); - ret = av_hwframe_ctx_init(ist->hw_frames_ctx); - if (ret < 0) { - av_log(avctx, AV_LOG_ERROR, "Error initializing a CUDA frame pool\n"); - return ret; - } - */ + ret = av_hwframe_ctx_init(pCodecCtx->hw_device_ctx); + ret = av_hwframe_ctx_init(ist->hw_frames_ctx); + if (ret < 0) { + av_log(avctx, AV_LOG_ERROR, "Error initializing a CUDA frame pool\n"); + return ret; + } + */ } else { throw InvalidCodec("Hardware device create failed.", path); } - } - #endif +#endif + // Open video codec if (avcodec_open2(pCodecCtx, pCodec, &opts) < 0) throw InvalidCodec("A video codec was found, but could not be opened.", path); - #if IS_FFMPEG_3_2 +#if IS_FFMPEG_3_2 if (hw_de_on && hw_de_supported) { AVHWFramesConstraints *constraints = NULL; void *hwconfig = NULL; hwconfig = av_hwdevice_hwconfig_alloc(hw_device_ctx); - // NOT WORKING needs va_config ! - #if PRAYFORAWONDER + +// TODO: needs va_config! +#if ENABLE_VAAPI ((AVVAAPIHWConfig *)hwconfig)->config_id = ((VAAPIDecodeContext *)(pCodecCtx->priv_data))->va_config; - #endif +#endif constraints = av_hwdevice_get_hwframe_constraints(hw_device_ctx,hwconfig); if (constraints) { if (pCodecCtx->coded_width < constraints->min_width || @@ -555,9 +555,9 @@ void FFmpegReader::Open() else { ZmqLogger::Instance()->AppendDebugMethod("\nDecode in software is used\n", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); } - #else +#else retry_decode_open = 0; - #endif +#endif } while (retry_decode_open); // retry_decode_open // Free options av_dict_free(&opts); @@ -567,8 +567,7 @@ void FFmpegReader::Open() } // Is there an audio stream? - if (audioStream != -1) - { + if (audioStream != -1) { // Set the stream index info.audio_stream_index = audioStream; @@ -626,32 +625,28 @@ void FFmpegReader::Open() } } -void FFmpegReader::Close() -{ +void FFmpegReader::Close() { // Close all objects, if reader is 'open' - if (is_open) - { + if (is_open) { // Mark as "closed" is_open = false; ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::Close", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); // Close the codec - if (info.has_video) - { + if (info.has_video) { avcodec_flush_buffers(pCodecCtx); AV_FREE_CONTEXT(pCodecCtx); - #if IS_FFMPEG_3_2 +#if IS_FFMPEG_3_2 if (hw_de_on) { if (hw_device_ctx) { av_buffer_unref(&hw_device_ctx); hw_device_ctx = NULL; } } - #endif +#endif } - if (info.has_audio) - { + if (info.has_audio) { avcodec_flush_buffers(aCodecCtx); AV_FREE_CONTEXT(aCodecCtx); } @@ -663,7 +658,7 @@ void FFmpegReader::Close() // Clear processed lists { - const GenericScopedLock lock(processingCriticalSection); + const GenericScopedLock lock(processingCriticalSection); processed_video_frames.clear(); processed_audio_frames.clear(); processing_video_frames.clear(); @@ -689,15 +684,14 @@ void FFmpegReader::Close() } } -void FFmpegReader::UpdateAudioInfo() -{ +void FFmpegReader::UpdateAudioInfo() { // Set values of FileInfo struct info.has_audio = true; info.file_size = pFormatCtx->pb ? avio_size(pFormatCtx->pb) : -1; info.acodec = aCodecCtx->codec->name; info.channels = AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channels; if (AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channel_layout == 0) - AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channel_layout = av_get_default_channel_layout( AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channels ); + AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channel_layout = av_get_default_channel_layout(AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channels); info.channel_layout = (ChannelLayout) AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channel_layout; info.sample_rate = AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->sample_rate; info.audio_bit_rate = AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->bit_rate; @@ -711,15 +705,13 @@ void FFmpegReader::UpdateAudioInfo() info.duration = aStream->duration * info.audio_timebase.ToDouble(); // Check for an invalid video length - if (info.has_video && info.video_length <= 0) - { + if (info.has_video && info.video_length <= 0) { // Calculate the video length from the audio duration info.video_length = info.duration * info.fps.ToDouble(); } // Set video timebase (if no video stream was found) - if (!info.has_video) - { + if (!info.has_video) { // Set a few important default video settings (so audio can be divided into frames) info.fps.num = 24; info.fps.den = 1; @@ -744,8 +736,7 @@ void FFmpegReader::UpdateAudioInfo() } } -void FFmpegReader::UpdateVideoInfo() -{ +void FFmpegReader::UpdateVideoInfo() { if (check_fps) // Already initialized all the video metadata, no reason to do it again return; @@ -762,18 +753,13 @@ void FFmpegReader::UpdateVideoInfo() info.fps.num = pStream->avg_frame_rate.num; info.fps.den = pStream->avg_frame_rate.den; - if (pStream->sample_aspect_ratio.num != 0) - { + if (pStream->sample_aspect_ratio.num != 0) { info.pixel_ratio.num = pStream->sample_aspect_ratio.num; info.pixel_ratio.den = pStream->sample_aspect_ratio.den; - } - else if (AV_GET_CODEC_ATTRIBUTES(pStream, pCodecCtx)->sample_aspect_ratio.num != 0) - { + } else if (AV_GET_CODEC_ATTRIBUTES(pStream, pCodecCtx)->sample_aspect_ratio.num != 0) { info.pixel_ratio.num = AV_GET_CODEC_ATTRIBUTES(pStream, pCodecCtx)->sample_aspect_ratio.num; info.pixel_ratio.den = AV_GET_CODEC_ATTRIBUTES(pStream, pCodecCtx)->sample_aspect_ratio.den; - } - else - { + } else { info.pixel_ratio.num = 1; info.pixel_ratio.den = 1; } @@ -807,15 +793,12 @@ void FFmpegReader::UpdateVideoInfo() info.duration = (info.file_size / info.video_bit_rate); // No duration found in stream of file - if (info.duration <= 0.0f) - { + if (info.duration <= 0.0f) { // No duration is found in the video stream info.duration = -1; info.video_length = -1; is_duration_known = false; - } - else - { + } else { // Yes, a duration was found is_duration_known = true; @@ -840,8 +823,7 @@ void FFmpegReader::UpdateVideoInfo() } -std::shared_ptr FFmpegReader::GetFrame(int64_t requested_frame) -{ +std::shared_ptr FFmpegReader::GetFrame(int64_t requested_frame) { // Check for open reader (or throw exception) if (!is_open) throw ReaderClosed("The FFmpegReader is closed. Call Open() before calling this method.", path); @@ -866,10 +848,8 @@ std::shared_ptr FFmpegReader::GetFrame(int64_t requested_frame) // Return the cached frame return frame; - } - else - { - #pragma omp critical (ReadStream) + } else { +#pragma omp critical (ReadStream) { // Check the cache a 2nd time (due to a potential previous lock) frame = final_cache.GetFrame(requested_frame); @@ -878,8 +858,7 @@ std::shared_ptr FFmpegReader::GetFrame(int64_t requested_frame) ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetFrame", "returned cached frame on 2nd look", requested_frame, "", -1, "", -1, "", -1, "", -1, "", -1); // Return the cached frame - } - else { + } else { // Frame is not in cache // Reset seek count seek_count = 0; @@ -891,20 +870,16 @@ std::shared_ptr FFmpegReader::GetFrame(int64_t requested_frame) // Are we within X frames of the requested frame? int64_t diff = requested_frame - last_frame; - if (diff >= 1 && diff <= 20) - { + if (diff >= 1 && diff <= 20) { // Continue walking the stream frame = ReadStream(requested_frame); - } - else - { + } else { // Greater than 30 frames away, or backwards, we need to seek to the nearest key frame if (enable_seek) // Only seek if enabled Seek(requested_frame); - else if (!enable_seek && diff < 0) - { + else if (!enable_seek && diff < 0) { // Start over, since we can't seek, and the requested frame is smaller than our position Close(); Open(); @@ -920,8 +895,7 @@ std::shared_ptr FFmpegReader::GetFrame(int64_t requested_frame) } // Read the stream until we find the requested Frame -std::shared_ptr FFmpegReader::ReadStream(int64_t requested_frame) -{ +std::shared_ptr FFmpegReader::ReadStream(int64_t requested_frame) { // Allocate video frame bool end_of_stream = false; bool check_seek = false; @@ -931,7 +905,7 @@ std::shared_ptr FFmpegReader::ReadStream(int64_t requested_frame) // Minimum number of packets to process (for performance reasons) int packets_processed = 0; int minimum_packets = OPEN_MP_NUM_PROCESSORS; - int max_packets = 4096; + int max_packets = 4096; // Set the number of threads in OpenMP omp_set_num_threads(OPEN_MP_NUM_PROCESSORS); @@ -941,20 +915,19 @@ std::shared_ptr FFmpegReader::ReadStream(int64_t requested_frame) // Debug output ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream", "requested_frame", requested_frame, "OPEN_MP_NUM_PROCESSORS", OPEN_MP_NUM_PROCESSORS, "", -1, "", -1, "", -1, "", -1); - #pragma omp parallel +#pragma omp parallel { - #pragma omp single +#pragma omp single { // Loop through the stream until the correct frame is found - while (true) - { + while (true) { // Get the next packet into a local variable called packet packet_error = GetNextPacket(); int processing_video_frames_size = 0; int processing_audio_frames_size = 0; { - const GenericScopedLock lock(processingCriticalSection); + const GenericScopedLock lock(processingCriticalSection); processing_video_frames_size = processing_video_frames.size(); processing_audio_frames_size = processing_audio_frames.size(); } @@ -962,14 +935,13 @@ std::shared_ptr FFmpegReader::ReadStream(int64_t requested_frame) // Wait if too many frames are being processed while (processing_video_frames_size + processing_audio_frames_size >= minimum_packets) { usleep(2500); - const GenericScopedLock lock(processingCriticalSection); + const GenericScopedLock lock(processingCriticalSection); processing_video_frames_size = processing_video_frames.size(); processing_audio_frames_size = processing_audio_frames.size(); } // Get the next packet (if any) - if (packet_error < 0) - { + if (packet_error < 0) { // Break loop when no more packets found end_of_stream = true; break; @@ -979,29 +951,27 @@ std::shared_ptr FFmpegReader::ReadStream(int64_t requested_frame) ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (GetNextPacket)", "requested_frame", requested_frame, "processing_video_frames_size", processing_video_frames_size, "processing_audio_frames_size", processing_audio_frames_size, "minimum_packets", minimum_packets, "packets_processed", packets_processed, "is_seeking", is_seeking); // Video packet - if (info.has_video && packet->stream_index == videoStream) - { + if (info.has_video && packet->stream_index == videoStream) { // Reset this counter, since we have a video packet num_packets_since_video_frame = 0; // Check the status of a seek (if any) - if (is_seeking) - #pragma omp critical (openshot_seek) - check_seek = CheckSeek(true); - else - check_seek = false; + if (is_seeking) +#pragma omp critical (openshot_seek) + check_seek = CheckSeek(true); + else + check_seek = false; - if (check_seek) { - // Jump to the next iteration of this loop - continue; - } + if (check_seek) { + // Jump to the next iteration of this loop + continue; + } // Get the AVFrame from the current packet frame_finished = GetAVFrame(); // Check if the AVFrame is finished and set it - if (frame_finished) - { + if (frame_finished) { // Update PTS / Frame Offset (if any) UpdatePTSOffset(true); @@ -1011,20 +981,19 @@ std::shared_ptr FFmpegReader::ReadStream(int64_t requested_frame) if (openshot::Settings::Instance()->WAIT_FOR_VIDEO_PROCESSING_TASK) { // Wait on each OMP task to complete before moving on to the next one. This slows // down processing considerably, but might be more stable on some systems. - #pragma omp taskwait +#pragma omp taskwait } } } // Audio packet - else if (info.has_audio && packet->stream_index == audioStream) - { + else if (info.has_audio && packet->stream_index == audioStream) { // Increment this (to track # of packets since the last video packet) num_packets_since_video_frame++; // Check the status of a seek (if any) if (is_seeking) - #pragma omp critical (openshot_seek) +#pragma omp critical (openshot_seek) check_seek = CheckSeek(false); else check_seek = false; @@ -1086,8 +1055,7 @@ std::shared_ptr FFmpegReader::ReadStream(int64_t requested_frame) if (frame) { // return the largest processed frame (assuming it was the last in the video file) return frame; - } - else { + } else { // The largest processed frame is no longer in cache, return a blank frame std::shared_ptr f = CreateFrame(largest_frame_processed); f->AddColor(info.width, info.height, "#000"); @@ -1098,43 +1066,40 @@ std::shared_ptr FFmpegReader::ReadStream(int64_t requested_frame) } // Get the next packet (if any) -int FFmpegReader::GetNextPacket() -{ +int FFmpegReader::GetNextPacket() { int found_packet = 0; AVPacket *next_packet; - #pragma omp critical(getnextpacket) +#pragma omp critical(getnextpacket) { - next_packet = new AVPacket(); - found_packet = av_read_frame(pFormatCtx, next_packet); + next_packet = new AVPacket(); + found_packet = av_read_frame(pFormatCtx, next_packet); - if (packet) { - // Remove previous packet before getting next one - RemoveAVPacket(packet); - packet = NULL; + if (packet) { + // Remove previous packet before getting next one + RemoveAVPacket(packet); + packet = NULL; + } + + if (found_packet >= 0) { + // Update current packet pointer + packet = next_packet; + } } - - if (found_packet >= 0) - { - // Update current packet pointer - packet = next_packet; - } -} // Return if packet was found (or error number) return found_packet; } // Get an AVFrame (if any) -bool FFmpegReader::GetAVFrame() -{ +bool FFmpegReader::GetAVFrame() { int frameFinished = -1; int ret = 0; // Decode video frame AVFrame *next_frame = AV_ALLOCATE_FRAME(); - #pragma omp critical (packet_cache) +#pragma omp critical (packet_cache) { - #if IS_FFMPEG_3_2 +#if IS_FFMPEG_3_2 frameFinished = 0; ret = avcodec_send_packet(pCodecCtx, packet); @@ -1157,29 +1122,30 @@ bool FFmpegReader::GetAVFrame() } pFrame = new AVFrame(); while (ret >= 0) { - ret = avcodec_receive_frame(pCodecCtx, next_frame2); - if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { - break; - } - if (ret != 0) { - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (invalid return frame received)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); - } - if (hw_de_on && hw_de_supported) { - int err; - if (next_frame2->format == hw_de_av_pix_fmt) { - next_frame->format = AV_PIX_FMT_YUV420P; - if ((err = av_hwframe_transfer_data(next_frame,next_frame2,0)) < 0) { - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (Failed to transfer data to output frame)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); - } - if ((err = av_frame_copy_props(next_frame,next_frame2)) < 0) { - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (Failed to copy props to output frame)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); - } + ret = avcodec_receive_frame(pCodecCtx, next_frame2); + if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { + break; + } + if (ret != 0) { + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (invalid return frame received)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + } + if (hw_de_on && hw_de_supported) { + int err; + if (next_frame2->format == hw_de_av_pix_fmt) { + next_frame->format = AV_PIX_FMT_YUV420P; + if ((err = av_hwframe_transfer_data(next_frame,next_frame2,0)) < 0) { + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (Failed to transfer data to output frame)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + } + if ((err = av_frame_copy_props(next_frame,next_frame2)) < 0) { + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (Failed to copy props to output frame)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); } } - else - { // No hardware acceleration used -> no copy from GPU memory needed - next_frame = next_frame2; - } + } + else + { // No hardware acceleration used -> no copy from GPU memory needed + next_frame = next_frame2; + } + // TODO also handle possible further frames // Use only the first frame like avcodec_decode_video2 if (frameFinished == 0 ) { @@ -1198,7 +1164,7 @@ bool FFmpegReader::GetAVFrame() AV_FREE_FRAME(&next_frame2); } } - #else +#else avcodec_decode_video2(pCodecCtx, next_frame, &frameFinished, packet); // is frame finished @@ -1217,7 +1183,7 @@ bool FFmpegReader::GetAVFrame() info.top_field_first = next_frame->top_field_first; } } - #endif +#endif } // deallocate the frame @@ -1228,11 +1194,9 @@ bool FFmpegReader::GetAVFrame() } // Check the current seek position and determine if we need to seek again -bool FFmpegReader::CheckSeek(bool is_video) -{ +bool FFmpegReader::CheckSeek(bool is_video) { // Are we seeking for a specific frame? - if (is_seeking) - { + if (is_seeking) { // Determine if both an audio and video packet have been decoded since the seek happened. // If not, allow the ReadStream method to keep looping if ((is_video_seek && !seek_video_frame_found) || (!is_video_seek && !seek_audio_frame_found)) @@ -1248,16 +1212,13 @@ bool FFmpegReader::CheckSeek(bool is_video) max_seeked_frame = seek_video_frame_found; // determine if we are "before" the requested frame - if (max_seeked_frame >= seeking_frame) - { + if (max_seeked_frame >= seeking_frame) { // SEEKED TOO FAR ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckSeek (Too far, seek again)", "is_video_seek", is_video_seek, "max_seeked_frame", max_seeked_frame, "seeking_frame", seeking_frame, "seeking_pts", seeking_pts, "seek_video_frame_found", seek_video_frame_found, "seek_audio_frame_found", seek_audio_frame_found); // Seek again... to the nearest Keyframe Seek(seeking_frame - (10 * seek_count * seek_count)); - } - else - { + } else { // SEEK WORKED ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckSeek (Successful)", "is_video_seek", is_video_seek, "current_pts", packet->pts, "seeking_pts", seeking_pts, "seeking_frame", seeking_frame, "seek_video_frame_found", seek_video_frame_found, "seek_audio_frame_found", seek_audio_frame_found); @@ -1273,8 +1234,7 @@ bool FFmpegReader::CheckSeek(bool is_video) } // Process a video packet -void FFmpegReader::ProcessVideoPacket(int64_t requested_frame) -{ +void FFmpegReader::ProcessVideoPacket(int64_t requested_frame) { // Calculate current frame # int64_t current_frame = ConvertVideoPTStoFrame(GetVideoPTS()); @@ -1283,8 +1243,7 @@ void FFmpegReader::ProcessVideoPacket(int64_t requested_frame) seek_video_frame_found = current_frame; // Are we close enough to decode the frame? and is this frame # valid? - if ((current_frame < (requested_frame - 20)) or (current_frame == -1)) - { + if ((current_frame < (requested_frame - 20)) or (current_frame == -1)) { // Remove frame and packet RemoveAVFrame(pFrame); @@ -1306,10 +1265,10 @@ void FFmpegReader::ProcessVideoPacket(int64_t requested_frame) AVFrame *my_frame = pFrame; // Add video frame to list of processing video frames - const GenericScopedLock lock(processingCriticalSection); + const GenericScopedLock lock(processingCriticalSection); processing_video_frames[current_frame] = current_frame; - #pragma omp task firstprivate(current_frame, my_frame, height, width, video_length, pix_fmt) +#pragma omp task firstprivate(current_frame, my_frame, height, width, video_length, pix_fmt) { // Create variables for a RGB Frame (since most videos are not in RGB, we must convert it) AVFrame *pFrameRGB = NULL; @@ -1333,7 +1292,7 @@ void FFmpegReader::ProcessVideoPacket(int64_t requested_frame) if (max_height <= 0) max_height = info.height; - Clip* parent = (Clip*) GetClip(); + Clip *parent = (Clip *) GetClip(); if (parent) { if (parent->scale == SCALE_FIT || parent->scale == SCALE_STRETCH) { // Best fit or Stretch scaling (based on max timeline size * scaling keyframes) @@ -1354,8 +1313,7 @@ void FFmpegReader::ProcessVideoPacket(int64_t requested_frame) if (width_size.width() >= max_width && width_size.height() >= max_height) { max_width = max(max_width, width_size.width()); max_height = max(max_height, width_size.height()); - } - else { + } else { max_width = max(max_width, height_size.width()); max_height = max(max_height, height_size.height()); } @@ -1389,7 +1347,7 @@ void FFmpegReader::ProcessVideoPacket(int64_t requested_frame) // Determine required buffer size and allocate buffer numBytes = AV_GET_IMAGE_SIZE(PIX_FMT_RGBA, width, height); - #pragma omp critical (video_buffer) +#pragma omp critical (video_buffer) buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t)); // Copy picture data from one AVFrame (or AVPicture) to another one. @@ -1404,7 +1362,7 @@ void FFmpegReader::ProcessVideoPacket(int64_t requested_frame) // Resize / Convert to RGB sws_scale(img_convert_ctx, my_frame->data, my_frame->linesize, 0, - original_height, pFrameRGB->data, pFrameRGB->linesize); + original_height, pFrameRGB->data, pFrameRGB->linesize); // Create or get the existing frame object std::shared_ptr f = CreateFrame(current_frame); @@ -1416,7 +1374,7 @@ void FFmpegReader::ProcessVideoPacket(int64_t requested_frame) working_cache.Add(f); // Keep track of last last_video_frame - #pragma omp critical (video_buffer) +#pragma omp critical (video_buffer) last_video_frame = f; // Free the RGB image @@ -1429,7 +1387,7 @@ void FFmpegReader::ProcessVideoPacket(int64_t requested_frame) // Remove video frame from list of processing video frames { - const GenericScopedLock lock(processingCriticalSection); + const GenericScopedLock lock(processingCriticalSection); processing_video_frames.erase(current_frame); processed_video_frames[current_frame] = current_frame; } @@ -1442,15 +1400,13 @@ void FFmpegReader::ProcessVideoPacket(int64_t requested_frame) } // Process an audio packet -void FFmpegReader::ProcessAudioPacket(int64_t requested_frame, int64_t target_frame, int starting_sample) -{ +void FFmpegReader::ProcessAudioPacket(int64_t requested_frame, int64_t target_frame, int starting_sample) { // Track 1st audio packet after a successful seek if (!seek_audio_frame_found && is_seeking) seek_audio_frame_found = target_frame; // Are we close enough to decode the frame's audio? - if (target_frame < (requested_frame - 20)) - { + if (target_frame < (requested_frame - 20)) { // Debug output ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ProcessAudioPacket (Skipped)", "requested_frame", requested_frame, "target_frame", target_frame, "starting_sample", starting_sample, "", -1, "", -1, "", -1); @@ -1471,9 +1427,9 @@ void FFmpegReader::ProcessAudioPacket(int64_t requested_frame, int64_t target_fr // re-initialize buffer size (it gets changed in the avcodec_decode_audio2 method call) int buf_size = AVCODEC_MAX_AUDIO_FRAME_SIZE + MY_INPUT_BUFFER_PADDING_SIZE; - #pragma omp critical (ProcessAudioPacket) +#pragma omp critical (ProcessAudioPacket) { - #if IS_FFMPEG_3_2 +#if IS_FFMPEG_3_2 int ret = 0; frame_finished = 1; while((packet->size > 0 || (!packet->data && frame_finished)) && ret >= 0) { @@ -1500,7 +1456,7 @@ void FFmpegReader::ProcessAudioPacket(int64_t requested_frame, int64_t target_fr { ret = -1; } - #else +#else int used = avcodec_decode_audio4(aCodecCtx, audio_frame, &frame_finished, packet); #endif } @@ -1508,12 +1464,12 @@ void FFmpegReader::ProcessAudioPacket(int64_t requested_frame, int64_t target_fr if (frame_finished) { // determine how many samples were decoded - int planar = av_sample_fmt_is_planar((AVSampleFormat)AV_GET_CODEC_PIXEL_FORMAT(aStream, aCodecCtx)); + int planar = av_sample_fmt_is_planar((AVSampleFormat) AV_GET_CODEC_PIXEL_FORMAT(aStream, aCodecCtx)); int plane_size = -1; data_size = av_samples_get_buffer_size(&plane_size, - AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channels, - audio_frame->nb_samples, - (AVSampleFormat)(AV_GET_SAMPLE_FORMAT(aStream, aCodecCtx)), 1); + AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channels, + audio_frame->nb_samples, + (AVSampleFormat) (AV_GET_SAMPLE_FORMAT(aStream, aCodecCtx)), 1); // Calculate total number of samples packet_samples = audio_frame->nb_samples * AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channels; @@ -1539,12 +1495,11 @@ void FFmpegReader::ProcessAudioPacket(int64_t requested_frame, int64_t target_fr // Add audio frame to list of processing audio frames { - const GenericScopedLock lock(processingCriticalSection); + const GenericScopedLock lock(processingCriticalSection); processing_audio_frames.insert(pair(previous_packet_location.frame, previous_packet_location.frame)); } - while (pts_remaining_samples) - { + while (pts_remaining_samples) { // Get Samples per frame (for this frame number) int samples_per_frame = Frame::GetSamplesPerFrame(previous_packet_location.frame, info.fps, info.sample_rate, info.channels); @@ -1563,7 +1518,7 @@ void FFmpegReader::ProcessAudioPacket(int64_t requested_frame, int64_t target_fr // Add audio frame to list of processing audio frames { - const GenericScopedLock lock(processingCriticalSection); + const GenericScopedLock lock(processingCriticalSection); processing_audio_frames.insert(pair(previous_packet_location.frame, previous_packet_location.frame)); } @@ -1590,24 +1545,24 @@ void FFmpegReader::ProcessAudioPacket(int64_t requested_frame, int64_t target_fr // setup resample context avr = SWR_ALLOC(); - av_opt_set_int(avr, "in_channel_layout", AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channel_layout, 0); + av_opt_set_int(avr, "in_channel_layout", AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channel_layout, 0); av_opt_set_int(avr, "out_channel_layout", AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channel_layout, 0); - av_opt_set_int(avr, "in_sample_fmt", AV_GET_SAMPLE_FORMAT(aStream, aCodecCtx), 0); - av_opt_set_int(avr, "out_sample_fmt", AV_SAMPLE_FMT_S16, 0); - av_opt_set_int(avr, "in_sample_rate", info.sample_rate, 0); - av_opt_set_int(avr, "out_sample_rate", info.sample_rate, 0); - av_opt_set_int(avr, "in_channels", info.channels, 0); - av_opt_set_int(avr, "out_channels", info.channels, 0); + av_opt_set_int(avr, "in_sample_fmt", AV_GET_SAMPLE_FORMAT(aStream, aCodecCtx), 0); + av_opt_set_int(avr, "out_sample_fmt", AV_SAMPLE_FMT_S16, 0); + av_opt_set_int(avr, "in_sample_rate", info.sample_rate, 0); + av_opt_set_int(avr, "out_sample_rate", info.sample_rate, 0); + av_opt_set_int(avr, "in_channels", info.channels, 0); + av_opt_set_int(avr, "out_channels", info.channels, 0); int r = SWR_INIT(avr); // Convert audio samples - nb_samples = SWR_CONVERT(avr, // audio resample context - audio_converted->data, // output data pointers - audio_converted->linesize[0], // output plane size, in bytes. (0 if unknown) - audio_converted->nb_samples, // maximum number of samples that the output buffer can hold - audio_frame->data, // input data pointers - audio_frame->linesize[0], // input plane size, in bytes (0 if unknown) - audio_frame->nb_samples); // number of input samples to convert + nb_samples = SWR_CONVERT(avr, // audio resample context + audio_converted->data, // output data pointers + audio_converted->linesize[0], // output plane size, in bytes. (0 if unknown) + audio_converted->nb_samples, // maximum number of samples that the output buffer can hold + audio_frame->data, // input data pointers + audio_frame->linesize[0], // input plane size, in bytes (0 if unknown) + audio_frame->nb_samples); // number of input samples to convert // Copy audio samples over original samples memcpy(audio_buf, audio_converted->data[0], audio_converted->nb_samples * av_get_bytes_per_sample(AV_SAMPLE_FMT_S16) * info.channels); @@ -1623,8 +1578,7 @@ void FFmpegReader::ProcessAudioPacket(int64_t requested_frame, int64_t target_fr int64_t starting_frame_number = -1; bool partial_frame = true; - for (int channel_filter = 0; channel_filter < info.channels; channel_filter++) - { + for (int channel_filter = 0; channel_filter < info.channels; channel_filter++) { // Array of floats (to hold samples for each channel) starting_frame_number = target_frame; int channel_buffer_size = packet_samples / info.channels; @@ -1638,11 +1592,9 @@ void FFmpegReader::ProcessAudioPacket(int64_t requested_frame, int64_t target_fr // Toggle through each channel number, since channel data is stored like (left right left right) int channel = 0; int position = 0; - for (int sample = 0; sample < packet_samples; sample++) - { + for (int sample = 0; sample < packet_samples; sample++) { // Only add samples for current channel - if (channel_filter == channel) - { + if (channel_filter == channel) { // Add sample (convert from (-32768 to 32768) to (-1.0 to 1.0)) channel_buffer[position] = audio_buf[sample] * (1.0f / (1 << 15)); @@ -1653,7 +1605,7 @@ void FFmpegReader::ProcessAudioPacket(int64_t requested_frame, int64_t target_fr // increment channel (if needed) if ((channel + 1) < info.channels) // move to next channel - channel ++; + channel++; else // reset channel channel = 0; @@ -1662,9 +1614,8 @@ void FFmpegReader::ProcessAudioPacket(int64_t requested_frame, int64_t target_fr // Loop through samples, and add them to the correct frames int start = starting_sample; int remaining_samples = channel_buffer_size; - float *iterate_channel_buffer = channel_buffer; // pointer to channel buffer - while (remaining_samples > 0) - { + float *iterate_channel_buffer = channel_buffer; // pointer to channel buffer + while (remaining_samples > 0) { // Get Samples per frame (for this frame number) int samples_per_frame = Frame::GetSamplesPerFrame(starting_frame_number, info.fps, info.sample_rate, info.channels); @@ -1718,7 +1669,7 @@ void FFmpegReader::ProcessAudioPacket(int64_t requested_frame, int64_t target_fr // Remove audio frame from list of processing audio frames { - const GenericScopedLock lock(processingCriticalSection); + const GenericScopedLock lock(processingCriticalSection); // Update all frames as completed for (int64_t f = target_frame; f < starting_frame_number; f++) { // Remove the frame # from the processing list. NOTE: If more than one thread is @@ -1747,10 +1698,8 @@ void FFmpegReader::ProcessAudioPacket(int64_t requested_frame, int64_t target_fr } - // Seek to a specific frame. This is not always frame accurate, it's more of an estimation on many codecs. -void FFmpegReader::Seek(int64_t requested_frame) -{ +void FFmpegReader::Seek(int64_t requested_frame) { // Adjust for a requested frame that is too small or too large if (requested_frame < 1) requested_frame = 1; @@ -1760,7 +1709,7 @@ void FFmpegReader::Seek(int64_t requested_frame) int processing_video_frames_size = 0; int processing_audio_frames_size = 0; { - const GenericScopedLock lock(processingCriticalSection); + const GenericScopedLock lock(processingCriticalSection); processing_video_frames_size = processing_video_frames.size(); processing_audio_frames_size = processing_audio_frames.size(); } @@ -1771,7 +1720,7 @@ void FFmpegReader::Seek(int64_t requested_frame) // Wait for any processing frames to complete while (processing_video_frames_size + processing_audio_frames_size > 0) { usleep(2500); - const GenericScopedLock lock(processingCriticalSection); + const GenericScopedLock lock(processingCriticalSection); processing_video_frames_size = processing_video_frames.size(); processing_audio_frames_size = processing_audio_frames.size(); } @@ -1782,7 +1731,7 @@ void FFmpegReader::Seek(int64_t requested_frame) // Clear processed lists { - const GenericScopedLock lock(processingCriticalSection); + const GenericScopedLock lock(processingCriticalSection); processing_audio_frames.clear(); processing_video_frames.clear(); processed_video_frames.clear(); @@ -1809,8 +1758,7 @@ void FFmpegReader::Seek(int64_t requested_frame) // If seeking near frame 1, we need to close and re-open the file (this is more reliable than seeking) int buffer_amount = max(OPEN_MP_NUM_PROCESSORS, 8); - if (requested_frame - buffer_amount < 20) - { + if (requested_frame - buffer_amount < 20) { // Close and re-open file (basically seeking to frame 1) Close(); Open(); @@ -1828,21 +1776,18 @@ void FFmpegReader::Seek(int64_t requested_frame) } seek_audio_frame_found = 0; // used to detect which frames to throw away after a seek seek_video_frame_found = 0; // used to detect which frames to throw away after a seek - } - else - { + + } else { // Seek to nearest key-frame (aka, i-frame) bool seek_worked = false; int64_t seek_target = 0; // Seek video stream (if any) - if (!seek_worked && info.has_video) - { + if (!seek_worked && info.has_video) { seek_target = ConvertFrameToVideoPTS(requested_frame - buffer_amount); if (av_seek_frame(pFormatCtx, info.video_stream_index, seek_target, AVSEEK_FLAG_BACKWARD) < 0) { fprintf(stderr, "%s: error while seeking video stream\n", pFormatCtx->AV_FILENAME); - } else - { + } else { // VIDEO SEEK is_video_seek = true; seek_worked = true; @@ -1850,13 +1795,11 @@ void FFmpegReader::Seek(int64_t requested_frame) } // Seek audio stream (if not already seeked... and if an audio stream is found) - if (!seek_worked && info.has_audio) - { + if (!seek_worked && info.has_audio) { seek_target = ConvertFrameToAudioPTS(requested_frame - buffer_amount); if (av_seek_frame(pFormatCtx, info.audio_stream_index, seek_target, AVSEEK_FLAG_BACKWARD) < 0) { fprintf(stderr, "%s: error while seeking audio stream\n", pFormatCtx->AV_FILENAME); - } else - { + } else { // AUDIO SEEK is_video_seek = false; seek_worked = true; @@ -1864,8 +1807,7 @@ void FFmpegReader::Seek(int64_t requested_frame) } // Was the seek successful? - if (seek_worked) - { + if (seek_worked) { // Flush audio buffer if (info.has_audio) avcodec_flush_buffers(aCodecCtx); @@ -1888,9 +1830,7 @@ void FFmpegReader::Seek(int64_t requested_frame) seek_audio_frame_found = 0; // used to detect which frames to throw away after a seek seek_video_frame_found = 0; // used to detect which frames to throw away after a seek - } - else - { + } else { // seek failed is_seeking = false; seeking_pts = 0; @@ -1912,10 +1852,9 @@ void FFmpegReader::Seek(int64_t requested_frame) } // Get the PTS for the current video packet -int64_t FFmpegReader::GetVideoPTS() -{ +int64_t FFmpegReader::GetVideoPTS() { int64_t current_pts = 0; - if(packet->dts != AV_NOPTS_VALUE) + if (packet->dts != AV_NOPTS_VALUE) current_pts = packet->dts; // Return adjusted PTS @@ -1923,11 +1862,9 @@ int64_t FFmpegReader::GetVideoPTS() } // Update PTS Offset (if any) -void FFmpegReader::UpdatePTSOffset(bool is_video) -{ +void FFmpegReader::UpdatePTSOffset(bool is_video) { // Determine the offset between the PTS and Frame number (only for 1st frame) - if (is_video) - { + if (is_video) { // VIDEO PACKET if (video_pts_offset == 99999) // Has the offset been set yet? { @@ -1937,9 +1874,7 @@ void FFmpegReader::UpdatePTSOffset(bool is_video) // debug output ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::UpdatePTSOffset (Video)", "video_pts_offset", video_pts_offset, "is_video", is_video, "", -1, "", -1, "", -1, "", -1); } - } - else - { + } else { // AUDIO PACKET if (audio_pts_offset == 99999) // Has the offset been set yet? { @@ -1953,8 +1888,7 @@ void FFmpegReader::UpdatePTSOffset(bool is_video) } // Convert PTS into Frame Number -int64_t FFmpegReader::ConvertVideoPTStoFrame(int64_t pts) -{ +int64_t FFmpegReader::ConvertVideoPTStoFrame(int64_t pts) { // Apply PTS offset pts = pts + video_pts_offset; int64_t previous_video_frame = current_video_frame; @@ -1974,10 +1908,10 @@ int64_t FFmpegReader::ConvertVideoPTStoFrame(int64_t pts) if (frame == previous_video_frame) { // return -1 frame number frame = -1; - } - else + } else { // Increment expected frame current_video_frame++; + } if (current_video_frame < frame) // has missing frames @@ -1985,7 +1919,7 @@ int64_t FFmpegReader::ConvertVideoPTStoFrame(int64_t pts) // Sometimes frames are missing due to varying timestamps, or they were dropped. Determine // if we are missing a video frame. - const GenericScopedLock lock(processingCriticalSection); + const GenericScopedLock lock(processingCriticalSection); while (current_video_frame < frame) { if (!missing_video_frames.count(current_video_frame)) { ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ConvertVideoPTStoFrame (tracking missing frame)", "current_video_frame", current_video_frame, "previous_video_frame", previous_video_frame, "", -1, "", -1, "", -1, "", -1); @@ -2006,8 +1940,7 @@ int64_t FFmpegReader::ConvertVideoPTStoFrame(int64_t pts) } // Convert Frame Number into Video PTS -int64_t FFmpegReader::ConvertFrameToVideoPTS(int64_t frame_number) -{ +int64_t FFmpegReader::ConvertFrameToVideoPTS(int64_t frame_number) { // Get timestamp of this frame (in seconds) double seconds = double(frame_number) / info.fps.ToDouble(); @@ -2019,8 +1952,7 @@ int64_t FFmpegReader::ConvertFrameToVideoPTS(int64_t frame_number) } // Convert Frame Number into Video PTS -int64_t FFmpegReader::ConvertFrameToAudioPTS(int64_t frame_number) -{ +int64_t FFmpegReader::ConvertFrameToAudioPTS(int64_t frame_number) { // Get timestamp of this frame (in seconds) double seconds = double(frame_number) / info.fps.ToDouble(); @@ -2032,8 +1964,7 @@ int64_t FFmpegReader::ConvertFrameToAudioPTS(int64_t frame_number) } // Calculate Starting video frame and sample # for an audio PTS -AudioLocation FFmpegReader::GetAudioPTSLocation(int64_t pts) -{ +AudioLocation FFmpegReader::GetAudioPTSLocation(int64_t pts) { // Apply PTS offset pts = pts + audio_pts_offset; @@ -2066,8 +1997,7 @@ AudioLocation FFmpegReader::GetAudioPTSLocation(int64_t pts) // Compare to previous audio packet (and fix small gaps due to varying PTS timestamps) if (previous_packet_location.frame != -1) { - if (location.is_near(previous_packet_location, samples_per_frame, samples_per_frame)) - { + if (location.is_near(previous_packet_location, samples_per_frame, samples_per_frame)) { int64_t orig_frame = location.frame; int orig_start = location.sample_start; @@ -2082,7 +2012,7 @@ AudioLocation FFmpegReader::GetAudioPTSLocation(int64_t pts) // Debug output ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAudioPTSLocation (Audio Gap Ignored - too big)", "Previous location frame", previous_packet_location.frame, "Target Frame", location.frame, "Target Audio Sample", location.sample_start, "pts", pts, "", -1, "", -1); - const GenericScopedLock lock(processingCriticalSection); + const GenericScopedLock lock(processingCriticalSection); for (int64_t audio_frame = previous_packet_location.frame; audio_frame < location.frame; audio_frame++) { if (!missing_audio_frames.count(audio_frame)) { ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAudioPTSLocation (tracking missing frame)", "missing_audio_frame", audio_frame, "previous_audio_frame", previous_packet_location.frame, "new location frame", location.frame, "", -1, "", -1, "", -1); @@ -2100,12 +2030,10 @@ AudioLocation FFmpegReader::GetAudioPTSLocation(int64_t pts) } // Create a new Frame (or return an existing one) and add it to the working queue. -std::shared_ptr FFmpegReader::CreateFrame(int64_t requested_frame) -{ +std::shared_ptr FFmpegReader::CreateFrame(int64_t requested_frame) { // Check working cache std::shared_ptr output = working_cache.GetFrame(requested_frame); - if (!output) - { + if (!output) { // Create a new frame on the working cache output = std::make_shared(requested_frame, info.width, info.height, "#000000", Frame::GetSamplesPerFrame(requested_frame, info.fps, info.sample_rate, info.channels), info.channels); output->SetPixelRatio(info.pixel_ratio.num, info.pixel_ratio.den); // update pixel ratio @@ -2129,20 +2057,21 @@ bool FFmpegReader::IsPartialFrame(int64_t requested_frame) { // Sometimes a seek gets partial frames, and we need to remove them bool seek_trash = false; int64_t max_seeked_frame = seek_audio_frame_found; // determine max seeked frame - if (seek_video_frame_found > max_seeked_frame) + if (seek_video_frame_found > max_seeked_frame) { max_seeked_frame = seek_video_frame_found; + } if ((info.has_audio && seek_audio_frame_found && max_seeked_frame >= requested_frame) || - (info.has_video && seek_video_frame_found && max_seeked_frame >= requested_frame)) - seek_trash = true; + (info.has_video && seek_video_frame_found && max_seeked_frame >= requested_frame)) { + seek_trash = true; + } return seek_trash; } // Check if a frame is missing and attempt to replace it's frame image (and -bool FFmpegReader::CheckMissingFrame(int64_t requested_frame) -{ +bool FFmpegReader::CheckMissingFrame(int64_t requested_frame) { // Lock - const GenericScopedLock lock(processingCriticalSection); + const GenericScopedLock lock(processingCriticalSection); // Init # of times this frame has been checked so far int checked_count = 0; @@ -2171,9 +2100,9 @@ bool FFmpegReader::CheckMissingFrame(int64_t requested_frame) if (checked_count > 8 && !missing_video_frames.count(requested_frame) && !processing_audio_frames.count(requested_frame) && processed_audio_frames.count(requested_frame) && last_frame && last_video_frame->has_image_data && aCodecId == AV_CODEC_ID_MP3 && (vCodecId == AV_CODEC_ID_MJPEGB || vCodecId == AV_CODEC_ID_MJPEG)) { - missing_video_frames.insert(pair(requested_frame, last_video_frame->number)); - missing_video_frames_source.insert(pair(last_video_frame->number, requested_frame)); - missing_frames.Add(last_video_frame); + missing_video_frames.insert(pair(requested_frame, last_video_frame->number)); + missing_video_frames_source.insert(pair(last_video_frame->number, requested_frame)); + missing_frames.Add(last_video_frame); } } @@ -2238,8 +2167,7 @@ bool FFmpegReader::CheckMissingFrame(int64_t requested_frame) } // Check the working queue, and move finished frames to the finished queue -void FFmpegReader::CheckWorkingFrames(bool end_of_stream, int64_t requested_frame) -{ +void FFmpegReader::CheckWorkingFrames(bool end_of_stream, int64_t requested_frame) { // Loop through all working queue frames bool checked_count_tripped = false; int max_checked_count = 80; @@ -2247,8 +2175,7 @@ void FFmpegReader::CheckWorkingFrames(bool end_of_stream, int64_t requested_fram // Check if requested frame is 'missing' CheckMissingFrame(requested_frame); - while (true) - { + while (true) { // Get the front frame of working cache std::shared_ptr f(working_cache.GetSmallestFrame()); @@ -2272,7 +2199,7 @@ void FFmpegReader::CheckWorkingFrames(bool end_of_stream, int64_t requested_fram bool is_video_ready = false; bool is_audio_ready = false; { // limit scope of next few lines - const GenericScopedLock lock(processingCriticalSection); + const GenericScopedLock lock(processingCriticalSection); is_video_ready = processed_video_frames.count(f->number); is_audio_ready = processed_audio_frames.count(f->number); @@ -2317,13 +2244,11 @@ void FFmpegReader::CheckWorkingFrames(bool end_of_stream, int64_t requested_fram ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckWorkingFrames", "requested_frame", requested_frame, "frame_number", f->number, "is_video_ready", is_video_ready, "is_audio_ready", is_audio_ready, "checked_count", checked_count, "checked_frames_size", checked_frames_size); // Check if working frame is final - if ((!end_of_stream && is_video_ready && is_audio_ready) || end_of_stream || is_seek_trash) - { + if ((!end_of_stream && is_video_ready && is_audio_ready) || end_of_stream || is_seek_trash) { // Debug output ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckWorkingFrames (mark frame as final)", "requested_frame", requested_frame, "f->number", f->number, "is_seek_trash", is_seek_trash, "Working Cache Count", working_cache.Count(), "Final Cache Count", final_cache.Count(), "end_of_stream", end_of_stream); - if (!is_seek_trash) - { + if (!is_seek_trash) { // Add missing image (if needed - sometimes end_of_stream causes frames with only audio) if (info.has_video && !is_video_ready && last_video_frame) // Copy image from last frame @@ -2337,7 +2262,7 @@ void FFmpegReader::CheckWorkingFrames(bool end_of_stream, int64_t requested_fram // Add to missing cache (if another frame depends on it) { - const GenericScopedLock lock(processingCriticalSection); + const GenericScopedLock lock(processingCriticalSection); if (missing_video_frames_source.count(f->number)) { // Debug output ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckWorkingFrames (add frame to missing cache)", "f->number", f->number, "is_seek_trash", is_seek_trash, "Missing Cache Count", missing_frames.Count(), "Working Cache Count", working_cache.Count(), "Final Cache Count", final_cache.Count(), "", -1); @@ -2358,16 +2283,16 @@ void FFmpegReader::CheckWorkingFrames(bool end_of_stream, int64_t requested_fram // Seek trash, so delete the frame from the working cache, and never add it to the final cache. working_cache.Remove(f->number); } - } - else + + } else { // Stop looping break; + } } } // Check for the correct frames per second (FPS) value by scanning the 1st few seconds of video packets. -void FFmpegReader::CheckFPS() -{ +void FFmpegReader::CheckFPS() { check_fps = true; @@ -2380,19 +2305,16 @@ void FFmpegReader::CheckFPS() int64_t pts = 0; // Loop through the stream - while (true) - { + while (true) { // Get the next packet (if any) if (GetNextPacket() < 0) // Break loop when no more packets found break; // Video packet - if (packet->stream_index == videoStream) - { + if (packet->stream_index == videoStream) { // Check if the AVFrame is finished and set it - if (GetAVFrame()) - { + if (GetAVFrame()) { // Update PTS / Frame Offset (if any) UpdatePTSOffset(true); @@ -2467,13 +2389,11 @@ void FFmpegReader::CheckFPS() } // Remove AVFrame from cache (and deallocate it's memory) -void FFmpegReader::RemoveAVFrame(AVFrame* remove_frame) -{ +void FFmpegReader::RemoveAVFrame(AVFrame *remove_frame) { // Remove pFrame (if exists) - if (remove_frame) - { + if (remove_frame) { // Free memory - #pragma omp critical (packet_cache) +#pragma omp critical (packet_cache) { av_freep(&remove_frame->data[0]); #ifndef WIN32 @@ -2484,8 +2404,7 @@ void FFmpegReader::RemoveAVFrame(AVFrame* remove_frame) } // Remove AVPacket from cache (and deallocate it's memory) -void FFmpegReader::RemoveAVPacket(AVPacket* remove_packet) -{ +void FFmpegReader::RemoveAVPacket(AVPacket *remove_packet) { // deallocate memory for packet AV_FREE_PACKET(remove_packet); @@ -2494,14 +2413,12 @@ void FFmpegReader::RemoveAVPacket(AVPacket* remove_packet) } /// Get the smallest video frame that is still being processed -int64_t FFmpegReader::GetSmallestVideoFrame() -{ +int64_t FFmpegReader::GetSmallestVideoFrame() { // Loop through frame numbers map::iterator itr; int64_t smallest_frame = -1; - const GenericScopedLock lock(processingCriticalSection); - for(itr = processing_video_frames.begin(); itr != processing_video_frames.end(); ++itr) - { + const GenericScopedLock lock(processingCriticalSection); + for (itr = processing_video_frames.begin(); itr != processing_video_frames.end(); ++itr) { if (itr->first < smallest_frame || smallest_frame == -1) smallest_frame = itr->first; } @@ -2511,14 +2428,12 @@ int64_t FFmpegReader::GetSmallestVideoFrame() } /// Get the smallest audio frame that is still being processed -int64_t FFmpegReader::GetSmallestAudioFrame() -{ +int64_t FFmpegReader::GetSmallestAudioFrame() { // Loop through frame numbers map::iterator itr; int64_t smallest_frame = -1; - const GenericScopedLock lock(processingCriticalSection); - for(itr = processing_audio_frames.begin(); itr != processing_audio_frames.end(); ++itr) - { + const GenericScopedLock lock(processingCriticalSection); + for (itr = processing_audio_frames.begin(); itr != processing_audio_frames.end(); ++itr) { if (itr->first < smallest_frame || smallest_frame == -1) smallest_frame = itr->first; } @@ -2552,18 +2467,16 @@ void FFmpegReader::SetJson(string value) { // Parse JSON string into JSON objects Json::Value root; Json::Reader reader; - bool success = reader.parse( value, root ); + bool success = reader.parse(value, root); if (!success) // Raise exception throw InvalidJSON("JSON could not be parsed (or is invalid)", ""); - try - { + try { // Set all values that match SetJsonValue(root); } - catch (exception e) - { + catch (exception e) { // Error parsing JSON (or missing keys) throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", ""); } @@ -2580,8 +2493,7 @@ void FFmpegReader::SetJsonValue(Json::Value root) { path = root["path"].asString(); // Re-Open path, and re-init everything (if needed) - if (is_open) - { + if (is_open) { Close(); Open(); } From 7930b289dcda88fd078ba01fa1ea9cf14d02ab1a Mon Sep 17 00:00:00 2001 From: "FeRD (Frank Dana)" Date: Thu, 18 Apr 2019 07:51:03 -0400 Subject: [PATCH 078/109] Update Python install path detection This changes the `PYTHON_MODULE_PATH` handling in two ways: * Makes it a cache variable, so detection is only run once and only if it's not already set. This allows it to be overridden on the command line, e.g. `cmake -DPYTHON_MODULE_PATH:PATH=lib/python3.6/dist-packages` * Uses the presence of the `pybuild` executable to sense for a Debian-derived system (only on UNIX AND NOT APPLE), and if found it falls back to the old `getsitepackages()[0]` path extraction method. --- src/bindings/python/CMakeLists.txt | 37 ++++++++++++++++++++++-------- 1 file changed, 28 insertions(+), 9 deletions(-) diff --git a/src/bindings/python/CMakeLists.txt b/src/bindings/python/CMakeLists.txt index 3418d2de..08182d95 100644 --- a/src/bindings/python/CMakeLists.txt +++ b/src/bindings/python/CMakeLists.txt @@ -66,17 +66,36 @@ if (PYTHONLIBS_FOUND AND PYTHONINTERP_FOUND) ${PYTHON_LIBRARIES} openshot) ### FIND THE PYTHON INTERPRETER (AND THE SITE PACKAGES FOLDER) - execute_process ( COMMAND ${PYTHON_EXECUTABLE} -c "\ + if (UNIX AND NOT APPLE) + ### Special-case for Debian's crazy, by checking to see if pybuild + ### is available. We don't use it, except as a canary in a coal mine + find_program(PYBUILD_EXECUTABLE pybuild + DOC "Path to Debian's pybuild utility") + if (PYBUILD_EXECUTABLE) + # We're on a Debian derivative, fall back to old path detection + set(py_detection "import site; print(site.getsitepackages()[0])") + else() + # Use distutils to detect install path + set (py_detection "\ from distutils.sysconfig import get_python_lib; \ -print( get_python_lib( plat_specific=True, prefix='${CMAKE_INSTALL_PREFIX}' ) )" - OUTPUT_VARIABLE _ABS_PYTHON_MODULE_PATH - OUTPUT_STRIP_TRAILING_WHITESPACE ) +print( get_python_lib( plat_specific=True, prefix='${CMAKE_INSTALL_PREFIX}' ) )") + endif() + endif() - GET_FILENAME_COMPONENT(_ABS_PYTHON_MODULE_PATH - "${_ABS_PYTHON_MODULE_PATH}" ABSOLUTE) - FILE(RELATIVE_PATH _REL_PYTHON_MODULE_PATH - ${CMAKE_INSTALL_PREFIX} ${_ABS_PYTHON_MODULE_PATH}) - SET(PYTHON_MODULE_PATH ${_REL_PYTHON_MODULE_PATH}) + if (NOT PYTHON_MODULE_PATH) + execute_process ( COMMAND ${PYTHON_EXECUTABLE} -c "${py_detection}" + OUTPUT_VARIABLE _ABS_PYTHON_MODULE_PATH + OUTPUT_STRIP_TRAILING_WHITESPACE ) + + GET_FILENAME_COMPONENT(_ABS_PYTHON_MODULE_PATH + "${_ABS_PYTHON_MODULE_PATH}" ABSOLUTE) + FILE(RELATIVE_PATH _REL_PYTHON_MODULE_PATH + ${CMAKE_INSTALL_PREFIX} ${_ABS_PYTHON_MODULE_PATH}) + SET(PYTHON_MODULE_PATH ${_REL_PYTHON_MODULE_PATH} + CACHE PATH "Install path for Python modules (relative to prefix)") + endif() + + message(STATUS "Will install Python module to: ${PYTHON_MODULE_PATH}") ############### INSTALL HEADERS & LIBRARY ################ ### Install Python bindings From b3f5406db38c25f3bfff4c9bb408df9880810c6e Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Thu, 18 Apr 2019 14:04:37 -0500 Subject: [PATCH 079/109] More code reformatting on FFmpegWriter.h/.cpp --- include/FFmpegWriter.h | 2 +- include/OpenMPUtilities.h | 1 - src/FFmpegWriter.cpp | 753 +++++++++++++++++--------------------- 3 files changed, 344 insertions(+), 412 deletions(-) diff --git a/include/FFmpegWriter.h b/include/FFmpegWriter.h index b93ef7b3..35dd1ed9 100644 --- a/include/FFmpegWriter.h +++ b/include/FFmpegWriter.h @@ -233,7 +233,7 @@ namespace openshot { void process_video_packet(std::shared_ptr frame); /// write all queued frames' audio to the video file - void write_audio_packets(bool final); + void write_audio_packets(bool is_final); /// write video frame bool write_video_packet(std::shared_ptr frame, AVFrame *frame_final); diff --git a/include/OpenMPUtilities.h b/include/OpenMPUtilities.h index 0411b6ba..7c198a76 100644 --- a/include/OpenMPUtilities.h +++ b/include/OpenMPUtilities.h @@ -42,5 +42,4 @@ using namespace openshot; #define FF_NUM_PROCESSORS (min(omp_get_num_procs(), max(2, openshot::Settings::Instance()->FF_THREADS) )) - #endif diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index 1c9094a6..e12ff094 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -83,8 +83,7 @@ FFmpegWriter::FFmpegWriter(string path) : initial_audio_input_frame_size(0), img_convert_ctx(NULL), cache_size(8), num_of_rescalers(32), rescaler_position(0), video_codec(NULL), audio_codec(NULL), is_writing(false), write_video_count(0), write_audio_count(0), original_sample_rate(0), original_channels(0), avr(NULL), avr_planar(NULL), is_open(false), prepare_streams(false), - write_header(false), write_trailer(false), audio_encoder_buffer_size(0), audio_encoder_buffer(NULL) -{ + write_header(false), write_trailer(false), audio_encoder_buffer_size(0), audio_encoder_buffer(NULL) { // Disable audio & video (so they can be independently enabled) info.has_audio = false; @@ -98,9 +97,8 @@ FFmpegWriter::FFmpegWriter(string path) : } // Open the writer -void FFmpegWriter::Open() -{ - if (!is_open) { +void FFmpegWriter::Open() { + if (!is_open) { // Open the writer is_open = true; @@ -121,8 +119,7 @@ void FFmpegWriter::Open() } // auto detect format (from path) -void FFmpegWriter::auto_detect_format() -{ +void FFmpegWriter::auto_detect_format() { // Auto detect the output format from the name. default is mpeg. fmt = av_guess_format(NULL, path.c_str(), NULL); if (!fmt) @@ -147,8 +144,7 @@ void FFmpegWriter::auto_detect_format() } // initialize streams -void FFmpegWriter::initialize_streams() -{ +void FFmpegWriter::initialize_streams() { ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::initialize_streams", "fmt->video_codec", fmt->video_codec, "fmt->audio_codec", fmt->audio_codec, "AV_CODEC_ID_NONE", AV_CODEC_ID_NONE, "", -1, "", -1, "", -1); // Add the audio and video streams using the default format codecs and initialize the codecs @@ -164,14 +160,12 @@ void FFmpegWriter::initialize_streams() } // Set video export options -void FFmpegWriter::SetVideoOptions(bool has_video, string codec, Fraction fps, int width, int height, Fraction pixel_ratio, bool interlaced, bool top_field_first, int bit_rate) -{ +void FFmpegWriter::SetVideoOptions(bool has_video, string codec, Fraction fps, int width, int height, Fraction pixel_ratio, bool interlaced, bool top_field_first, int bit_rate) { // Set the video options - if (codec.length() > 0) - { + if (codec.length() > 0) { AVCodec *new_codec; // Check if the codec selected is a hardware accelerated codec - #if IS_FFMPEG_3_2 +#if IS_FFMPEG_3_2 #if defined(__linux__) if ( (strcmp(codec.c_str(),"h264_vaapi") == 0)) { new_codec = avcodec_find_encoder_by_name(codec.c_str()); @@ -194,7 +188,7 @@ void FFmpegWriter::SetVideoOptions(bool has_video, string codec, Fraction fps, i hw_en_supported = 0; } } - #elif defined(_WIN32) +#elif defined(_WIN32) if ( (strcmp(codec.c_str(),"h264_dxva2") == 0)) { new_codec = avcodec_find_encoder_by_name(codec.c_str()); hw_en_on = 1; @@ -216,7 +210,7 @@ void FFmpegWriter::SetVideoOptions(bool has_video, string codec, Fraction fps, i hw_en_supported = 0; } } - #elif defined(__APPLE__) +#elif defined(__APPLE__) if ( (strcmp(codec.c_str(),"h264_qsv") == 0)) { new_codec = avcodec_find_encoder_by_name(codec.c_str()); hw_en_on = 1; @@ -229,12 +223,12 @@ void FFmpegWriter::SetVideoOptions(bool has_video, string codec, Fraction fps, i hw_en_on = 0; hw_en_supported = 0; } - #else // is FFmpeg 3 but not linux +#else // is FFmpeg 3 but not linux new_codec = avcodec_find_encoder_by_name(codec.c_str()); - #endif //__linux__ - #else // not ffmpeg 3 +#endif //__linux__ +#else // not ffmpeg 3 new_codec = avcodec_find_encoder_by_name(codec.c_str()); - #endif //IS_FFMPEG_3_2 +#endif //IS_FFMPEG_3_2 if (new_codec == NULL) throw InvalidCodec("A valid video codec could not be found for this file.", path); else { @@ -245,8 +239,7 @@ void FFmpegWriter::SetVideoOptions(bool has_video, string codec, Fraction fps, i fmt->video_codec = new_codec->id; } } - if (fps.num > 0) - { + if (fps.num > 0) { // Set frames per second (if provided) info.fps.num = fps.num; info.fps.den = fps.den; @@ -259,14 +252,13 @@ void FFmpegWriter::SetVideoOptions(bool has_video, string codec, Fraction fps, i info.width = width; if (height >= 1) info.height = height; - if (pixel_ratio.num > 0) - { + if (pixel_ratio.num > 0) { info.pixel_ratio.num = pixel_ratio.num; info.pixel_ratio.den = pixel_ratio.den; } - if (bit_rate >= 1000) // bit_rate is the bitrate in b/s + if (bit_rate >= 1000) // bit_rate is the bitrate in b/s info.video_bit_rate = bit_rate; - if ((bit_rate >= 0) && (bit_rate < 64) ) // bit_rate is the bitrate in crf + if ((bit_rate >= 0) && (bit_rate < 64)) // bit_rate is the bitrate in crf info.video_bit_rate = bit_rate; info.interlaced_frame = interlaced; @@ -289,16 +281,13 @@ void FFmpegWriter::SetVideoOptions(bool has_video, string codec, Fraction fps, i } // Set audio export options -void FFmpegWriter::SetAudioOptions(bool has_audio, string codec, int sample_rate, int channels, ChannelLayout channel_layout, int bit_rate) -{ +void FFmpegWriter::SetAudioOptions(bool has_audio, string codec, int sample_rate, int channels, ChannelLayout channel_layout, int bit_rate) { // Set audio options - if (codec.length() > 0) - { + if (codec.length() > 0) { AVCodec *new_codec = avcodec_find_encoder_by_name(codec.c_str()); if (new_codec == NULL) throw InvalidCodec("A valid audio codec could not be found for this file.", path); - else - { + else { // Set audio codec info.acodec = new_codec->name; @@ -327,8 +316,7 @@ void FFmpegWriter::SetAudioOptions(bool has_audio, string codec, int sample_rate } // Set custom options (some codecs accept additional params) -void FFmpegWriter::SetOption(StreamType stream, string name, string value) -{ +void FFmpegWriter::SetOption(StreamType stream, string name, string value) { // Declare codec context AVCodecContext *c = NULL; AVStream *st = NULL; @@ -338,13 +326,11 @@ void FFmpegWriter::SetOption(StreamType stream, string name, string value) st = video_st; // Get codec context c = AV_GET_CODEC_PAR_CONTEXT(st, video_codec); - } - else if (info.has_audio && stream == AUDIO_STREAM && audio_st) { + } else if (info.has_audio && stream == AUDIO_STREAM && audio_st) { st = audio_st; // Get codec context c = AV_GET_CODEC_PAR_CONTEXT(st, audio_codec); - } - else + } else throw NoStreamsFound("The stream was not found. Be sure to call PrepareStreams() first.", path); // Init AVOption @@ -357,9 +343,8 @@ void FFmpegWriter::SetOption(StreamType stream, string name, string value) // Was option found? if (option || (name == "g" || name == "qmin" || name == "qmax" || name == "max_b_frames" || name == "mb_decision" || - name == "level" || name == "profile" || name == "slices" || name == "rc_min_rate" || name == "rc_max_rate" || - name == "crf")) - { + name == "level" || name == "profile" || name == "slices" || name == "rc_min_rate" || name == "rc_max_rate" || + name == "crf")) { // Check for specific named options if (name == "g") // Set gop_size @@ -409,81 +394,79 @@ void FFmpegWriter::SetOption(StreamType stream, string name, string value) // encode quality and special settings like lossless // This might be better in an extra methods as more options // and way to set quality are possible - #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(55, 39, 101) - #if IS_FFMPEG_3_2 - if (hw_en_on) { - double mbs = 15000000.0; - if (info.video_bit_rate > 0) { - if (info.video_bit_rate > 42) { - mbs = 380000.0; - } - else { - mbs *= pow(0.912,info.video_bit_rate); - } +#if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(55, 39, 101) +#if IS_FFMPEG_3_2 + if (hw_en_on) { + double mbs = 15000000.0; + if (info.video_bit_rate > 0) { + if (info.video_bit_rate > 42) { + mbs = 380000.0; } - c->bit_rate = (int)(mbs); - } else - #endif - { - switch (c->codec_id) { - #if (LIBAVCODEC_VERSION_MAJOR >= 58) - case AV_CODEC_ID_AV1 : - c->bit_rate = 0; - av_opt_set_int(c->priv_data, "crf", min(stoi(value),63), 0); - break; - #endif - case AV_CODEC_ID_VP8 : - c->bit_rate = 10000000; - av_opt_set_int(c->priv_data, "crf", max(min(stoi(value),63),4), 0); // 4-63 - break; - case AV_CODEC_ID_VP9 : - c->bit_rate = 0; // Must be zero! - av_opt_set_int(c->priv_data, "crf", min(stoi(value),63), 0); // 0-63 - if (stoi(value) == 0) { - av_opt_set(c->priv_data, "preset", "veryslow", 0); - av_opt_set_int(c->priv_data, "lossless", 1, 0); - } - break; - case AV_CODEC_ID_H264 : - av_opt_set_int(c->priv_data, "crf", min(stoi(value),51), 0); // 0-51 - if (stoi(value) == 0) { - av_opt_set(c->priv_data, "preset", "veryslow", 0); - } - break; - case AV_CODEC_ID_H265 : - av_opt_set_int(c->priv_data, "crf", min(stoi(value),51), 0); // 0-51 - if (stoi(value) == 0) { - av_opt_set(c->priv_data, "preset", "veryslow", 0); - av_opt_set_int(c->priv_data, "lossless", 1, 0); - } - break; - default: - // If this codec doesn't support crf calculate a bitrate - // TODO: find better formula - double mbs = 15000000.0; - if (info.video_bit_rate > 0) { - if (info.video_bit_rate > 42) { - mbs = 380000.0; - } - else { - mbs *= pow(0.912,info.video_bit_rate); - } - } - c->bit_rate = (int)(mbs); + else { + mbs *= pow(0.912,info.video_bit_rate); } } - #endif - } - - else + c->bit_rate = (int)(mbs); + } else +#endif + { + switch (c->codec_id) { +#if (LIBAVCODEC_VERSION_MAJOR >= 58) + case AV_CODEC_ID_AV1 : + c->bit_rate = 0; + av_opt_set_int(c->priv_data, "crf", min(stoi(value),63), 0); + break; +#endif + case AV_CODEC_ID_VP8 : + c->bit_rate = 10000000; + av_opt_set_int(c->priv_data, "crf", max(min(stoi(value), 63), 4), 0); // 4-63 + break; + case AV_CODEC_ID_VP9 : + c->bit_rate = 0; // Must be zero! + av_opt_set_int(c->priv_data, "crf", min(stoi(value), 63), 0); // 0-63 + if (stoi(value) == 0) { + av_opt_set(c->priv_data, "preset", "veryslow", 0); + av_opt_set_int(c->priv_data, "lossless", 1, 0); + } + break; + case AV_CODEC_ID_H264 : + av_opt_set_int(c->priv_data, "crf", min(stoi(value), 51), 0); // 0-51 + if (stoi(value) == 0) { + av_opt_set(c->priv_data, "preset", "veryslow", 0); + } + break; + case AV_CODEC_ID_H265 : + av_opt_set_int(c->priv_data, "crf", min(stoi(value), 51), 0); // 0-51 + if (stoi(value) == 0) { + av_opt_set(c->priv_data, "preset", "veryslow", 0); + av_opt_set_int(c->priv_data, "lossless", 1, 0); + } + break; + default: + // If this codec doesn't support crf calculate a bitrate + // TODO: find better formula + double mbs = 15000000.0; + if (info.video_bit_rate > 0) { + if (info.video_bit_rate > 42) { + mbs = 380000.0; + } else { + mbs *= pow(0.912, info.video_bit_rate); + } + } + c->bit_rate = (int) (mbs); + } + } +#endif + } else { // Set AVOption AV_OPTION_SET(st, c->priv_data, name.c_str(), value.c_str(), c); + } ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::SetOption (" + (string)name + ")", "stream == VIDEO_STREAM", stream == VIDEO_STREAM, "", -1, "", -1, "", -1, "", -1, "", -1); - } - else + } else { throw InvalidOptions("The option is not valid for this codec.", path); + } } @@ -500,8 +483,7 @@ bool FFmpegWriter::IsValidCodec(string codec_name) { } // Prepare & initialize streams and open codecs -void FFmpegWriter::PrepareStreams() -{ +void FFmpegWriter::PrepareStreams() { if (!info.has_audio && !info.has_video) throw InvalidOptions("No video or audio options have been set. You must set has_video or has_audio (or both).", path); @@ -515,8 +497,7 @@ void FFmpegWriter::PrepareStreams() } // Write the file header (after the options are set) -void FFmpegWriter::WriteHeader() -{ +void FFmpegWriter::WriteHeader() { if (!info.has_audio && !info.has_video) throw InvalidOptions("No video or audio options have been set. You must set has_video or has_audio (or both).", path); @@ -526,20 +507,19 @@ void FFmpegWriter::WriteHeader() throw InvalidFile("Could not open or write file.", path); } - // Force the output filename (which doesn't always happen for some reason) - snprintf(oc->AV_FILENAME, sizeof(oc->AV_FILENAME), "%s", path.c_str()); + // Force the output filename (which doesn't always happen for some reason) + snprintf(oc->AV_FILENAME, sizeof(oc->AV_FILENAME), "%s", path.c_str()); // Write the stream header, if any // TODO: add avoptions / parameters instead of NULL // Add general metadata (if any) - for(std::map::iterator iter = info.metadata.begin(); iter != info.metadata.end(); ++iter) - { + for (std::map::iterator iter = info.metadata.begin(); iter != info.metadata.end(); ++iter) { av_dict_set(&oc->metadata, iter->first.c_str(), iter->second.c_str(), 0); } if (avformat_write_header(oc, NULL) != 0) { - throw InvalidFile("Could not write header to file.", path); + throw InvalidFile("Could not write header to file.", path); }; // Mark as 'written' @@ -549,8 +529,7 @@ void FFmpegWriter::WriteHeader() } // Add a frame to the queue waiting to be encoded. -void FFmpegWriter::WriteFrame(std::shared_ptr frame) -{ +void FFmpegWriter::WriteFrame(std::shared_ptr frame) { // Check for open reader (or throw exception) if (!is_open) throw WriterClosed("The FFmpegWriter is closed. Call Open() before calling this method.", path); @@ -566,15 +545,13 @@ void FFmpegWriter::WriteFrame(std::shared_ptr frame) ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::WriteFrame", "frame->number", frame->number, "spooled_video_frames.size()", spooled_video_frames.size(), "spooled_audio_frames.size()", spooled_audio_frames.size(), "cache_size", cache_size, "is_writing", is_writing, "", -1); // Write the frames once it reaches the correct cache size - if (spooled_video_frames.size() == cache_size || spooled_audio_frames.size() == cache_size) - { + if (spooled_video_frames.size() == cache_size || spooled_audio_frames.size() == cache_size) { // Is writer currently writing? if (!is_writing) // Write frames to video file write_queued_frames(); - else - { + else { // Write frames to video file write_queued_frames(); } @@ -585,8 +562,7 @@ void FFmpegWriter::WriteFrame(std::shared_ptr frame) } // Write all frames in the queue to the video file. -void FFmpegWriter::write_queued_frames() -{ +void FFmpegWriter::write_queued_frames() { ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_queued_frames", "spooled_video_frames.size()", spooled_video_frames.size(), "spooled_audio_frames.size()", spooled_audio_frames.size(), "", -1, "", -1, "", -1, "", -1); // Flip writing flag @@ -608,17 +584,16 @@ void FFmpegWriter::write_queued_frames() // Create blank exception bool has_error_encoding_video = false; - #pragma omp parallel +#pragma omp parallel { - #pragma omp single +#pragma omp single { // Process all audio frames (in a separate thread) if (info.has_audio && audio_st && !queued_audio_frames.empty()) write_audio_packets(false); // Loop through each queued image frame - while (!queued_video_frames.empty()) - { + while (!queued_video_frames.empty()) { // Get front frame (from the queue) std::shared_ptr frame = queued_video_frames.front(); @@ -635,22 +610,19 @@ void FFmpegWriter::write_queued_frames() } // end while } // end omp single - #pragma omp single +#pragma omp single { // Loop back through the frames (in order), and write them to the video file - while (!processed_frames.empty()) - { + while (!processed_frames.empty()) { // Get front frame (from the queue) std::shared_ptr frame = processed_frames.front(); - if (info.has_video && video_st) - { + if (info.has_video && video_st) { // Add to deallocate queue (so we can remove the AVFrames when we are done) deallocate_frames.push_back(frame); // Does this frame's AVFrame still exist - if (av_frames.count(frame)) - { + if (av_frames.count(frame)) { // Get AVFrame AVFrame *frame_final = av_frames[frame]; @@ -666,14 +638,12 @@ void FFmpegWriter::write_queued_frames() } // Loop through, and deallocate AVFrames - while (!deallocate_frames.empty()) - { + while (!deallocate_frames.empty()) { // Get front frame (from the queue) std::shared_ptr frame = deallocate_frames.front(); // Does this frame's AVFrame still exist - if (av_frames.count(frame)) - { + if (av_frames.count(frame)) { // Get AVFrame AVFrame *av_frame = av_frames[frame]; @@ -700,13 +670,11 @@ void FFmpegWriter::write_queued_frames() } // Write a block of frames from a reader -void FFmpegWriter::WriteFrame(ReaderBase* reader, int64_t start, int64_t length) -{ +void FFmpegWriter::WriteFrame(ReaderBase *reader, int64_t start, int64_t length) { ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::WriteFrame (from Reader)", "start", start, "length", length, "", -1, "", -1, "", -1, "", -1); // Loop through each frame (and encoded it) - for (int64_t number = start; number <= length; number++) - { + for (int64_t number = start; number <= length; number++) { // Get the frame std::shared_ptr f = reader->GetFrame(number); @@ -716,8 +684,7 @@ void FFmpegWriter::WriteFrame(ReaderBase* reader, int64_t start, int64_t length) } // Write the file trailer (after all frames are written) -void FFmpegWriter::WriteTrailer() -{ +void FFmpegWriter::WriteTrailer() { // Write any remaining queued frames to video file write_queued_frames(); @@ -741,8 +708,7 @@ void FFmpegWriter::WriteTrailer() } // Flush encoders -void FFmpegWriter::flush_encoders() -{ +void FFmpegWriter::flush_encoders() { if (info.has_audio && audio_codec && AV_GET_CODEC_TYPE(audio_st) == AVMEDIA_TYPE_AUDIO && AV_GET_CODEC_ATTRIBUTES(audio_st, audio_codec)->frame_size <= 1) return; #if (LIBAVFORMAT_VERSION_MAJOR < 58) @@ -750,15 +716,15 @@ void FFmpegWriter::flush_encoders() return; #endif - int error_code = 0; - int stop_encoding = 1; + int error_code = 0; + int stop_encoding = 1; - // FLUSH VIDEO ENCODER - if (info.has_video) + // FLUSH VIDEO ENCODER + if (info.has_video) for (;;) { // Increment PTS (in frames and scaled to the codec's timebase) - write_video_count += av_rescale_q(1, (AVRational){info.fps.den, info.fps.num}, video_codec->time_base); + write_video_count += av_rescale_q(1, (AVRational) {info.fps.den, info.fps.num}, video_codec->time_base); AVPacket pkt; av_init_packet(&pkt); @@ -772,7 +738,7 @@ void FFmpegWriter::flush_encoders() int got_packet = 0; int error_code = 0; - #if IS_FFMPEG_3_2 +#if IS_FFMPEG_3_2 #pragma omp critical (write_video_packet) { // Encode video packet (latest version of FFmpeg) @@ -796,34 +762,35 @@ void FFmpegWriter::flush_encoders() error_code = av_interleaved_write_frame(oc, &pkt); } } - #else +#else // IS_FFMPEG_3_2 - #if LIBAVFORMAT_VERSION_MAJOR >= 54 - // Encode video packet (older than FFmpeg 3.2) - error_code = avcodec_encode_video2(video_codec, &pkt, NULL, &got_packet); +#if LIBAVFORMAT_VERSION_MAJOR >= 54 + // Encode video packet (older than FFmpeg 3.2) + error_code = avcodec_encode_video2(video_codec, &pkt, NULL, &got_packet); - #else - // Encode video packet (even older version of FFmpeg) - int video_outbuf_size = 0; +#else + // Encode video packet (even older version of FFmpeg) + int video_outbuf_size = 0; - /* encode the image */ - int out_size = avcodec_encode_video(video_codec, NULL, video_outbuf_size, NULL); + /* encode the image */ + int out_size = avcodec_encode_video(video_codec, NULL, video_outbuf_size, NULL); - /* if zero size, it means the image was buffered */ - if (out_size > 0) { - if(video_codec->coded_frame->key_frame) - pkt.flags |= AV_PKT_FLAG_KEY; - pkt.data= video_outbuf; - pkt.size= out_size; + /* if zero size, it means the image was buffered */ + if (out_size > 0) { + if(video_codec->coded_frame->key_frame) + pkt.flags |= AV_PKT_FLAG_KEY; + pkt.data= video_outbuf; + pkt.size= out_size; - // got data back (so encode this frame) - got_packet = 1; - } - #endif - #endif + // got data back (so encode this frame) + got_packet = 1; + } +#endif // LIBAVFORMAT_VERSION_MAJOR >= 54 +#endif // IS_FFMPEG_3_2 if (error_code < 0) { - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::flush_encoders ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::flush_encoders ERROR [" + (string) av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, + "", -1, "", -1, "", -1); } if (!got_packet) { stop_encoding = 1; @@ -853,8 +820,8 @@ void FFmpegWriter::flush_encoders() av_freep(&video_outbuf); } - // FLUSH AUDIO ENCODER - if (info.has_audio) + // FLUSH AUDIO ENCODER + if (info.has_audio) for (;;) { // Increment PTS (in samples and scaled to the codec's timebase) @@ -873,12 +840,12 @@ void FFmpegWriter::flush_encoders() /* encode the image */ int got_packet = 0; - #if IS_FFMPEG_3_2 +#if IS_FFMPEG_3_2 avcodec_send_frame(audio_codec, NULL); got_packet = 0; - #else +#else error_code = avcodec_encode_audio2(audio_codec, &pkt, NULL, &got_packet); - #endif +#endif if (error_code < 0) { ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::flush_encoders ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); } @@ -917,25 +884,23 @@ void FFmpegWriter::flush_encoders() } // Close the video codec -void FFmpegWriter::close_video(AVFormatContext *oc, AVStream *st) -{ +void FFmpegWriter::close_video(AVFormatContext *oc, AVStream *st) { AV_FREE_CONTEXT(video_codec); video_codec = NULL; - #if IS_FFMPEG_3_2 -// #if defined(__linux__) - if (hw_en_on && hw_en_supported) { - if (hw_device_ctx) { - av_buffer_unref(&hw_device_ctx); - hw_device_ctx = NULL; +#if IS_FFMPEG_3_2 + // #if defined(__linux__) + if (hw_en_on && hw_en_supported) { + if (hw_device_ctx) { + av_buffer_unref(&hw_device_ctx); + hw_device_ctx = NULL; + } } - } -// #endif - #endif + // #endif +#endif } // Close the audio codec -void FFmpegWriter::close_audio(AVFormatContext *oc, AVStream *st) -{ +void FFmpegWriter::close_audio(AVFormatContext *oc, AVStream *st) { AV_FREE_CONTEXT(audio_codec); audio_codec = NULL; @@ -962,8 +927,7 @@ void FFmpegWriter::close_audio(AVFormatContext *oc, AVStream *st) } // Close the writer -void FFmpegWriter::Close() -{ +void FFmpegWriter::Close() { // Write trailer (if needed) if (!write_trailer) WriteTrailer(); @@ -1006,24 +970,19 @@ void FFmpegWriter::Close() } // Add an AVFrame to the cache -void FFmpegWriter::add_avframe(std::shared_ptr frame, AVFrame* av_frame) -{ +void FFmpegWriter::add_avframe(std::shared_ptr frame, AVFrame *av_frame) { // Add AVFrame to map (if it does not already exist) - if (!av_frames.count(frame)) - { + if (!av_frames.count(frame)) { // Add av_frame av_frames[frame] = av_frame; - } - else - { + } else { // Do not add, and deallocate this AVFrame AV_FREE_FRAME(&av_frame); } } // Add an audio output stream -AVStream* FFmpegWriter::add_audio_stream() -{ +AVStream *FFmpegWriter::add_audio_stream() { AVCodecContext *c; AVStream *st; @@ -1050,14 +1009,13 @@ AVStream* FFmpegWriter::add_audio_stream() if (codec->supported_samplerates) { int i; for (i = 0; codec->supported_samplerates[i] != 0; i++) - if (info.sample_rate == codec->supported_samplerates[i]) - { + if (info.sample_rate == codec->supported_samplerates[i]) { // Set the valid sample rate c->sample_rate = info.sample_rate; break; } - if (codec->supported_samplerates[i] == 0) - throw InvalidSampleRate("An invalid sample rate was detected for this codec.", path); + if (codec->supported_samplerates[i] == 0) + throw InvalidSampleRate("An invalid sample rate was detected for this codec.", path); } else // Set sample rate c->sample_rate = info.sample_rate; @@ -1068,8 +1026,7 @@ AVStream* FFmpegWriter::add_audio_stream() if (codec->channel_layouts) { int i; for (i = 0; codec->channel_layouts[i] != 0; i++) - if (channel_layout == codec->channel_layouts[i]) - { + if (channel_layout == codec->channel_layouts[i]) { // Set valid channel layout c->channel_layout = channel_layout; break; @@ -1082,8 +1039,7 @@ AVStream* FFmpegWriter::add_audio_stream() // Choose a valid sample_fmt if (codec->sample_fmts) { - for (int i = 0; codec->sample_fmts[i] != AV_SAMPLE_FMT_NONE; i++) - { + for (int i = 0; codec->sample_fmts[i] != AV_SAMPLE_FMT_NONE; i++) { // Set sample format to 1st valid format (and then exit loop) c->sample_fmt = codec->sample_fmts[i]; break; @@ -1109,8 +1065,7 @@ AVStream* FFmpegWriter::add_audio_stream() } // Add a video output stream -AVStream* FFmpegWriter::add_video_stream() -{ +AVStream *FFmpegWriter::add_video_stream() { AVCodecContext *c; AVStream *st; @@ -1138,24 +1093,22 @@ AVStream* FFmpegWriter::add_video_stream() } // Here should be the setting for low fixed bitrate // Defaults are used because mpeg2 otherwise had problems - } - else { + } else { // Check if codec supports crf switch (c->codec_id) { - #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(55, 39, 101) - #if (LIBAVCODEC_VERSION_MAJOR >= 58) +#if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(55, 39, 101) +#if (LIBAVCODEC_VERSION_MAJOR >= 58) case AV_CODEC_ID_AV1 : - #endif +#endif case AV_CODEC_ID_VP9 : case AV_CODEC_ID_H265 : - #endif +#endif case AV_CODEC_ID_VP8 : case AV_CODEC_ID_H264 : if (info.video_bit_rate < 40) { c->qmin = 0; c->qmax = 63; - } - else { + } else { c->qmin = info.video_bit_rate - 5; c->qmax = 63; } @@ -1186,9 +1139,9 @@ AVStream* FFmpegWriter::add_video_stream() identically 1. */ c->time_base.num = info.video_timebase.num; c->time_base.den = info.video_timebase.den; - #if LIBAVFORMAT_VERSION_MAJOR >= 56 +#if LIBAVFORMAT_VERSION_MAJOR >= 56 c->framerate = av_inv_q(c->time_base); - #endif +#endif st->avg_frame_rate = av_inv_q(c->time_base); st->time_base.num = info.video_timebase.num; st->time_base.den = info.video_timebase.den; @@ -1212,7 +1165,7 @@ AVStream* FFmpegWriter::add_video_stream() #endif // Find all supported pixel formats for this codec - const PixelFormat* supported_pixel_formats = codec->pix_fmts; + const PixelFormat *supported_pixel_formats = codec->pix_fmts; while (supported_pixel_formats != NULL && *supported_pixel_formats != PIX_FMT_NONE) { // Assign the 1st valid pixel format (if one is missing) if (c->pix_fmt == PIX_FMT_NONE) @@ -1222,7 +1175,7 @@ AVStream* FFmpegWriter::add_video_stream() // Codec doesn't have any pix formats? if (c->pix_fmt == PIX_FMT_NONE) { - if(fmt->video_codec == AV_CODEC_ID_RAWVIDEO) { + if (fmt->video_codec == AV_CODEC_ID_RAWVIDEO) { // Raw video should use RGB24 c->pix_fmt = PIX_FMT_RGB24; @@ -1249,8 +1202,7 @@ AVStream* FFmpegWriter::add_video_stream() } // open audio codec -void FFmpegWriter::open_audio(AVFormatContext *oc, AVStream *st) -{ +void FFmpegWriter::open_audio(AVFormatContext *oc, AVStream *st) { AVCodec *codec; AV_GET_CODEC_FROM_STREAM(st, audio_codec) @@ -1284,14 +1236,14 @@ void FFmpegWriter::open_audio(AVFormatContext *oc, AVStream *st) int s = AV_FIND_DECODER_CODEC_ID(st); switch (s) { - case AV_CODEC_ID_PCM_S16LE: - case AV_CODEC_ID_PCM_S16BE: - case AV_CODEC_ID_PCM_U16LE: - case AV_CODEC_ID_PCM_U16BE: - audio_input_frame_size >>= 1; - break; - default: - break; + case AV_CODEC_ID_PCM_S16LE: + case AV_CODEC_ID_PCM_S16BE: + case AV_CODEC_ID_PCM_U16LE: + case AV_CODEC_ID_PCM_U16BE: + audio_input_frame_size >>= 1; + break; + default: + break; } } else { // Set frame size based on the codec @@ -1313,25 +1265,22 @@ void FFmpegWriter::open_audio(AVFormatContext *oc, AVStream *st) audio_encoder_buffer = new uint8_t[audio_encoder_buffer_size]; // Add audio metadata (if any) - for(std::map::iterator iter = info.metadata.begin(); iter != info.metadata.end(); ++iter) - { + for (std::map::iterator iter = info.metadata.begin(); iter != info.metadata.end(); ++iter) { av_dict_set(&st->metadata, iter->first.c_str(), iter->second.c_str(), 0); } ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::open_audio", "audio_codec->thread_count", audio_codec->thread_count, "audio_input_frame_size", audio_input_frame_size, "buffer_size", AVCODEC_MAX_AUDIO_FRAME_SIZE + MY_INPUT_BUFFER_PADDING_SIZE, "", -1, "", -1, "", -1); - } // open video codec -void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) -{ +void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) { AVCodec *codec; AV_GET_CODEC_FROM_STREAM(st, video_codec) // Set number of threads equal to number of processors (not to exceed 16) video_codec->thread_count = min(FF_NUM_PROCESSORS, 16); - #if IS_FFMPEG_3_2 +#if IS_FFMPEG_3_2 if (hw_en_on && hw_en_supported) { //char *dev_hw = NULL; char adapter[256]; @@ -1341,32 +1290,31 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) adapter_num = openshot::Settings::Instance()->HW_EN_DEVICE_SET; fprintf(stderr, "\n\nEncodiing Device Nr: %d\n", adapter_num); if (adapter_num < 3 && adapter_num >=0) { - #if defined(__linux__) +#if defined(__linux__) snprintf(adapter,sizeof(adapter),"/dev/dri/renderD%d", adapter_num+128); // Maybe 127 is better because the first card would be 1?! adapter_ptr = adapter; - #elif defined(_WIN32) +#elif defined(_WIN32) adapter_ptr = NULL; - #elif defined(__APPLE__) +#elif defined(__APPLE__) adapter_ptr = NULL; - #endif +#endif } else { adapter_ptr = NULL; // Just to be sure } // Check if it is there and writable - #if defined(__linux__) +#if defined(__linux__) if( adapter_ptr != NULL && access( adapter_ptr, W_OK ) == 0 ) { - #elif defined(_WIN32) +#elif defined(_WIN32) if( adapter_ptr != NULL ) { - #elif defined(__APPLE__) +#elif defined(__APPLE__) if( adapter_ptr != NULL ) { - #endif +#endif ZmqLogger::Instance()->AppendDebugMethod("Encode Device present using device", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); } else { adapter_ptr = NULL; // use default - //cerr << "\n\n\nEncode Device not present using default\n\n\n"; ZmqLogger::Instance()->AppendDebugMethod("Encode Device not present using default", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); } if (av_hwdevice_ctx_create(&hw_device_ctx, hw_en_av_device_type, @@ -1375,7 +1323,8 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) throw InvalidCodec("Could not create hwdevice", path); } } - #endif +#endif + /* find the video encoder */ codec = avcodec_find_encoder_by_name(info.vcodec.c_str()); if (!codec) @@ -1383,15 +1332,15 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) if (!codec) throw InvalidCodec("Could not find codec", path); - /* Force max_b_frames to 0 in some cases (i.e. for mjpeg image sequences */ - if(video_codec->max_b_frames && video_codec->codec_id != AV_CODEC_ID_MPEG4 && video_codec->codec_id != AV_CODEC_ID_MPEG1VIDEO && video_codec->codec_id != AV_CODEC_ID_MPEG2VIDEO) - video_codec->max_b_frames = 0; + /* Force max_b_frames to 0 in some cases (i.e. for mjpeg image sequences */ + if (video_codec->max_b_frames && video_codec->codec_id != AV_CODEC_ID_MPEG4 && video_codec->codec_id != AV_CODEC_ID_MPEG1VIDEO && video_codec->codec_id != AV_CODEC_ID_MPEG2VIDEO) + video_codec->max_b_frames = 0; // Init options AVDictionary *opts = NULL; av_dict_set(&opts, "strict", "experimental", 0); - #if IS_FFMPEG_3_2 +#if IS_FFMPEG_3_2 if (hw_en_on && hw_en_supported) { video_codec->max_b_frames = 0; // At least this GPU doesn't support b-frames video_codec->pix_fmt = hw_en_av_pix_fmt; @@ -1405,7 +1354,7 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) fprintf(stderr, "Failed to set hwframe context.\n"); } } - #endif +#endif /* open the codec */ if (avcodec_open2(video_codec, codec, &opts) < 0) @@ -1416,8 +1365,7 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) av_dict_free(&opts); // Add video metadata (if any) - for(std::map::iterator iter = info.metadata.begin(); iter != info.metadata.end(); ++iter) - { + for (std::map::iterator iter = info.metadata.begin(); iter != info.metadata.end(); ++iter) { av_dict_set(&st->metadata, iter->first.c_str(), iter->second.c_str(), 0); } @@ -1426,9 +1374,8 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) } // write all queued frames' audio to the video file -void FFmpegWriter::write_audio_packets(bool final) -{ - #pragma omp task firstprivate(final) +void FFmpegWriter::write_audio_packets(bool is_final) { +#pragma omp task firstprivate(is_final) { // Init audio buffers / variables int total_frame_samples = 0; @@ -1439,14 +1386,13 @@ void FFmpegWriter::write_audio_packets(bool final) ChannelLayout channel_layout_in_frame = LAYOUT_MONO; // default channel layout // Create a new array (to hold all S16 audio samples, for the current queued frames - int16_t* all_queued_samples = (int16_t*)av_malloc((sizeof(int16_t)*(queued_audio_frames.size() * AVCODEC_MAX_AUDIO_FRAME_SIZE))); - int16_t* all_resampled_samples = NULL; - int16_t* final_samples_planar = NULL; - int16_t* final_samples = NULL; + int16_t *all_queued_samples = (int16_t *) av_malloc((sizeof(int16_t) * (queued_audio_frames.size() * AVCODEC_MAX_AUDIO_FRAME_SIZE))); + int16_t *all_resampled_samples = NULL; + int16_t *final_samples_planar = NULL; + int16_t *final_samples = NULL; // Loop through each queued audio frame - while (!queued_audio_frames.empty()) - { + while (!queued_audio_frames.empty()) { // Get front frame (from the queue) std::shared_ptr frame = queued_audio_frames.front(); @@ -1458,7 +1404,7 @@ void FFmpegWriter::write_audio_packets(bool final) // Get audio sample array - float* frame_samples_float = NULL; + float *frame_samples_float = NULL; // Get samples interleaved together (c1 c2 c1 c2 c1 c2) frame_samples_float = frame->GetInterleavedAudioSamples(sample_rate_in_frame, NULL, &samples_in_frame); @@ -1487,13 +1433,13 @@ void FFmpegWriter::write_audio_packets(bool final) int samples_position = 0; - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_audio_packets", "final", final, "total_frame_samples", total_frame_samples, "channel_layout_in_frame", channel_layout_in_frame, "channels_in_frame", channels_in_frame, "samples_in_frame", samples_in_frame, "LAYOUT_MONO", LAYOUT_MONO); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_audio_packets", "is_final", is_final, "total_frame_samples", total_frame_samples, "channel_layout_in_frame", channel_layout_in_frame, "channels_in_frame", channels_in_frame, "samples_in_frame", samples_in_frame, "LAYOUT_MONO", LAYOUT_MONO); // Keep track of the original sample format AVSampleFormat output_sample_fmt = audio_codec->sample_fmt; AVFrame *audio_frame = NULL; - if (!final) { + if (!is_final) { // Create input frame (and allocate arrays) audio_frame = AV_ALLOCATE_FRAME(); AV_RESET_FRAME(audio_frame); @@ -1501,28 +1447,23 @@ void FFmpegWriter::write_audio_packets(bool final) // Fill input frame with sample data avcodec_fill_audio_frame(audio_frame, channels_in_frame, AV_SAMPLE_FMT_S16, (uint8_t *) all_queued_samples, - audio_encoder_buffer_size, 0); + audio_encoder_buffer_size, 0); // Do not convert audio to planar format (yet). We need to keep everything interleaved at this point. - switch (audio_codec->sample_fmt) - { - case AV_SAMPLE_FMT_FLTP: - { + switch (audio_codec->sample_fmt) { + case AV_SAMPLE_FMT_FLTP: { output_sample_fmt = AV_SAMPLE_FMT_FLT; break; } - case AV_SAMPLE_FMT_S32P: - { + case AV_SAMPLE_FMT_S32P: { output_sample_fmt = AV_SAMPLE_FMT_S32; break; } - case AV_SAMPLE_FMT_S16P: - { + case AV_SAMPLE_FMT_S16P: { output_sample_fmt = AV_SAMPLE_FMT_S16; break; } - case AV_SAMPLE_FMT_U8P: - { + case AV_SAMPLE_FMT_U8P: { output_sample_fmt = AV_SAMPLE_FMT_U8; break; } @@ -1546,29 +1487,30 @@ void FFmpegWriter::write_audio_packets(bool final) // setup resample context if (!avr) { avr = SWR_ALLOC(); - av_opt_set_int(avr, "in_channel_layout", channel_layout_in_frame, 0); + av_opt_set_int(avr, "in_channel_layout", channel_layout_in_frame, 0); av_opt_set_int(avr, "out_channel_layout", info.channel_layout, 0); - av_opt_set_int(avr, "in_sample_fmt", AV_SAMPLE_FMT_S16, 0); - av_opt_set_int(avr, "out_sample_fmt", output_sample_fmt, 0); // planar not allowed here - av_opt_set_int(avr, "in_sample_rate", sample_rate_in_frame, 0); - av_opt_set_int(avr, "out_sample_rate", info.sample_rate, 0); - av_opt_set_int(avr, "in_channels", channels_in_frame, 0); - av_opt_set_int(avr, "out_channels", info.channels, 0); + av_opt_set_int(avr, "in_sample_fmt", AV_SAMPLE_FMT_S16, 0); + av_opt_set_int(avr, "out_sample_fmt", output_sample_fmt, 0); // planar not allowed here + av_opt_set_int(avr, "in_sample_rate", sample_rate_in_frame, 0); + av_opt_set_int(avr, "out_sample_rate", info.sample_rate, 0); + av_opt_set_int(avr, "in_channels", channels_in_frame, 0); + av_opt_set_int(avr, "out_channels", info.channels, 0); SWR_INIT(avr); } int nb_samples = 0; // Convert audio samples - nb_samples = SWR_CONVERT(avr, // audio resample context - audio_converted->data, // output data pointers - audio_converted->linesize[0], // output plane size, in bytes. (0 if unknown) - audio_converted->nb_samples, // maximum number of samples that the output buffer can hold - audio_frame->data, // input data pointers - audio_frame->linesize[0], // input plane size, in bytes (0 if unknown) - audio_frame->nb_samples); // number of input samples to convert + nb_samples = SWR_CONVERT(avr, // audio resample context + audio_converted->data, // output data pointers + audio_converted->linesize[0], // output plane size, in bytes. (0 if unknown) + audio_converted->nb_samples, // maximum number of samples that the output buffer can hold + audio_frame->data, // input data pointers + audio_frame->linesize[0], // input plane size, in bytes (0 if unknown) + audio_frame->nb_samples); // number of input samples to convert // Create a new array (to hold all resampled S16 audio samples) - all_resampled_samples = (int16_t*)av_malloc(sizeof(int16_t) * nb_samples * info.channels * (av_get_bytes_per_sample(output_sample_fmt) / av_get_bytes_per_sample(AV_SAMPLE_FMT_S16))); + all_resampled_samples = (int16_t *) av_malloc( + sizeof(int16_t) * nb_samples * info.channels * (av_get_bytes_per_sample(output_sample_fmt) / av_get_bytes_per_sample(AV_SAMPLE_FMT_S16))); // Copy audio samples over original samples memcpy(all_resampled_samples, audio_converted->data[0], nb_samples * info.channels * av_get_bytes_per_sample(output_sample_fmt)); @@ -1584,7 +1526,7 @@ void FFmpegWriter::write_audio_packets(bool final) } // Loop until no more samples - while (remaining_frame_samples > 0 || final) { + while (remaining_frame_samples > 0 || is_final) { // Get remaining samples needed for this packet int remaining_packet_samples = (audio_input_frame_size * info.channels) - audio_input_position; @@ -1596,9 +1538,10 @@ void FFmpegWriter::write_audio_packets(bool final) diff = remaining_frame_samples; // Copy frame samples into the packet samples array - if (!final) + if (!is_final) //TODO: Make this more sane - memcpy(samples + (audio_input_position * (av_get_bytes_per_sample(output_sample_fmt) / av_get_bytes_per_sample(AV_SAMPLE_FMT_S16))), all_resampled_samples + samples_position, diff * av_get_bytes_per_sample(output_sample_fmt)); + memcpy(samples + (audio_input_position * (av_get_bytes_per_sample(output_sample_fmt) / av_get_bytes_per_sample(AV_SAMPLE_FMT_S16))), + all_resampled_samples + samples_position, diff * av_get_bytes_per_sample(output_sample_fmt)); // Increment counters audio_input_position += diff; @@ -1607,28 +1550,27 @@ void FFmpegWriter::write_audio_packets(bool final) remaining_packet_samples -= diff; // Do we have enough samples to proceed? - if (audio_input_position < (audio_input_frame_size * info.channels) && !final) + if (audio_input_position < (audio_input_frame_size * info.channels) && !is_final) // Not enough samples to encode... so wait until the next frame break; // Convert to planar (if needed by audio codec) AVFrame *frame_final = AV_ALLOCATE_FRAME(); AV_RESET_FRAME(frame_final); - if (av_sample_fmt_is_planar(audio_codec->sample_fmt)) - { + if (av_sample_fmt_is_planar(audio_codec->sample_fmt)) { ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_audio_packets (2nd resampling for Planar formats)", "in_sample_fmt", output_sample_fmt, "out_sample_fmt", audio_codec->sample_fmt, "in_sample_rate", info.sample_rate, "out_sample_rate", info.sample_rate, "in_channels", info.channels, "out_channels", info.channels); // setup resample context if (!avr_planar) { avr_planar = SWR_ALLOC(); - av_opt_set_int(avr_planar, "in_channel_layout", info.channel_layout, 0); + av_opt_set_int(avr_planar, "in_channel_layout", info.channel_layout, 0); av_opt_set_int(avr_planar, "out_channel_layout", info.channel_layout, 0); - av_opt_set_int(avr_planar, "in_sample_fmt", output_sample_fmt, 0); - av_opt_set_int(avr_planar, "out_sample_fmt", audio_codec->sample_fmt, 0); // planar not allowed here - av_opt_set_int(avr_planar, "in_sample_rate", info.sample_rate, 0); - av_opt_set_int(avr_planar, "out_sample_rate", info.sample_rate, 0); - av_opt_set_int(avr_planar, "in_channels", info.channels, 0); - av_opt_set_int(avr_planar, "out_channels", info.channels, 0); + av_opt_set_int(avr_planar, "in_sample_fmt", output_sample_fmt, 0); + av_opt_set_int(avr_planar, "out_sample_fmt", audio_codec->sample_fmt, 0); // planar not allowed here + av_opt_set_int(avr_planar, "in_sample_rate", info.sample_rate, 0); + av_opt_set_int(avr_planar, "out_sample_rate", info.sample_rate, 0); + av_opt_set_int(avr_planar, "in_channels", info.channels, 0); + av_opt_set_int(avr_planar, "out_channels", info.channels, 0); SWR_INIT(avr_planar); } @@ -1638,27 +1580,28 @@ void FFmpegWriter::write_audio_packets(bool final) audio_frame->nb_samples = audio_input_position / info.channels; // Create a new array - final_samples_planar = (int16_t*)av_malloc(sizeof(int16_t) * audio_frame->nb_samples * info.channels * (av_get_bytes_per_sample(output_sample_fmt) / av_get_bytes_per_sample(AV_SAMPLE_FMT_S16))); + final_samples_planar = (int16_t *) av_malloc( + sizeof(int16_t) * audio_frame->nb_samples * info.channels * (av_get_bytes_per_sample(output_sample_fmt) / av_get_bytes_per_sample(AV_SAMPLE_FMT_S16))); // Copy audio into buffer for frame memcpy(final_samples_planar, samples, audio_frame->nb_samples * info.channels * av_get_bytes_per_sample(output_sample_fmt)); // Fill input frame with sample data avcodec_fill_audio_frame(audio_frame, info.channels, output_sample_fmt, (uint8_t *) final_samples_planar, - audio_encoder_buffer_size, 0); + audio_encoder_buffer_size, 0); // Create output frame (and allocate arrays) frame_final->nb_samples = audio_input_frame_size; av_samples_alloc(frame_final->data, frame_final->linesize, info.channels, frame_final->nb_samples, audio_codec->sample_fmt, 0); // Convert audio samples - int nb_samples = SWR_CONVERT(avr_planar, // audio resample context - frame_final->data, // output data pointers - frame_final->linesize[0], // output plane size, in bytes. (0 if unknown) - frame_final->nb_samples, // maximum number of samples that the output buffer can hold - audio_frame->data, // input data pointers - audio_frame->linesize[0], // input plane size, in bytes (0 if unknown) - audio_frame->nb_samples); // number of input samples to convert + int nb_samples = SWR_CONVERT(avr_planar, // audio resample context + frame_final->data, // output data pointers + frame_final->linesize[0], // output plane size, in bytes. (0 if unknown) + frame_final->nb_samples, // maximum number of samples that the output buffer can hold + audio_frame->data, // input data pointers + audio_frame->linesize[0], // input plane size, in bytes (0 if unknown) + audio_frame->nb_samples); // number of input samples to convert // Copy audio samples over original samples if (nb_samples > 0) @@ -1673,7 +1616,8 @@ void FFmpegWriter::write_audio_packets(bool final) } else { // Create a new array - final_samples = (int16_t*)av_malloc(sizeof(int16_t) * audio_input_position * (av_get_bytes_per_sample(audio_codec->sample_fmt) / av_get_bytes_per_sample(AV_SAMPLE_FMT_S16))); + final_samples = (int16_t *) av_malloc( + sizeof(int16_t) * audio_input_position * (av_get_bytes_per_sample(audio_codec->sample_fmt) / av_get_bytes_per_sample(AV_SAMPLE_FMT_S16))); // Copy audio into buffer for frame memcpy(final_samples, samples, audio_input_position * av_get_bytes_per_sample(audio_codec->sample_fmt)); @@ -1683,7 +1627,7 @@ void FFmpegWriter::write_audio_packets(bool final) // Fill the final_frame AVFrame with audio (non planar) avcodec_fill_audio_frame(frame_final, audio_codec->channels, audio_codec->sample_fmt, (uint8_t *) final_samples, - audio_encoder_buffer_size, 0); + audio_encoder_buffer_size, 0); } // Increment PTS (in samples) @@ -1702,7 +1646,7 @@ void FFmpegWriter::write_audio_packets(bool final) /* encode the audio samples */ int got_packet_ptr = 0; - #if IS_FFMPEG_3_2 +#if IS_FFMPEG_3_2 // Encode audio (latest version of FFmpeg) int error_code; int ret = 0; @@ -1730,10 +1674,10 @@ void FFmpegWriter::write_audio_packets(bool final) ret = -1; } got_packet_ptr = ret; - #else +#else // Encode audio (older versions of FFmpeg) int error_code = avcodec_encode_audio2(audio_codec, &pkt, frame_final, &got_packet_ptr); - #endif +#endif /* if zero size, it means the image was buffered */ if (error_code == 0 && got_packet_ptr) { @@ -1755,15 +1699,13 @@ void FFmpegWriter::write_audio_packets(bool final) /* write the compressed frame in the media file */ int error_code = av_interleaved_write_frame(oc, &pkt); - if (error_code < 0) - { - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_audio_packets ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); + if (error_code < 0) { + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_audio_packets ERROR [" + (string) av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); } } - if (error_code < 0) - { - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_audio_packets ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); + if (error_code < 0) { + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_audio_packets ERROR [" + (string) av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); } // deallocate AVFrame @@ -1775,7 +1717,7 @@ void FFmpegWriter::write_audio_packets(bool final) // Reset position audio_input_position = 0; - final = false; + is_final = false; } // Delete arrays (if needed) @@ -1792,8 +1734,7 @@ void FFmpegWriter::write_audio_packets(bool final) } // Allocate an AVFrame object -AVFrame* FFmpegWriter::allocate_avframe(PixelFormat pix_fmt, int width, int height, int *buffer_size, uint8_t *new_buffer) -{ +AVFrame *FFmpegWriter::allocate_avframe(PixelFormat pix_fmt, int width, int height, int *buffer_size, uint8_t *new_buffer) { // Create an RGB AVFrame AVFrame *new_av_frame = NULL; @@ -1806,10 +1747,9 @@ AVFrame* FFmpegWriter::allocate_avframe(PixelFormat pix_fmt, int width, int heig *buffer_size = AV_GET_IMAGE_SIZE(pix_fmt, width, height); // Create buffer (if not provided) - if (!new_buffer) - { + if (!new_buffer) { // New Buffer - new_buffer = (uint8_t*)av_malloc(*buffer_size * sizeof(uint8_t)); + new_buffer = (uint8_t *) av_malloc(*buffer_size * sizeof(uint8_t)); // Attach buffer to AVFrame AV_COPY_PICTURE_DATA(new_av_frame, new_buffer, pix_fmt, width, height); new_av_frame->width = width; @@ -1822,8 +1762,7 @@ AVFrame* FFmpegWriter::allocate_avframe(PixelFormat pix_fmt, int width, int heig } // process video frame -void FFmpegWriter::process_video_packet(std::shared_ptr frame) -{ +void FFmpegWriter::process_video_packet(std::shared_ptr frame) { // Determine the height & width of the source image int source_image_width = frame->GetWidth(); int source_image_height = frame->GetHeight(); @@ -1842,7 +1781,7 @@ void FFmpegWriter::process_video_packet(std::shared_ptr frame) if (rescaler_position == num_of_rescalers) rescaler_position = 0; - #pragma omp task firstprivate(frame, scaler, source_image_width, source_image_height) +#pragma omp task firstprivate(frame, scaler, source_image_width, source_image_height) { // Allocate an RGB frame & final output frame int bytes_source = 0; @@ -1854,29 +1793,28 @@ void FFmpegWriter::process_video_packet(std::shared_ptr frame) pixels = frame->GetPixels(); // Init AVFrame for source image & final (converted image) - frame_source = allocate_avframe(PIX_FMT_RGBA, source_image_width, source_image_height, &bytes_source, (uint8_t*) pixels); - #if IS_FFMPEG_3_2 - AVFrame *frame_final; + frame_source = allocate_avframe(PIX_FMT_RGBA, source_image_width, source_image_height, &bytes_source, (uint8_t *) pixels); +#if IS_FFMPEG_3_2 + AVFrame *frame_final; if (hw_en_on && hw_en_supported) { frame_final = allocate_avframe(AV_PIX_FMT_NV12, info.width, info.height, &bytes_final, NULL); - } else - { + } else { frame_final = allocate_avframe((AVPixelFormat)(video_st->codecpar->format), info.width, info.height, &bytes_final, NULL); } - #else +#else AVFrame *frame_final = allocate_avframe(video_codec->pix_fmt, info.width, info.height, &bytes_final, NULL); - #endif +#endif // Fill with data - AV_COPY_PICTURE_DATA(frame_source, (uint8_t*)pixels, PIX_FMT_RGBA, source_image_width, source_image_height); + AV_COPY_PICTURE_DATA(frame_source, (uint8_t *) pixels, PIX_FMT_RGBA, source_image_width, source_image_height); ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::process_video_packet", "frame->number", frame->number, "bytes_source", bytes_source, "bytes_final", bytes_final, "", -1, "", -1, "", -1); // Resize & convert pixel format sws_scale(scaler, frame_source->data, frame_source->linesize, 0, - source_image_height, frame_final->data, frame_final->linesize); + source_image_height, frame_final->data, frame_final->linesize); // Add resized AVFrame to av_frames map - #pragma omp critical (av_frames_section) +#pragma omp critical (av_frames_section) add_avframe(frame, frame_final); // Deallocate memory @@ -1887,8 +1825,7 @@ void FFmpegWriter::process_video_packet(std::shared_ptr frame) } // write video frame -bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* frame_final) -{ +bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame *frame_final) { #if (LIBAVFORMAT_VERSION_MAJOR >= 58) ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_video_packet", "frame->number", frame->number, "oc->oformat->flags", oc->oformat->flags, "", -1, "", -1, "", -1, "", -1); #else @@ -1900,19 +1837,18 @@ bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* fra av_init_packet(&pkt); pkt.flags |= AV_PKT_FLAG_KEY; - pkt.stream_index= video_st->index; - pkt.data= (uint8_t*)frame_final->data; - pkt.size= sizeof(AVPicture); + pkt.stream_index = video_st->index; + pkt.data = (uint8_t *) frame_final->data; + pkt.size = sizeof(AVPicture); // Increment PTS (in frames and scaled to the codec's timebase) - write_video_count += av_rescale_q(1, (AVRational){info.fps.den, info.fps.num}, video_codec->time_base); + write_video_count += av_rescale_q(1, (AVRational) {info.fps.den, info.fps.num}, video_codec->time_base); pkt.pts = write_video_count; /* write the compressed frame in the media file */ int error_code = av_interleaved_write_frame(oc, &pkt); - if (error_code < 0) - { - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_video_packet ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); + if (error_code < 0) { + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_video_packet ERROR [" + (string) av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); return false; } @@ -1933,11 +1869,11 @@ bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* fra uint8_t *video_outbuf = NULL; // Increment PTS (in frames and scaled to the codec's timebase) - write_video_count += av_rescale_q(1, (AVRational){info.fps.den, info.fps.num}, video_codec->time_base); + write_video_count += av_rescale_q(1, (AVRational) {info.fps.den, info.fps.num}, video_codec->time_base); // Assign the initial AVFrame PTS from the frame counter frame_final->pts = write_video_count; - #if IS_FFMPEG_3_2 +#if IS_FFMPEG_3_2 if (hw_en_on && hw_en_supported) { if (!(hw_frame = av_frame_alloc())) { fprintf(stderr, "Error code: av_hwframe_alloc\n"); @@ -1954,20 +1890,20 @@ bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* fra } av_frame_copy_props(hw_frame, frame_final); } - #endif +#endif /* encode the image */ int got_packet_ptr = 0; int error_code = 0; - #if IS_FFMPEG_3_2 +#if IS_FFMPEG_3_2 // Write video packet (latest version of FFmpeg) int frameFinished = 0; int ret; - #if IS_FFMPEG_3_2 + if (hw_en_on && hw_en_supported) { ret = avcodec_send_frame(video_codec, hw_frame); //hw_frame!!! - } else - #endif - ret = avcodec_send_frame(video_codec, frame_final); + } else { + ret = avcodec_send_frame(video_codec, frame_final); + } error_code = ret; if (ret < 0 ) { ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_video_packet (Frame not sent)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); @@ -1982,10 +1918,11 @@ bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* fra else { while (ret >= 0) { ret = avcodec_receive_packet(video_codec, &pkt); - if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { + + if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { avcodec_flush_buffers(video_codec); got_packet_ptr = 0; - break; + break; } if (ret == 0) { got_packet_ptr = 1; @@ -1993,36 +1930,36 @@ bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* fra } } } - #else - #if LIBAVFORMAT_VERSION_MAJOR >= 54 - // Write video packet (older than FFmpeg 3.2) - error_code = avcodec_encode_video2(video_codec, &pkt, frame_final, &got_packet_ptr); - if (error_code != 0 ) { - cerr << "Frame AVERROR_EOF" << "\n"; - } - if (got_packet_ptr == 0 ) { - cerr << "Frame gotpacket error" << "\n"; - } - #else - // Write video packet (even older versions of FFmpeg) - int video_outbuf_size = 200000; - video_outbuf = (uint8_t*) av_malloc(200000); +#else +#if LIBAVFORMAT_VERSION_MAJOR >= 54 + // Write video packet (older than FFmpeg 3.2) + error_code = avcodec_encode_video2(video_codec, &pkt, frame_final, &got_packet_ptr); + if (error_code != 0) { + cerr << "Frame AVERROR_EOF" << "\n"; + } + if (got_packet_ptr == 0) { + cerr << "Frame gotpacket error" << "\n"; + } +#else + // Write video packet (even older versions of FFmpeg) + int video_outbuf_size = 200000; + video_outbuf = (uint8_t*) av_malloc(200000); - /* encode the image */ - int out_size = avcodec_encode_video(video_codec, video_outbuf, video_outbuf_size, frame_final); + /* encode the image */ + int out_size = avcodec_encode_video(video_codec, video_outbuf, video_outbuf_size, frame_final); - /* if zero size, it means the image was buffered */ - if (out_size > 0) { - if(video_codec->coded_frame->key_frame) - pkt.flags |= AV_PKT_FLAG_KEY; - pkt.data= video_outbuf; - pkt.size= out_size; + /* if zero size, it means the image was buffered */ + if (out_size > 0) { + if(video_codec->coded_frame->key_frame) + pkt.flags |= AV_PKT_FLAG_KEY; + pkt.data= video_outbuf; + pkt.size= out_size; - // got data back (so encode this frame) - got_packet_ptr = 1; - } - #endif - #endif + // got data back (so encode this frame) + got_packet_ptr = 1; + } +#endif +#endif /* if zero size, it means the image was buffered */ if (error_code == 0 && got_packet_ptr) { @@ -2042,9 +1979,8 @@ bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* fra /* write the compressed frame in the media file */ int error_code = av_interleaved_write_frame(oc, &pkt); - if (error_code < 0) - { - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_video_packet ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); + if (error_code < 0) { + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_video_packet ERROR [" + (string) av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); return false; } } @@ -2055,14 +1991,14 @@ bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* fra // Deallocate packet AV_FREE_PACKET(&pkt); - #if IS_FFMPEG_3_2 +#if IS_FFMPEG_3_2 if (hw_en_on && hw_en_supported) { if (hw_frame) { av_frame_free(&hw_frame); hw_frame = NULL; } } - #endif +#endif } // Success @@ -2070,31 +2006,29 @@ bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* fra } // Output the ffmpeg info about this format, streams, and codecs (i.e. dump format) -void FFmpegWriter::OutputStreamInfo() -{ +void FFmpegWriter::OutputStreamInfo() { // output debug info av_dump_format(oc, 0, path.c_str(), 1); } // Init a collection of software rescalers (thread safe) -void FFmpegWriter::InitScalers(int source_width, int source_height) -{ +void FFmpegWriter::InitScalers(int source_width, int source_height) { int scale_mode = SWS_FAST_BILINEAR; if (openshot::Settings::Instance()->HIGH_QUALITY_SCALING) { scale_mode = SWS_LANCZOS; } // Init software rescalers vector (many of them, one for each thread) - for (int x = 0; x < num_of_rescalers; x++) - { + for (int x = 0; x < num_of_rescalers; x++) { // Init the software scaler from FFMpeg - #if IS_FFMPEG_3_2 +#if IS_FFMPEG_3_2 if (hw_en_on && hw_en_supported) { img_convert_ctx = sws_getContext(source_width, source_height, PIX_FMT_RGBA, info.width, info.height, AV_PIX_FMT_NV12, SWS_BILINEAR, NULL, NULL, NULL); } else - #endif +#endif { - img_convert_ctx = sws_getContext(source_width, source_height, PIX_FMT_RGBA, info.width, info.height, AV_GET_CODEC_PIXEL_FORMAT(video_st, video_st->codec), SWS_BILINEAR, NULL, NULL, NULL); + img_convert_ctx = sws_getContext(source_width, source_height, PIX_FMT_RGBA, info.width, info.height, AV_GET_CODEC_PIXEL_FORMAT(video_st, video_st->codec), SWS_BILINEAR, + NULL, NULL, NULL); } // Add rescaler to vector @@ -2109,8 +2043,7 @@ void FFmpegWriter::ResampleAudio(int sample_rate, int channels) { } // Remove & deallocate all software scalers -void FFmpegWriter::RemoveScalers() -{ +void FFmpegWriter::RemoveScalers() { // Close all rescalers for (int x = 0; x < num_of_rescalers; x++) sws_freeContext(image_rescalers[x]); From 19f5fa37f2c915be6ad4bb96323802d3c508f04b Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Thu, 18 Apr 2019 16:41:11 -0700 Subject: [PATCH 080/109] Replace qsv with videotoolbox for MacOS codec library. Windows and MacOS is not tested! We need users who test it. --- src/FFmpegReader.cpp | 12 ++++++------ src/FFmpegWriter.cpp | 6 +++--- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 3af851e1..2ea6fcbb 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -248,9 +248,9 @@ static enum AVPixelFormat get_hw_dec_format_qs(AVCodecContext *ctx, const enum A for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { switch (*p) { - case AV_PIX_FMT_QSV: - hw_de_av_pix_fmt_global = AV_PIX_FMT_QSV; - hw_de_av_device_type_global = AV_HWDEVICE_TYPE_QSV; + case AV_PIX_FMT_VIDEOTOOLBOX: + hw_de_av_pix_fmt_global = AV_PIX_FMT_VIDEOTOOLBOX; + hw_de_av_device_type_global = AV_HWDEVICE_TYPE_VIDEOTOOLBOX; return *p; break; } @@ -413,15 +413,15 @@ void FFmpegReader::Open() { i_decoder_hw = openshot::Settings::Instance()->HARDWARE_DECODER; switch (i_decoder_hw) { case 0: - hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; + hw_de_av_device_type = AV_HWDEVICE_TYPE_VIDEOTOOLBOX; pCodecCtx->get_format = get_hw_dec_format_qs; break; case 5: - hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; + hw_de_av_device_type = AV_HWDEVICE_TYPE_VIDEOTOOLBOX; pCodecCtx->get_format = get_hw_dec_format_qs; break; default: - hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; + hw_de_av_device_type = AV_HWDEVICE_TYPE_VIDEOTOOLBOX; pCodecCtx->get_format = get_hw_dec_format_qs; break; } diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index e12ff094..072ac6d7 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -211,12 +211,12 @@ void FFmpegWriter::SetVideoOptions(bool has_video, string codec, Fraction fps, i } } #elif defined(__APPLE__) - if ( (strcmp(codec.c_str(),"h264_qsv") == 0)) { + if ( (strcmp(codec.c_str(),"h264_videotoolbox") == 0)) { new_codec = avcodec_find_encoder_by_name(codec.c_str()); hw_en_on = 1; hw_en_supported = 1; - hw_en_av_pix_fmt = AV_PIX_FMT_QSV; - hw_en_av_device_type = AV_HWDEVICE_TYPE_QSV; + hw_en_av_pix_fmt = AV_PIX_FMT_VIDEOTOOLBOX; + hw_en_av_device_type = AV_HWDEVICE_TYPE_VIDEOTOOLBOX; } else { new_codec = avcodec_find_encoder_by_name(codec.c_str()); From 8d3263f2faacad1235624a9e0acb2327342b034c Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Thu, 18 Apr 2019 17:09:20 -0700 Subject: [PATCH 081/109] Some information --- doc/HW-ACCEL.md | 28 +++++++++++++++++++++------- 1 file changed, 21 insertions(+), 7 deletions(-) diff --git a/doc/HW-ACCEL.md b/doc/HW-ACCEL.md index edbe85b4..c11d2adc 100644 --- a/doc/HW-ACCEL.md +++ b/doc/HW-ACCEL.md @@ -8,8 +8,8 @@ Observations for developers wanting to make hardware acceleration work. * HW accel is supported from ffmpeg version 3.2 (3.3 for nVidia drivers) * HW accel was removed for nVidia drivers in Ubuntu for ffmpeg 4+ -* I could not manage to build a version of ffmpeg 4.1 with the nVidia SDK -that worked with nVidia cards. There might be a problem in ffmpeg 4+ +* I could not manage to build a version of ffmpeg 4.1 with the nVidia SDK +that worked with nVidia cards. There might be a problem in ffmpeg 4+ that prohibits this. **Notice:** The ffmpeg versions of Ubuntu and PPAs for Ubuntu show the @@ -55,7 +55,7 @@ int HW_EN_DEVICE_SET = 0; The correct version of libva is needed (libva in Ubuntu 16.04 or libva2 in Ubuntu 18.04) for the AppImage to work with hardware acceleration. -An AppImage that works on both systems (supporting libva and libva2), +An AppImage that works on both systems (supporting libva and libva2), might be possible when no libva is included in the AppImage. * vaapi is working for intel and AMD @@ -64,14 +64,14 @@ might be possible when no libva is included in the AppImage. ## AMD Graphics Cards (RadeonOpenCompute/ROCm) -Decoding and encoding on the (AMD) GPU can be done on systems where ROCm -is installed and run. Possible future use for GPU acceleration of effects (contributions -welcome). +Decoding and encoding on the (AMD) GPU is possible with the default drivers. +On systems where ROCm is installed and run a future use for GPU acceleration +of effects could be implemented (contributions welcome). ## Multiple Graphics Cards If the computer has multiple graphics cards installed, you can choose which -should be used by libopenshot. Also, you can optionally use one card for +should be used by libopenshot. Also, you can optionally use one card for decoding and the other for encoding (if both cards support acceleration). ## Help Us Improve Hardware Support @@ -82,3 +82,17 @@ this document if you find an error or discover some new information. **Desperately Needed:** a way to compile ffmpeg 4.0 and up with working nVidia hardware acceleration support on Ubuntu Linux! + +**Needed:** a way to get the options and limits of the GPU, like +supported codecs and the supported dimensions (width and height). + +**Would be nice:** a way in python to only have some source for the desired +plattform. Example: VAAPI is not supported in Windows or Mac and should not +be displayed as an option for encoder libraries. + +**Further improvement:** Right now the frame can be decoded on the GPU but the +frame is then copied to CPU memory. Before encoding the frame the frame is then +copied to GPU memory for encoding. That is necessary because the modifications +are done by the CPU. Using the GPU for that too will make it possible to do +away with these two copies. A possible solution would be to use Vulkan compute +which would be available on Linux and Windows natively and on MacOS via MoltenVK. From f6465e3a786f6796210b2d7db66b890d810847a6 Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Thu, 18 Apr 2019 21:05:31 -0500 Subject: [PATCH 082/109] Experimental Python install path logic --- src/bindings/python/CMakeLists.txt | 39 ++++++++++++++++++------------ 1 file changed, 24 insertions(+), 15 deletions(-) diff --git a/src/bindings/python/CMakeLists.txt b/src/bindings/python/CMakeLists.txt index c8686097..2a481aa7 100644 --- a/src/bindings/python/CMakeLists.txt +++ b/src/bindings/python/CMakeLists.txt @@ -30,10 +30,10 @@ FIND_PACKAGE(SWIG 2.0 REQUIRED) INCLUDE(${SWIG_USE_FILE}) ### Enable some legacy SWIG behaviors, in newer CMAKEs -if (CMAKE_VERSION VERSION_GREATER 3.13) +if (POLICY CMP0078) cmake_policy(SET CMP0078 OLD) endif() -if (CMAKE_VERSION VERSION_GREATER 3.14) +if (POLICY CMP0086) cmake_policy(SET CMP0086 OLD) endif() @@ -65,20 +65,29 @@ if (PYTHONLIBS_FOUND AND PYTHONINTERP_FOUND) target_link_libraries(${SWIG_MODULE_pyopenshot_REAL_NAME} ${PYTHON_LIBRARIES} openshot) - ### FIND THE PYTHON INTERPRETER (AND THE SITE PACKAGES FOLDER) - execute_process ( COMMAND ${PYTHON_EXECUTABLE} -c "\ -import site; from distutils.sysconfig import get_python_lib; \ -print( get_python_lib( plat_specific=True, standard_lib=True, prefix='${CMAKE_INSTALL_PREFIX}' ) \ - + '/' + get_python_lib( plat_specific=False, standard_lib=False, prefix='${CMAKE_INSTALL_PREFIX}' ).split('/')[-1] \ - + '/' )" - OUTPUT_VARIABLE _ABS_PYTHON_MODULE_PATH - OUTPUT_STRIP_TRAILING_WHITESPACE ) + ### Check if the following Debian-friendly python module path exists + SET(PYTHON_MODULE_PATH "${CMAKE_INSTALL_PREFIX}/lib/python${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}/site-packages") + if (NOT EXISTS ${PYTHON_MODULE_PATH}) - GET_FILENAME_COMPONENT(_ABS_PYTHON_MODULE_PATH - "${_ABS_PYTHON_MODULE_PATH}" ABSOLUTE) - FILE(RELATIVE_PATH _REL_PYTHON_MODULE_PATH - ${CMAKE_INSTALL_PREFIX} ${_ABS_PYTHON_MODULE_PATH}) - SET(PYTHON_MODULE_PATH ${_REL_PYTHON_MODULE_PATH}) + ### Check if another Debian-friendly python module path exists + SET(PYTHON_MODULE_PATH "${CMAKE_INSTALL_PREFIX}/lib/python${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}/dist-packages") + if (NOT EXISTS ${PYTHON_MODULE_PATH}) + + ### Calculate the python module path (using distutils) + execute_process ( COMMAND ${PYTHON_EXECUTABLE} -c "\ +from distutils.sysconfig import get_python_lib; \ +print( get_python_lib( plat_specific=True, prefix='${CMAKE_INSTALL_PREFIX}' ) )" + OUTPUT_VARIABLE _ABS_PYTHON_MODULE_PATH + OUTPUT_STRIP_TRAILING_WHITESPACE ) + + GET_FILENAME_COMPONENT(_ABS_PYTHON_MODULE_PATH + "${_ABS_PYTHON_MODULE_PATH}" ABSOLUTE) + FILE(RELATIVE_PATH _REL_PYTHON_MODULE_PATH + ${CMAKE_INSTALL_PREFIX} ${_ABS_PYTHON_MODULE_PATH}) + SET(PYTHON_MODULE_PATH ${_ABS_PYTHON_MODULE_PATH}) + endif() + endif() + message("PYTHON_MODULE_PATH: ${PYTHON_MODULE_PATH}") ############### INSTALL HEADERS & LIBRARY ################ ### Install Python bindings From 4c65804d4547cb61343116ea5c349a3653bec4bb Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Fri, 19 Apr 2019 15:47:29 -0700 Subject: [PATCH 083/109] User interface is now usable --- doc/HW-ACCEL.md | 4 ---- 1 file changed, 4 deletions(-) diff --git a/doc/HW-ACCEL.md b/doc/HW-ACCEL.md index c11d2adc..3f750e6e 100644 --- a/doc/HW-ACCEL.md +++ b/doc/HW-ACCEL.md @@ -86,10 +86,6 @@ hardware acceleration support on Ubuntu Linux! **Needed:** a way to get the options and limits of the GPU, like supported codecs and the supported dimensions (width and height). -**Would be nice:** a way in python to only have some source for the desired -plattform. Example: VAAPI is not supported in Windows or Mac and should not -be displayed as an option for encoder libraries. - **Further improvement:** Right now the frame can be decoded on the GPU but the frame is then copied to CPU memory. Before encoding the frame the frame is then copied to GPU memory for encoding. That is necessary because the modifications From 825e38ac9d8f80eaeb3fd90cd6a2d9cfa1c55400 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sun, 21 Apr 2019 10:04:24 -0700 Subject: [PATCH 084/109] Removing old way to select hardware support Removing the decode setting makes hardware supported decode break. There must be some hidden dependency on that variable somewhere which might also be responsible for the problems with nVidia on Linux. TODO Remove the dependency --- include/Settings.h | 6 +++++- src/Settings.cpp | 6 +++++- tests/Settings_Tests.cpp | 22 +++++++++++++--------- 3 files changed, 23 insertions(+), 11 deletions(-) diff --git a/include/Settings.h b/include/Settings.h index 3e18bc9a..fc98ab4f 100644 --- a/include/Settings.h +++ b/include/Settings.h @@ -77,13 +77,17 @@ namespace openshot { public: /// Use video card for faster video decoding (if supported) + // REMOVE_HW_OLD + // Removing this breaks decode completely + // Find bug in libopenshot bool HARDWARE_DECODE = false; /// Use video codec for faster video decoding (if supported) int HARDWARE_DECODER = 0; /// Use video card for faster video encoding (if supported) - bool HARDWARE_ENCODE = false; + // REMOVE_HW_OLD + //bool HARDWARE_ENCODE = false; /// Scale mode used in FFmpeg decoding and encoding (used as an optimization for faster previews) bool HIGH_QUALITY_SCALING = false; diff --git a/src/Settings.cpp b/src/Settings.cpp index 461f9183..e0f8e693 100644 --- a/src/Settings.cpp +++ b/src/Settings.cpp @@ -40,9 +40,13 @@ Settings *Settings::Instance() if (!m_pInstance) { // Create the actual instance of logger only once m_pInstance = new Settings; + // REMOVE_HW_OLD + // Removing this breaks decode completely + // Find bug in libopenshot m_pInstance->HARDWARE_DECODE = false; m_pInstance->HARDWARE_DECODER = 0; - m_pInstance->HARDWARE_ENCODE = false; + // REMOVE_HW_OLD + //m_pInstance->HARDWARE_ENCODE = false; m_pInstance->HIGH_QUALITY_SCALING = false; m_pInstance->MAX_WIDTH = 0; m_pInstance->MAX_HEIGHT = 0; diff --git a/tests/Settings_Tests.cpp b/tests/Settings_Tests.cpp index 86790653..f76d60ba 100644 --- a/tests/Settings_Tests.cpp +++ b/tests/Settings_Tests.cpp @@ -36,8 +36,9 @@ TEST(Settings_Default_Constructor) // Create an empty color Settings *s = Settings::Instance(); - CHECK_EQUAL(false, s->HARDWARE_DECODE); - CHECK_EQUAL(false, s->HARDWARE_ENCODE); + CHECK_EQUAL(1, s->HARDWARE_DECODER); + // REMOVE_HW_OLD + //CHECK_EQUAL(false, s->HARDWARE_ENCODE); CHECK_EQUAL(false, s->HIGH_QUALITY_SCALING); CHECK_EQUAL(false, s->WAIT_FOR_VIDEO_PROCESSING_TASK); } @@ -46,18 +47,21 @@ TEST(Settings_Change_Settings) { // Create an empty color Settings *s = Settings::Instance(); - s->HARDWARE_DECODE = true; - s->HARDWARE_ENCODE = true; + s->HARDWARE_DECODER = 1; + // REMOVE_HW_OLD + //s->HARDWARE_ENCODE = true; s->HIGH_QUALITY_SCALING = true; s->WAIT_FOR_VIDEO_PROCESSING_TASK = true; - CHECK_EQUAL(true, s->HARDWARE_DECODE); - CHECK_EQUAL(true, s->HARDWARE_ENCODE); + CHECK_EQUAL(1, s->HARDWARE_DECODER); + // REMOVE_HW_OLD + //CHECK_EQUAL(true, s->HARDWARE_ENCODE); CHECK_EQUAL(true, s->HIGH_QUALITY_SCALING); CHECK_EQUAL(true, s->WAIT_FOR_VIDEO_PROCESSING_TASK); - CHECK_EQUAL(true, s->HARDWARE_DECODE); - CHECK_EQUAL(true, s->HARDWARE_ENCODE); + CHECK_EQUAL(1, s->HARDWARE_DECODER); + // REMOVE_HW_OLD + //CHECK_EQUAL(true, s->HARDWARE_ENCODE); CHECK_EQUAL(true, Settings::Instance()->HIGH_QUALITY_SCALING); CHECK_EQUAL(true, Settings::Instance()->WAIT_FOR_VIDEO_PROCESSING_TASK); -} \ No newline at end of file +} From bb561ae4e2102505890760f23f80eee19d54590f Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sun, 21 Apr 2019 10:17:31 -0700 Subject: [PATCH 085/109] Temporarily disable test for DECODER --- tests/Settings_Tests.cpp | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/Settings_Tests.cpp b/tests/Settings_Tests.cpp index f76d60ba..bc127805 100644 --- a/tests/Settings_Tests.cpp +++ b/tests/Settings_Tests.cpp @@ -36,7 +36,7 @@ TEST(Settings_Default_Constructor) // Create an empty color Settings *s = Settings::Instance(); - CHECK_EQUAL(1, s->HARDWARE_DECODER); + //CHECK_EQUAL(1, s->HARDWARE_DECODER); // REMOVE_HW_OLD //CHECK_EQUAL(false, s->HARDWARE_ENCODE); CHECK_EQUAL(false, s->HIGH_QUALITY_SCALING); @@ -47,19 +47,19 @@ TEST(Settings_Change_Settings) { // Create an empty color Settings *s = Settings::Instance(); - s->HARDWARE_DECODER = 1; + //s->HARDWARE_DECODER = 1; // REMOVE_HW_OLD //s->HARDWARE_ENCODE = true; s->HIGH_QUALITY_SCALING = true; s->WAIT_FOR_VIDEO_PROCESSING_TASK = true; - CHECK_EQUAL(1, s->HARDWARE_DECODER); + //CHECK_EQUAL(1, s->HARDWARE_DECODER); // REMOVE_HW_OLD //CHECK_EQUAL(true, s->HARDWARE_ENCODE); CHECK_EQUAL(true, s->HIGH_QUALITY_SCALING); CHECK_EQUAL(true, s->WAIT_FOR_VIDEO_PROCESSING_TASK); - CHECK_EQUAL(1, s->HARDWARE_DECODER); + //CHECK_EQUAL(1, s->HARDWARE_DECODER); // REMOVE_HW_OLD //CHECK_EQUAL(true, s->HARDWARE_ENCODE); CHECK_EQUAL(true, Settings::Instance()->HIGH_QUALITY_SCALING); From 0e77fbdc3b967b97bed8454cda4fa0e97185e232 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sun, 21 Apr 2019 10:53:53 -0700 Subject: [PATCH 086/109] Re-anable the DECODER test --- tests/Settings_Tests.cpp | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/Settings_Tests.cpp b/tests/Settings_Tests.cpp index bc127805..82690bbe 100644 --- a/tests/Settings_Tests.cpp +++ b/tests/Settings_Tests.cpp @@ -36,7 +36,7 @@ TEST(Settings_Default_Constructor) // Create an empty color Settings *s = Settings::Instance(); - //CHECK_EQUAL(1, s->HARDWARE_DECODER); + CHECK_EQUAL(0, s->HARDWARE_DECODER); // REMOVE_HW_OLD //CHECK_EQUAL(false, s->HARDWARE_ENCODE); CHECK_EQUAL(false, s->HIGH_QUALITY_SCALING); @@ -47,19 +47,19 @@ TEST(Settings_Change_Settings) { // Create an empty color Settings *s = Settings::Instance(); - //s->HARDWARE_DECODER = 1; + s->HARDWARE_DECODER = 1; // REMOVE_HW_OLD //s->HARDWARE_ENCODE = true; s->HIGH_QUALITY_SCALING = true; s->WAIT_FOR_VIDEO_PROCESSING_TASK = true; - //CHECK_EQUAL(1, s->HARDWARE_DECODER); + CHECK_EQUAL(1, s->HARDWARE_DECODER); // REMOVE_HW_OLD //CHECK_EQUAL(true, s->HARDWARE_ENCODE); CHECK_EQUAL(true, s->HIGH_QUALITY_SCALING); CHECK_EQUAL(true, s->WAIT_FOR_VIDEO_PROCESSING_TASK); - //CHECK_EQUAL(1, s->HARDWARE_DECODER); + CHECK_EQUAL(1, s->HARDWARE_DECODER); // REMOVE_HW_OLD //CHECK_EQUAL(true, s->HARDWARE_ENCODE); CHECK_EQUAL(true, Settings::Instance()->HIGH_QUALITY_SCALING); From e6efea7a929e21c13e016cef0ba569df4fb653fa Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sun, 21 Apr 2019 11:31:58 -0700 Subject: [PATCH 087/109] Update documentation --- doc/HW-ACCEL.md | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/doc/HW-ACCEL.md b/doc/HW-ACCEL.md index 3f750e6e..ed886a5f 100644 --- a/doc/HW-ACCEL.md +++ b/doc/HW-ACCEL.md @@ -23,15 +23,12 @@ The following settings are use by libopenshot to enable, disable, and control the various hardware acceleration features. ``` -/// Use video card for faster video decoding (if supported) +/// DEPRECATED Use video card for faster video decoding (if supported) bool HARDWARE_DECODE = false; /// Use video codec for faster video decoding (if supported) int HARDWARE_DECODER = 0; -/// Use video card for faster video encoding (if supported) -bool HARDWARE_ENCODE = false; - /// Number of threads of OpenMP int OMP_THREADS = 12; @@ -83,6 +80,11 @@ this document if you find an error or discover some new information. **Desperately Needed:** a way to compile ffmpeg 4.0 and up with working nVidia hardware acceleration support on Ubuntu Linux! +**BUG:** the use of HARDWARE_DECODE is somehow still needed, otherwise hardware +supported decoding breaks. TODO remove the hidden dependency on this variable. +The variable HARDWARE_DECODER should and does select if and which hardware +decoder is used but somehow HARDWARE_DECODE has to be there too. + **Needed:** a way to get the options and limits of the GPU, like supported codecs and the supported dimensions (width and height). From 65d9134722440d1812e095fb25910046a0dd9677 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sun, 21 Apr 2019 12:18:44 -0700 Subject: [PATCH 088/109] Remove DECODE varaible. Turn out that a buggy graphic driver was the problem. --- doc/HW-ACCEL.md | 12 +++++------- include/Settings.h | 4 +--- src/Settings.cpp | 4 +--- 3 files changed, 7 insertions(+), 13 deletions(-) diff --git a/doc/HW-ACCEL.md b/doc/HW-ACCEL.md index ed886a5f..9a8b78b9 100644 --- a/doc/HW-ACCEL.md +++ b/doc/HW-ACCEL.md @@ -23,9 +23,6 @@ The following settings are use by libopenshot to enable, disable, and control the various hardware acceleration features. ``` -/// DEPRECATED Use video card for faster video decoding (if supported) -bool HARDWARE_DECODE = false; - /// Use video codec for faster video decoding (if supported) int HARDWARE_DECODER = 0; @@ -80,10 +77,11 @@ this document if you find an error or discover some new information. **Desperately Needed:** a way to compile ffmpeg 4.0 and up with working nVidia hardware acceleration support on Ubuntu Linux! -**BUG:** the use of HARDWARE_DECODE is somehow still needed, otherwise hardware -supported decoding breaks. TODO remove the hidden dependency on this variable. -The variable HARDWARE_DECODER should and does select if and which hardware -decoder is used but somehow HARDWARE_DECODE has to be there too. +**BUG:** hardware supported decoding still has a bug. The speed gains with +decoding are by far not as great as with encoding. In case hardware accelerated +decoding does not work disable it. +Hardware acceleration might also break because of graphics drivers that have +bugs. **Needed:** a way to get the options and limits of the GPU, like supported codecs and the supported dimensions (width and height). diff --git a/include/Settings.h b/include/Settings.h index fc98ab4f..1ea61335 100644 --- a/include/Settings.h +++ b/include/Settings.h @@ -78,9 +78,7 @@ namespace openshot { public: /// Use video card for faster video decoding (if supported) // REMOVE_HW_OLD - // Removing this breaks decode completely - // Find bug in libopenshot - bool HARDWARE_DECODE = false; + //bool HARDWARE_DECODE = false; /// Use video codec for faster video decoding (if supported) int HARDWARE_DECODER = 0; diff --git a/src/Settings.cpp b/src/Settings.cpp index e0f8e693..e838cc51 100644 --- a/src/Settings.cpp +++ b/src/Settings.cpp @@ -41,9 +41,7 @@ Settings *Settings::Instance() // Create the actual instance of logger only once m_pInstance = new Settings; // REMOVE_HW_OLD - // Removing this breaks decode completely - // Find bug in libopenshot - m_pInstance->HARDWARE_DECODE = false; + //m_pInstance->HARDWARE_DECODE = false; m_pInstance->HARDWARE_DECODER = 0; // REMOVE_HW_OLD //m_pInstance->HARDWARE_ENCODE = false; From 79335277cba0d001faf1904b1e9ca907d52c4f4e Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Sun, 21 Apr 2019 14:37:06 -0500 Subject: [PATCH 089/109] Removing old commented out code --- doc/HW-ACCEL.md | 13 ++++++------- include/Settings.h | 8 -------- src/Settings.cpp | 4 ---- tests/Settings_Tests.cpp | 8 -------- 4 files changed, 6 insertions(+), 27 deletions(-) diff --git a/doc/HW-ACCEL.md b/doc/HW-ACCEL.md index 9a8b78b9..7ed4c637 100644 --- a/doc/HW-ACCEL.md +++ b/doc/HW-ACCEL.md @@ -74,20 +74,19 @@ This information might be wrong, and we would love to continue improving our support for hardware acceleration in OpenShot. Please help us update this document if you find an error or discover some new information. -**Desperately Needed:** a way to compile ffmpeg 4.0 and up with working nVidia +**Desperately Needed:** A way to compile ffmpeg 4.0 and up with working nVidia hardware acceleration support on Ubuntu Linux! -**BUG:** hardware supported decoding still has a bug. The speed gains with +**BUG:** Hardware supported decoding still has a bug. The speed gains with decoding are by far not as great as with encoding. In case hardware accelerated -decoding does not work disable it. -Hardware acceleration might also break because of graphics drivers that have -bugs. +decoding does not work disable it. Hardware acceleration might also break +because of graphics drivers that have bugs. -**Needed:** a way to get the options and limits of the GPU, like +**Needed:** A way to get the options and limits of the GPU, like supported codecs and the supported dimensions (width and height). **Further improvement:** Right now the frame can be decoded on the GPU but the -frame is then copied to CPU memory. Before encoding the frame the frame is then +frame is then copied to CPU memory. Before encoding the frame is then copied to GPU memory for encoding. That is necessary because the modifications are done by the CPU. Using the GPU for that too will make it possible to do away with these two copies. A possible solution would be to use Vulkan compute diff --git a/include/Settings.h b/include/Settings.h index 1ea61335..26edf464 100644 --- a/include/Settings.h +++ b/include/Settings.h @@ -76,17 +76,9 @@ namespace openshot { static Settings * m_pInstance; public: - /// Use video card for faster video decoding (if supported) - // REMOVE_HW_OLD - //bool HARDWARE_DECODE = false; - /// Use video codec for faster video decoding (if supported) int HARDWARE_DECODER = 0; - /// Use video card for faster video encoding (if supported) - // REMOVE_HW_OLD - //bool HARDWARE_ENCODE = false; - /// Scale mode used in FFmpeg decoding and encoding (used as an optimization for faster previews) bool HIGH_QUALITY_SCALING = false; diff --git a/src/Settings.cpp b/src/Settings.cpp index e838cc51..99b059e8 100644 --- a/src/Settings.cpp +++ b/src/Settings.cpp @@ -40,11 +40,7 @@ Settings *Settings::Instance() if (!m_pInstance) { // Create the actual instance of logger only once m_pInstance = new Settings; - // REMOVE_HW_OLD - //m_pInstance->HARDWARE_DECODE = false; m_pInstance->HARDWARE_DECODER = 0; - // REMOVE_HW_OLD - //m_pInstance->HARDWARE_ENCODE = false; m_pInstance->HIGH_QUALITY_SCALING = false; m_pInstance->MAX_WIDTH = 0; m_pInstance->MAX_HEIGHT = 0; diff --git a/tests/Settings_Tests.cpp b/tests/Settings_Tests.cpp index 82690bbe..1b8180c9 100644 --- a/tests/Settings_Tests.cpp +++ b/tests/Settings_Tests.cpp @@ -37,8 +37,6 @@ TEST(Settings_Default_Constructor) Settings *s = Settings::Instance(); CHECK_EQUAL(0, s->HARDWARE_DECODER); - // REMOVE_HW_OLD - //CHECK_EQUAL(false, s->HARDWARE_ENCODE); CHECK_EQUAL(false, s->HIGH_QUALITY_SCALING); CHECK_EQUAL(false, s->WAIT_FOR_VIDEO_PROCESSING_TASK); } @@ -48,20 +46,14 @@ TEST(Settings_Change_Settings) // Create an empty color Settings *s = Settings::Instance(); s->HARDWARE_DECODER = 1; - // REMOVE_HW_OLD - //s->HARDWARE_ENCODE = true; s->HIGH_QUALITY_SCALING = true; s->WAIT_FOR_VIDEO_PROCESSING_TASK = true; CHECK_EQUAL(1, s->HARDWARE_DECODER); - // REMOVE_HW_OLD - //CHECK_EQUAL(true, s->HARDWARE_ENCODE); CHECK_EQUAL(true, s->HIGH_QUALITY_SCALING); CHECK_EQUAL(true, s->WAIT_FOR_VIDEO_PROCESSING_TASK); CHECK_EQUAL(1, s->HARDWARE_DECODER); - // REMOVE_HW_OLD - //CHECK_EQUAL(true, s->HARDWARE_ENCODE); CHECK_EQUAL(true, Settings::Instance()->HIGH_QUALITY_SCALING); CHECK_EQUAL(true, Settings::Instance()->WAIT_FOR_VIDEO_PROCESSING_TASK); } From 140fbaddff7e26e472474631765a5956b0b4075c Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Tue, 23 Apr 2019 16:45:02 -0500 Subject: [PATCH 090/109] Added new AudioDeviceInfo struct, and populate a vector of them on QtPlayer initialization. This allows a user to overwrite the preferred audio device by using the setting PLAYBACK_AUDIO_DEVICE_NAME. --- include/AudioDeviceInfo.h | 43 ++++++++++++++++++++++++++++++++ include/Qt/AudioPlaybackThread.h | 14 ++++++++--- include/QtPlayer.h | 3 +++ include/Settings.h | 3 +++ src/Qt/AudioPlaybackThread.cpp | 25 +++++++++++++++---- src/QtPlayer.cpp | 11 +++++++- src/Settings.cpp | 2 +- src/bindings/python/openshot.i | 3 +++ src/bindings/ruby/openshot.i | 3 +++ 9 files changed, 97 insertions(+), 10 deletions(-) create mode 100644 include/AudioDeviceInfo.h diff --git a/include/AudioDeviceInfo.h b/include/AudioDeviceInfo.h new file mode 100644 index 00000000..29a89139 --- /dev/null +++ b/include/AudioDeviceInfo.h @@ -0,0 +1,43 @@ +/** + * @file + * @brief Header file for Audio Device Info struct + * @author Jonathan Thomas + * + * @section LICENSE + * + * Copyright (c) 2008-2014 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#ifndef OPENSHOT_AUDIODEVICEINFO_H +#define OPENSHOT_AUDIODEVICEINFO_H + + +/** + * @brief This struct hold information about Audio Devices + * + * The type and name of the audio device. + */ +struct AudioDeviceInfo +{ + string name; + string type; +}; + +#endif \ No newline at end of file diff --git a/include/Qt/AudioPlaybackThread.h b/include/Qt/AudioPlaybackThread.h index 1d654756..be26c4e8 100644 --- a/include/Qt/AudioPlaybackThread.h +++ b/include/Qt/AudioPlaybackThread.h @@ -32,6 +32,8 @@ #include "../ReaderBase.h" #include "../RendererBase.h" #include "../AudioReaderSource.h" +#include "../AudioDeviceInfo.h" +#include "../Settings.h" namespace openshot { @@ -66,8 +68,11 @@ namespace openshot /// Error found during JUCE initialise method string initialise_error; - /// Create or get an instance of this singleton (invoke the class with this method) - static AudioDeviceManagerSingleton * Instance(int numChannels); + /// List of valid audio device names + vector audio_device_names; + + /// Override with no channels and no preferred audio device + static AudioDeviceManagerSingleton * Instance(); /// Public device manager property AudioDeviceManager audioDeviceManager; @@ -126,7 +131,10 @@ namespace openshot int getSpeed() const { if (source) return source->getSpeed(); else return 1; } /// Get Audio Error (if any) - string getError() { return AudioDeviceManagerSingleton::Instance(numChannels)->initialise_error; } + string getError() { return AudioDeviceManagerSingleton::Instance()->initialise_error; } + + /// Get Audio Device Names (if any) + vector getAudioDeviceNames() { return AudioDeviceManagerSingleton::Instance()->audio_device_names; }; friend class PlayerPrivate; friend class QtPlayer; diff --git a/include/QtPlayer.h b/include/QtPlayer.h index a1a7ee0c..c9137f5e 100644 --- a/include/QtPlayer.h +++ b/include/QtPlayer.h @@ -62,6 +62,9 @@ namespace openshot /// Get Error (if any) string GetError(); + /// Get Audio Devices from JUCE + vector GetAudioDeviceNames(); + /// Play the video void Play(); diff --git a/include/Settings.h b/include/Settings.h index 26edf464..859b2fab 100644 --- a/include/Settings.h +++ b/include/Settings.h @@ -109,6 +109,9 @@ namespace openshot { /// Which GPU to use to encode (0 is the first) int HW_EN_DEVICE_SET = 0; + /// The audio device name to use during playback + string PLAYBACK_AUDIO_DEVICE_NAME = ""; + /// Create or get an instance of this logger singleton (invoke the class with this method) static Settings * Instance(); }; diff --git a/src/Qt/AudioPlaybackThread.cpp b/src/Qt/AudioPlaybackThread.cpp index c64bd688..28db8df6 100644 --- a/src/Qt/AudioPlaybackThread.cpp +++ b/src/Qt/AudioPlaybackThread.cpp @@ -35,7 +35,7 @@ namespace openshot AudioDeviceManagerSingleton *AudioDeviceManagerSingleton::m_pInstance = NULL; // Create or Get an instance of the device manager singleton - AudioDeviceManagerSingleton *AudioDeviceManagerSingleton::Instance(int numChannels) + AudioDeviceManagerSingleton *AudioDeviceManagerSingleton::Instance() { if (!m_pInstance) { // Create the actual instance of device manager only once @@ -44,9 +44,10 @@ namespace openshot // Initialize audio device only 1 time String error = m_pInstance->audioDeviceManager.initialise ( 0, /* number of input channels */ - numChannels, /* number of output channels */ + 2, /* number of output channels */ 0, /* no XML settings.. */ - true /* select default device on failure */); + true, /* select default device on failure */ + Settings::Instance()->PLAYBACK_AUDIO_DEVICE_NAME /* preferredDefaultDeviceName */); // Persist any errors detected if (error.isNotEmpty()) { @@ -54,6 +55,20 @@ namespace openshot } else { m_pInstance->initialise_error = ""; } + + // Get all audio device names + for (int i = 0; i < m_pInstance->audioDeviceManager.getAvailableDeviceTypes().size(); ++i) + { + const AudioIODeviceType* t = m_pInstance->audioDeviceManager.getAvailableDeviceTypes()[i]; + const StringArray deviceNames = t->getDeviceNames (); + + for (int j = 0; j < deviceNames.size (); ++j ) + { + const String deviceName = deviceNames[j]; + AudioDeviceInfo deviceInfo = {deviceName.toStdString(), t->getTypeName().toStdString()}; + m_pInstance->audio_device_names.push_back(deviceInfo); + } + } } return m_pInstance; @@ -149,7 +164,7 @@ namespace openshot // Start new audio device (or get existing one) // Add callback - AudioDeviceManagerSingleton::Instance(numChannels)->audioDeviceManager.addAudioCallback(&player); + AudioDeviceManagerSingleton::Instance()->audioDeviceManager.addAudioCallback(&player); // Create TimeSliceThread for audio buffering time_thread.startThread(); @@ -182,7 +197,7 @@ namespace openshot transport.setSource(NULL); player.setSource(NULL); - AudioDeviceManagerSingleton::Instance(0)->audioDeviceManager.removeAudioCallback(&player); + AudioDeviceManagerSingleton::Instance()->audioDeviceManager.removeAudioCallback(&player); // Remove source delete source; diff --git a/src/QtPlayer.cpp b/src/QtPlayer.cpp index 4f53c7ca..3287c19d 100644 --- a/src/QtPlayer.cpp +++ b/src/QtPlayer.cpp @@ -56,7 +56,7 @@ QtPlayer::~QtPlayer() void QtPlayer::CloseAudioDevice() { // Close audio device (only do this once, when all audio playback is finished) - AudioDeviceManagerSingleton::Instance(0)->CloseAudioDevice(); + AudioDeviceManagerSingleton::Instance()->CloseAudioDevice(); } // Return any error string during initialization @@ -69,6 +69,15 @@ string QtPlayer::GetError() { } } +/// Get Audio Devices from JUCE +vector QtPlayer::GetAudioDeviceNames() { + if (reader && threads_started) { + return p->audioPlayback->getAudioDeviceNames(); + } else { + return vector(); + } +} + void QtPlayer::SetSource(const std::string &source) { FFmpegReader *ffreader = new FFmpegReader(source); diff --git a/src/Settings.cpp b/src/Settings.cpp index 99b059e8..8193ec6b 100644 --- a/src/Settings.cpp +++ b/src/Settings.cpp @@ -51,7 +51,7 @@ Settings *Settings::Instance() m_pInstance->DE_LIMIT_WIDTH_MAX = 1950; m_pInstance->HW_DE_DEVICE_SET = 0; m_pInstance->HW_EN_DEVICE_SET = 0; - + m_pInstance->PLAYBACK_AUDIO_DEVICE_NAME = ""; } return m_pInstance; diff --git a/src/bindings/python/openshot.i b/src/bindings/python/openshot.i index de1f020c..ed34b658 100644 --- a/src/bindings/python/openshot.i +++ b/src/bindings/python/openshot.i @@ -87,6 +87,7 @@ #include "../../../include/Settings.h" #include "../../../include/Timeline.h" #include "../../../include/ZmqLogger.h" +#include "../../../include/AudioDeviceInfo.h" %} @@ -154,6 +155,7 @@ %include "../../../include/Settings.h" %include "../../../include/Timeline.h" %include "../../../include/ZmqLogger.h" +%include "../../../include/AudioDeviceInfo.h" #ifdef USE_IMAGEMAGICK %include "../../../include/ImageReader.h" @@ -187,4 +189,5 @@ namespace std { %template(FieldVector) vector; %template(MappedFrameVector) vector; %template(MappedMetadata) map; + %template(AudioDeviceInfoVector) vector; } diff --git a/src/bindings/ruby/openshot.i b/src/bindings/ruby/openshot.i index b9a35d41..1cd9bb75 100644 --- a/src/bindings/ruby/openshot.i +++ b/src/bindings/ruby/openshot.i @@ -91,6 +91,7 @@ namespace std { #include "../../../include/Settings.h" #include "../../../include/Timeline.h" #include "../../../include/ZmqLogger.h" +#include "../../../include/AudioDeviceInfo.h" %} @@ -147,6 +148,7 @@ namespace std { %include "../../../include/Settings.h" %include "../../../include/Timeline.h" %include "../../../include/ZmqLogger.h" +%include "../../../include/AudioDeviceInfo.h" #ifdef USE_IMAGEMAGICK %include "../../../include/ImageReader.h" @@ -181,4 +183,5 @@ namespace std { %template(FieldVector) vector; %template(MappedFrameVector) vector; %template(MappedMetadata) map; + %template(AudioDeviceInfoVector) vector; } From a69c34ffbb49c5f2a79a5f124e8bf9b2b41df8ff Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Wed, 24 Apr 2019 09:41:52 -0500 Subject: [PATCH 091/109] Small refactor to audio device manager initialise (to prevent compile breakage on Mac) --- src/Qt/AudioPlaybackThread.cpp | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/Qt/AudioPlaybackThread.cpp b/src/Qt/AudioPlaybackThread.cpp index 28db8df6..d31f719e 100644 --- a/src/Qt/AudioPlaybackThread.cpp +++ b/src/Qt/AudioPlaybackThread.cpp @@ -41,13 +41,16 @@ namespace openshot // Create the actual instance of device manager only once m_pInstance = new AudioDeviceManagerSingleton; + // Get preferred audio device name (if any) + string preferred_audio_device = Settings::Instance()->PLAYBACK_AUDIO_DEVICE_NAME; + // Initialize audio device only 1 time String error = m_pInstance->audioDeviceManager.initialise ( 0, /* number of input channels */ 2, /* number of output channels */ 0, /* no XML settings.. */ true, /* select default device on failure */ - Settings::Instance()->PLAYBACK_AUDIO_DEVICE_NAME /* preferredDefaultDeviceName */); + preferred_audio_device /* preferredDefaultDeviceName */); // Persist any errors detected if (error.isNotEmpty()) { From ef2ed569065beaf28049b2b9767fcbc6dfa84aa6 Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Wed, 24 Apr 2019 10:08:22 -0500 Subject: [PATCH 092/109] More refactoring for Mac compile breakage --- src/Qt/AudioPlaybackThread.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/Qt/AudioPlaybackThread.cpp b/src/Qt/AudioPlaybackThread.cpp index d31f719e..678a9c09 100644 --- a/src/Qt/AudioPlaybackThread.cpp +++ b/src/Qt/AudioPlaybackThread.cpp @@ -42,10 +42,10 @@ namespace openshot m_pInstance = new AudioDeviceManagerSingleton; // Get preferred audio device name (if any) - string preferred_audio_device = Settings::Instance()->PLAYBACK_AUDIO_DEVICE_NAME; + juce::String preferred_audio_device = juce::String(Settings::Instance()->PLAYBACK_AUDIO_DEVICE_NAME.c_str()); // Initialize audio device only 1 time - String error = m_pInstance->audioDeviceManager.initialise ( + juce::String error = m_pInstance->audioDeviceManager.initialise ( 0, /* number of input channels */ 2, /* number of output channels */ 0, /* no XML settings.. */ @@ -67,7 +67,7 @@ namespace openshot for (int j = 0; j < deviceNames.size (); ++j ) { - const String deviceName = deviceNames[j]; + juce::String deviceName = deviceNames[j]; AudioDeviceInfo deviceInfo = {deviceName.toStdString(), t->getTypeName().toStdString()}; m_pInstance->audio_device_names.push_back(deviceInfo); } From eea67ad97296e6c5ca475086cebd99ccc61c0496 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sat, 27 Apr 2019 13:36:21 -0700 Subject: [PATCH 093/109] Link to instruction to produce ffmpeg 4 plus the libraries on Ubuntu that support nVidia GPU acceleration. Tested on Mint 19.1. --- doc/HW-ACCEL.md | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/doc/HW-ACCEL.md b/doc/HW-ACCEL.md index 7ed4c637..34e05e4f 100644 --- a/doc/HW-ACCEL.md +++ b/doc/HW-ACCEL.md @@ -74,8 +74,13 @@ This information might be wrong, and we would love to continue improving our support for hardware acceleration in OpenShot. Please help us update this document if you find an error or discover some new information. -**Desperately Needed:** A way to compile ffmpeg 4.0 and up with working nVidia -hardware acceleration support on Ubuntu Linux! +**Desperately Needed:** The manual at: +https://www.tal.org/tutorials/ffmpeg_nvidia_encode +works pretty well. I could compile and install a version of ffmpeg 4.1.3 +on Mint 19.1 that supports the GPU on nVidia cards. A version of openshot +with hardware support using these libraries could use the nVidia GPU. +(A way to compile ffmpeg 4.0 and up with working nVidia +hardware acceleration support on Ubuntu Linux!) **BUG:** Hardware supported decoding still has a bug. The speed gains with decoding are by far not as great as with encoding. In case hardware accelerated From 3bd2ae5f2394dd725363d66bf85979fa7c02c341 Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Sun, 28 Apr 2019 14:03:45 -0500 Subject: [PATCH 094/109] Integrating VDPAU decoding into libopenshot --- doc/HW-ACCEL.md | 6 ++++++ src/FFmpegReader.cpp | 21 +++++++++++++++++++++ src/examples/Example.cpp | 4 ++++ 3 files changed, 31 insertions(+) diff --git a/doc/HW-ACCEL.md b/doc/HW-ACCEL.md index 34e05e4f..b785a9ce 100644 --- a/doc/HW-ACCEL.md +++ b/doc/HW-ACCEL.md @@ -96,3 +96,9 @@ copied to GPU memory for encoding. That is necessary because the modifications are done by the CPU. Using the GPU for that too will make it possible to do away with these two copies. A possible solution would be to use Vulkan compute which would be available on Linux and Windows natively and on MacOS via MoltenVK. + +## Credit + +A big thanks to Peter M (https://github.com/eisneinechse) for all his work +on integrating hardware accelleration into libopenshot! The community thanks +you for this major contribution! diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 2ea6fcbb..929ddf72 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -186,6 +186,23 @@ static enum AVPixelFormat get_hw_dec_format_cu(AVCodecContext *ctx, const enum A ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::get_hw_dec_format_cu (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); return AV_PIX_FMT_NONE; } + +static enum AVPixelFormat get_hw_dec_format_vd(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) +{ + const enum AVPixelFormat *p; + + for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { + switch (*p) { + case AV_PIX_FMT_VDPAU: + hw_de_av_pix_fmt_global = AV_PIX_FMT_VDPAU; + hw_de_av_device_type_global = AV_HWDEVICE_TYPE_VDPAU; + return *p; + break; + } + } + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::get_hw_dec_format_vd (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + return AV_PIX_FMT_NONE; +} #endif #if defined(_WIN32) @@ -377,6 +394,10 @@ void FFmpegReader::Open() { hw_de_av_device_type = AV_HWDEVICE_TYPE_CUDA; pCodecCtx->get_format = get_hw_dec_format_cu; break; + case 6: + hw_de_av_device_type = AV_HWDEVICE_TYPE_VDPAU; + pCodecCtx->get_format = get_hw_dec_format_vd; + break; default: hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; pCodecCtx->get_format = get_hw_dec_format_va; diff --git a/src/examples/Example.cpp b/src/examples/Example.cpp index 411abdda..80339684 100644 --- a/src/examples/Example.cpp +++ b/src/examples/Example.cpp @@ -36,6 +36,10 @@ using namespace openshot; int main(int argc, char* argv[]) { + Settings *s = Settings::Instance(); + s->HARDWARE_DECODER = 2; // 1 VA-API, 2 NVDEC + s->HW_DE_DEVICE_SET = 1; + FFmpegReader r9("/home/jonathan/Videos/sintel_trailer-720p.mp4"); r9.Open(); r9.DisplayInfo(); From 2bafe60448eb72a82665b96a164558d61f4033fa Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Sun, 28 Apr 2019 17:18:43 -0500 Subject: [PATCH 095/109] Removing 0 cases, and adding new QSV decoder support (experimental) --- src/FFmpegReader.cpp | 47 ++++++++++++++++++++++++++++++-------------- 1 file changed, 32 insertions(+), 15 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 929ddf72..480e835b 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -259,7 +259,7 @@ static enum AVPixelFormat get_hw_dec_format_cu(AVCodecContext *ctx, const enum A #endif #if defined(__APPLE__) -static enum AVPixelFormat get_hw_dec_format_qs(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) +static enum AVPixelFormat get_hw_dec_format_vt(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) { const enum AVPixelFormat *p; @@ -272,11 +272,28 @@ static enum AVPixelFormat get_hw_dec_format_qs(AVCodecContext *ctx, const enum A break; } } - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::get_hw_dec_format_qs (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::get_hw_dec_format_vt (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); return AV_PIX_FMT_NONE; } #endif +static enum AVPixelFormat get_hw_dec_format_qs(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) +{ + const enum AVPixelFormat *p; + + for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { + switch (*p) { + case AV_PIX_FMT_QSV: + hw_de_av_pix_fmt_global = AV_PIX_FMT_QSV; + hw_de_av_device_type_global = AV_HWDEVICE_TYPE_QSV; + return *p; + break; + } + } + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::get_hw_dec_format_qs (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + return AV_PIX_FMT_NONE; +} + int FFmpegReader::IsHardwareDecodeSupported(int codecid) { int ret; @@ -382,10 +399,6 @@ void FFmpegReader::Open() { adapter_ptr = adapter; i_decoder_hw = openshot::Settings::Instance()->HARDWARE_DECODER; switch (i_decoder_hw) { - case 0: - hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; - pCodecCtx->get_format = get_hw_dec_format_va; - break; case 1: hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; pCodecCtx->get_format = get_hw_dec_format_va; @@ -398,6 +411,10 @@ void FFmpegReader::Open() { hw_de_av_device_type = AV_HWDEVICE_TYPE_VDPAU; pCodecCtx->get_format = get_hw_dec_format_vd; break; + case 7: + hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; + pCodecCtx->get_format = get_hw_dec_format_qs; + break; default: hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; pCodecCtx->get_format = get_hw_dec_format_va; @@ -408,10 +425,6 @@ void FFmpegReader::Open() { adapter_ptr = NULL; i_decoder_hw = openshot::Settings::Instance()->HARDWARE_DECODER; switch (i_decoder_hw) { - case 0: - hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; - pCodecCtx->get_format = get_hw_dec_format_dx; - break; case 2: hw_de_av_device_type = AV_HWDEVICE_TYPE_CUDA; pCodecCtx->get_format = get_hw_dec_format_cu; @@ -424,6 +437,10 @@ void FFmpegReader::Open() { hw_de_av_device_type = AV_HWDEVICE_TYPE_D3D11VA; pCodecCtx->get_format = get_hw_dec_format_d3; break; + case 7: + hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; + pCodecCtx->get_format = get_hw_dec_format_qs; + break; default: hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; pCodecCtx->get_format = get_hw_dec_format_dx; @@ -433,17 +450,17 @@ void FFmpegReader::Open() { adapter_ptr = NULL; i_decoder_hw = openshot::Settings::Instance()->HARDWARE_DECODER; switch (i_decoder_hw) { - case 0: - hw_de_av_device_type = AV_HWDEVICE_TYPE_VIDEOTOOLBOX; - pCodecCtx->get_format = get_hw_dec_format_qs; - break; case 5: hw_de_av_device_type = AV_HWDEVICE_TYPE_VIDEOTOOLBOX; + pCodecCtx->get_format = get_hw_dec_format_vt; + break; + case 7: + hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; pCodecCtx->get_format = get_hw_dec_format_qs; break; default: hw_de_av_device_type = AV_HWDEVICE_TYPE_VIDEOTOOLBOX; - pCodecCtx->get_format = get_hw_dec_format_qs; + pCodecCtx->get_format = get_hw_dec_format_vt; break; } #endif From cdb4ae5483d43748d5af8ecbc6a7e5b112b5c850 Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Mon, 29 Apr 2019 17:05:13 -0500 Subject: [PATCH 096/109] Fixing crash on Mac due to juce::String again --- src/FFmpegReader.cpp | 2 +- src/Qt/AudioPlaybackThread.cpp | 9 +++++---- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 480e835b..690af140 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -76,7 +76,7 @@ using namespace openshot; int hw_de_on = 0; #if IS_FFMPEG_3_2 -AVPixelFormat hw_de_av_pix_fmt_global = AV_PIX_FMT_NONE; + AVPixelFormat hw_de_av_pix_fmt_global = AV_PIX_FMT_NONE; AVHWDeviceType hw_de_av_device_type_global = AV_HWDEVICE_TYPE_NONE; #endif diff --git a/src/Qt/AudioPlaybackThread.cpp b/src/Qt/AudioPlaybackThread.cpp index 678a9c09..7bad4649 100644 --- a/src/Qt/AudioPlaybackThread.cpp +++ b/src/Qt/AudioPlaybackThread.cpp @@ -45,7 +45,7 @@ namespace openshot juce::String preferred_audio_device = juce::String(Settings::Instance()->PLAYBACK_AUDIO_DEVICE_NAME.c_str()); // Initialize audio device only 1 time - juce::String error = m_pInstance->audioDeviceManager.initialise ( + juce::String audio_error = m_pInstance->audioDeviceManager.initialise ( 0, /* number of input channels */ 2, /* number of output channels */ 0, /* no XML settings.. */ @@ -53,8 +53,8 @@ namespace openshot preferred_audio_device /* preferredDefaultDeviceName */); // Persist any errors detected - if (error.isNotEmpty()) { - m_pInstance->initialise_error = error.toStdString(); + if (audio_error.isNotEmpty()) { + m_pInstance->initialise_error = audio_error.toRawUTF8(); } else { m_pInstance->initialise_error = ""; } @@ -68,7 +68,8 @@ namespace openshot for (int j = 0; j < deviceNames.size (); ++j ) { juce::String deviceName = deviceNames[j]; - AudioDeviceInfo deviceInfo = {deviceName.toStdString(), t->getTypeName().toStdString()}; + juce::String typeName = t->getTypeName(); + AudioDeviceInfo deviceInfo = {deviceName.toRawUTF8(), typeName.toRawUTF8()}; m_pInstance->audio_device_names.push_back(deviceInfo); } } From 70f07ca4f81196d6e919858a4395fc72184f2a70 Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Tue, 30 Apr 2019 13:17:43 -0500 Subject: [PATCH 097/109] Improving HW-ACCEL documentation --- doc/HW-ACCEL.md | 88 ++++++++++++++++++++++++++++++---------------- include/Settings.h | 13 ++++++- 2 files changed, 70 insertions(+), 31 deletions(-) diff --git a/doc/HW-ACCEL.md b/doc/HW-ACCEL.md index b785a9ce..af5bb091 100644 --- a/doc/HW-ACCEL.md +++ b/doc/HW-ACCEL.md @@ -1,20 +1,33 @@ ## Hardware Acceleration -Observations for developers wanting to make hardware acceleration work. +OpenShot now has experimental support for hardware acceleration, which uses 1 (or more) +graphics cards to offload some of the work for both decoding and encoding. This is +very new and experimental (as of May 2019), but we look forward to "accelerating" +our support for this in the future! -*All observations are for Linux (but contributions welcome).* +The following table summarizes our current level of support: + +| | Linux Decode | Linux Encode | Mac Decode | Mac Encode | Windows Decode | Windows Encode | Notes | +|--------------------|--------------|--------------|------------|------------|----------------------|----------------|----------------| +| VA-API | Verified | Verified | - | - | - | - | Linux Only | +| VDPAU | Verified | Verified | - | - | - | - | Linux Only | +| CUDA (NVDEC/NVENC) | Verified | Verified | - | - | - | Verified | Cross Platform | +| VideoToolBox | - | - | Verified | Crashes | - | - | Mac Only | +| DXVA2 | - | - | - | - | Fails (green frames) | Verified | Windows Only | +| D3D11VA | - | - | - | - | Fails (green frames) | - | Windows Only | +| QSV | Fails | Fails | Fails | Fails | Fails | Fails | Cross Platform | ## Supported FFmpeg Versions -* HW accel is supported from ffmpeg version 3.2 (3.3 for nVidia drivers) -* HW accel was removed for nVidia drivers in Ubuntu for ffmpeg 4+ -* I could not manage to build a version of ffmpeg 4.1 with the nVidia SDK -that worked with nVidia cards. There might be a problem in ffmpeg 4+ +* HW accel is supported from FFmpeg version 3.2 (3.3 for nVidia drivers) +* HW accel was removed for nVidia drivers in Ubuntu for FFmpeg 4+ +* We could not manage to build a version of FFmpeg 4.1 with the nVidia SDK +that worked with nVidia cards. There might be a problem in FFmpeg 4+ that prohibits this. -**Notice:** The ffmpeg versions of Ubuntu and PPAs for Ubuntu show the -same behaviour. ffmpeg 3 has working nVidia hardware acceleration while -ffmpeg 4+ has no support for nVidia hardware acceleration +**Notice:** The FFmpeg versions of Ubuntu and PPAs for Ubuntu show the +same behaviour. FFmpeg 3 has working nVidia hardware acceleration while +FFmpeg 4+ has no support for nVidia hardware acceleration included. ## OpenShot Settings @@ -26,10 +39,19 @@ the various hardware acceleration features. /// Use video codec for faster video decoding (if supported) int HARDWARE_DECODER = 0; +/* 0 - No acceleration + 1 - Linux VA-API + 2 - nVidia NVDEC + 3 - Windows D3D9 + 4 - Windows D3D11 + 5 - MacOS / VideoToolBox + 6 - Linux VDPAU + 7 - Intel QSV */ + /// Number of threads of OpenMP int OMP_THREADS = 12; -/// Number of threads that ffmpeg uses +/// Number of threads that FFmpeg uses int FF_THREADS = 8; /// Maximum rows that hardware decode can handle @@ -38,10 +60,10 @@ int DE_LIMIT_HEIGHT_MAX = 1100; /// Maximum columns that hardware decode can handle int DE_LIMIT_WIDTH_MAX = 1950; -/// Which GPU to use to decode (0 is the first) +/// Which GPU to use to decode (0 is the first, LINUX ONLY) int HW_DE_DEVICE_SET = 0; -/// Which GPU to use to encode (0 is the first) +/// Which GPU to use to encode (0 is the first, LINUX ONLY) int HW_EN_DEVICE_SET = 0; ``` @@ -67,38 +89,44 @@ of effects could be implemented (contributions welcome). If the computer has multiple graphics cards installed, you can choose which should be used by libopenshot. Also, you can optionally use one card for decoding and the other for encoding (if both cards support acceleration). +This is currently only supported on Linux, due to the device name FFmpeg +expects (i.e. **/dev/dri/render128**). Contributions welcome if anyone can +determine what string format to pass for Windows and Mac. ## Help Us Improve Hardware Support This information might be wrong, and we would love to continue improving our support for hardware acceleration in OpenShot. Please help us update -this document if you find an error or discover some new information. +this document if you find an error or discover new and/or useful information. -**Desperately Needed:** The manual at: +**FFmpeg 4 + nVidia** The manual at: https://www.tal.org/tutorials/ffmpeg_nvidia_encode -works pretty well. I could compile and install a version of ffmpeg 4.1.3 +works pretty well. We could compile and install a version of FFmpeg 4.1.3 on Mint 19.1 that supports the GPU on nVidia cards. A version of openshot with hardware support using these libraries could use the nVidia GPU. -(A way to compile ffmpeg 4.0 and up with working nVidia -hardware acceleration support on Ubuntu Linux!) -**BUG:** Hardware supported decoding still has a bug. The speed gains with -decoding are by far not as great as with encoding. In case hardware accelerated -decoding does not work disable it. Hardware acceleration might also break -because of graphics drivers that have bugs. +**BUG:** Hardware supported decoding still has some bugs (as you can see from +the chart above). Also, the speed gains with decoding are not as great +as with encoding. Currently, if hardware decoding fails, there is no +fallback (you either get green frames or an "invalid file" error in OpenShot). +This needs to be improved to successfully fall-back to software decoding. -**Needed:** A way to get the options and limits of the GPU, like -supported codecs and the supported dimensions (width and height). +**Needed:** + * A way to get options and limits of the GPU, such as + supported dimensions (width and height). + * A way to list the actual Graphic Cards available to FFmpeg (for the + user to choose which card for decoding and encoding, as opposed + to "Graphics Card X") -**Further improvement:** Right now the frame can be decoded on the GPU but the -frame is then copied to CPU memory. Before encoding the frame is then -copied to GPU memory for encoding. That is necessary because the modifications -are done by the CPU. Using the GPU for that too will make it possible to do -away with these two copies. A possible solution would be to use Vulkan compute -which would be available on Linux and Windows natively and on MacOS via MoltenVK. +**Further improvement:** Right now the frame can be decoded on the GPU, but the +frame is then copied to CPU memory for modifications. It is then copied back to +GPU memory for encoding. Using the GPU for both decoding and modifications +will make it possible to do away with these two copies. A possible solution would +be to use Vulkan compute which would be available on Linux and Windows natively +and on MacOS via MoltenVK. ## Credit A big thanks to Peter M (https://github.com/eisneinechse) for all his work -on integrating hardware accelleration into libopenshot! The community thanks +on integrating hardware acceleration into libopenshot! The community thanks you for this major contribution! diff --git a/include/Settings.h b/include/Settings.h index 859b2fab..89561034 100644 --- a/include/Settings.h +++ b/include/Settings.h @@ -76,7 +76,18 @@ namespace openshot { static Settings * m_pInstance; public: - /// Use video codec for faster video decoding (if supported) + /** + * @brief Use video codec for faster video decoding (if supported) + * + * 0 - No acceleration, + * 1 - Linux VA-API, + * 2 - nVidia NVDEC, + * 3 - Windows D3D9, + * 4 - Windows D3D11, + * 5 - MacOS / VideoToolBox, + * 6 - Linux VDPAU, + * 7 - Intel QSV + */ int HARDWARE_DECODER = 0; /// Scale mode used in FFmpeg decoding and encoding (used as an optimization for faster previews) From 27450a8a869ccd400db115b16b8c6c3122810b23 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Tue, 30 Apr 2019 14:05:52 -0700 Subject: [PATCH 098/109] Clarify table --- doc/HW-ACCEL.md | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/doc/HW-ACCEL.md b/doc/HW-ACCEL.md index af5bb091..1dd8c92c 100644 --- a/doc/HW-ACCEL.md +++ b/doc/HW-ACCEL.md @@ -1,8 +1,8 @@ ## Hardware Acceleration -OpenShot now has experimental support for hardware acceleration, which uses 1 (or more) +OpenShot now has experimental support for hardware acceleration, which uses 1 (or more) graphics cards to offload some of the work for both decoding and encoding. This is -very new and experimental (as of May 2019), but we look forward to "accelerating" +very new and experimental (as of May 2019), but we look forward to "accelerating" our support for this in the future! The following table summarizes our current level of support: @@ -10,8 +10,8 @@ The following table summarizes our current level of support: | | Linux Decode | Linux Encode | Mac Decode | Mac Encode | Windows Decode | Windows Encode | Notes | |--------------------|--------------|--------------|------------|------------|----------------------|----------------|----------------| | VA-API | Verified | Verified | - | - | - | - | Linux Only | -| VDPAU | Verified | Verified | - | - | - | - | Linux Only | -| CUDA (NVDEC/NVENC) | Verified | Verified | - | - | - | Verified | Cross Platform | +| VDPAU | Verified(+) | N/A(++) | - | - | - | - | Linux Only | +| CUDA (NVDEC/NVENC) | Fails(+++) | Verified | - | - | - | Verified | Cross Platform | | VideoToolBox | - | - | Verified | Crashes | - | - | Mac Only | | DXVA2 | - | - | - | - | Fails (green frames) | Verified | Windows Only | | D3D11VA | - | - | - | - | Fails (green frames) | - | Windows Only | @@ -21,9 +21,9 @@ The following table summarizes our current level of support: * HW accel is supported from FFmpeg version 3.2 (3.3 for nVidia drivers) * HW accel was removed for nVidia drivers in Ubuntu for FFmpeg 4+ -* We could not manage to build a version of FFmpeg 4.1 with the nVidia SDK -that worked with nVidia cards. There might be a problem in FFmpeg 4+ -that prohibits this. +* (+) VDPAU for some reason needs a card number one higher than it really is +* (++) VDPAU is a decoder only. +* (+++) Green frames **Notice:** The FFmpeg versions of Ubuntu and PPAs for Ubuntu show the same behaviour. FFmpeg 3 has working nVidia hardware acceleration while @@ -106,23 +106,23 @@ on Mint 19.1 that supports the GPU on nVidia cards. A version of openshot with hardware support using these libraries could use the nVidia GPU. **BUG:** Hardware supported decoding still has some bugs (as you can see from -the chart above). Also, the speed gains with decoding are not as great +the chart above). Also, the speed gains with decoding are not as great as with encoding. Currently, if hardware decoding fails, there is no fallback (you either get green frames or an "invalid file" error in OpenShot). This needs to be improved to successfully fall-back to software decoding. -**Needed:** - * A way to get options and limits of the GPU, such as +**Needed:** + * A way to get options and limits of the GPU, such as supported dimensions (width and height). - * A way to list the actual Graphic Cards available to FFmpeg (for the - user to choose which card for decoding and encoding, as opposed + * A way to list the actual Graphic Cards available to FFmpeg (for the + user to choose which card for decoding and encoding, as opposed to "Graphics Card X") **Further improvement:** Right now the frame can be decoded on the GPU, but the -frame is then copied to CPU memory for modifications. It is then copied back to +frame is then copied to CPU memory for modifications. It is then copied back to GPU memory for encoding. Using the GPU for both decoding and modifications -will make it possible to do away with these two copies. A possible solution would -be to use Vulkan compute which would be available on Linux and Windows natively +will make it possible to do away with these two copies. A possible solution would +be to use Vulkan compute which would be available on Linux and Windows natively and on MacOS via MoltenVK. ## Credit From 9324b691e9a2152ad1dfb6fdfed1ccd44b4c750e Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Tue, 30 Apr 2019 17:11:04 -0500 Subject: [PATCH 099/109] Improving HW-ACCEL documentation --- doc/HW-ACCEL.md | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/doc/HW-ACCEL.md b/doc/HW-ACCEL.md index 1dd8c92c..c04c3b88 100644 --- a/doc/HW-ACCEL.md +++ b/doc/HW-ACCEL.md @@ -7,23 +7,24 @@ our support for this in the future! The following table summarizes our current level of support: -| | Linux Decode | Linux Encode | Mac Decode | Mac Encode | Windows Decode | Windows Encode | Notes | -|--------------------|--------------|--------------|------------|------------|----------------------|----------------|----------------| -| VA-API | Verified | Verified | - | - | - | - | Linux Only | -| VDPAU | Verified(+) | N/A(++) | - | - | - | - | Linux Only | -| CUDA (NVDEC/NVENC) | Fails(+++) | Verified | - | - | - | Verified | Cross Platform | -| VideoToolBox | - | - | Verified | Crashes | - | - | Mac Only | -| DXVA2 | - | - | - | - | Fails (green frames) | Verified | Windows Only | -| D3D11VA | - | - | - | - | Fails (green frames) | - | Windows Only | -| QSV | Fails | Fails | Fails | Fails | Fails | Fails | Cross Platform | +| | Linux Decode | Linux Encode | Mac Decode | Mac Encode | Windows Decode | Windows Encode | Notes | +|--------------------|--------------|--------------|------------|------------|----------------|----------------|----------------| +| VA-API | Verified | Verified | - | - | - | - | Linux Only | +| VDPAU | Verified(+) | N/A(++) | - | - | - | - | Linux Only | +| CUDA (NVDEC/NVENC) | Fails(+++) | Verified | - | - | - | Verified | Cross Platform | +| VideoToolBox | - | - | Verified | Crashes | - | - | Mac Only | +| DXVA2 | - | - | - | - | Fails(+++) | - | Windows Only | +| D3D11VA | - | - | - | - | Fails(+++) | - | Windows Only | +| QSV | Fails | Fails | Fails | Fails | Fails | Fails | Cross Platform | + +* *(+) VDPAU for some reason needs a card number one higher than it really is* +* *(++) VDPAU is a decoder only.* +* *(+++) Green frames (pixel data not correctly tranferred back to system memory)* ## Supported FFmpeg Versions * HW accel is supported from FFmpeg version 3.2 (3.3 for nVidia drivers) * HW accel was removed for nVidia drivers in Ubuntu for FFmpeg 4+ -* (+) VDPAU for some reason needs a card number one higher than it really is -* (++) VDPAU is a decoder only. -* (+++) Green frames **Notice:** The FFmpeg versions of Ubuntu and PPAs for Ubuntu show the same behaviour. FFmpeg 3 has working nVidia hardware acceleration while From fad8f40cf577e314b9890e8db7a76480c6b98959 Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Tue, 30 Apr 2019 17:43:15 -0500 Subject: [PATCH 100/109] Simplifying hardware decoder logic (when looking for pixmap) --- src/FFmpegReader.cpp | 285 +++++++++++++++---------------------------- 1 file changed, 95 insertions(+), 190 deletions(-) diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 690af140..acd3b55f 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -152,137 +152,53 @@ bool AudioLocation::is_near(AudioLocation location, int samples_per_frame, int64 #if IS_FFMPEG_3_2 -#if defined(__linux__) -static enum AVPixelFormat get_hw_dec_format_va(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) -{ - const enum AVPixelFormat *p; - - for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { - switch (*p) { - case AV_PIX_FMT_VAAPI: - hw_de_av_pix_fmt_global = AV_PIX_FMT_VAAPI; - hw_de_av_device_type_global = AV_HWDEVICE_TYPE_VAAPI; - return *p; - break; - } - } - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::get_hw_dec_format_va (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); - return AV_PIX_FMT_NONE; -} - -static enum AVPixelFormat get_hw_dec_format_cu(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) -{ - const enum AVPixelFormat *p; - - for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { - switch (*p) { - case AV_PIX_FMT_CUDA: - hw_de_av_pix_fmt_global = AV_PIX_FMT_CUDA; - hw_de_av_device_type_global = AV_HWDEVICE_TYPE_CUDA; - return *p; - break; - } - } - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::get_hw_dec_format_cu (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); - return AV_PIX_FMT_NONE; -} - -static enum AVPixelFormat get_hw_dec_format_vd(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) -{ - const enum AVPixelFormat *p; - - for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { - switch (*p) { - case AV_PIX_FMT_VDPAU: - hw_de_av_pix_fmt_global = AV_PIX_FMT_VDPAU; - hw_de_av_device_type_global = AV_HWDEVICE_TYPE_VDPAU; - return *p; - break; - } - } - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::get_hw_dec_format_vd (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); - return AV_PIX_FMT_NONE; -} -#endif - -#if defined(_WIN32) -static enum AVPixelFormat get_hw_dec_format_dx(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) -{ - const enum AVPixelFormat *p; - - for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { - switch (*p) { - case AV_PIX_FMT_DXVA2_VLD: - hw_de_av_pix_fmt_global = AV_PIX_FMT_DXVA2_VLD; - hw_de_av_device_type_global = AV_HWDEVICE_TYPE_DXVA2; - return *p; - break; - } - } - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::get_hw_dec_format_dx (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); - return AV_PIX_FMT_NONE; -} - -static enum AVPixelFormat get_hw_dec_format_d3(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) -{ - const enum AVPixelFormat *p; - - for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { - switch (*p) { - case AV_PIX_FMT_D3D11: - hw_de_av_pix_fmt_global = AV_PIX_FMT_D3D11; - hw_de_av_device_type_global = AV_HWDEVICE_TYPE_D3D11VA; - return *p; - break; - } - } - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::get_hw_dec_format_d3 (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); - return AV_PIX_FMT_NONE; -} - -static enum AVPixelFormat get_hw_dec_format_cu(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) -{ - const enum AVPixelFormat *p; - - for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { - switch (*p) { - case AV_PIX_FMT_CUDA: - hw_de_av_pix_fmt_global = AV_PIX_FMT_CUDA; - hw_de_av_device_type_global = AV_HWDEVICE_TYPE_CUDA; - return *p; - break; - } - } - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::get_hw_dec_format_cu (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); - return AV_PIX_FMT_NONE; -} -#endif - -#if defined(__APPLE__) -static enum AVPixelFormat get_hw_dec_format_vt(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) -{ - const enum AVPixelFormat *p; - - for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { - switch (*p) { - case AV_PIX_FMT_VIDEOTOOLBOX: - hw_de_av_pix_fmt_global = AV_PIX_FMT_VIDEOTOOLBOX; - hw_de_av_device_type_global = AV_HWDEVICE_TYPE_VIDEOTOOLBOX; - return *p; - break; - } - } - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::get_hw_dec_format_vt (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); - return AV_PIX_FMT_NONE; -} -#endif - -static enum AVPixelFormat get_hw_dec_format_qs(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) +// Get hardware pix format +static enum AVPixelFormat get_hw_dec_format(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) { const enum AVPixelFormat *p; for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { switch (*p) { +#if defined(__linux__) + // Linux pix formats + case AV_PIX_FMT_VAAPI: + hw_de_av_pix_fmt_global = AV_PIX_FMT_VAAPI; + hw_de_av_device_type_global = AV_HWDEVICE_TYPE_VAAPI; + return *p; + break; + case AV_PIX_FMT_VDPAU: + hw_de_av_pix_fmt_global = AV_PIX_FMT_VDPAU; + hw_de_av_device_type_global = AV_HWDEVICE_TYPE_VDPAU; + return *p; + break; +#endif +#if defined(_WIN32) + // Windows pix formats + case AV_PIX_FMT_DXVA2_VLD: + hw_de_av_pix_fmt_global = AV_PIX_FMT_DXVA2_VLD; + hw_de_av_device_type_global = AV_HWDEVICE_TYPE_DXVA2; + return *p; + break; + case AV_PIX_FMT_D3D11: + hw_de_av_pix_fmt_global = AV_PIX_FMT_D3D11; + hw_de_av_device_type_global = AV_HWDEVICE_TYPE_D3D11VA; + return *p; + break; +#endif +#if defined(__APPLE__) + // Apple pix formats + case AV_PIX_FMT_VIDEOTOOLBOX: + hw_de_av_pix_fmt_global = AV_PIX_FMT_VIDEOTOOLBOX; + hw_de_av_device_type_global = AV_HWDEVICE_TYPE_VIDEOTOOLBOX; + return *p; + break; +#endif + // Cross-platform pix formats + case AV_PIX_FMT_CUDA: + hw_de_av_pix_fmt_global = AV_PIX_FMT_CUDA; + hw_de_av_device_type_global = AV_HWDEVICE_TYPE_CUDA; + return *p; + break; case AV_PIX_FMT_QSV: hw_de_av_pix_fmt_global = AV_PIX_FMT_QSV; hw_de_av_device_type_global = AV_HWDEVICE_TYPE_QSV; @@ -290,7 +206,7 @@ static enum AVPixelFormat get_hw_dec_format_qs(AVCodecContext *ctx, const enum A break; } } - ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::get_hw_dec_format_qs (Unable to decode this file using hardware decode.)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::get_hw_dec_format (Unable to decode this file using hardware decode)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); return AV_PIX_FMT_NONE; } @@ -304,15 +220,14 @@ int FFmpegReader::IsHardwareDecodeSupported(int codecid) case AV_CODEC_ID_WMV1: case AV_CODEC_ID_WMV2: case AV_CODEC_ID_WMV3: - ret = 1; - break; - default : - ret = 0; - break; + ret = 1; + break; + default : + ret = 0; + break; } return ret; } - #endif void FFmpegReader::Open() { @@ -393,76 +308,66 @@ void FFmpegReader::Open() { adapter_num = openshot::Settings::Instance()->HW_DE_DEVICE_SET; fprintf(stderr, "\n\nDecodiing Device Nr: %d\n", adapter_num); + // Set hardware pix format (callback) + pCodecCtx->get_format = get_hw_dec_format; + if (adapter_num < 3 && adapter_num >=0) { #if defined(__linux__) - snprintf(adapter,sizeof(adapter),"/dev/dri/renderD%d", adapter_num+128); - adapter_ptr = adapter; - i_decoder_hw = openshot::Settings::Instance()->HARDWARE_DECODER; - switch (i_decoder_hw) { - case 1: - hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; - pCodecCtx->get_format = get_hw_dec_format_va; - break; - case 2: - hw_de_av_device_type = AV_HWDEVICE_TYPE_CUDA; - pCodecCtx->get_format = get_hw_dec_format_cu; - break; - case 6: - hw_de_av_device_type = AV_HWDEVICE_TYPE_VDPAU; - pCodecCtx->get_format = get_hw_dec_format_vd; - break; - case 7: - hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; - pCodecCtx->get_format = get_hw_dec_format_qs; - break; - default: - hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; - pCodecCtx->get_format = get_hw_dec_format_va; - break; - } - -#elif defined(_WIN32) - adapter_ptr = NULL; - i_decoder_hw = openshot::Settings::Instance()->HARDWARE_DECODER; - switch (i_decoder_hw) { + snprintf(adapter,sizeof(adapter),"/dev/dri/renderD%d", adapter_num+128); + adapter_ptr = adapter; + i_decoder_hw = openshot::Settings::Instance()->HARDWARE_DECODER; + switch (i_decoder_hw) { + case 1: + hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; + break; case 2: hw_de_av_device_type = AV_HWDEVICE_TYPE_CUDA; - pCodecCtx->get_format = get_hw_dec_format_cu; break; - case 3: - hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; - pCodecCtx->get_format = get_hw_dec_format_dx; - break; - case 4: - hw_de_av_device_type = AV_HWDEVICE_TYPE_D3D11VA; - pCodecCtx->get_format = get_hw_dec_format_d3; + case 6: + hw_de_av_device_type = AV_HWDEVICE_TYPE_VDPAU; break; case 7: hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; - pCodecCtx->get_format = get_hw_dec_format_qs; break; default: - hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; - pCodecCtx->get_format = get_hw_dec_format_dx; + hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI; break; } + +#elif defined(_WIN32) + adapter_ptr = NULL; + i_decoder_hw = openshot::Settings::Instance()->HARDWARE_DECODER; + switch (i_decoder_hw) { + case 2: + hw_de_av_device_type = AV_HWDEVICE_TYPE_CUDA; + break; + case 3: + hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; + break; + case 4: + hw_de_av_device_type = AV_HWDEVICE_TYPE_D3D11VA; + break; + case 7: + hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; + break; + default: + hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2; + break; + } #elif defined(__APPLE__) - adapter_ptr = NULL; - i_decoder_hw = openshot::Settings::Instance()->HARDWARE_DECODER; - switch (i_decoder_hw) { - case 5: - hw_de_av_device_type = AV_HWDEVICE_TYPE_VIDEOTOOLBOX; - pCodecCtx->get_format = get_hw_dec_format_vt; - break; - case 7: - hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; - pCodecCtx->get_format = get_hw_dec_format_qs; - break; - default: - hw_de_av_device_type = AV_HWDEVICE_TYPE_VIDEOTOOLBOX; - pCodecCtx->get_format = get_hw_dec_format_vt; - break; - } + adapter_ptr = NULL; + i_decoder_hw = openshot::Settings::Instance()->HARDWARE_DECODER; + switch (i_decoder_hw) { + case 5: + hw_de_av_device_type = AV_HWDEVICE_TYPE_VIDEOTOOLBOX; + break; + case 7: + hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV; + break; + default: + hw_de_av_device_type = AV_HWDEVICE_TYPE_VIDEOTOOLBOX; + break; + } #endif } else { From 2b42574ffdbbb1207601f306e03b842da37eb854 Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Wed, 1 May 2019 18:02:25 -0500 Subject: [PATCH 101/109] Adding SetJson support for display_ratio and pixel_ratio updates, and improving SetMaxSize to maintain aspect ratio correctly, regardless of what is passed in. This helps support things like square aspect ratios. --- src/Timeline.cpp | 42 +++++++++++++++++++++++++++++++++++++----- 1 file changed, 37 insertions(+), 5 deletions(-) diff --git a/src/Timeline.cpp b/src/Timeline.cpp index 5cb9ff5e..b3a24461 100644 --- a/src/Timeline.cpp +++ b/src/Timeline.cpp @@ -1371,6 +1371,33 @@ void Timeline::apply_json_to_timeline(Json::Value change) { else if (root_key == "fps" && sub_key == "den") // Set fps.den info.fps.den = change["value"].asInt(); + else if (root_key == "display_ratio" && sub_key == "" && change["value"].isObject()) { + // Set display_ratio fraction + if (!change["value"]["num"].isNull()) + info.display_ratio.num = change["value"]["num"].asInt(); + if (!change["value"]["den"].isNull()) + info.display_ratio.den = change["value"]["den"].asInt(); + } + else if (root_key == "display_ratio" && sub_key == "num") + // Set display_ratio.num + info.display_ratio.num = change["value"].asInt(); + else if (root_key == "display_ratio" && sub_key == "den") + // Set display_ratio.den + info.display_ratio.den = change["value"].asInt(); + else if (root_key == "pixel_ratio" && sub_key == "" && change["value"].isObject()) { + // Set pixel_ratio fraction + if (!change["value"]["num"].isNull()) + info.pixel_ratio.num = change["value"]["num"].asInt(); + if (!change["value"]["den"].isNull()) + info.pixel_ratio.den = change["value"]["den"].asInt(); + } + else if (root_key == "pixel_ratio" && sub_key == "num") + // Set pixel_ratio.num + info.pixel_ratio.num = change["value"].asInt(); + else if (root_key == "pixel_ratio" && sub_key == "den") + // Set pixel_ratio.den + info.pixel_ratio.den = change["value"].asInt(); + else if (root_key == "sample_rate") // Set sample rate info.sample_rate = change["value"].asInt(); @@ -1380,9 +1407,7 @@ void Timeline::apply_json_to_timeline(Json::Value change) { else if (root_key == "channel_layout") // Set channel layout info.channel_layout = (ChannelLayout) change["value"].asInt(); - else - // Error parsing JSON (or missing keys) throw InvalidJSONKey("JSON change key is invalid", change.toStyledString()); @@ -1443,7 +1468,14 @@ void Timeline::ClearAllCache() { // Set Max Image Size (used for performance optimization). Convenience function for setting // Settings::Instance()->MAX_WIDTH and Settings::Instance()->MAX_HEIGHT. void Timeline::SetMaxSize(int width, int height) { - // Init max image size (choose the smallest one) - Settings::Instance()->MAX_WIDTH = min(width, info.width); - Settings::Instance()->MAX_HEIGHT = min(height, info.height); + // Maintain aspect ratio regardless of what size is passed in + QSize display_ratio_size = QSize(info.display_ratio.num * info.pixel_ratio.ToFloat(), info.display_ratio.den * info.pixel_ratio.ToFloat()); + QSize proposed_size = QSize(min(width, info.width), min(height, info.height)); + + // Scale QSize up to proposed size + display_ratio_size.scale(proposed_size, Qt::KeepAspectRatio); + + // Set max size + Settings::Instance()->MAX_WIDTH = display_ratio_size.width(); + Settings::Instance()->MAX_HEIGHT = display_ratio_size.height(); } \ No newline at end of file From 6f00062b7b2526b23e51adb3b5f55f8be1a00d63 Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Thu, 2 May 2019 11:43:34 -0500 Subject: [PATCH 102/109] Fixing small regression with SetMaxSize and missing display_ratio and pixel_ratio --- src/Timeline.cpp | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/Timeline.cpp b/src/Timeline.cpp index b3a24461..b229a3de 100644 --- a/src/Timeline.cpp +++ b/src/Timeline.cpp @@ -58,6 +58,9 @@ Timeline::Timeline(int width, int height, Fraction fps, int sample_rate, int cha info.has_audio = true; info.has_video = true; info.video_length = info.fps.ToFloat() * info.duration; + info.display_ratio = openshot::Fraction(width, height); + info.display_ratio.Reduce(); + info.pixel_ratio = openshot::Fraction(1, 1); // Init max image size SetMaxSize(info.width, info.height); From da07ab250be91ce07267568f51330e3a36dc1ba4 Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Thu, 2 May 2019 12:02:56 -0500 Subject: [PATCH 103/109] Updating hwaccel table to use emojis (instead of words) --- doc/HW-ACCEL.md | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/doc/HW-ACCEL.md b/doc/HW-ACCEL.md index c04c3b88..8d29a11f 100644 --- a/doc/HW-ACCEL.md +++ b/doc/HW-ACCEL.md @@ -7,19 +7,20 @@ our support for this in the future! The following table summarizes our current level of support: -| | Linux Decode | Linux Encode | Mac Decode | Mac Encode | Windows Decode | Windows Encode | Notes | -|--------------------|--------------|--------------|------------|------------|----------------|----------------|----------------| -| VA-API | Verified | Verified | - | - | - | - | Linux Only | -| VDPAU | Verified(+) | N/A(++) | - | - | - | - | Linux Only | -| CUDA (NVDEC/NVENC) | Fails(+++) | Verified | - | - | - | Verified | Cross Platform | -| VideoToolBox | - | - | Verified | Crashes | - | - | Mac Only | -| DXVA2 | - | - | - | - | Fails(+++) | - | Windows Only | -| D3D11VA | - | - | - | - | Fails(+++) | - | Windows Only | -| QSV | Fails | Fails | Fails | Fails | Fails | Fails | Cross Platform | +| | Linux Decode | Linux Encode | Mac Decode | Mac Encode |Windows Decode| Windows Encode | Notes | +|--------------------|------------------------|----------------------|------------------|---------------|--------------|------------------|------------------| +| VA-API | :heavy_check_mark: | :heavy_check_mark: | - | - | - | - | *Linux Only* | +| VDPAU | :heavy_check_mark:(+) |:white_check_mark:(++)| - | - | - | - | *Linux Only* | +| CUDA (NVDEC/NVENC) | :x:(+++) | :heavy_check_mark: | - | - | - |:heavy_check_mark:| *Cross Platform* | +| VideoToolBox | - | - |:heavy_check_mark:| :x:(++++) | - | - | *Mac Only* | +| DXVA2 | - | - | - | - | :x:(+++) | - | *Windows Only* | +| D3D11VA | - | - | - | - | :x:(+++) | - | *Windows Only* | +| QSV | :x:(+++) | :x: | :x: | :x: | :x: | :x: | *Cross Platform* | * *(+) VDPAU for some reason needs a card number one higher than it really is* * *(++) VDPAU is a decoder only.* * *(+++) Green frames (pixel data not correctly tranferred back to system memory)* +* *(++++) Crashes and burns ## Supported FFmpeg Versions From 10ef8838b18326be51c68e2e90640e6882d1ef16 Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Thu, 2 May 2019 12:03:58 -0500 Subject: [PATCH 104/109] Updating hwaccel table to use emojis (instead of words) --- doc/HW-ACCEL.md | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/doc/HW-ACCEL.md b/doc/HW-ACCEL.md index 8d29a11f..f78fcd3a 100644 --- a/doc/HW-ACCEL.md +++ b/doc/HW-ACCEL.md @@ -7,15 +7,15 @@ our support for this in the future! The following table summarizes our current level of support: -| | Linux Decode | Linux Encode | Mac Decode | Mac Encode |Windows Decode| Windows Encode | Notes | -|--------------------|------------------------|----------------------|------------------|---------------|--------------|------------------|------------------| -| VA-API | :heavy_check_mark: | :heavy_check_mark: | - | - | - | - | *Linux Only* | -| VDPAU | :heavy_check_mark:(+) |:white_check_mark:(++)| - | - | - | - | *Linux Only* | -| CUDA (NVDEC/NVENC) | :x:(+++) | :heavy_check_mark: | - | - | - |:heavy_check_mark:| *Cross Platform* | -| VideoToolBox | - | - |:heavy_check_mark:| :x:(++++) | - | - | *Mac Only* | -| DXVA2 | - | - | - | - | :x:(+++) | - | *Windows Only* | -| D3D11VA | - | - | - | - | :x:(+++) | - | *Windows Only* | -| QSV | :x:(+++) | :x: | :x: | :x: | :x: | :x: | *Cross Platform* | +| | Linux Decode | Linux Encode | Mac Decode | Mac Encode |Windows Decode| Windows Encode | Notes | +|--------------------|------------------------|----------------------|------------------|----------------|--------------|------------------|------------------| +| VA-API | :heavy_check_mark: | :heavy_check_mark: | - | - | - | - | *Linux Only* | +| VDPAU | :heavy_check_mark:(+) |:white_check_mark:(++)| - | - | - | - | *Linux Only* | +| CUDA (NVDEC/NVENC) | :x:(+++) | :heavy_check_mark: | - | - | - |:heavy_check_mark:| *Cross Platform* | +| VideoToolBox | - | - |:heavy_check_mark:| :x:(++++) | - | - | *Mac Only* | +| DXVA2 | - | - | - | - | :x:(+++) | - | *Windows Only* | +| D3D11VA | - | - | - | - | :x:(+++) | - | *Windows Only* | +| QSV | :x:(+++) | :x: | :x: | :x: | :x: | :x: | *Cross Platform* | * *(+) VDPAU for some reason needs a card number one higher than it really is* * *(++) VDPAU is a decoder only.* From 4a0f0fa1c6297e4fba2c76968f1586c521a820a8 Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Thu, 2 May 2019 12:04:51 -0500 Subject: [PATCH 105/109] Updating hwaccel table to use emojis (instead of words) take 3 --- doc/HW-ACCEL.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/HW-ACCEL.md b/doc/HW-ACCEL.md index f78fcd3a..b8ee7b4e 100644 --- a/doc/HW-ACCEL.md +++ b/doc/HW-ACCEL.md @@ -20,7 +20,7 @@ The following table summarizes our current level of support: * *(+) VDPAU for some reason needs a card number one higher than it really is* * *(++) VDPAU is a decoder only.* * *(+++) Green frames (pixel data not correctly tranferred back to system memory)* -* *(++++) Crashes and burns +* *(++++) Crashes and burns* ## Supported FFmpeg Versions From eab0bbbe18eb77f84c7cea17eb4fce79f14eaa03 Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Thu, 2 May 2019 14:19:14 -0500 Subject: [PATCH 106/109] Revert "Update Python install path detection" --- src/bindings/python/CMakeLists.txt | 48 +++++++++++++----------------- 1 file changed, 20 insertions(+), 28 deletions(-) diff --git a/src/bindings/python/CMakeLists.txt b/src/bindings/python/CMakeLists.txt index 08182d95..2a481aa7 100644 --- a/src/bindings/python/CMakeLists.txt +++ b/src/bindings/python/CMakeLists.txt @@ -65,37 +65,29 @@ if (PYTHONLIBS_FOUND AND PYTHONINTERP_FOUND) target_link_libraries(${SWIG_MODULE_pyopenshot_REAL_NAME} ${PYTHON_LIBRARIES} openshot) - ### FIND THE PYTHON INTERPRETER (AND THE SITE PACKAGES FOLDER) - if (UNIX AND NOT APPLE) - ### Special-case for Debian's crazy, by checking to see if pybuild - ### is available. We don't use it, except as a canary in a coal mine - find_program(PYBUILD_EXECUTABLE pybuild - DOC "Path to Debian's pybuild utility") - if (PYBUILD_EXECUTABLE) - # We're on a Debian derivative, fall back to old path detection - set(py_detection "import site; print(site.getsitepackages()[0])") - else() - # Use distutils to detect install path - set (py_detection "\ + ### Check if the following Debian-friendly python module path exists + SET(PYTHON_MODULE_PATH "${CMAKE_INSTALL_PREFIX}/lib/python${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}/site-packages") + if (NOT EXISTS ${PYTHON_MODULE_PATH}) + + ### Check if another Debian-friendly python module path exists + SET(PYTHON_MODULE_PATH "${CMAKE_INSTALL_PREFIX}/lib/python${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}/dist-packages") + if (NOT EXISTS ${PYTHON_MODULE_PATH}) + + ### Calculate the python module path (using distutils) + execute_process ( COMMAND ${PYTHON_EXECUTABLE} -c "\ from distutils.sysconfig import get_python_lib; \ -print( get_python_lib( plat_specific=True, prefix='${CMAKE_INSTALL_PREFIX}' ) )") +print( get_python_lib( plat_specific=True, prefix='${CMAKE_INSTALL_PREFIX}' ) )" + OUTPUT_VARIABLE _ABS_PYTHON_MODULE_PATH + OUTPUT_STRIP_TRAILING_WHITESPACE ) + + GET_FILENAME_COMPONENT(_ABS_PYTHON_MODULE_PATH + "${_ABS_PYTHON_MODULE_PATH}" ABSOLUTE) + FILE(RELATIVE_PATH _REL_PYTHON_MODULE_PATH + ${CMAKE_INSTALL_PREFIX} ${_ABS_PYTHON_MODULE_PATH}) + SET(PYTHON_MODULE_PATH ${_ABS_PYTHON_MODULE_PATH}) endif() endif() - - if (NOT PYTHON_MODULE_PATH) - execute_process ( COMMAND ${PYTHON_EXECUTABLE} -c "${py_detection}" - OUTPUT_VARIABLE _ABS_PYTHON_MODULE_PATH - OUTPUT_STRIP_TRAILING_WHITESPACE ) - - GET_FILENAME_COMPONENT(_ABS_PYTHON_MODULE_PATH - "${_ABS_PYTHON_MODULE_PATH}" ABSOLUTE) - FILE(RELATIVE_PATH _REL_PYTHON_MODULE_PATH - ${CMAKE_INSTALL_PREFIX} ${_ABS_PYTHON_MODULE_PATH}) - SET(PYTHON_MODULE_PATH ${_REL_PYTHON_MODULE_PATH} - CACHE PATH "Install path for Python modules (relative to prefix)") - endif() - - message(STATUS "Will install Python module to: ${PYTHON_MODULE_PATH}") + message("PYTHON_MODULE_PATH: ${PYTHON_MODULE_PATH}") ############### INSTALL HEADERS & LIBRARY ################ ### Install Python bindings From bfa8a838643989316ede715b731e2998d893fbff Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Fri, 3 May 2019 13:13:45 -0700 Subject: [PATCH 107/109] The default return value is present Remove else so that the default return value is used if no other return was used."else if" in line 334 had no else and therefore in some cases no return value was present. --- src/KeyFrame.cpp | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/KeyFrame.cpp b/src/KeyFrame.cpp index 025484a3..d83adc7f 100644 --- a/src/KeyFrame.cpp +++ b/src/KeyFrame.cpp @@ -336,9 +336,8 @@ bool Keyframe::IsIncreasing(int index) return false; } } - else - // return default true (since most curves increase) - return true; + // return default true (since most curves increase) + return true; } // Generate JSON string of this object From c55d8551c15b2f2c913627765476015d6343def1 Mon Sep 17 00:00:00 2001 From: eisneinechse <42617957+eisneinechse@users.noreply.github.com> Date: Sun, 5 May 2019 18:18:14 -0700 Subject: [PATCH 108/109] Simplification Further simplify the else branches. Thanks to SuslikV for pointing this out. --- src/KeyFrame.cpp | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/KeyFrame.cpp b/src/KeyFrame.cpp index d83adc7f..2b0389de 100644 --- a/src/KeyFrame.cpp +++ b/src/KeyFrame.cpp @@ -327,11 +327,7 @@ bool Keyframe::IsIncreasing(int index) } } - if (current_value < next_value) { - // Increasing - return true; - } - else if (current_value >= next_value) { + if (current_value >= next_value) { // Decreasing return false; } From d23197c9b64b183826ee0aa1d7b304ff78f8bc95 Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Wed, 8 May 2019 14:00:55 -0500 Subject: [PATCH 109/109] Updating hwaccel table to use emojis (instead of words) take 3 --- include/QtImageReader.h | 7 ++++--- src/QtImageReader.cpp | 11 +++++++++-- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/include/QtImageReader.h b/include/QtImageReader.h index 6b260f15..e4d14f9b 100644 --- a/include/QtImageReader.h +++ b/include/QtImageReader.h @@ -65,9 +65,10 @@ namespace openshot { private: string path; - std::shared_ptr image; ///> Original image (full quality) - std::shared_ptr cached_image; ///> Scaled for performance - bool is_open; + std::shared_ptr image; ///> Original image (full quality) + std::shared_ptr cached_image; ///> Scaled for performance + bool is_open; ///> Is Reader opened + QSize max_size; ///> Current max_size as calculated with Clip properties public: diff --git a/src/QtImageReader.cpp b/src/QtImageReader.cpp index c500d221..a9682bd9 100644 --- a/src/QtImageReader.cpp +++ b/src/QtImageReader.cpp @@ -130,6 +130,10 @@ void QtImageReader::Open() info.display_ratio.num = size.num; info.display_ratio.den = size.den; + // Set current max size + max_size.setWidth(info.width); + max_size.setHeight(info.height); + // Mark as "open" is_open = true; } @@ -209,8 +213,7 @@ std::shared_ptr QtImageReader::GetFrame(int64_t requested_frame) } // Scale image smaller (or use a previous scaled image) - if (!cached_image || (cached_image && cached_image->width() != max_width || cached_image->height() != max_height)) { - + if (!cached_image || (cached_image && max_size.width() != max_width || max_size.height() != max_height)) { #if USE_RESVG == 1 // If defined and found in CMake, utilize the libresvg for parsing // SVG files and rasterizing them to QImages. @@ -239,6 +242,10 @@ std::shared_ptr QtImageReader::GetFrame(int64_t requested_frame) cached_image = std::shared_ptr(new QImage(image->scaled(max_width, max_height, Qt::KeepAspectRatio, Qt::SmoothTransformation))); cached_image = std::shared_ptr(new QImage(cached_image->convertToFormat(QImage::Format_RGBA8888))); #endif + + // Set max size (to later determine if max_size is changed) + max_size.setWidth(max_width); + max_size.setHeight(max_height); } // Create or get frame object