2013-09-09 23:32:16 -05:00
/**
* @ file
* @ brief Source file for FFmpegReader class
2013-09-12 17:52:10 -05:00
* @ author Jonathan Thomas < jonathan @ openshot . org > , Fabrice Bellard
2013-09-09 23:32:16 -05:00
*
2013-09-12 17:52:10 -05:00
* This file is originally based on the Libavformat API example , and then modified
* by the libopenshot project .
*
2021-10-16 01:26:26 -04:00
* @ ref License
2013-09-09 23:32:16 -05:00
*/
2021-10-16 01:26:26 -04:00
// Copyright (c) 2008-2019 OpenShot Studios, LLC, Fabrice Bellard
//
// SPDX-License-Identifier: LGPL-3.0-or-later
2022-07-21 15:00:08 -05:00
# include <thread> // for std::this_thread::sleep_for
# include <chrono> // for std::chrono::milliseconds
2021-10-27 14:34:05 -04:00
# include <unistd.h>
2021-10-27 06:49:27 -04:00
# include "FFmpegUtilities.h"
2020-10-18 07:43:37 -04:00
# include "FFmpegReader.h"
2021-01-26 10:52:04 -05:00
# include "Exceptions.h"
2021-08-11 12:10:46 -05:00
# include "Timeline.h"
2021-11-01 11:04:31 -04:00
# include "ZmqLogger.h"
2011-10-11 08:44:27 -05:00
2019-04-18 01:07:57 -05:00
# define ENABLE_VAAPI 0
2018-09-14 14:40:29 -07:00
2021-06-04 21:32:29 -04:00
# if USE_HW_ACCEL
2018-09-13 12:37:32 -07:00
# define MAX_SUPPORTED_WIDTH 1950
# define MAX_SUPPORTED_HEIGHT 1100
2018-09-14 14:40:29 -07:00
2019-04-18 01:07:57 -05:00
# if ENABLE_VAAPI
2018-09-14 14:40:29 -07:00
# include "libavutil/hwcontext_vaapi.h"
2018-09-13 12:37:32 -07:00
typedef struct VAAPIDecodeContext {
2022-07-21 16:50:23 -05:00
VAProfile va_profile ;
VAEntrypoint va_entrypoint ;
VAConfigID va_config ;
VAContextID va_context ;
2018-09-13 12:37:32 -07:00
2019-04-18 01:07:57 -05:00
# if FF_API_STRUCT_VAAPI_CONTEXT
// FF_DISABLE_DEPRECATION_WARNINGS
2022-07-21 16:50:23 -05:00
int have_old_context ;
2019-04-18 01:07:57 -05:00
struct vaapi_context * old_context ;
2022-07-21 16:50:23 -05:00
AVBufferRef * device_ref ;
2019-04-18 01:07:57 -05:00
// FF_ENABLE_DEPRECATION_WARNINGS
# endif
2018-09-13 12:37:32 -07:00
2022-07-21 16:50:23 -05:00
AVHWDeviceContext * device ;
2019-04-18 01:07:57 -05:00
AVVAAPIDeviceContext * hwctx ;
2018-09-13 12:37:32 -07:00
2022-07-21 16:50:23 -05:00
AVHWFramesContext * frames ;
2019-04-18 01:07:57 -05:00
AVVAAPIFramesContext * hwfc ;
2018-09-13 12:37:32 -07:00
2022-07-21 16:50:23 -05:00
enum AVPixelFormat surface_format ;
int surface_count ;
2018-09-13 12:37:32 -07:00
} VAAPIDecodeContext ;
2020-02-10 01:50:31 -05:00
# endif // ENABLE_VAAPI
2021-06-04 21:32:29 -04:00
# endif // USE_HW_ACCEL
2018-09-13 12:37:32 -07:00
2011-10-11 08:44:27 -05:00
using namespace openshot ;
2019-04-18 01:07:57 -05:00
int hw_de_on = 0 ;
2021-06-04 21:32:29 -04:00
# if USE_HW_ACCEL
2019-04-29 17:05:13 -05:00
AVPixelFormat hw_de_av_pix_fmt_global = AV_PIX_FMT_NONE ;
2019-04-18 01:07:57 -05:00
AVHWDeviceType hw_de_av_device_type_global = AV_HWDEVICE_TYPE_NONE ;
2018-09-08 22:30:16 -07:00
# endif
2018-08-31 21:36:23 -07:00
2022-07-21 13:56:29 -05:00
FFmpegReader : : FFmpegReader ( const std : : string & path , bool inspect_reader )
2022-07-21 15:00:08 -05:00
: last_frame ( 0 ) , is_seeking ( 0 ) , seeking_pts ( 0 ) , seeking_frame ( 0 ) , seek_count ( 0 ) , NO_PTS_OFFSET ( - 99999 ) ,
path ( path ) , is_video_seek ( true ) , check_interlace ( false ) , check_fps ( false ) , enable_seek ( true ) , is_open ( false ) ,
seek_audio_frame_found ( 0 ) , seek_video_frame_found ( 0 ) , prev_samples ( 0 ) , prev_pts ( 0 ) , pts_total ( 0 ) ,
pts_counter ( 0 ) , is_duration_known ( false ) , largest_frame_processed ( 0 ) , current_video_frame ( 0 ) , packet ( NULL ) ,
max_concurrent_frames ( OPEN_MP_NUM_PROCESSORS ) , audio_pts ( 0 ) , video_pts ( 0 ) , pFormatCtx ( NULL ) , packets_read ( 0 ) ,
packets_decoded ( 0 ) , videoStream ( - 1 ) , audioStream ( - 1 ) , pCodecCtx ( NULL ) , aCodecCtx ( NULL ) , pStream ( NULL ) ,
aStream ( NULL ) , pFrame ( NULL ) , previous_packet_location { - 1 , 0 } , video_eof ( false ) , audio_eof ( false ) ,
packets_eof ( false ) , end_of_file ( false ) {
2011-10-11 08:44:27 -05:00
2016-09-16 17:43:26 -05:00
// Initialize FFMpeg, and register all formats and codecs
2018-09-11 00:40:31 -05:00
AV_REGISTER_ALL
AVCODEC_REGISTER_ALL
2016-09-16 17:43:26 -05:00
2022-07-21 13:56:29 -05:00
// Init timestamp offsets
2022-07-21 15:00:08 -05:00
pts_offset_seconds = NO_PTS_OFFSET ;
video_pts_seconds = NO_PTS_OFFSET ;
audio_pts_seconds = NO_PTS_OFFSET ;
2022-07-21 13:56:29 -05:00
2016-09-16 17:43:26 -05:00
// Init cache
2021-02-04 17:28:07 -06:00
working_cache . SetMaxBytesFromInfo ( max_concurrent_frames * info . fps . ToDouble ( ) * 2 , info . width , info . height , info . sample_rate , info . channels ) ;
final_cache . SetMaxBytesFromInfo ( max_concurrent_frames * 2 , info . width , info . height , info . sample_rate , info . channels ) ;
2016-09-16 17:43:26 -05:00
2019-03-14 09:26:56 -07:00
// Open and Close the reader, to populate its attributes (such as height, width, etc...)
2016-09-16 17:43:26 -05:00
if ( inspect_reader ) {
Open ( ) ;
Close ( ) ;
}
}
2015-12-24 16:44:45 -06:00
FFmpegReader : : ~ FFmpegReader ( ) {
if ( is_open )
// Auto close reader if not already done
Close ( ) ;
}
2013-09-08 23:09:54 -05:00
// This struct holds the associated video frame and starting sample # for an audio packet.
2019-04-18 01:07:57 -05:00
bool AudioLocation : : is_near ( AudioLocation location , int samples_per_frame , int64_t amount ) {
2013-09-08 23:09:54 -05:00
// Is frame even close to this one?
if ( abs ( location . frame - frame ) > = 2 )
// This is too far away to be considered
return false ;
2017-01-07 17:34:11 -05:00
// Note that samples_per_frame can vary slightly frame to frame when the
// audio sampling rate is not an integer multiple of the video fps.
2017-09-28 16:03:01 -05:00
int64_t diff = samples_per_frame * ( location . frame - frame ) + location . sample_start - sample_start ;
2017-01-07 17:34:11 -05:00
if ( abs ( diff ) < = amount )
2013-09-08 23:09:54 -05:00
// close
return true ;
// not close
return false ;
}
2021-06-04 21:32:29 -04:00
# if USE_HW_ACCEL
2018-08-31 21:36:23 -07:00
2019-04-30 17:43:15 -05:00
// Get hardware pix format
static enum AVPixelFormat get_hw_dec_format ( AVCodecContext * ctx , const enum AVPixelFormat * pix_fmts )
2011-10-11 08:44:27 -05:00
{
2019-04-28 17:18:43 -05:00
const enum AVPixelFormat * p ;
for ( p = pix_fmts ; * p ! = AV_PIX_FMT_NONE ; p + + ) {
switch ( * p ) {
2019-04-30 17:43:15 -05:00
# if defined(__linux__)
// Linux pix formats
case AV_PIX_FMT_VAAPI :
hw_de_av_pix_fmt_global = AV_PIX_FMT_VAAPI ;
hw_de_av_device_type_global = AV_HWDEVICE_TYPE_VAAPI ;
return * p ;
break ;
case AV_PIX_FMT_VDPAU :
hw_de_av_pix_fmt_global = AV_PIX_FMT_VDPAU ;
hw_de_av_device_type_global = AV_HWDEVICE_TYPE_VDPAU ;
return * p ;
break ;
# endif
# if defined(_WIN32)
// Windows pix formats
case AV_PIX_FMT_DXVA2_VLD :
hw_de_av_pix_fmt_global = AV_PIX_FMT_DXVA2_VLD ;
hw_de_av_device_type_global = AV_HWDEVICE_TYPE_DXVA2 ;
return * p ;
break ;
case AV_PIX_FMT_D3D11 :
hw_de_av_pix_fmt_global = AV_PIX_FMT_D3D11 ;
hw_de_av_device_type_global = AV_HWDEVICE_TYPE_D3D11VA ;
return * p ;
break ;
# endif
# if defined(__APPLE__)
// Apple pix formats
case AV_PIX_FMT_VIDEOTOOLBOX :
2019-12-15 07:14:01 -05:00
hw_de_av_pix_fmt_global = AV_PIX_FMT_VIDEOTOOLBOX ;
hw_de_av_device_type_global = AV_HWDEVICE_TYPE_VIDEOTOOLBOX ;
return * p ;
break ;
2019-04-30 17:43:15 -05:00
# endif
// Cross-platform pix formats
case AV_PIX_FMT_CUDA :
hw_de_av_pix_fmt_global = AV_PIX_FMT_CUDA ;
hw_de_av_device_type_global = AV_HWDEVICE_TYPE_CUDA ;
return * p ;
break ;
2019-04-28 17:18:43 -05:00
case AV_PIX_FMT_QSV :
hw_de_av_pix_fmt_global = AV_PIX_FMT_QSV ;
hw_de_av_device_type_global = AV_HWDEVICE_TYPE_QSV ;
return * p ;
break ;
2019-12-15 07:14:01 -05:00
default :
// This is only here to silence unused-enum warnings
break ;
2019-04-28 17:18:43 -05:00
}
}
2019-07-03 14:14:02 -04:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::get_hw_dec_format (Unable to decode this file using hardware decode) " ) ;
2019-04-28 17:18:43 -05:00
return AV_PIX_FMT_NONE ;
}
2019-04-18 01:07:57 -05:00
int FFmpegReader : : IsHardwareDecodeSupported ( int codecid )
2018-09-08 21:17:24 -07:00
{
int ret ;
switch ( codecid ) {
case AV_CODEC_ID_H264 :
case AV_CODEC_ID_MPEG2VIDEO :
case AV_CODEC_ID_VC1 :
case AV_CODEC_ID_WMV1 :
case AV_CODEC_ID_WMV2 :
case AV_CODEC_ID_WMV3 :
2019-04-30 17:43:15 -05:00
ret = 1 ;
break ;
default :
ret = 0 ;
break ;
2018-09-08 21:17:24 -07:00
}
return ret ;
}
2021-06-04 21:32:29 -04:00
# endif // USE_HW_ACCEL
2018-08-31 21:36:23 -07:00
2019-04-18 01:07:57 -05:00
void FFmpegReader : : Open ( ) {
2012-10-08 15:02:52 -05:00
// Open reader if not already open
2019-04-18 01:07:57 -05:00
if ( ! is_open ) {
2012-10-08 15:02:52 -05:00
// Initialize format context
pFormatCtx = NULL ;
2019-01-30 09:58:54 -08:00
{
2019-04-18 01:07:57 -05:00
hw_de_on = ( openshot : : Settings : : Instance ( ) - > HARDWARE_DECODER = = 0 ? 0 : 1 ) ;
2019-01-30 09:58:54 -08:00
}
2012-10-08 15:02:52 -05:00
// Open video file
if ( avformat_open_input ( & pFormatCtx , path . c_str ( ) , NULL , NULL ) ! = 0 )
throw InvalidFile ( " File could not be opened. " , path ) ;
// Retrieve stream information
if ( avformat_find_stream_info ( pFormatCtx , NULL ) < 0 )
throw NoStreamsFound ( " No streams found in file. " , path ) ;
videoStream = - 1 ;
audioStream = - 1 ;
2022-07-21 13:56:29 -05:00
// Init end-of-file detection variables
video_eof = true ;
audio_eof = true ;
2022-07-21 15:00:08 -05:00
packets_eof = true ;
end_of_file = true ;
packets_read = 0 ;
packets_decoded = 0 ;
2022-07-21 13:56:29 -05:00
2012-10-08 15:02:52 -05:00
// Loop through each stream, and identify the video and audio stream index
2019-04-18 01:07:57 -05:00
for ( unsigned int i = 0 ; i < pFormatCtx - > nb_streams ; i + + ) {
2012-10-08 15:02:52 -05:00
// Is this a video stream?
2018-03-21 02:10:46 -05:00
if ( AV_GET_CODEC_TYPE ( pFormatCtx - > streams [ i ] ) = = AVMEDIA_TYPE_VIDEO & & videoStream < 0 ) {
2012-10-08 15:02:52 -05:00
videoStream = i ;
2022-07-21 15:00:08 -05:00
video_eof = false ;
packets_eof = false ;
end_of_file = false ;
}
2012-10-08 15:02:52 -05:00
// Is this an audio stream?
2018-03-21 02:10:46 -05:00
if ( AV_GET_CODEC_TYPE ( pFormatCtx - > streams [ i ] ) = = AVMEDIA_TYPE_AUDIO & & audioStream < 0 ) {
2012-10-08 15:02:52 -05:00
audioStream = i ;
2022-07-21 15:00:08 -05:00
audio_eof = false ;
packets_eof = false ;
end_of_file = false ;
2012-10-08 15:02:52 -05:00
}
2011-10-11 08:44:27 -05:00
}
2012-10-08 15:02:52 -05:00
if ( videoStream = = - 1 & & audioStream = = - 1 )
throw NoStreamsFound ( " No video or audio streams found in this file. " , path ) ;
// Is there a video stream?
2019-04-18 01:07:57 -05:00
if ( videoStream ! = - 1 ) {
2012-10-08 15:02:52 -05:00
// Set the stream index
info . video_stream_index = videoStream ;
// Set the codec and codec context pointers
pStream = pFormatCtx - > streams [ videoStream ] ;
2018-03-21 02:10:46 -05:00
// Find the codec ID from stream
2021-11-21 23:25:37 -05:00
const AVCodecID codecId = AV_FIND_DECODER_CODEC_ID ( pStream ) ;
2018-03-21 02:10:46 -05:00
// Get codec and codec context from stream
2021-11-21 23:25:37 -05:00
const AVCodec * pCodec = avcodec_find_decoder ( codecId ) ;
2018-06-29 15:06:34 -05:00
AVDictionary * opts = NULL ;
2018-09-11 20:18:11 -07:00
int retry_decode_open = 2 ;
2019-04-18 01:07:57 -05:00
// If hw accel is selected but hardware cannot handle repeat with software decoding
2018-09-11 20:18:11 -07:00
do {
pCodecCtx = AV_GET_CODEC_CONTEXT ( pStream , pCodec ) ;
2021-06-04 21:32:29 -04:00
# if USE_HW_ACCEL
2018-09-11 20:18:11 -07:00
if ( hw_de_on & & ( retry_decode_open = = 2 ) ) {
// Up to here no decision is made if hardware or software decode
2019-04-18 01:07:57 -05:00
hw_de_supported = IsHardwareDecodeSupported ( pCodecCtx - > codec_id ) ;
2018-09-11 20:18:11 -07:00
}
2019-04-18 01:07:57 -05:00
# endif
2018-09-11 20:18:11 -07:00
retry_decode_open = 0 ;
2018-06-29 15:06:34 -05:00
2018-09-11 20:18:11 -07:00
// Set number of threads equal to number of processors (not to exceed 16)
2019-08-04 23:51:02 -04:00
pCodecCtx - > thread_count = std : : min ( FF_NUM_PROCESSORS , 16 ) ;
2012-10-08 15:02:52 -05:00
2018-09-11 20:18:11 -07:00
if ( pCodec = = NULL ) {
throw InvalidCodec ( " A valid video codec could not be found for this file. " , path ) ;
}
// Init options
av_dict_set ( & opts , " strict " , " experimental " , 0 ) ;
2021-06-04 21:32:29 -04:00
# if USE_HW_ACCEL
2018-09-11 20:18:11 -07:00
if ( hw_de_on & & hw_de_supported ) {
// Open Hardware Acceleration
2019-01-30 09:58:54 -08:00
int i_decoder_hw = 0 ;
2019-01-31 09:42:26 -08:00
char adapter [ 256 ] ;
char * adapter_ptr = NULL ;
int adapter_num ;
2019-01-30 20:44:36 -08:00
adapter_num = openshot : : Settings : : Instance ( ) - > HW_DE_DEVICE_SET ;
2019-09-13 07:43:46 -04:00
fprintf ( stderr , " Hardware decoding device number: %d \n " , adapter_num ) ;
2019-04-18 01:07:57 -05:00
2019-04-30 17:43:15 -05:00
// Set hardware pix format (callback)
pCodecCtx - > get_format = get_hw_dec_format ;
2019-01-31 09:42:26 -08:00
if ( adapter_num < 3 & & adapter_num > = 0 ) {
2019-04-18 01:07:57 -05:00
# if defined(__linux__)
2019-04-30 17:43:15 -05:00
snprintf ( adapter , sizeof ( adapter ) , " /dev/dri/renderD%d " , adapter_num + 128 ) ;
adapter_ptr = adapter ;
i_decoder_hw = openshot : : Settings : : Instance ( ) - > HARDWARE_DECODER ;
switch ( i_decoder_hw ) {
case 1 :
hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI ;
break ;
2019-01-30 09:58:54 -08:00
case 2 :
2019-01-31 09:42:26 -08:00
hw_de_av_device_type = AV_HWDEVICE_TYPE_CUDA ;
2019-01-30 09:58:54 -08:00
break ;
2019-04-30 17:43:15 -05:00
case 6 :
hw_de_av_device_type = AV_HWDEVICE_TYPE_VDPAU ;
2019-04-18 01:07:57 -05:00
break ;
2019-04-28 17:18:43 -05:00
case 7 :
hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV ;
break ;
2019-01-30 09:58:54 -08:00
default :
2019-04-30 17:43:15 -05:00
hw_de_av_device_type = AV_HWDEVICE_TYPE_VAAPI ;
2019-01-30 09:58:54 -08:00
break ;
}
2019-04-30 17:43:15 -05:00
# elif defined(_WIN32)
adapter_ptr = NULL ;
i_decoder_hw = openshot : : Settings : : Instance ( ) - > HARDWARE_DECODER ;
switch ( i_decoder_hw ) {
case 2 :
hw_de_av_device_type = AV_HWDEVICE_TYPE_CUDA ;
break ;
case 3 :
hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2 ;
break ;
case 4 :
hw_de_av_device_type = AV_HWDEVICE_TYPE_D3D11VA ;
break ;
case 7 :
hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV ;
break ;
default :
hw_de_av_device_type = AV_HWDEVICE_TYPE_DXVA2 ;
break ;
}
2019-04-18 01:07:57 -05:00
# elif defined(__APPLE__)
2019-04-30 17:43:15 -05:00
adapter_ptr = NULL ;
i_decoder_hw = openshot : : Settings : : Instance ( ) - > HARDWARE_DECODER ;
switch ( i_decoder_hw ) {
case 5 :
hw_de_av_device_type = AV_HWDEVICE_TYPE_VIDEOTOOLBOX ;
break ;
case 7 :
hw_de_av_device_type = AV_HWDEVICE_TYPE_QSV ;
break ;
default :
hw_de_av_device_type = AV_HWDEVICE_TYPE_VIDEOTOOLBOX ;
break ;
}
2019-04-18 01:07:57 -05:00
# endif
2018-12-08 10:34:24 -08:00
2019-04-18 01:07:57 -05:00
} else {
2019-01-31 09:42:26 -08:00
adapter_ptr = NULL ; // Just to be sure
}
2019-04-18 01:07:57 -05:00
2019-01-31 09:42:26 -08:00
// Check if it is there and writable
2019-04-18 01:07:57 -05:00
# if defined(__linux__)
2019-02-01 03:38:44 -08:00
if ( adapter_ptr ! = NULL & & access ( adapter_ptr , W_OK ) = = 0 ) {
2019-04-18 01:07:57 -05:00
# elif defined(_WIN32)
2019-01-31 09:42:26 -08:00
if ( adapter_ptr ! = NULL ) {
2019-04-18 01:07:57 -05:00
# elif defined(__APPLE__)
2019-01-31 09:42:26 -08:00
if ( adapter_ptr ! = NULL ) {
2019-04-18 01:07:57 -05:00
# endif
2019-07-03 14:14:02 -04:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " Decode Device present using device " ) ;
2019-01-30 20:44:36 -08:00
}
else {
2019-01-31 09:42:26 -08:00
adapter_ptr = NULL ; // use default
2019-07-03 14:14:02 -04:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " Decode Device not present using default " ) ;
2019-01-31 09:42:26 -08:00
}
2019-04-18 01:07:57 -05:00
2018-09-11 20:18:11 -07:00
hw_device_ctx = NULL ;
// Here the first hardware initialisations are made
2018-09-19 17:51:21 -07:00
if ( av_hwdevice_ctx_create ( & hw_device_ctx , hw_de_av_device_type , adapter_ptr , NULL , 0 ) > = 0 ) {
2018-09-11 20:18:11 -07:00
if ( ! ( pCodecCtx - > hw_device_ctx = av_buffer_ref ( hw_device_ctx ) ) ) {
2018-12-08 15:54:29 -08:00
throw InvalidCodec ( " Hardware device reference create failed. " , path ) ;
2018-09-11 20:18:11 -07:00
}
2018-12-19 09:12:15 -08:00
2019-04-18 01:07:57 -05:00
/*
av_buffer_unref ( & ist - > hw_frames_ctx ) ;
ist - > hw_frames_ctx = av_hwframe_ctx_alloc ( hw_device_ctx ) ;
if ( ! ist - > hw_frames_ctx ) {
av_log ( avctx , AV_LOG_ERROR , " Error creating a CUDA frames context \n " ) ;
return AVERROR ( ENOMEM ) ;
}
2018-12-19 09:12:15 -08:00
2019-04-18 01:07:57 -05:00
frames_ctx = ( AVHWFramesContext * ) ist - > hw_frames_ctx - > data ;
2018-12-19 09:12:15 -08:00
2019-04-18 01:07:57 -05:00
frames_ctx - > format = AV_PIX_FMT_CUDA ;
frames_ctx - > sw_format = avctx - > sw_pix_fmt ;
frames_ctx - > width = avctx - > width ;
frames_ctx - > height = avctx - > height ;
av_log ( avctx , AV_LOG_DEBUG , " Initializing CUDA frames context: sw_format = %s, width = %d, height = %d \n " ,
av_get_pix_fmt_name ( frames_ctx - > sw_format ) , frames_ctx - > width , frames_ctx - > height ) ;
2018-12-19 09:12:15 -08:00
2019-04-18 01:07:57 -05:00
ret = av_hwframe_ctx_init ( pCodecCtx - > hw_device_ctx ) ;
ret = av_hwframe_ctx_init ( ist - > hw_frames_ctx ) ;
if ( ret < 0 ) {
av_log ( avctx , AV_LOG_ERROR , " Error initializing a CUDA frame pool \n " ) ;
return ret ;
}
*/
2018-09-11 20:18:11 -07:00
}
else {
2018-12-08 10:34:24 -08:00
throw InvalidCodec ( " Hardware device create failed. " , path ) ;
2018-08-31 21:36:23 -07:00
}
}
2021-06-04 21:32:29 -04:00
# endif // USE_HW_ACCEL
2019-04-18 01:07:57 -05:00
2019-12-17 11:18:36 +09:00
// Disable per-frame threading for album arts
// Using FF_THREAD_FRAME adds one frame decoding delay per thread,
// but there's only one frame in this case.
if ( HasAlbumArt ( ) )
{
pCodecCtx - > thread_type & = ~ FF_THREAD_FRAME ;
}
2018-09-11 20:18:11 -07:00
// Open video codec
if ( avcodec_open2 ( pCodecCtx , pCodec , & opts ) < 0 )
throw InvalidCodec ( " A video codec was found, but could not be opened. " , path ) ;
2012-10-08 15:02:52 -05:00
2021-06-04 21:32:29 -04:00
# if USE_HW_ACCEL
2018-09-11 20:18:11 -07:00
if ( hw_de_on & & hw_de_supported ) {
AVHWFramesConstraints * constraints = NULL ;
2018-09-13 12:37:32 -07:00
void * hwconfig = NULL ;
hwconfig = av_hwdevice_hwconfig_alloc ( hw_device_ctx ) ;
2019-04-18 01:07:57 -05:00
// TODO: needs va_config!
# if ENABLE_VAAPI
2018-09-14 14:40:29 -07:00
( ( AVVAAPIHWConfig * ) hwconfig ) - > config_id = ( ( VAAPIDecodeContext * ) ( pCodecCtx - > priv_data ) ) - > va_config ;
2018-09-13 12:37:32 -07:00
constraints = av_hwdevice_get_hwframe_constraints ( hw_device_ctx , hwconfig ) ;
2020-02-10 01:50:31 -05:00
# endif // ENABLE_VAAPI
2018-09-11 20:18:11 -07:00
if ( constraints ) {
if ( pCodecCtx - > coded_width < constraints - > min_width | |
pCodecCtx - > coded_height < constraints - > min_height | |
pCodecCtx - > coded_width > constraints - > max_width | |
pCodecCtx - > coded_height > constraints - > max_height ) {
2019-07-03 14:14:02 -04:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " DIMENSIONS ARE TOO LARGE for hardware acceleration \n " ) ;
2018-09-11 20:18:11 -07:00
hw_de_supported = 0 ;
retry_decode_open = 1 ;
AV_FREE_CONTEXT ( pCodecCtx ) ;
if ( hw_device_ctx ) {
av_buffer_unref ( & hw_device_ctx ) ;
hw_device_ctx = NULL ;
}
}
else {
// All is just peachy
2019-01-31 09:42:26 -08:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " \n Decode hardware acceleration is used \n " , " Min width : " , constraints - > min_width , " Min Height : " , constraints - > min_height , " MaxWidth : " , constraints - > max_width , " MaxHeight : " , constraints - > max_height , " Frame width : " , pCodecCtx - > coded_width , " Frame height : " , pCodecCtx - > coded_height ) ;
2018-09-11 20:18:11 -07:00
retry_decode_open = 0 ;
}
av_hwframe_constraints_free ( & constraints ) ;
2018-09-13 12:37:32 -07:00
if ( hwconfig ) {
av_freep ( & hwconfig ) ;
}
2018-09-11 20:18:11 -07:00
}
else {
2018-09-13 14:45:09 -07:00
int max_h , max_w ;
2019-01-30 09:58:54 -08:00
//max_h = ((getenv( "LIMIT_HEIGHT_MAX" )==NULL) ? MAX_SUPPORTED_HEIGHT : atoi(getenv( "LIMIT_HEIGHT_MAX" )));
2019-01-30 20:44:36 -08:00
max_h = openshot : : Settings : : Instance ( ) - > DE_LIMIT_HEIGHT_MAX ;
2019-01-30 09:58:54 -08:00
//max_w = ((getenv( "LIMIT_WIDTH_MAX" )==NULL) ? MAX_SUPPORTED_WIDTH : atoi(getenv( "LIMIT_WIDTH_MAX" )));
2019-01-30 20:44:36 -08:00
max_w = openshot : : Settings : : Instance ( ) - > DE_LIMIT_WIDTH_MAX ;
2019-07-03 14:14:02 -04:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " Constraints could not be found using default limit \n " ) ;
2018-09-18 15:31:34 -07:00
//cerr << "Constraints could not be found using default limit\n";
2018-09-13 12:37:32 -07:00
if ( pCodecCtx - > coded_width < 0 | |
pCodecCtx - > coded_height < 0 | |
2018-09-13 14:45:09 -07:00
pCodecCtx - > coded_width > max_w | |
pCodecCtx - > coded_height > max_h ) {
2019-07-03 14:14:02 -04:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " DIMENSIONS ARE TOO LARGE for hardware acceleration \n " , " Max Width : " , max_w , " Max Height : " , max_h , " Frame width : " , pCodecCtx - > coded_width , " Frame height : " , pCodecCtx - > coded_height ) ;
2018-09-13 12:37:32 -07:00
hw_de_supported = 0 ;
retry_decode_open = 1 ;
AV_FREE_CONTEXT ( pCodecCtx ) ;
if ( hw_device_ctx ) {
av_buffer_unref ( & hw_device_ctx ) ;
hw_device_ctx = NULL ;
}
}
else {
2019-07-03 14:14:02 -04:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " \n Decode hardware acceleration is used \n " , " Max Width : " , max_w , " Max Height : " , max_h , " Frame width : " , pCodecCtx - > coded_width , " Frame height : " , pCodecCtx - > coded_height ) ;
2018-09-13 12:37:32 -07:00
retry_decode_open = 0 ;
}
2018-09-11 20:18:11 -07:00
}
} // if hw_de_on && hw_de_supported
2019-01-31 09:42:26 -08:00
else {
2019-07-03 14:14:02 -04:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " \n Decode in software is used \n " ) ;
2019-01-31 09:42:26 -08:00
}
2019-04-18 01:07:57 -05:00
# else
2018-09-11 20:18:11 -07:00
retry_decode_open = 0 ;
2021-06-04 21:32:29 -04:00
# endif // USE_HW_ACCEL
2018-09-11 20:18:11 -07:00
} while ( retry_decode_open ) ; // retry_decode_open
2018-06-29 15:06:34 -05:00
// Free options
av_dict_free ( & opts ) ;
2012-10-08 15:02:52 -05:00
// Update the File Info struct with video details (if a video stream is found)
UpdateVideoInfo ( ) ;
2011-10-11 08:44:27 -05:00
}
2012-10-08 15:02:52 -05:00
// Is there an audio stream?
2019-04-18 01:07:57 -05:00
if ( audioStream ! = - 1 ) {
2012-10-08 15:02:52 -05:00
// Set the stream index
info . audio_stream_index = audioStream ;
// Get a pointer to the codec context for the audio stream
aStream = pFormatCtx - > streams [ audioStream ] ;
2018-03-21 02:10:46 -05:00
// Find the codec ID from stream
AVCodecID codecId = AV_FIND_DECODER_CODEC_ID ( aStream ) ;
// Get codec and codec context from stream
2021-11-21 23:25:37 -05:00
const AVCodec * aCodec = avcodec_find_decoder ( codecId ) ;
2018-03-21 02:10:46 -05:00
aCodecCtx = AV_GET_CODEC_CONTEXT ( aStream , aCodec ) ;
2012-10-08 15:02:52 -05:00
2017-10-01 17:54:21 -05:00
// Set number of threads equal to number of processors (not to exceed 16)
2019-08-04 23:51:02 -04:00
aCodecCtx - > thread_count = std : : min ( FF_NUM_PROCESSORS , 16 ) ;
2012-10-08 15:02:52 -05:00
if ( aCodec = = NULL ) {
throw InvalidCodec ( " A valid audio codec could not be found for this file. " , path ) ;
}
2018-06-29 15:06:34 -05:00
// Init options
AVDictionary * opts = NULL ;
av_dict_set ( & opts , " strict " , " experimental " , 0 ) ;
2012-10-08 15:02:52 -05:00
// Open audio codec
2018-06-29 15:06:34 -05:00
if ( avcodec_open2 ( aCodecCtx , aCodec , & opts ) < 0 )
2012-10-08 15:02:52 -05:00
throw InvalidCodec ( " An audio codec was found, but could not be opened. " , path ) ;
2018-06-29 15:06:34 -05:00
// Free options
av_dict_free ( & opts ) ;
2012-10-08 15:02:52 -05:00
// Update the File Info struct with audio details (if an audio stream is found)
UpdateAudioInfo ( ) ;
}
2018-02-03 01:57:18 -06:00
// Add format metadata (if any)
AVDictionaryEntry * tag = NULL ;
while ( ( tag = av_dict_get ( pFormatCtx - > metadata , " " , tag , AV_DICT_IGNORE_SUFFIX ) ) ) {
QString str_key = tag - > key ;
QString str_value = tag - > value ;
info . metadata [ str_key . toStdString ( ) ] = str_value . trimmed ( ) . toStdString ( ) ;
}
2012-11-20 10:15:39 -06:00
// Init previous audio location to zero
previous_packet_location . frame = - 1 ;
previous_packet_location . sample_start = 0 ;
2015-06-01 00:20:14 -07:00
// Adjust cache size based on size of frame and audio
2021-02-04 17:28:07 -06:00
working_cache . SetMaxBytesFromInfo ( max_concurrent_frames * info . fps . ToDouble ( ) * 2 , info . width , info . height , info . sample_rate , info . channels ) ;
final_cache . SetMaxBytesFromInfo ( max_concurrent_frames * 2 , info . width , info . height , info . sample_rate , info . channels ) ;
2015-06-01 00:20:14 -07:00
2022-07-21 15:00:08 -05:00
// Scan PTS for any offsets (i.e. non-zero starting streams). At least 1 stream must start at zero timestamp.
// This method allows us to shift timestamps to ensure at least 1 stream is starting at zero.
UpdatePTSOffset ( ) ;
2022-07-21 13:56:29 -05:00
2022-07-21 15:00:08 -05:00
// Override an invalid framerate
if ( info . fps . ToFloat ( ) > 240.0f | | ( info . fps . num < = 0 | | info . fps . den < = 0 ) | | info . video_length < = 0 ) {
// Calculate FPS, duration, video bit rate, and video length manually
// by scanning through all the video stream packets
CheckFPS ( ) ;
}
2022-07-21 13:56:29 -05:00
2022-07-21 15:00:08 -05:00
// Seek back to beginning of file (if not already seeking)
if ( ! is_seeking ) {
Seek ( 1 ) ;
}
2022-07-21 13:56:29 -05:00
2022-07-21 15:00:08 -05:00
// Mark as "open"
2012-10-08 15:02:52 -05:00
is_open = true ;
2011-10-11 08:44:27 -05:00
}
}
2019-04-18 01:07:57 -05:00
void FFmpegReader : : Close ( ) {
2012-10-08 15:02:52 -05:00
// Close all objects, if reader is 'open'
2019-04-18 01:07:57 -05:00
if ( is_open ) {
2015-12-24 16:44:45 -06:00
// Mark as "closed"
is_open = false ;
2022-07-22 18:00:28 -05:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::Close (Start) " ) ;
2019-05-08 14:53:23 -07:00
2012-10-08 15:02:52 -05:00
// Close the codec
2019-04-18 01:07:57 -05:00
if ( info . has_video ) {
2022-07-22 16:50:02 -05:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::Close (Flush video context) " ) ;
2012-10-08 15:02:52 -05:00
avcodec_flush_buffers ( pCodecCtx ) ;
2022-07-21 13:56:29 -05:00
2022-07-22 16:50:02 -05:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::Close (Free video context) " ) ;
2018-03-21 02:10:46 -05:00
AV_FREE_CONTEXT ( pCodecCtx ) ;
2021-06-04 21:32:29 -04:00
# if USE_HW_ACCEL
2018-09-08 21:17:24 -07:00
if ( hw_de_on ) {
if ( hw_device_ctx ) {
2022-07-22 16:50:02 -05:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::Close (Free hw context) " ) ;
2018-09-08 21:17:24 -07:00
av_buffer_unref ( & hw_device_ctx ) ;
hw_device_ctx = NULL ;
2018-08-31 21:36:23 -07:00
}
2018-09-08 21:17:24 -07:00
}
2021-06-04 21:32:29 -04:00
# endif // USE_HW_ACCEL
2012-10-08 15:02:52 -05:00
}
2019-04-18 01:07:57 -05:00
if ( info . has_audio ) {
2022-07-22 16:50:02 -05:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::Close (Flush audio context) " ) ;
2012-10-08 15:02:52 -05:00
avcodec_flush_buffers ( aCodecCtx ) ;
2022-07-21 13:56:29 -05:00
2022-07-22 16:50:02 -05:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::Close (Free audio context) " ) ;
2018-03-21 02:10:46 -05:00
AV_FREE_CONTEXT ( aCodecCtx ) ;
2012-10-08 15:02:52 -05:00
}
2022-07-22 18:00:28 -05:00
if ( packet ) {
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::Close (Remove packet) " ) ;
RemoveAVPacket ( packet ) ;
packet = NULL ;
}
2012-10-14 21:09:22 -05:00
// Clear final cache
2022-07-22 16:50:02 -05:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::Close (Clear cache) " ) ;
2012-10-14 21:09:22 -05:00
final_cache . Clear ( ) ;
2012-10-08 15:02:52 -05:00
working_cache . Clear ( ) ;
2014-08-27 09:44:27 -05:00
2012-10-08 15:02:52 -05:00
// Close the video file
2022-07-22 16:50:02 -05:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::Close (Close format context) " ) ;
2012-10-08 15:02:52 -05:00
avformat_close_input ( & pFormatCtx ) ;
av_freep ( & pFormatCtx ) ;
2015-02-19 01:03:22 -06:00
// Reset some variables
2022-07-22 16:50:02 -05:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::Close (Clear variables) " ) ;
2012-10-10 14:49:33 -05:00
last_frame = 0 ;
2015-02-19 01:03:22 -06:00
largest_frame_processed = 0 ;
seek_audio_frame_found = 0 ;
seek_video_frame_found = 0 ;
2015-08-24 01:05:48 -05:00
current_video_frame = 0 ;
2019-05-08 14:53:23 -07:00
last_video_frame . reset ( ) ;
2012-07-08 23:26:44 -05:00
}
2022-07-22 16:50:02 -05:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::Close (End) " ) ;
2011-10-11 08:44:27 -05:00
}
2019-12-17 11:18:36 +09:00
bool FFmpegReader : : HasAlbumArt ( ) {
2020-10-20 11:01:42 +09:00
// Check if the video stream we use is an attached picture
// This won't return true if the file has a cover image as a secondary stream
// like an MKV file with an attached image file
2019-12-17 11:18:36 +09:00
return pFormatCtx & & videoStream > = 0 & & pFormatCtx - > streams [ videoStream ]
& & ( pFormatCtx - > streams [ videoStream ] - > disposition & AV_DISPOSITION_ATTACHED_PIC ) ;
}
2019-04-18 01:07:57 -05:00
void FFmpegReader : : UpdateAudioInfo ( ) {
2011-10-11 08:44:27 -05:00
// Set values of FileInfo struct
info . has_audio = true ;
2012-06-16 02:12:48 -05:00
info . file_size = pFormatCtx - > pb ? avio_size ( pFormatCtx - > pb ) : - 1 ;
2011-10-11 08:44:27 -05:00
info . acodec = aCodecCtx - > codec - > name ;
2018-03-21 02:10:46 -05:00
info . channels = AV_GET_CODEC_ATTRIBUTES ( aStream , aCodecCtx ) - > channels ;
if ( AV_GET_CODEC_ATTRIBUTES ( aStream , aCodecCtx ) - > channel_layout = = 0 )
2019-04-18 01:07:57 -05:00
AV_GET_CODEC_ATTRIBUTES ( aStream , aCodecCtx ) - > channel_layout = av_get_default_channel_layout ( AV_GET_CODEC_ATTRIBUTES ( aStream , aCodecCtx ) - > channels ) ;
2018-03-21 02:10:46 -05:00
info . channel_layout = ( ChannelLayout ) AV_GET_CODEC_ATTRIBUTES ( aStream , aCodecCtx ) - > channel_layout ;
info . sample_rate = AV_GET_CODEC_ATTRIBUTES ( aStream , aCodecCtx ) - > sample_rate ;
info . audio_bit_rate = AV_GET_CODEC_ATTRIBUTES ( aStream , aCodecCtx ) - > bit_rate ;
2021-10-07 13:40:31 -05:00
if ( info . audio_bit_rate < = 0 ) {
2022-07-21 15:00:08 -05:00
// Get bitrate from format
info . audio_bit_rate = pFormatCtx - > bit_rate ;
2021-10-07 13:40:31 -05:00
}
2011-10-11 08:44:27 -05:00
2011-12-11 20:42:50 -06:00
// Set audio timebase
2011-10-11 08:44:27 -05:00
info . audio_timebase . num = aStream - > time_base . num ;
info . audio_timebase . den = aStream - > time_base . den ;
2011-10-27 09:40:03 -05:00
2015-08-05 23:40:58 -05:00
// Get timebase of audio stream (if valid) and greater than the current duration
2021-10-07 13:40:31 -05:00
if ( aStream - > duration > 0 & & aStream - > duration > info . duration ) {
2022-07-21 15:00:08 -05:00
// Get duration from audio stream
info . duration = aStream - > duration * info . audio_timebase . ToDouble ( ) ;
} else if ( pFormatCtx - > duration > 0 & & info . duration < = 0.0f ) {
// Use the format's duration
info . duration = float ( pFormatCtx - > duration ) / AV_TIME_BASE ;
}
2021-10-07 13:40:31 -05:00
2022-07-21 15:00:08 -05:00
// Calculate duration from filesize and bitrate (if any)
if ( info . duration < = 0.0f & & info . video_bit_rate > 0 & & info . file_size > 0 ) {
// Estimate from bitrate, total bytes, and framerate
info . duration = float ( info . file_size ) / info . video_bit_rate ;
}
2011-12-11 20:42:50 -06:00
// Check for an invalid video length
2019-04-18 01:07:57 -05:00
if ( info . has_video & & info . video_length < = 0 ) {
2011-12-11 20:42:50 -06:00
// Calculate the video length from the audio duration
info . video_length = info . duration * info . fps . ToDouble ( ) ;
}
2011-10-27 09:40:03 -05:00
// Set video timebase (if no video stream was found)
2019-04-18 01:07:57 -05:00
if ( ! info . has_video ) {
2011-10-27 09:40:03 -05:00
// Set a few important default video settings (so audio can be divided into frames)
2012-11-12 01:25:35 -06:00
info . fps . num = 24 ;
2011-10-27 09:40:03 -05:00
info . fps . den = 1 ;
2011-12-11 20:42:50 -06:00
info . video_timebase . num = 1 ;
2012-11-12 01:25:35 -06:00
info . video_timebase . den = 24 ;
2011-12-11 20:42:50 -06:00
info . video_length = info . duration * info . fps . ToDouble ( ) ;
2016-01-09 15:50:53 -06:00
info . width = 720 ;
info . height = 480 ;
2011-10-27 09:40:03 -05:00
}
2011-12-11 20:42:50 -06:00
2018-09-17 00:27:30 -05:00
// Fix invalid video lengths for certain types of files (MP3 for example)
if ( info . has_video & & ( ( info . duration * info . fps . ToDouble ( ) ) - info . video_length > 60 ) ) {
info . video_length = info . duration * info . fps . ToDouble ( ) ;
}
2018-02-03 01:57:18 -06:00
// Add audio metadata (if any found)
AVDictionaryEntry * tag = NULL ;
while ( ( tag = av_dict_get ( aStream - > metadata , " " , tag , AV_DICT_IGNORE_SUFFIX ) ) ) {
QString str_key = tag - > key ;
QString str_value = tag - > value ;
info . metadata [ str_key . toStdString ( ) ] = str_value . trimmed ( ) . toStdString ( ) ;
}
2011-10-11 08:44:27 -05:00
}
2019-04-18 01:07:57 -05:00
void FFmpegReader : : UpdateVideoInfo ( ) {
2011-10-11 08:44:27 -05:00
// Set values of FileInfo struct
info . has_video = true ;
2012-06-16 02:12:48 -05:00
info . file_size = pFormatCtx - > pb ? avio_size ( pFormatCtx - > pb ) : - 1 ;
2018-03-21 02:10:46 -05:00
info . height = AV_GET_CODEC_ATTRIBUTES ( pStream , pCodecCtx ) - > height ;
info . width = AV_GET_CODEC_ATTRIBUTES ( pStream , pCodecCtx ) - > width ;
2011-10-11 08:44:27 -05:00
info . vcodec = pCodecCtx - > codec - > name ;
2018-07-25 02:24:01 -05:00
info . video_bit_rate = ( pFormatCtx - > bit_rate / 8 ) ;
2020-02-24 09:35:13 +02:00
// Frame rate from the container and codec
AVRational framerate = av_guess_frame_rate ( pFormatCtx , pStream , NULL ) ;
2022-07-21 13:56:29 -05:00
if ( ! check_fps ) {
2022-07-21 15:00:08 -05:00
info . fps . num = framerate . num ;
info . fps . den = framerate . den ;
}
2020-02-24 09:35:13 +02:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::UpdateVideoInfo " , " info.fps.num " , info . fps . num , " info.fps.den " , info . fps . den ) ;
// TODO: remove excessive debug info in the next releases
// The debug info below is just for comparison and troubleshooting on users side during the transition period
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::UpdateVideoInfo (pStream->avg_frame_rate) " , " num " , pStream - > avg_frame_rate . num , " den " , pStream - > avg_frame_rate . den ) ;
2015-06-01 02:05:17 -07:00
2019-04-18 01:07:57 -05:00
if ( pStream - > sample_aspect_ratio . num ! = 0 ) {
2011-10-11 08:44:27 -05:00
info . pixel_ratio . num = pStream - > sample_aspect_ratio . num ;
2011-12-11 20:42:50 -06:00
info . pixel_ratio . den = pStream - > sample_aspect_ratio . den ;
2019-04-18 01:07:57 -05:00
} else if ( AV_GET_CODEC_ATTRIBUTES ( pStream , pCodecCtx ) - > sample_aspect_ratio . num ! = 0 ) {
2018-03-21 02:10:46 -05:00
info . pixel_ratio . num = AV_GET_CODEC_ATTRIBUTES ( pStream , pCodecCtx ) - > sample_aspect_ratio . num ;
info . pixel_ratio . den = AV_GET_CODEC_ATTRIBUTES ( pStream , pCodecCtx ) - > sample_aspect_ratio . den ;
2019-04-18 01:07:57 -05:00
} else {
2011-10-11 08:44:27 -05:00
info . pixel_ratio . num = 1 ;
2011-12-11 20:42:50 -06:00
info . pixel_ratio . den = 1 ;
}
2018-03-21 02:10:46 -05:00
info . pixel_format = AV_GET_CODEC_PIXEL_FORMAT ( pStream , pCodecCtx ) ;
2011-10-11 08:44:27 -05:00
// Calculate the DAR (display aspect ratio)
2011-12-11 20:42:50 -06:00
Fraction size ( info . width * info . pixel_ratio . num , info . height * info . pixel_ratio . den ) ;
2011-10-11 08:44:27 -05:00
// Reduce size fraction
size . Reduce ( ) ;
// Set the ratio based on the reduced fraction
info . display_ratio . num = size . num ;
info . display_ratio . den = size . den ;
2019-08-20 04:32:47 -04:00
// Get scan type and order from codec context/params
if ( ! check_interlace ) {
check_interlace = true ;
AVFieldOrder field_order = AV_GET_CODEC_ATTRIBUTES ( pStream , pCodecCtx ) - > field_order ;
switch ( field_order ) {
case AV_FIELD_PROGRESSIVE :
info . interlaced_frame = false ;
break ;
case AV_FIELD_TT :
case AV_FIELD_TB :
info . interlaced_frame = true ;
info . top_field_first = true ;
break ;
case AV_FIELD_BT :
case AV_FIELD_BB :
info . interlaced_frame = true ;
info . top_field_first = false ;
break ;
case AV_FIELD_UNKNOWN :
// Check again later?
check_interlace = false ;
break ;
}
2019-09-22 01:37:32 -04:00
// check_interlace will prevent these checks being repeated,
// unless it was cleared because we got an AV_FIELD_UNKNOWN response.
2019-08-20 04:32:47 -04:00
}
2011-12-11 20:42:50 -06:00
// Set the video timebase
2011-10-11 08:44:27 -05:00
info . video_timebase . num = pStream - > time_base . num ;
info . video_timebase . den = pStream - > time_base . den ;
2011-12-11 20:42:50 -06:00
// Set the duration in seconds, and video length (# of frames)
info . duration = pStream - > duration * info . video_timebase . ToDouble ( ) ;
2012-08-12 02:14:15 -05:00
2013-09-08 16:08:56 -05:00
// Check for valid duration (if found)
2021-10-07 13:40:31 -05:00
if ( info . duration < = 0.0f & & pFormatCtx - > duration > = 0 ) {
2022-07-21 15:00:08 -05:00
// Use the format's duration
info . duration = float ( pFormatCtx - > duration ) / AV_TIME_BASE ;
}
2012-08-12 02:14:15 -05:00
2013-09-08 16:08:56 -05:00
// Calculate duration from filesize and bitrate (if any)
2021-10-07 13:40:31 -05:00
if ( info . duration < = 0.0f & & info . video_bit_rate > 0 & & info . file_size > 0 ) {
2022-07-21 15:00:08 -05:00
// Estimate from bitrate, total bytes, and framerate
info . duration = float ( info . file_size ) / info . video_bit_rate ;
}
2013-09-08 16:08:56 -05:00
// No duration found in stream of file
2019-04-18 01:07:57 -05:00
if ( info . duration < = 0.0f ) {
2013-09-08 16:08:56 -05:00
// No duration is found in the video stream
info . duration = - 1 ;
info . video_length = - 1 ;
is_duration_known = false ;
2019-04-18 01:07:57 -05:00
} else {
2013-09-08 16:08:56 -05:00
// Yes, a duration was found
is_duration_known = true ;
// Calculate number of frames
info . video_length = round ( info . duration * info . fps . ToDouble ( ) ) ;
}
2011-12-11 20:42:50 -06:00
2018-02-03 01:57:18 -06:00
// Add video metadata (if any)
AVDictionaryEntry * tag = NULL ;
while ( ( tag = av_dict_get ( pStream - > metadata , " " , tag , AV_DICT_IGNORE_SUFFIX ) ) ) {
QString str_key = tag - > key ;
QString str_value = tag - > value ;
info . metadata [ str_key . toStdString ( ) ] = str_value . trimmed ( ) . toStdString ( ) ;
}
2011-10-11 08:44:27 -05:00
}
2020-07-02 19:09:04 -03:00
bool FFmpegReader : : GetIsDurationKnown ( ) {
return this - > is_duration_known ;
}
2012-10-31 01:17:12 -05:00
2019-04-18 01:07:57 -05:00
std : : shared_ptr < Frame > FFmpegReader : : GetFrame ( int64_t requested_frame ) {
2012-10-09 01:45:34 -05:00
// Check for open reader (or throw exception)
if ( ! is_open )
throw ReaderClosed ( " The FFmpegReader is closed. Call Open() before calling this method . " , path) ;
2014-03-21 01:25:17 -05:00
// Adjust for a requested frame that is too small or too large
if ( requested_frame < 1 )
requested_frame = 1 ;
if ( requested_frame > info . video_length & & is_duration_known )
requested_frame = info . video_length ;
if ( info . has_video & & info . video_length = = 0 )
// Invalid duration of video file
throw InvalidFile ( " Could not detect the duration of the video or audio stream. " , path ) ;
2014-08-27 09:44:27 -05:00
// Debug output
2019-07-03 14:14:02 -04:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::GetFrame " , " requested_frame " , requested_frame , " last_frame " , last_frame ) ;
2014-08-27 09:44:27 -05:00
2011-10-11 08:44:27 -05:00
// Check the cache for this frame
2017-08-20 17:37:39 -05:00
std : : shared_ptr < Frame > frame = final_cache . GetFrame ( requested_frame ) ;
2015-08-05 23:40:58 -05:00
if ( frame ) {
2014-08-27 09:44:27 -05:00
// Debug output
2019-07-03 14:14:02 -04:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::GetFrame " , " returned cached frame " , requested_frame ) ;
2014-08-27 09:44:27 -05:00
2011-10-11 08:44:27 -05:00
// Return the cached frame
2015-08-05 23:40:58 -05:00
return frame ;
2019-04-18 01:07:57 -05:00
} else {
2022-07-21 15:00:08 -05:00
// Check the cache a 2nd time (due to a potential previous lock)
frame = final_cache . GetFrame ( requested_frame ) ;
if ( frame ) {
// Debug output
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::GetFrame " , " returned cached frame on 2nd look " , requested_frame ) ;
2015-06-01 00:20:14 -07:00
2022-07-21 15:00:08 -05:00
// Return the cached frame
} else {
// Frame is not in cache
// Reset seek count
seek_count = 0 ;
2012-10-10 17:27:46 -05:00
2022-07-21 15:00:08 -05:00
// Are we within X frames of the requested frame?
int64_t diff = requested_frame - last_frame ;
if ( diff > = 1 & & diff < = 20 ) {
// Continue walking the stream
frame = ReadStream ( requested_frame ) ;
} else {
// Greater than 30 frames away, or backwards, we need to seek to the nearest key frame
if ( enable_seek ) {
// Only seek if enabled
Seek ( requested_frame ) ;
2018-06-21 02:44:08 -05:00
2022-07-21 15:00:08 -05:00
} else if ( ! enable_seek & & diff < 0 ) {
// Start over, since we can't seek, and the requested frame is smaller than our position
// Since we are seeking to frame 1, this actually just closes/re-opens the reader
Seek ( 1 ) ;
}
2018-06-21 02:44:08 -05:00
2022-07-21 15:00:08 -05:00
// Then continue walking the stream
frame = ReadStream ( requested_frame ) ;
}
}
2019-01-31 09:42:26 -08:00
return frame ;
2011-10-11 08:44:27 -05:00
}
}
// Read the stream until we find the requested Frame
2019-04-18 01:07:57 -05:00
std : : shared_ptr < Frame > FFmpegReader : : ReadStream ( int64_t requested_frame ) {
2011-10-11 08:44:27 -05:00
// Allocate video frame
2012-07-03 02:59:38 -05:00
bool check_seek = false ;
2012-07-03 16:58:07 -05:00
int packet_error = - 1 ;
2011-10-11 08:44:27 -05:00
2014-08-27 09:44:27 -05:00
// Debug output
2021-02-04 17:28:07 -06:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::ReadStream " , " requested_frame " , requested_frame , " max_concurrent_frames " , max_concurrent_frames ) ;
2014-08-27 09:44:27 -05:00
2021-02-17 19:44:44 -06:00
// Loop through the stream until the correct frame is found
while ( true ) {
2022-07-21 15:00:08 -05:00
// Check if working frames are 'finished'
if ( ! is_seeking ) {
// Check for final frames
CheckWorkingFrames ( requested_frame ) ;
}
2022-07-21 13:56:29 -05:00
2022-07-21 15:00:08 -05:00
// Check if requested 'final' frame is available (and break out of loop if found)
bool is_cache_found = ( final_cache . GetFrame ( requested_frame ) ! = NULL ) ;
if ( is_cache_found ) {
break ;
}
2022-07-21 13:56:29 -05:00
// Get the next packet
2021-02-17 19:44:44 -06:00
packet_error = GetNextPacket ( ) ;
2022-07-21 13:56:29 -05:00
if ( packet_error < 0 & & ! packet ) {
// No more packets to be found
2022-07-21 15:00:08 -05:00
packets_eof = true ;
2021-02-17 19:44:44 -06:00
}
2013-02-15 00:23:55 -06:00
2021-02-17 19:44:44 -06:00
// Debug output
2022-07-21 13:56:29 -05:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::ReadStream (GetNextPacket) " , " requested_frame " , requested_frame , " packets_read " , packets_read , " packets_decoded " , packets_decoded , " is_seeking " , is_seeking ) ;
2022-07-21 15:00:08 -05:00
// Check the status of a seek (if any)
if ( is_seeking ) {
check_seek = CheckSeek ( false ) ;
} else {
check_seek = false ;
}
2022-07-21 13:56:29 -05:00
2022-07-21 15:00:08 -05:00
if ( check_seek ) {
// Packet may become NULL on Close inside Seek if CheckSeek returns false
// Jump to the next iteration of this loop
continue ;
}
2011-10-11 08:44:27 -05:00
2021-02-17 19:44:44 -06:00
// Video packet
2022-07-21 13:56:29 -05:00
if ( ( info . has_video & & packet & & packet - > stream_index = = videoStream ) | |
2022-07-21 15:00:08 -05:00
( info . has_video & & ! packet & & ! video_eof ) ) {
// Process Video Packet
ProcessVideoPacket ( requested_frame ) ;
2021-02-17 19:44:44 -06:00
}
2022-07-21 13:56:29 -05:00
// Audio packet
else if ( ( info . has_audio & & packet & & packet - > stream_index = = audioStream ) | |
2022-07-21 15:00:08 -05:00
( info . has_audio & & ! packet & & ! audio_eof ) ) {
2021-02-17 19:44:44 -06:00
// Process Audio Packet
2022-07-21 13:56:29 -05:00
ProcessAudioPacket ( requested_frame ) ;
2021-02-17 19:44:44 -06:00
}
2022-07-21 15:00:08 -05:00
// Determine end-of-stream (waiting until final decoder threads finish)
// Force end-of-stream in some situations
end_of_file = packets_eof & & video_eof & & audio_eof ;
if ( ( packets_eof & & packets_read = = packets_decoded ) | | end_of_file ) {
// Force EOF (end of file) variables to true, if decoder does not support EOF detection.
// If we have no more packets, and all known packets have been decoded
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::ReadStream (force EOF) " , " packets_read " , packets_read , " packets_decoded " , packets_decoded , " packets_eof " , packets_eof , " video_eof " , video_eof , " audio_eof " , audio_eof , " end_of_file " , end_of_file ) ;
if ( ! video_eof ) {
video_eof = true ;
}
if ( ! audio_eof ) {
audio_eof = true ;
}
end_of_file = true ;
break ;
}
2021-02-17 19:44:44 -06:00
} // end while
2011-10-11 08:44:27 -05:00
2014-08-27 09:44:27 -05:00
// Debug output
2022-07-21 13:56:29 -05:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::ReadStream (Completed) " , " packets_read " , packets_read , " packets_decoded " , packets_decoded , " end_of_file " , end_of_file , " largest_frame_processed " , largest_frame_processed , " Working Cache Count " , working_cache . Count ( ) ) ;
2011-10-11 08:44:27 -05:00
2022-07-21 13:56:29 -05:00
// Have we reached end-of-stream (or the final frame)?
if ( ! end_of_file & & requested_frame > = info . video_length ) {
2022-07-21 15:00:08 -05:00
// Force end-of-stream
end_of_file = true ;
2022-07-21 13:56:29 -05:00
}
if ( end_of_file ) {
// Mark any other working frames as 'finished'
CheckWorkingFrames ( requested_frame ) ;
2022-07-21 15:00:08 -05:00
}
2011-10-24 08:22:21 -05:00
// Return requested frame (if found)
2017-08-20 17:37:39 -05:00
std : : shared_ptr < Frame > frame = final_cache . GetFrame ( requested_frame ) ;
2015-08-05 23:40:58 -05:00
if ( frame )
2011-10-24 08:22:21 -05:00
// Return prepared frame
2015-08-05 23:40:58 -05:00
return frame ;
2015-02-05 00:00:52 -06:00
else {
// Check if largest frame is still cached
2015-08-05 23:40:58 -05:00
frame = final_cache . GetFrame ( largest_frame_processed ) ;
if ( frame ) {
2015-02-05 00:00:52 -06:00
// return the largest processed frame (assuming it was the last in the video file)
2015-08-05 23:40:58 -05:00
return frame ;
2019-04-18 01:07:57 -05:00
} else {
2015-02-05 00:00:52 -06:00
// The largest processed frame is no longer in cache, return a blank frame
2017-08-20 17:37:39 -05:00
std : : shared_ptr < Frame > f = CreateFrame ( largest_frame_processed ) ;
2015-02-05 00:00:52 -06:00
f - > AddColor ( info . width , info . height , " #000 " ) ;
return f ;
}
}
2014-01-28 02:41:15 -06:00
2011-10-11 08:44:27 -05:00
}
// Get the next packet (if any)
2019-04-18 01:07:57 -05:00
int FFmpegReader : : GetNextPacket ( ) {
2015-06-01 00:20:14 -07:00
int found_packet = 0 ;
2018-08-31 21:36:23 -07:00
AVPacket * next_packet ;
2021-02-17 19:44:44 -06:00
next_packet = new AVPacket ( ) ;
found_packet = av_read_frame ( pFormatCtx , next_packet ) ;
2012-07-02 00:51:10 -05:00
2021-02-17 19:44:44 -06:00
if ( packet ) {
// Remove previous packet before getting next one
RemoveAVPacket ( packet ) ;
packet = NULL ;
}
if ( found_packet > = 0 ) {
// Update current packet pointer
packet = next_packet ;
2022-07-21 13:56:29 -05:00
packets_read + + ;
2021-02-17 19:44:44 -06:00
} else {
2022-07-21 15:00:08 -05:00
// No more packets found
2021-02-17 19:44:44 -06:00
delete next_packet ;
2022-07-21 15:00:08 -05:00
packet = NULL ;
2019-01-31 09:42:26 -08:00
}
2012-07-02 00:51:10 -05:00
// Return if packet was found (or error number)
return found_packet ;
2011-10-11 08:44:27 -05:00
}
// Get an AVFrame (if any)
2019-04-18 01:07:57 -05:00
bool FFmpegReader : : GetAVFrame ( ) {
2022-07-21 13:56:29 -05:00
int frameFinished = 0 ;
2011-10-11 08:44:27 -05:00
2015-06-01 00:20:14 -07:00
// Decode video frame
2015-09-23 00:27:28 -05:00
AVFrame * next_frame = AV_ALLOCATE_FRAME ( ) ;
2018-08-31 21:36:23 -07:00
2021-02-17 19:44:44 -06:00
# if IS_FFMPEG_3_2
2022-07-21 15:00:08 -05:00
int send_packet_err = avcodec_send_packet ( pCodecCtx , packet ) ;
2018-08-31 21:36:23 -07:00
2021-06-04 21:32:29 -04:00
# if USE_HW_ACCEL
2018-09-09 10:54:31 -07:00
// Get the format from the variables set in get_hw_dec_format
hw_de_av_pix_fmt = hw_de_av_pix_fmt_global ;
hw_de_av_device_type = hw_de_av_device_type_global ;
2021-06-04 21:32:29 -04:00
# endif // USE_HW_ACCEL
2022-07-21 13:56:29 -05:00
if ( send_packet_err < 0 & & send_packet_err ! = AVERROR_EOF ) {
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::GetAVFrame (Packet not sent) " , " send_packet_err " , send_packet_err ) ;
2012-07-02 00:51:10 -05:00
}
2018-03-21 02:10:46 -05:00
else {
2022-07-21 15:00:08 -05:00
int receive_frame_err = 0 ;
2019-08-20 04:32:47 -04:00
AVFrame * next_frame2 ;
2021-06-04 21:32:29 -04:00
# if USE_HW_ACCEL
2019-08-20 04:32:47 -04:00
if ( hw_de_on & & hw_de_supported ) {
next_frame2 = AV_ALLOCATE_FRAME ( ) ;
}
else
2021-06-04 21:32:29 -04:00
# endif // USE_HW_ACCEL
2019-08-20 04:32:47 -04:00
{
next_frame2 = next_frame ;
}
2019-05-08 14:53:23 -07:00
pFrame = AV_ALLOCATE_FRAME ( ) ;
2022-07-21 13:56:29 -05:00
while ( receive_frame_err > = 0 ) {
2022-07-21 15:00:08 -05:00
receive_frame_err = avcodec_receive_frame ( pCodecCtx , next_frame2 ) ;
2022-07-21 13:56:29 -05:00
2022-07-21 15:00:08 -05:00
if ( receive_frame_err = = AVERROR_EOF ) {
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::GetAVFrame (EOF - end of file detected from decoder) " ) ;
video_eof = true ;
}
if ( receive_frame_err = = AVERROR ( EINVAL ) | | receive_frame_err = = AVERROR_EOF ) {
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::GetAVFrame (invalid frame received or EOF from decoder) " ) ;
avcodec_flush_buffers ( pCodecCtx ) ;
}
if ( receive_frame_err ! = 0 ) {
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::GetAVFrame (frame not ready yet from decoder) " ) ;
break ;
}
2022-07-21 13:56:29 -05:00
2021-06-04 21:32:29 -04:00
# if USE_HW_ACCEL
2019-04-18 01:07:57 -05:00
if ( hw_de_on & & hw_de_supported ) {
int err ;
if ( next_frame2 - > format = = hw_de_av_pix_fmt ) {
next_frame - > format = AV_PIX_FMT_YUV420P ;
if ( ( err = av_hwframe_transfer_data ( next_frame , next_frame2 , 0 ) ) < 0 ) {
2019-07-03 14:14:02 -04:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::GetAVFrame (Failed to transfer data to output frame) " ) ;
2019-04-18 01:07:57 -05:00
}
if ( ( err = av_frame_copy_props ( next_frame , next_frame2 ) ) < 0 ) {
2019-07-03 14:14:02 -04:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::GetAVFrame (Failed to copy props to output frame) " ) ;
2018-08-31 21:36:23 -07:00
}
}
2019-04-18 01:07:57 -05:00
}
else
2021-06-04 21:32:29 -04:00
# endif // USE_HW_ACCEL
2019-04-18 01:07:57 -05:00
{ // No hardware acceleration used -> no copy from GPU memory needed
next_frame = next_frame2 ;
}
2018-03-21 02:10:46 -05:00
// TODO also handle possible further frames
// Use only the first frame like avcodec_decode_video2
2022-07-21 15:00:08 -05:00
frameFinished = 1 ;
packets_decoded + + ;
2022-07-21 13:56:29 -05:00
2022-07-21 15:00:08 -05:00
av_image_alloc ( pFrame - > data , pFrame - > linesize , info . width , info . height , ( AVPixelFormat ) ( pStream - > codecpar - > format ) , 1 ) ;
av_image_copy ( pFrame - > data , pFrame - > linesize , ( const uint8_t * * ) next_frame - > data , next_frame - > linesize ,
( AVPixelFormat ) ( pStream - > codecpar - > format ) , info . width , info . height ) ;
2022-07-21 13:56:29 -05:00
2022-07-21 15:00:08 -05:00
// Get display PTS from video frame, often different than packet->pts.
// Sending packets to the decoder (i.e. packet->pts) is async,
// and retrieving packets from the decoder (frame->pts) is async. In most decoders
// sending and retrieving are separated by multiple calls to this method.
if ( next_frame - > pts ! = AV_NOPTS_VALUE ) {
// This is the current decoded frame (and should be the pts used) for
// processing this data
video_pts = next_frame - > pts ;
}
2022-07-21 13:56:29 -05:00
2022-07-21 15:00:08 -05:00
// break out of loop after each successful image returned
break ;
2018-03-21 02:10:46 -05:00
}
2021-06-04 21:32:29 -04:00
# if USE_HW_ACCEL
2018-09-08 21:17:24 -07:00
if ( hw_de_on & & hw_de_supported ) {
AV_FREE_FRAME ( & next_frame2 ) ;
}
2021-06-04 21:32:29 -04:00
# endif // USE_HW_ACCEL
2018-03-21 02:10:46 -05:00
}
2019-04-18 01:07:57 -05:00
# else
2018-03-21 02:10:46 -05:00
avcodec_decode_video2 ( pCodecCtx , next_frame , & frameFinished , packet ) ;
2019-05-10 11:39:26 -07:00
// always allocate pFrame (because we do that in the ffmpeg >= 3.2 as well); it will always be freed later
pFrame = AV_ALLOCATE_FRAME ( ) ;
2018-03-21 02:10:46 -05:00
// is frame finished
if ( frameFinished ) {
// AVFrames are clobbered on the each call to avcodec_decode_video, so we
// must make a copy of the image data before this method is called again.
avpicture_alloc ( ( AVPicture * ) pFrame , pCodecCtx - > pix_fmt , info . width , info . height ) ;
av_picture_copy ( ( AVPicture * ) pFrame , ( AVPicture * ) next_frame , pCodecCtx - > pix_fmt , info . width ,
info . height ) ;
}
2020-02-10 01:50:31 -05:00
# endif // IS_FFMPEG_3_2
2011-12-11 20:42:50 -06:00
2012-10-12 16:41:23 -05:00
// deallocate the frame
2015-09-23 00:27:28 -05:00
AV_FREE_FRAME ( & next_frame ) ;
2012-10-12 16:41:23 -05:00
2011-10-11 08:44:27 -05:00
// Did we get a video frame?
return frameFinished ;
}
// Check the current seek position and determine if we need to seek again
2019-04-18 01:07:57 -05:00
bool FFmpegReader : : CheckSeek ( bool is_video ) {
2011-10-11 08:44:27 -05:00
// Are we seeking for a specific frame?
2019-04-18 01:07:57 -05:00
if ( is_seeking ) {
2014-08-27 09:44:27 -05:00
// Determine if both an audio and video packet have been decoded since the seek happened.
// If not, allow the ReadStream method to keep looping
2014-09-13 16:35:11 -05:00
if ( ( is_video_seek & & ! seek_video_frame_found ) | | ( ! is_video_seek & & ! seek_audio_frame_found ) )
2014-08-27 09:44:27 -05:00
return false ;
2016-01-05 01:59:50 -06:00
// Check for both streams
if ( ( info . has_video & & ! seek_video_frame_found ) | | ( info . has_audio & & ! seek_audio_frame_found ) )
return false ;
2014-09-26 09:35:38 -05:00
// Determine max seeked frame
2022-07-21 13:56:29 -05:00
int64_t max_seeked_frame = std : : max ( seek_audio_frame_found , seek_video_frame_found ) ;
2011-10-11 08:44:27 -05:00
// determine if we are "before" the requested frame
2019-04-18 01:07:57 -05:00
if ( max_seeked_frame > = seeking_frame ) {
2012-10-12 16:41:23 -05:00
// SEEKED TOO FAR
2016-04-21 01:39:17 -05:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::CheckSeek (Too far, seek again) " , " is_video_seek " , is_video_seek , " max_seeked_frame " , max_seeked_frame , " seeking_frame " , seeking_frame , " seeking_pts " , seeking_pts , " seek_video_frame_found " , seek_video_frame_found , " seek_audio_frame_found " , seek_audio_frame_found ) ;
2011-10-11 08:44:27 -05:00
2012-10-12 16:41:23 -05:00
// Seek again... to the nearest Keyframe
2018-04-14 16:25:13 -05:00
Seek ( seeking_frame - ( 10 * seek_count * seek_count ) ) ;
2019-04-18 01:07:57 -05:00
} else {
2014-09-13 16:35:11 -05:00
// SEEK WORKED
2022-07-21 13:56:29 -05:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::CheckSeek (Successful) " , " is_video_seek " , is_video_seek , " packet->pts " , GetPacketPTS ( ) , " seeking_pts " , seeking_pts , " seeking_frame " , seeking_frame , " seek_video_frame_found " , seek_video_frame_found , " seek_audio_frame_found " , seek_audio_frame_found ) ;
2014-08-27 09:44:27 -05:00
2012-10-12 16:41:23 -05:00
// Seek worked, and we are "before" the requested frame
is_seeking = false ;
seeking_frame = 0 ;
2014-04-05 10:19:20 -05:00
seeking_pts = - 1 ;
2011-10-11 08:44:27 -05:00
}
}
// return the pts to seek to (if any)
return is_seeking ;
}
// Process a video packet
2019-04-18 01:07:57 -05:00
void FFmpegReader : : ProcessVideoPacket ( int64_t requested_frame ) {
2022-07-21 15:00:08 -05:00
// Get the AVFrame from the current packet
// This sets the video_pts to the correct timestamp
int frame_finished = GetAVFrame ( ) ;
2022-07-21 13:56:29 -05:00
2022-07-21 15:00:08 -05:00
// Check if the AVFrame is finished and set it
if ( ! frame_finished ) {
// No AVFrame decoded yet, bail out
return ;
}
2022-07-21 13:56:29 -05:00
2011-10-24 08:22:21 -05:00
// Calculate current frame #
2022-07-21 13:56:29 -05:00
int64_t current_frame = ConvertVideoPTStoFrame ( video_pts ) ;
2011-10-11 08:44:27 -05:00
2016-01-05 01:59:50 -06:00
// Track 1st video packet after a successful seek
if ( ! seek_video_frame_found & & is_seeking )
seek_video_frame_found = current_frame ;
2022-07-21 15:00:08 -05:00
// Create or get the existing frame object. Requested frame needs to be created
// in working_cache at least once. Seek can clear the working_cache, so we must
// add the requested frame back to the working_cache here. If it already exists,
// it will be moved to the top of the working_cache.
working_cache . Add ( CreateFrame ( requested_frame ) ) ;
2011-10-11 08:44:27 -05:00
2014-08-27 09:44:27 -05:00
// Debug output
2019-07-03 14:14:02 -04:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::ProcessVideoPacket (Before) " , " requested_frame " , requested_frame , " current_frame " , current_frame ) ;
2014-08-27 09:44:27 -05:00
2012-07-01 01:43:06 -05:00
// Init some things local (for OpenMP)
2018-03-21 02:10:46 -05:00
PixelFormat pix_fmt = AV_GET_CODEC_PIXEL_FORMAT ( pStream , pCodecCtx ) ;
2012-06-18 09:26:14 -05:00
int height = info . height ;
int width = info . width ;
2017-09-28 16:03:01 -05:00
int64_t video_length = info . video_length ;
2018-03-21 02:10:46 -05:00
AVFrame * my_frame = pFrame ;
2019-05-08 14:53:23 -07:00
pFrame = NULL ;
2012-06-18 09:26:14 -05:00
2021-02-17 19:44:44 -06:00
// Create variables for a RGB Frame (since most videos are not in RGB, we must convert it)
2021-03-31 19:44:48 -04:00
AVFrame * pFrameRGB = nullptr ;
uint8_t * buffer = nullptr ;
2012-06-18 09:26:14 -05:00
2021-02-17 19:44:44 -06:00
// Allocate an AVFrame structure
pFrameRGB = AV_ALLOCATE_FRAME ( ) ;
2021-03-31 19:44:48 -04:00
if ( pFrameRGB = = nullptr )
throw OutOfMemory ( " Failed to allocate frame buffer " , path ) ;
2012-06-18 09:26:14 -05:00
2021-02-17 19:44:44 -06:00
// Determine the max size of this source image (based on the timeline's size, the scaling mode,
// and the scaling keyframes). This is a performance improvement, to keep the images as small as possible,
// without losing quality. NOTE: We cannot go smaller than the timeline itself, or the add_layer timeline
// method will scale it back to timeline size before scaling it smaller again. This needs to be fixed in
// the future.
int max_width = info . width ;
int max_height = info . height ;
2019-01-19 02:18:52 -06:00
2021-02-17 19:44:44 -06:00
Clip * parent = ( Clip * ) ParentClip ( ) ;
if ( parent ) {
if ( parent - > ParentTimeline ( ) ) {
// Set max width/height based on parent clip's timeline (if attached to a timeline)
max_width = parent - > ParentTimeline ( ) - > preview_width ;
max_height = parent - > ParentTimeline ( ) - > preview_height ;
}
if ( parent - > scale = = SCALE_FIT | | parent - > scale = = SCALE_STRETCH ) {
// Best fit or Stretch scaling (based on max timeline size * scaling keyframes)
float max_scale_x = parent - > scale_x . GetMaxPoint ( ) . co . Y ;
float max_scale_y = parent - > scale_y . GetMaxPoint ( ) . co . Y ;
max_width = std : : max ( float ( max_width ) , max_width * max_scale_x ) ;
max_height = std : : max ( float ( max_height ) , max_height * max_scale_y ) ;
2019-01-19 02:18:52 -06:00
2022-07-21 15:00:08 -05:00
} else if ( parent - > scale = = SCALE_CROP ) {
2021-02-17 19:44:44 -06:00
// Cropping scale mode (based on max timeline size * cropped size * scaling keyframes)
float max_scale_x = parent - > scale_x . GetMaxPoint ( ) . co . Y ;
float max_scale_y = parent - > scale_y . GetMaxPoint ( ) . co . Y ;
QSize width_size ( max_width * max_scale_x ,
round ( max_width / ( float ( info . width ) / float ( info . height ) ) ) ) ;
QSize height_size ( round ( max_height / ( float ( info . height ) / float ( info . width ) ) ) ,
max_height * max_scale_y ) ;
// respect aspect ratio
if ( width_size . width ( ) > = max_width & & width_size . height ( ) > = max_height ) {
max_width = std : : max ( max_width , width_size . width ( ) ) ;
max_height = std : : max ( max_height , width_size . height ( ) ) ;
2019-01-19 02:18:52 -06:00
} else {
2021-02-17 19:44:44 -06:00
max_width = std : : max ( max_width , height_size . width ( ) ) ;
max_height = std : : max ( max_height , height_size . height ( ) ) ;
2019-01-19 02:18:52 -06:00
}
2020-12-31 17:35:49 -06:00
} else {
2022-07-21 15:00:08 -05:00
// Scale video to equivalent unscaled size
// Since the preview window can change sizes, we want to always
// scale against the ratio of original video size to timeline size
float preview_ratio = 1.0 ;
if ( parent - > ParentTimeline ( ) ) {
Timeline * t = ( Timeline * ) parent - > ParentTimeline ( ) ;
preview_ratio = t - > preview_width / float ( t - > info . width ) ;
}
float max_scale_x = parent - > scale_x . GetMaxPoint ( ) . co . Y ;
float max_scale_y = parent - > scale_y . GetMaxPoint ( ) . co . Y ;
max_width = info . width * max_scale_x * preview_ratio ;
max_height = info . height * max_scale_y * preview_ratio ;
2020-12-31 17:35:49 -06:00
}
2021-02-17 19:44:44 -06:00
}
2012-06-18 09:26:14 -05:00
2021-02-17 19:44:44 -06:00
// Determine if image needs to be scaled (for performance reasons)
int original_height = height ;
if ( max_width ! = 0 & & max_height ! = 0 & & max_width < width & & max_height < height ) {
// Override width and height (but maintain aspect ratio)
float ratio = float ( width ) / float ( height ) ;
int possible_width = round ( max_height * ratio ) ;
int possible_height = round ( max_width / ratio ) ;
2012-06-18 09:26:14 -05:00
2021-02-17 19:44:44 -06:00
if ( possible_width < = max_width ) {
// use calculated width, and max_height
width = possible_width ;
height = max_height ;
} else {
// use max_width, and calculated height
width = max_width ;
height = possible_height ;
2014-08-27 09:44:27 -05:00
}
2021-02-17 19:44:44 -06:00
}
2014-08-27 09:44:27 -05:00
2021-02-17 19:44:44 -06:00
// Determine required buffer size and allocate buffer
const int bytes_per_pixel = 4 ;
int buffer_size = width * height * bytes_per_pixel ;
buffer = new unsigned char [ buffer_size ] ( ) ;
2012-07-06 02:34:18 -05:00
2021-02-17 19:44:44 -06:00
// Copy picture data from one AVFrame (or AVPicture) to another one.
AV_COPY_PICTURE_DATA ( pFrameRGB , buffer , PIX_FMT_RGBA , width , height ) ;
2012-06-18 09:26:14 -05:00
2021-02-17 19:44:44 -06:00
int scale_mode = SWS_FAST_BILINEAR ;
if ( openshot : : Settings : : Instance ( ) - > HIGH_QUALITY_SCALING ) {
scale_mode = SWS_BICUBIC ;
}
SwsContext * img_convert_ctx = sws_getContext ( info . width , info . height , AV_GET_CODEC_PIXEL_FORMAT ( pStream , pCodecCtx ) , width ,
height , PIX_FMT_RGBA , scale_mode , NULL , NULL , NULL ) ;
// Resize / Convert to RGB
sws_scale ( img_convert_ctx , my_frame - > data , my_frame - > linesize , 0 ,
original_height , pFrameRGB - > data , pFrameRGB - > linesize ) ;
// Create or get the existing frame object
std : : shared_ptr < Frame > f = CreateFrame ( current_frame ) ;
// Add Image data to frame
if ( ! ffmpeg_has_alpha ( AV_GET_CODEC_PIXEL_FORMAT ( pStream , pCodecCtx ) ) ) {
// Add image with no alpha channel, Speed optimization
f - > AddImage ( width , height , bytes_per_pixel , QImage : : Format_RGBA8888_Premultiplied , buffer ) ;
} else {
// Add image with alpha channel (this will be converted to premultipled when needed, but is slower)
f - > AddImage ( width , height , bytes_per_pixel , QImage : : Format_RGBA8888 , buffer ) ;
}
// Update working cache
working_cache . Add ( f ) ;
// Keep track of last last_video_frame
last_video_frame = f ;
// Free the RGB image
AV_FREE_FRAME ( & pFrameRGB ) ;
// Remove frame and packet
RemoveAVFrame ( my_frame ) ;
sws_freeContext ( img_convert_ctx ) ;
2022-07-21 15:00:08 -05:00
// Get video PTS in seconds
video_pts_seconds = ( double ( video_pts ) * info . video_timebase . ToDouble ( ) ) + pts_offset_seconds ;
2021-02-17 19:44:44 -06:00
2022-07-21 15:00:08 -05:00
// Debug output
2022-07-21 13:56:29 -05:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::ProcessVideoPacket (After) " , " requested_frame " , requested_frame , " current_frame " , current_frame , " f->number " , f - > number , " video_pts_seconds " , video_pts_seconds ) ;
2011-10-11 08:44:27 -05:00
}
// Process an audio packet
2022-07-21 13:56:29 -05:00
void FFmpegReader : : ProcessAudioPacket ( int64_t requested_frame ) {
2022-07-21 15:00:08 -05:00
AudioLocation location ;
// Calculate location of current audio packet
2022-07-21 13:56:29 -05:00
if ( packet & & packet - > pts ! = AV_NOPTS_VALUE ) {
2022-07-21 15:00:08 -05:00
// Determine related video frame and starting sample # from audio PTS
location = GetAudioPTSLocation ( packet - > pts ) ;
2016-01-05 01:59:50 -06:00
2022-07-21 15:00:08 -05:00
// Track 1st audio packet after a successful seek
if ( ! seek_audio_frame_found & & is_seeking )
seek_audio_frame_found = location . frame ;
2012-07-03 16:58:07 -05:00
}
2011-10-11 08:44:27 -05:00
2022-07-21 15:00:08 -05:00
// Create or get the existing frame object. Requested frame needs to be created
// in working_cache at least once. Seek can clear the working_cache, so we must
// add the requested frame back to the working_cache here. If it already exists,
// it will be moved to the top of the working_cache.
working_cache . Add ( CreateFrame ( requested_frame ) ) ;
2022-07-21 13:56:29 -05:00
2014-08-27 09:44:27 -05:00
// Debug output
2022-07-21 13:56:29 -05:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::ProcessAudioPacket (Before) " , " requested_frame " , requested_frame , " target_frame " , location . frame , " starting_sample " , location . sample_start ) ;
2014-08-27 09:44:27 -05:00
2015-02-05 00:00:52 -06:00
// Init an AVFrame to hold the decoded audio samples
int frame_finished = 0 ;
2015-09-23 00:27:28 -05:00
AVFrame * audio_frame = AV_ALLOCATE_FRAME ( ) ;
AV_RESET_FRAME ( audio_frame ) ;
2015-02-05 00:00:52 -06:00
2012-08-21 15:31:52 -05:00
int packet_samples = 0 ;
2015-02-05 00:00:52 -06:00
int data_size = 0 ;
2014-09-22 00:40:21 -05:00
2019-04-18 01:07:57 -05:00
# if IS_FFMPEG_3_2
2022-07-21 15:00:08 -05:00
int send_packet_err = avcodec_send_packet ( aCodecCtx , packet ) ;
if ( send_packet_err < 0 & & send_packet_err ! = AVERROR_EOF ) {
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::ProcessAudioPacket (Packet not sent) " ) ;
}
else {
int receive_frame_err = avcodec_receive_frame ( aCodecCtx , audio_frame ) ;
if ( receive_frame_err > = 0 ) {
frame_finished = 1 ;
}
if ( receive_frame_err = = AVERROR_EOF ) {
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::ProcessAudioPacket (EOF - end of file detected from decoder) " ) ;
audio_eof = true ;
}
if ( receive_frame_err = = AVERROR ( EINVAL ) | | receive_frame_err = = AVERROR_EOF ) {
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::ProcessAudioPacket (invalid frame received or EOF from decoder) " ) ;
avcodec_flush_buffers ( aCodecCtx ) ;
}
if ( receive_frame_err ! = 0 ) {
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::ProcessAudioPacket (frame not ready yet from decoder) " ) ;
}
}
2019-04-18 01:07:57 -05:00
# else
2018-03-21 02:10:46 -05:00
int used = avcodec_decode_audio4 ( aCodecCtx , audio_frame , & frame_finished , packet ) ;
# endif
2012-08-21 15:31:52 -05:00
2015-07-05 22:57:46 -07:00
if ( frame_finished ) {
2022-07-21 15:00:08 -05:00
packets_decoded + + ;
2022-07-21 13:56:29 -05:00
2022-07-21 15:00:08 -05:00
// This can be different than the current packet, so we need to look
// at the current AVFrame from the audio decoder. This timestamp should
// be used for the remainder of this function
audio_pts = audio_frame - > pts ;
2022-07-21 13:56:29 -05:00
2022-07-21 15:00:08 -05:00
// Determine related video frame and starting sample # from audio PTS
location = GetAudioPTSLocation ( audio_pts ) ;
2015-02-05 00:00:52 -06:00
// determine how many samples were decoded
int plane_size = - 1 ;
data_size = av_samples_get_buffer_size ( & plane_size ,
2019-04-18 01:07:57 -05:00
AV_GET_CODEC_ATTRIBUTES ( aStream , aCodecCtx ) - > channels ,
audio_frame - > nb_samples ,
( AVSampleFormat ) ( AV_GET_SAMPLE_FORMAT ( aStream , aCodecCtx ) ) , 1 ) ;
2012-08-21 15:31:52 -05:00
// Calculate total number of samples
2018-03-21 02:10:46 -05:00
packet_samples = audio_frame - > nb_samples * AV_GET_CODEC_ATTRIBUTES ( aStream , aCodecCtx ) - > channels ;
2012-08-21 15:31:52 -05:00
}
2012-11-20 10:15:39 -06:00
// Estimate the # of samples and the end of this packet's location (to prevent GAPS for the next timestamp)
2012-12-03 04:51:17 -06:00
int pts_remaining_samples = packet_samples / info . channels ; // Adjust for zero based array
// DEBUG (FOR AUDIO ISSUES) - Get the audio packet start time (in seconds)
2022-07-21 13:56:29 -05:00
int64_t adjusted_pts = audio_pts ;
double audio_seconds = ( double ( adjusted_pts ) * info . audio_timebase . ToDouble ( ) ) + pts_offset_seconds ;
double sample_seconds = ( double ( pts_total ) / info . sample_rate ) + pts_offset_seconds ;
2012-12-03 04:51:17 -06:00
2014-08-27 09:44:27 -05:00
// Debug output
2022-07-21 13:56:29 -05:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::ProcessAudioPacket (Decode Info A) " , " pts_counter " , pts_counter , " PTS " , adjusted_pts , " PTS Diff " , adjusted_pts - prev_pts , " Samples " , pts_remaining_samples , " Sample PTS ratio " , float ( adjusted_pts - prev_pts ) / pts_remaining_samples ) ;
2016-04-21 01:39:17 -05:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::ProcessAudioPacket (Decode Info B) " , " Sample Diff " , pts_remaining_samples - prev_samples - prev_pts , " Total " , pts_total , " PTS Seconds " , audio_seconds , " Sample Seconds " , sample_seconds , " Seconds Diff " , audio_seconds - sample_seconds , " raw samples " , packet_samples ) ;
2012-12-03 04:51:17 -06:00
// DEBUG (FOR AUDIO ISSUES)
prev_pts = adjusted_pts ;
pts_total + = pts_remaining_samples ;
pts_counter + + ;
prev_samples = pts_remaining_samples ;
2019-04-18 01:07:57 -05:00
while ( pts_remaining_samples ) {
2012-11-20 10:15:39 -06:00
// Get Samples per frame (for this frame number)
2015-03-08 21:42:53 -05:00
int samples_per_frame = Frame : : GetSamplesPerFrame ( previous_packet_location . frame , info . fps , info . sample_rate , info . channels ) ;
2012-11-20 10:15:39 -06:00
// Calculate # of samples to add to this frame
int samples = samples_per_frame - previous_packet_location . sample_start ;
if ( samples > pts_remaining_samples )
samples = pts_remaining_samples ;
// Decrement remaining samples
pts_remaining_samples - = samples ;
if ( pts_remaining_samples > 0 ) {
// next frame
previous_packet_location . frame + + ;
previous_packet_location . sample_start = 0 ;
} else {
// Increment sample start
previous_packet_location . sample_start + = samples ;
}
}
2011-10-11 08:44:27 -05:00
2017-08-20 17:37:39 -05:00
// Allocate audio buffer
2018-09-11 00:40:31 -05:00
int16_t * audio_buf = new int16_t [ AVCODEC_MAX_AUDIO_FRAME_SIZE + MY_INPUT_BUFFER_PADDING_SIZE ] ;
2015-03-07 17:07:37 -06:00
2019-07-03 14:14:02 -04:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::ProcessAudioPacket (ReSample) " , " packet_samples " , packet_samples , " info.channels " , info . channels , " info.sample_rate " , info . sample_rate , " aCodecCtx->sample_fmt " , AV_GET_SAMPLE_FORMAT ( aStream , aCodecCtx ) , " AV_SAMPLE_FMT_S16 " , AV_SAMPLE_FMT_S16 ) ;
2017-08-20 17:37:39 -05:00
// Create output frame
AVFrame * audio_converted = AV_ALLOCATE_FRAME ( ) ;
AV_RESET_FRAME ( audio_converted ) ;
audio_converted - > nb_samples = audio_frame - > nb_samples ;
av_samples_alloc ( audio_converted - > data , audio_converted - > linesize , info . channels , audio_frame - > nb_samples , AV_SAMPLE_FMT_S16 , 0 ) ;
2018-09-11 00:40:31 -05:00
SWRCONTEXT * avr = NULL ;
2017-08-20 17:37:39 -05:00
int nb_samples = 0 ;
// setup resample context
2018-09-11 00:40:31 -05:00
avr = SWR_ALLOC ( ) ;
2019-04-18 01:07:57 -05:00
av_opt_set_int ( avr , " in_channel_layout " , AV_GET_CODEC_ATTRIBUTES ( aStream , aCodecCtx ) - > channel_layout , 0 ) ;
2018-03-21 02:10:46 -05:00
av_opt_set_int ( avr , " out_channel_layout " , AV_GET_CODEC_ATTRIBUTES ( aStream , aCodecCtx ) - > channel_layout , 0 ) ;
2019-04-18 01:07:57 -05:00
av_opt_set_int ( avr , " in_sample_fmt " , AV_GET_SAMPLE_FORMAT ( aStream , aCodecCtx ) , 0 ) ;
av_opt_set_int ( avr , " out_sample_fmt " , AV_SAMPLE_FMT_S16 , 0 ) ;
av_opt_set_int ( avr , " in_sample_rate " , info . sample_rate , 0 ) ;
av_opt_set_int ( avr , " out_sample_rate " , info . sample_rate , 0 ) ;
av_opt_set_int ( avr , " in_channels " , info . channels , 0 ) ;
av_opt_set_int ( avr , " out_channels " , info . channels , 0 ) ;
2020-04-22 02:02:55 -04:00
SWR_INIT ( avr ) ;
2017-08-20 17:37:39 -05:00
// Convert audio samples
2022-07-21 15:00:08 -05:00
nb_samples = SWR_CONVERT ( avr , // audio resample context
audio_converted - > data , // output data pointers
2019-04-18 01:07:57 -05:00
audio_converted - > linesize [ 0 ] , // output plane size, in bytes. (0 if unknown)
2022-07-21 15:00:08 -05:00
audio_converted - > nb_samples , // maximum number of samples that the output buffer can hold
audio_frame - > data , // input data pointers
audio_frame - > linesize [ 0 ] , // input plane size, in bytes (0 if unknown)
audio_frame - > nb_samples ) ; // number of input samples to convert
2017-08-20 17:37:39 -05:00
// Copy audio samples over original samples
2021-09-27 20:36:56 -04:00
memcpy ( audio_buf ,
2022-07-21 15:00:08 -05:00
audio_converted - > data [ 0 ] ,
static_cast < size_t > ( audio_converted - > nb_samples )
* av_get_bytes_per_sample ( AV_SAMPLE_FMT_S16 )
* info . channels
) ;
2017-08-20 17:37:39 -05:00
// Deallocate resample buffer
2018-09-11 00:40:31 -05:00
SWR_CLOSE ( avr ) ;
SWR_FREE ( & avr ) ;
2017-08-20 17:37:39 -05:00
avr = NULL ;
// Free AVFrames
av_free ( audio_converted - > data [ 0 ] ) ;
AV_FREE_FRAME ( & audio_converted ) ;
2017-09-28 16:03:01 -05:00
int64_t starting_frame_number = - 1 ;
2017-08-20 17:37:39 -05:00
bool partial_frame = true ;
2019-04-18 01:07:57 -05:00
for ( int channel_filter = 0 ; channel_filter < info . channels ; channel_filter + + ) {
2017-08-20 17:37:39 -05:00
// Array of floats (to hold samples for each channel)
2022-07-21 13:56:29 -05:00
starting_frame_number = location . frame ;
2017-08-20 17:37:39 -05:00
int channel_buffer_size = packet_samples / info . channels ;
float * channel_buffer = new float [ channel_buffer_size ] ;
2012-08-21 21:51:00 -05:00
2017-08-20 17:37:39 -05:00
// Init buffer array
for ( int z = 0 ; z < channel_buffer_size ; z + + )
channel_buffer [ z ] = 0.0f ;
2012-08-21 21:51:00 -05:00
2017-08-20 17:37:39 -05:00
// Loop through all samples and add them to our Frame based on channel.
// Toggle through each channel number, since channel data is stored like (left right left right)
int channel = 0 ;
int position = 0 ;
2019-04-18 01:07:57 -05:00
for ( int sample = 0 ; sample < packet_samples ; sample + + ) {
2017-08-20 17:37:39 -05:00
// Only add samples for current channel
2019-04-18 01:07:57 -05:00
if ( channel_filter = = channel ) {
2017-08-20 17:37:39 -05:00
// Add sample (convert from (-32768 to 32768) to (-1.0 to 1.0))
channel_buffer [ position ] = audio_buf [ sample ] * ( 1.0f / ( 1 < < 15 ) ) ;
2011-10-24 08:22:21 -05:00
2017-08-20 17:37:39 -05:00
// Increment audio position
position + + ;
2012-06-29 02:02:12 -05:00
}
2011-10-24 08:22:21 -05:00
2017-08-20 17:37:39 -05:00
// increment channel (if needed)
if ( ( channel + 1 ) < info . channels )
// move to next channel
2019-04-18 01:07:57 -05:00
channel + + ;
2017-08-20 17:37:39 -05:00
else
// reset channel
channel = 0 ;
2011-10-24 08:22:21 -05:00
}
2011-10-11 08:44:27 -05:00
2017-08-20 17:37:39 -05:00
// Loop through samples, and add them to the correct frames
2022-07-21 13:56:29 -05:00
int start = location . sample_start ;
2017-08-20 17:37:39 -05:00
int remaining_samples = channel_buffer_size ;
2022-07-21 15:00:08 -05:00
float * iterate_channel_buffer = channel_buffer ; // pointer to channel buffer
2019-04-18 01:07:57 -05:00
while ( remaining_samples > 0 ) {
2017-08-20 17:37:39 -05:00
// Get Samples per frame (for this frame number)
int samples_per_frame = Frame : : GetSamplesPerFrame ( starting_frame_number , info . fps , info . sample_rate , info . channels ) ;
2015-03-07 17:07:37 -06:00
2017-08-20 17:37:39 -05:00
// Calculate # of samples to add to this frame
int samples = samples_per_frame - start ;
if ( samples > remaining_samples )
samples = remaining_samples ;
2015-10-01 13:00:50 -05:00
2017-08-20 17:37:39 -05:00
// Create or get the existing frame object
std : : shared_ptr < Frame > f = CreateFrame ( starting_frame_number ) ;
// Determine if this frame was "partially" filled in
if ( samples_per_frame = = start + samples )
partial_frame = false ;
else
partial_frame = true ;
2020-03-16 14:49:41 +02:00
// Add samples for current channel to the frame.
f - > AddAudio ( true , channel_filter , start , iterate_channel_buffer , samples , 1.0f ) ;
2017-08-20 17:37:39 -05:00
// Debug output
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::ProcessAudioPacket (f->AddAudio) " , " frame " , starting_frame_number , " start " , start , " samples " , samples , " channel " , channel_filter , " partial_frame " , partial_frame , " samples_per_frame " , samples_per_frame ) ;
// Add or update cache
working_cache . Add ( f ) ;
// Decrement remaining samples
remaining_samples - = samples ;
// Increment buffer (to next set of samples)
if ( remaining_samples > 0 )
iterate_channel_buffer + = samples ;
// Increment frame number
starting_frame_number + + ;
// Reset starting sample #
start = 0 ;
2012-07-06 02:34:18 -05:00
}
2012-08-21 15:31:52 -05:00
2017-08-20 17:37:39 -05:00
// clear channel buffer
delete [ ] channel_buffer ;
channel_buffer = NULL ;
iterate_channel_buffer = NULL ;
}
2015-03-07 17:07:37 -06:00
2017-08-20 17:37:39 -05:00
// Clean up some arrays
delete [ ] audio_buf ;
audio_buf = NULL ;
2014-08-27 09:44:27 -05:00
2016-08-15 00:44:51 -05:00
// Free audio frame
AV_FREE_FRAME ( & audio_frame ) ;
2017-08-20 17:37:39 -05:00
2022-07-21 15:00:08 -05:00
// Get audio PTS in seconds
audio_pts_seconds = ( double ( audio_pts ) * info . audio_timebase . ToDouble ( ) ) + pts_offset_seconds ;
2022-07-21 13:56:29 -05:00
2017-08-20 17:37:39 -05:00
// Debug output
2022-07-21 13:56:29 -05:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::ProcessAudioPacket (After) " , " requested_frame " , requested_frame , " starting_frame " , location . frame , " end_frame " , starting_frame_number - 1 , " audio_pts_seconds " , audio_pts_seconds ) ;
2017-08-20 17:37:39 -05:00
2011-10-11 08:44:27 -05:00
}
// Seek to a specific frame. This is not always frame accurate, it's more of an estimation on many codecs.
2019-04-18 01:07:57 -05:00
void FFmpegReader : : Seek ( int64_t requested_frame ) {
2011-10-11 08:44:27 -05:00
// Adjust for a requested frame that is too small or too large
if ( requested_frame < 1 )
requested_frame = 1 ;
if ( requested_frame > info . video_length )
requested_frame = info . video_length ;
2014-08-27 09:44:27 -05:00
// Debug output
2022-07-21 13:56:29 -05:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::Seek " , " requested_frame " , requested_frame , " seek_count " , seek_count , " last_frame " , last_frame ) ;
2014-08-27 09:44:27 -05:00
2011-10-14 09:47:05 -05:00
// Clear working cache (since we are seeking to another location in the file)
working_cache . Clear ( ) ;
2014-08-27 09:44:27 -05:00
2012-07-06 15:17:57 -05:00
// Reset the last frame variable
2022-07-21 15:00:08 -05:00
video_pts = 0.0 ;
video_pts_seconds = NO_PTS_OFFSET ;
audio_pts = 0.0 ;
audio_pts_seconds = NO_PTS_OFFSET ;
2012-07-06 15:17:57 -05:00
last_frame = 0 ;
2015-08-24 01:05:48 -05:00
current_video_frame = 0 ;
largest_frame_processed = 0 ;
2022-07-21 15:00:08 -05:00
packets_eof = false ;
2022-07-21 13:56:29 -05:00
video_eof = false ;
audio_eof = false ;
end_of_file = false ;
packets_read = 0 ;
packets_decoded = 0 ;
2017-05-17 01:17:42 -05:00
bool has_audio_override = info . has_audio ;
bool has_video_override = info . has_video ;
2011-10-24 08:22:21 -05:00
2012-10-10 17:27:46 -05:00
// Increment seek count
seek_count + + ;
2015-08-24 01:05:48 -05:00
// If seeking near frame 1, we need to close and re-open the file (this is more reliable than seeking)
2021-02-04 17:28:07 -06:00
int buffer_amount = std : : max ( max_concurrent_frames , 8 ) ;
2019-04-18 01:07:57 -05:00
if ( requested_frame - buffer_amount < 20 ) {
2022-07-21 15:00:08 -05:00
// prevent Open() from seeking again
is_seeking = true ;
2022-07-21 13:56:29 -05:00
2011-10-11 08:44:27 -05:00
// Close and re-open file (basically seeking to frame 1)
2012-10-14 21:09:22 -05:00
Close ( ) ;
2011-10-11 08:44:27 -05:00
Open ( ) ;
2017-05-17 01:17:42 -05:00
// Update overrides (since closing and re-opening might update these)
info . has_audio = has_audio_override ;
info . has_video = has_video_override ;
2011-10-11 08:44:27 -05:00
// Not actually seeking, so clear these flags
is_seeking = false ;
2015-08-24 01:05:48 -05:00
if ( seek_count = = 1 ) {
// Don't redefine this on multiple seek attempts for a specific frame
seeking_frame = 1 ;
seeking_pts = ConvertFrameToVideoPTS ( 1 ) ;
}
2014-09-13 16:35:11 -05:00
seek_audio_frame_found = 0 ; // used to detect which frames to throw away after a seek
seek_video_frame_found = 0 ; // used to detect which frames to throw away after a seek
2019-04-18 01:07:57 -05:00
} else {
2011-10-11 08:44:27 -05:00
// Seek to nearest key-frame (aka, i-frame)
2014-04-05 10:19:20 -05:00
bool seek_worked = false ;
2015-08-24 23:49:45 -05:00
int64_t seek_target = 0 ;
2012-07-09 00:41:17 -05:00
2019-12-17 11:18:36 +09:00
// Seek video stream (if any), except album arts
if ( ! seek_worked & & info . has_video & & ! HasAlbumArt ( ) ) {
2015-08-24 23:49:45 -05:00
seek_target = ConvertFrameToVideoPTS ( requested_frame - buffer_amount ) ;
2015-08-24 01:05:48 -05:00
if ( av_seek_frame ( pFormatCtx , info . video_stream_index , seek_target , AVSEEK_FLAG_BACKWARD ) < 0 ) {
2018-09-11 00:40:31 -05:00
fprintf ( stderr , " %s: error while seeking video stream \n " , pFormatCtx - > AV_FILENAME ) ;
2019-04-18 01:07:57 -05:00
} else {
2015-08-24 01:05:48 -05:00
// VIDEO SEEK
is_video_seek = true ;
seek_worked = true ;
2015-08-24 23:49:45 -05:00
}
}
// Seek audio stream (if not already seeked... and if an audio stream is found)
2019-04-18 01:07:57 -05:00
if ( ! seek_worked & & info . has_audio ) {
2015-08-24 23:49:45 -05:00
seek_target = ConvertFrameToAudioPTS ( requested_frame - buffer_amount ) ;
2016-12-07 01:06:16 -06:00
if ( av_seek_frame ( pFormatCtx , info . audio_stream_index , seek_target , AVSEEK_FLAG_BACKWARD ) < 0 ) {
2018-09-11 00:40:31 -05:00
fprintf ( stderr , " %s: error while seeking audio stream \n " , pFormatCtx - > AV_FILENAME ) ;
2019-04-18 01:07:57 -05:00
} else {
2015-08-24 23:49:45 -05:00
// AUDIO SEEK
is_video_seek = false ;
seek_worked = true ;
2015-08-24 01:05:48 -05:00
}
}
2012-07-09 00:41:17 -05:00
// Was the seek successful?
2019-04-18 01:07:57 -05:00
if ( seek_worked ) {
2012-10-12 00:54:53 -05:00
// Flush audio buffer
2012-10-14 02:36:05 -05:00
if ( info . has_audio )
avcodec_flush_buffers ( aCodecCtx ) ;
2012-10-12 00:54:53 -05:00
// Flush video buffer
2012-10-14 02:36:05 -05:00
if ( info . has_video )
avcodec_flush_buffers ( pCodecCtx ) ;
2012-10-12 00:54:53 -05:00
2013-01-25 02:24:18 -06:00
// Reset previous audio location to zero
previous_packet_location . frame = - 1 ;
previous_packet_location . sample_start = 0 ;
2012-10-10 17:27:46 -05:00
// init seek flags
is_seeking = true ;
2015-08-24 01:05:48 -05:00
if ( seek_count = = 1 ) {
// Don't redefine this on multiple seek attempts for a specific frame
seeking_pts = seek_target ;
seeking_frame = requested_frame ;
}
2015-02-05 00:00:52 -06:00
seek_audio_frame_found = 0 ; // used to detect which frames to throw away after a seek
seek_video_frame_found = 0 ; // used to detect which frames to throw away after a seek
2019-04-18 01:07:57 -05:00
} else {
2012-10-10 17:27:46 -05:00
// seek failed
seeking_pts = 0 ;
seeking_frame = 0 ;
2016-03-08 23:11:56 -06:00
2022-07-21 15:00:08 -05:00
// prevent Open() from seeking again
is_seeking = true ;
2016-03-08 23:11:56 -06:00
// Close and re-open file (basically seeking to frame 1)
Close ( ) ;
Open ( ) ;
2017-05-17 01:17:42 -05:00
2022-07-21 15:00:08 -05:00
// Not actually seeking, so clear these flags
is_seeking = false ;
2022-07-21 13:56:29 -05:00
2022-07-21 15:00:08 -05:00
// disable seeking for this reader (since it failed)
enable_seek = false ;
2022-07-21 13:56:29 -05:00
2017-05-17 01:17:42 -05:00
// Update overrides (since closing and re-opening might update these)
info . has_audio = has_audio_override ;
info . has_video = has_video_override ;
2011-12-11 20:42:50 -06:00
}
2011-10-11 08:44:27 -05:00
}
}
2011-10-24 08:22:21 -05:00
// Get the PTS for the current video packet
2022-07-21 13:56:29 -05:00
int64_t FFmpegReader : : GetPacketPTS ( ) {
int64_t current_pts = packet - > pts ;
if ( current_pts = = AV_NOPTS_VALUE & & packet - > dts ! = AV_NOPTS_VALUE )
2012-07-02 00:51:10 -05:00
current_pts = packet - > dts ;
2011-10-24 08:22:21 -05:00
// Return adjusted PTS
return current_pts ;
}
// Update PTS Offset (if any)
2022-07-21 13:56:29 -05:00
void FFmpegReader : : UpdatePTSOffset ( ) {
if ( pts_offset_seconds ! = NO_PTS_OFFSET ) {
2022-07-21 15:00:08 -05:00
// Skip this method if we have already set PTS offset
return ;
2011-10-24 08:22:21 -05:00
}
2022-07-21 15:00:08 -05:00
pts_offset_seconds = 0.0 ;
double video_pts_offset_seconds = 0.0 ;
double audio_pts_offset_seconds = 0.0 ;
2022-07-21 13:56:29 -05:00
2022-07-21 15:00:08 -05:00
bool has_video_pts = false ;
if ( ! info . has_video ) {
// Mark as checked
has_video_pts = true ;
}
bool has_audio_pts = false ;
if ( ! info . has_audio ) {
// Mark as checked
has_audio_pts = true ;
}
2022-07-21 13:56:29 -05:00
2022-07-21 15:00:08 -05:00
// Loop through the stream (until a packet from all streams is found)
while ( ! has_video_pts | | ! has_audio_pts ) {
// Get the next packet (if any)
if ( GetNextPacket ( ) < 0 )
// Break loop when no more packets found
break ;
2022-07-21 13:56:29 -05:00
2022-07-21 15:00:08 -05:00
// Get PTS of this packet
int64_t pts = GetPacketPTS ( ) ;
2022-07-21 13:56:29 -05:00
2022-07-21 15:00:08 -05:00
// Video packet
if ( ! has_video_pts & & packet - > stream_index = = videoStream ) {
// Get the video packet start time (in seconds)
video_pts_offset_seconds = 0.0 - ( video_pts * info . video_timebase . ToDouble ( ) ) ;
2022-07-21 13:56:29 -05:00
2022-07-21 15:00:08 -05:00
// Is timestamp close to zero (within X seconds)
// Ignore wildly invalid timestamps (i.e. -234923423423)
if ( std : : abs ( video_pts_offset_seconds ) < = 10.0 ) {
has_video_pts = true ;
}
}
else if ( ! has_audio_pts & & packet - > stream_index = = audioStream ) {
// Get the audio packet start time (in seconds)
audio_pts_offset_seconds = 0.0 - ( pts * info . audio_timebase . ToDouble ( ) ) ;
2022-07-21 13:56:29 -05:00
2022-07-21 15:00:08 -05:00
// Is timestamp close to zero (within X seconds)
// Ignore wildly invalid timestamps (i.e. -234923423423)
if ( std : : abs ( audio_pts_offset_seconds ) < = 10.0 ) {
has_audio_pts = true ;
}
}
}
2022-07-21 13:56:29 -05:00
2022-07-21 15:00:08 -05:00
// Do we have all valid timestamps to determine PTS offset?
if ( has_video_pts & & has_audio_pts ) {
// Set PTS Offset to the smallest offset
// [ video timestamp ]
// [ audio timestamp ]
//
// ** SHIFT TIMESTAMPS TO ZERO **
//
//[ video timestamp ]
// [ audio timestamp ]
//
// Since all offsets are negative at this point, we want the max value, which
// represents the closest to zero
pts_offset_seconds = std : : max ( video_pts_offset_seconds , audio_pts_offset_seconds ) ;
}
2011-10-11 08:44:27 -05:00
}
2011-12-11 20:42:50 -06:00
// Convert PTS into Frame Number
2019-04-18 01:07:57 -05:00
int64_t FFmpegReader : : ConvertVideoPTStoFrame ( int64_t pts ) {
2011-12-11 20:42:50 -06:00
// Apply PTS offset
2017-09-28 16:03:01 -05:00
int64_t previous_video_frame = current_video_frame ;
2011-12-11 20:42:50 -06:00
2011-11-07 17:12:25 -06:00
// Get the video packet start time (in seconds)
2022-07-21 13:56:29 -05:00
double video_seconds = ( double ( pts ) * info . video_timebase . ToDouble ( ) ) + pts_offset_seconds ;
2011-11-07 17:12:25 -06:00
2011-12-11 20:42:50 -06:00
// Divide by the video timebase, to get the video frame number (frame # is decimal at this point)
2017-09-28 16:03:01 -05:00
int64_t frame = round ( video_seconds * info . fps . ToDouble ( ) ) + 1 ;
2015-08-24 01:05:48 -05:00
// Keep track of the expected video frame #
if ( current_video_frame = = 0 )
current_video_frame = frame ;
else {
// Sometimes frames are duplicated due to identical (or similar) timestamps
if ( frame = = previous_video_frame ) {
// return -1 frame number
frame = - 1 ;
2019-04-18 01:07:57 -05:00
} else {
2015-08-24 01:05:48 -05:00
// Increment expected frame
current_video_frame + + ;
2019-04-18 01:07:57 -05:00
}
2015-08-24 01:05:48 -05:00
}
2011-11-07 17:12:25 -06:00
2011-12-11 20:42:50 -06:00
// Return frame #
2011-11-07 17:12:25 -06:00
return frame ;
2011-10-11 08:44:27 -05:00
}
2011-10-24 08:22:21 -05:00
// Convert Frame Number into Video PTS
2019-04-18 01:07:57 -05:00
int64_t FFmpegReader : : ConvertFrameToVideoPTS ( int64_t frame_number ) {
2011-11-07 17:12:25 -06:00
// Get timestamp of this frame (in seconds)
2022-07-21 13:56:29 -05:00
double seconds = ( double ( frame_number - 1 ) / info . fps . ToDouble ( ) ) + pts_offset_seconds ;
2011-11-07 17:12:25 -06:00
2011-12-11 20:42:50 -06:00
// Calculate the # of video packets in this timestamp
2017-09-28 16:03:01 -05:00
int64_t video_pts = round ( seconds / info . video_timebase . ToDouble ( ) ) ;
2011-11-07 17:12:25 -06:00
2011-12-11 20:42:50 -06:00
// Apply PTS offset (opposite)
2022-07-21 13:56:29 -05:00
return video_pts ;
2011-10-11 08:44:27 -05:00
}
2011-12-11 20:42:50 -06:00
// Convert Frame Number into Video PTS
2019-04-18 01:07:57 -05:00
int64_t FFmpegReader : : ConvertFrameToAudioPTS ( int64_t frame_number ) {
2011-11-07 17:12:25 -06:00
// Get timestamp of this frame (in seconds)
2022-07-21 15:00:08 -05:00
double seconds = ( double ( frame_number - 1 ) / info . fps . ToDouble ( ) ) + pts_offset_seconds ;
2011-10-24 08:22:21 -05:00
2011-12-11 20:42:50 -06:00
// Calculate the # of audio packets in this timestamp
2017-09-28 16:03:01 -05:00
int64_t audio_pts = round ( seconds / info . audio_timebase . ToDouble ( ) ) ;
2011-10-24 08:22:21 -05:00
2011-12-11 20:42:50 -06:00
// Apply PTS offset (opposite)
2022-07-21 13:56:29 -05:00
return audio_pts ;
2011-10-24 08:22:21 -05:00
}
// Calculate Starting video frame and sample # for an audio PTS
2019-04-18 01:07:57 -05:00
AudioLocation FFmpegReader : : GetAudioPTSLocation ( int64_t pts ) {
2011-12-11 20:42:50 -06:00
// Get the audio packet start time (in seconds)
2022-07-21 13:56:29 -05:00
double audio_seconds = ( double ( pts ) * info . audio_timebase . ToDouble ( ) ) + pts_offset_seconds ;
2011-12-11 20:42:50 -06:00
// Divide by the video timebase, to get the video frame number (frame # is decimal at this point)
double frame = ( audio_seconds * info . fps . ToDouble ( ) ) + 1 ;
2011-10-24 08:22:21 -05:00
// Frame # as a whole number (no more decimals)
2017-09-28 16:03:01 -05:00
int64_t whole_frame = int64_t ( frame ) ;
2011-10-24 08:22:21 -05:00
// Remove the whole number, and only get the decimal of the frame
double sample_start_percentage = frame - double ( whole_frame ) ;
// Get Samples per frame
2015-03-08 21:42:53 -05:00
int samples_per_frame = Frame : : GetSamplesPerFrame ( whole_frame , info . fps , info . sample_rate , info . channels ) ;
2011-11-07 17:12:25 -06:00
// Calculate the sample # to start on
2011-10-26 14:34:14 -05:00
int sample_start = round ( double ( samples_per_frame ) * sample_start_percentage ) ;
2011-10-24 08:22:21 -05:00
2012-12-03 04:51:17 -06:00
// Protect against broken (i.e. negative) timestamps
if ( whole_frame < 1 )
whole_frame = 1 ;
if ( sample_start < 0 )
sample_start = 0 ;
2011-10-24 08:22:21 -05:00
// Prepare final audio packet location
2013-09-10 12:59:06 -05:00
AudioLocation location = { whole_frame , sample_start } ;
2011-10-24 08:22:21 -05:00
2012-11-20 10:15:39 -06:00
// Compare to previous audio packet (and fix small gaps due to varying PTS timestamps)
2017-01-23 23:53:50 -06:00
if ( previous_packet_location . frame ! = - 1 ) {
2019-04-18 01:07:57 -05:00
if ( location . is_near ( previous_packet_location , samples_per_frame , samples_per_frame ) ) {
2017-09-28 16:03:01 -05:00
int64_t orig_frame = location . frame ;
2017-01-23 23:53:50 -06:00
int orig_start = location . sample_start ;
2012-11-20 10:15:39 -06:00
2017-01-23 23:53:50 -06:00
// Update sample start, to prevent gaps in audio
location . sample_start = previous_packet_location . sample_start ;
location . frame = previous_packet_location . frame ;
2012-11-20 16:22:50 -06:00
2017-01-23 23:53:50 -06:00
// Debug output
2019-07-03 14:14:02 -04:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::GetAudioPTSLocation (Audio Gap Detected) " , " Source Frame " , orig_frame , " Source Audio Sample " , orig_start , " Target Frame " , location . frame , " Target Audio Sample " , location . sample_start , " pts " , pts ) ;
2014-08-27 09:44:27 -05:00
2017-01-23 23:53:50 -06:00
} else {
// Debug output
2019-07-03 14:14:02 -04:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::GetAudioPTSLocation (Audio Gap Ignored - too big) " , " Previous location frame " , previous_packet_location . frame , " Target Frame " , location . frame , " Target Audio Sample " , location . sample_start , " pts " , pts ) ;
2016-06-29 02:42:00 -05:00
}
2012-11-20 10:15:39 -06:00
}
// Set previous location
previous_packet_location = location ;
2011-10-24 08:22:21 -05:00
// Return the associated video frame and starting sample #
return location ;
2011-10-14 09:47:05 -05:00
}
2011-10-24 08:22:21 -05:00
// Create a new Frame (or return an existing one) and add it to the working queue.
2019-04-18 01:07:57 -05:00
std : : shared_ptr < Frame > FFmpegReader : : CreateFrame ( int64_t requested_frame ) {
2011-10-24 08:22:21 -05:00
// Check working cache
2017-08-20 17:37:39 -05:00
std : : shared_ptr < Frame > output = working_cache . GetFrame ( requested_frame ) ;
2019-10-01 23:27:36 -04:00
2019-04-18 01:07:57 -05:00
if ( ! output ) {
2019-10-01 23:27:36 -04:00
// Lock
2021-10-27 14:34:05 -04:00
const std : : lock_guard < std : : recursive_mutex > lock ( processingMutex ) ;
2019-10-01 23:27:36 -04:00
// (re-)Check working cache
output = working_cache . GetFrame ( requested_frame ) ;
if ( output ) return output ;
2011-10-24 08:22:21 -05:00
// Create a new frame on the working cache
2017-08-20 17:37:39 -05:00
output = std : : make_shared < Frame > ( requested_frame , info . width , info . height , " #000000 " , Frame : : GetSamplesPerFrame ( requested_frame , info . fps , info . sample_rate , info . channels ) , info . channels ) ;
2015-06-01 00:20:14 -07:00
output - > SetPixelRatio ( info . pixel_ratio . num , info . pixel_ratio . den ) ; // update pixel ratio
output - > ChannelsLayout ( info . channel_layout ) ; // update audio channel layout from the parent reader
output - > SampleRate ( info . sample_rate ) ; // update the frame's sample rate of the parent reader
2012-07-02 00:51:10 -05:00
2016-08-31 02:02:54 -05:00
working_cache . Add ( output ) ;
2011-10-24 08:22:21 -05:00
2014-03-21 01:25:17 -05:00
// Set the largest processed frame (if this is larger)
if ( requested_frame > largest_frame_processed )
largest_frame_processed = requested_frame ;
2011-10-24 08:22:21 -05:00
}
2019-10-01 23:27:36 -04:00
// Return frame
2015-06-01 00:20:14 -07:00
return output ;
2011-10-24 08:22:21 -05:00
}
2014-09-13 16:35:11 -05:00
// Determine if frame is partial due to seek
2017-09-28 16:03:01 -05:00
bool FFmpegReader : : IsPartialFrame ( int64_t requested_frame ) {
2014-09-13 16:35:11 -05:00
// Sometimes a seek gets partial frames, and we need to remove them
bool seek_trash = false ;
2017-09-28 16:03:01 -05:00
int64_t max_seeked_frame = seek_audio_frame_found ; // determine max seeked frame
2019-04-18 01:07:57 -05:00
if ( seek_video_frame_found > max_seeked_frame ) {
2014-09-13 16:35:11 -05:00
max_seeked_frame = seek_video_frame_found ;
2019-04-18 01:07:57 -05:00
}
2014-09-26 09:35:38 -05:00
if ( ( info . has_audio & & seek_audio_frame_found & & max_seeked_frame > = requested_frame ) | |
2019-04-18 01:07:57 -05:00
( info . has_video & & seek_video_frame_found & & max_seeked_frame > = requested_frame ) ) {
seek_trash = true ;
}
2014-09-13 16:35:11 -05:00
return seek_trash ;
}
2011-10-24 08:22:21 -05:00
// Check the working queue, and move finished frames to the finished queue
2022-07-21 13:56:29 -05:00
void FFmpegReader : : CheckWorkingFrames ( int64_t requested_frame ) {
2017-05-17 01:17:42 -05:00
2022-07-21 13:56:29 -05:00
// Get a list of current working queue frames in the cache (in-progress frames)
2022-07-21 15:00:08 -05:00
std : : vector < std : : shared_ptr < openshot : : Frame > > working_frames = working_cache . GetFrames ( ) ;
std : : vector < std : : shared_ptr < openshot : : Frame > > : : iterator working_itr ;
2019-02-27 23:25:54 -06:00
2022-07-21 15:00:08 -05:00
// Loop through all working queue frames (sorted by frame #)
for ( working_itr = working_frames . begin ( ) ; working_itr ! = working_frames . end ( ) ; + + working_itr )
{
// Get working frame
std : : shared_ptr < Frame > f = * working_itr ;
2011-10-24 08:22:21 -05:00
2022-07-21 13:56:29 -05:00
// Was a frame found? Is frame requested yet?
2022-07-21 14:21:44 -05:00
if ( ! f | | f & & f - > number > requested_frame ) {
2022-07-21 15:00:08 -05:00
// If not, skip to next one
continue ;
}
2015-08-24 01:05:48 -05:00
2022-07-21 13:56:29 -05:00
// Calculate PTS in seconds (of working frame), and the most recent processed pts value
double frame_pts_seconds = ( double ( f - > number - 1 ) / info . fps . ToDouble ( ) ) + pts_offset_seconds ;
2022-07-21 15:00:08 -05:00
double recent_pts_seconds = std : : max ( video_pts_seconds , audio_pts_seconds ) ;
2016-01-01 01:39:56 -06:00
2022-07-21 15:00:08 -05:00
// Determine if video and audio are ready (based on timestamps)
2015-06-01 00:20:14 -07:00
bool is_video_ready = false ;
bool is_audio_ready = false ;
2022-07-21 15:00:08 -05:00
double recent_pts_diff = recent_pts_seconds - frame_pts_seconds ;
2022-07-21 16:50:23 -05:00
if ( ( frame_pts_seconds < = video_pts_seconds )
| | ( recent_pts_diff > 1.5 )
2022-07-21 15:00:08 -05:00
| | video_eof | | end_of_file ) {
// Video stream is past this frame (so it must be done)
// OR video stream is too far behind, missing, or end-of-file
is_video_ready = true ;
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::CheckWorkingFrames (video ready) " , " frame_number " , f - > number , " frame_pts_seconds " , frame_pts_seconds , " video_pts_seconds " , video_pts_seconds , " recent_pts_diff " , recent_pts_diff ) ;
2022-07-21 13:56:29 -05:00
if ( info . has_video & & ! f - > has_image_data ) {
2022-07-21 15:00:08 -05:00
// Frame has no image data (copy from previous frame)
// Loop backwards through final frames (looking for the nearest, previous frame image)
for ( int64_t previous_frame = requested_frame - 1 ; previous_frame > 0 ; previous_frame - - ) {
std : : shared_ptr < Frame > previous_frame_instance = final_cache . GetFrame ( previous_frame ) ;
if ( previous_frame_instance & & previous_frame_instance - > has_image_data ) {
// Copy image from last decoded frame
f - > AddImage ( std : : make_shared < QImage > ( * previous_frame_instance - > GetImage ( ) ) ) ;
break ;
}
}
if ( last_video_frame & & ! f - > has_image_data ) {
// Copy image from last decoded frame
f - > AddImage ( std : : make_shared < QImage > ( * last_video_frame - > GetImage ( ) ) ) ;
} else if ( ! f - > has_image_data ) {
f - > AddColor ( " #000000 " ) ;
}
2022-07-21 13:56:29 -05:00
}
2015-06-01 00:20:14 -07:00
}
2022-07-21 15:00:08 -05:00
double audio_pts_diff = audio_pts_seconds - frame_pts_seconds ;
2022-07-21 16:50:23 -05:00
if ( ( frame_pts_seconds < audio_pts_seconds & & audio_pts_diff > 1.0 )
| | ( recent_pts_diff > 1.5 )
2022-07-21 13:56:29 -05:00
| | audio_eof | | end_of_file ) {
2022-07-21 15:00:08 -05:00
// Audio stream is past this frame (so it must be done)
// OR audio stream is too far behind, missing, or end-of-file
// Adding a bit of margin here, to allow for partial audio packets
is_audio_ready = true ;
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::CheckWorkingFrames (audio ready) " , " frame_number " , f - > number , " frame_pts_seconds " , frame_pts_seconds , " audio_pts_seconds " , audio_pts_seconds , " audio_pts_diff " , audio_pts_diff , " recent_pts_diff " , recent_pts_diff ) ;
2022-07-21 13:56:29 -05:00
}
2014-09-13 16:35:11 -05:00
bool is_seek_trash = IsPartialFrame ( f - > number ) ;
2014-08-27 09:44:27 -05:00
2015-02-05 00:00:52 -06:00
// Adjust for available streams
if ( ! info . has_video ) is_video_ready = true ;
if ( ! info . has_audio ) is_audio_ready = true ;
2014-08-27 09:44:27 -05:00
// Debug output
2022-07-21 13:56:29 -05:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::CheckWorkingFrames " , " frame_number " , f - > number , " is_video_ready " , is_video_ready , " is_audio_ready " , is_audio_ready , " video_eof " , video_eof , " audio_eof " , audio_eof , " end_of_file " , end_of_file ) ;
2012-07-06 16:52:13 -05:00
2011-10-24 08:22:21 -05:00
// Check if working frame is final
2022-07-21 13:56:29 -05:00
if ( ( ! end_of_file & & is_video_ready & & is_audio_ready ) | | end_of_file | | is_seek_trash ) {
2014-08-27 09:44:27 -05:00
// Debug output
2022-07-21 13:56:29 -05:00
ZmqLogger : : Instance ( ) - > AppendDebugMethod ( " FFmpegReader::CheckWorkingFrames (mark frame as final) " , " requested_frame " , requested_frame , " f->number " , f - > number , " is_seek_trash " , is_seek_trash , " Working Cache Count " , working_cache . Count ( ) , " Final Cache Count " , final_cache . Count ( ) , " end_of_file " , end_of_file ) ;
2014-08-27 09:44:27 -05:00
2019-04-18 01:07:57 -05:00
if ( ! is_seek_trash ) {
2012-10-22 17:05:34 -05:00
// Move frame to final cache
2016-08-31 02:02:54 -05:00
final_cache . Add ( f ) ;
2012-07-06 15:17:57 -05:00
2012-10-22 17:05:34 -05:00
// Remove frame from working cache
working_cache . Remove ( f - > number ) ;
// Update last frame processed
last_frame = f - > number ;
} else {
// Seek trash, so delete the frame from the working cache, and never add it to the final cache.
working_cache . Remove ( f - > number ) ;
}
2019-04-18 01:07:57 -05:00
}
2011-10-24 08:22:21 -05:00
}
}
2011-12-15 16:11:48 -06:00
// Check for the correct frames per second (FPS) value by scanning the 1st few seconds of video packets.
2019-04-18 01:07:57 -05:00
void FFmpegReader : : CheckFPS ( ) {
2022-07-21 15:00:08 -05:00
if ( check_fps ) {
// Do not check FPS more than 1 time
return ;
} else {
check_fps = true ;
}
2018-09-11 00:40:31 -05:00
2022-07-21 13:56:29 -05:00
int frames_per_second [ 3 ] = { 0 , 0 , 0 } ;
2022-07-21 15:00:08 -05:00
int max_fps_index = sizeof ( frames_per_second ) / sizeof ( frames_per_second [ 0 ] ) ;
int fps_index = 0 ;
2018-08-11 18:22:18 -05:00
2022-07-21 15:00:08 -05:00
int all_frames_detected = 0 ;
2022-07-21 13:56:29 -05:00
int starting_frames_detected = 0 ;
2012-07-03 02:42:47 -05:00
2011-12-15 16:11:48 -06:00
// Loop through the stream
2019-04-18 01:07:57 -05:00
while ( true ) {
2011-12-15 16:11:48 -06:00
// Get the next packet (if any)
if ( GetNextPacket ( ) < 0 )
// Break loop when no more packets found
break ;
// Video packet
2019-04-18 01:07:57 -05:00
if ( packet - > stream_index = = videoStream ) {
2022-07-21 15:00:08 -05:00
// Get the video packet start time (in seconds)
double video_seconds = ( double ( GetPacketPTS ( ) ) * info . video_timebase . ToDouble ( ) ) + pts_offset_seconds ;
fps_index = int ( video_seconds ) ; // truncate float timestamp to int (second 1, second 2, second 3)
2011-12-15 16:11:48 -06:00
2022-07-21 15:00:08 -05:00
// Is this video packet from the first few seconds?
if ( fps_index > = 0 & & fps_index < max_fps_index ) {
// Yes, keep track of how many frames per second (over the first few seconds)
starting_frames_detected + + ;
frames_per_second [ fps_index ] + + ;
}
2011-12-15 16:11:48 -06:00
2022-07-21 15:00:08 -05:00
// Track all video packets detected
all_frames_detected + + ;
2011-12-15 16:11:48 -06:00
}
2012-07-03 02:42:47 -05:00
}
2011-12-15 16:11:48 -06:00
2022-07-21 13:56:29 -05:00
// Calculate FPS (based on the first few seconds of video packets)
float avg_fps = 30.0 ;
2022-07-21 14:21:44 -05:00
if ( starting_frames_detected > 0 & & fps_index > 0 ) {
2022-07-21 15:00:08 -05:00
avg_fps = float ( starting_frames_detected ) / std : : min ( fps_index , max_fps_index ) ;
}
2012-02-26 16:45:50 -06:00
2022-07-21 13:56:29 -05:00
// Verify average FPS is a reasonable value
if ( avg_fps < 8.0 ) {
// Invalid FPS assumed, so switching to a sane default FPS instead
avg_fps = 30.0 ;
2012-02-26 16:45:50 -06:00
}
2022-07-21 13:56:29 -05:00
2022-07-21 15:00:08 -05:00
// Update FPS (truncate average FPS to Integer)
info . fps = Fraction ( int ( avg_fps ) , 1 ) ;
2022-07-21 13:56:29 -05:00
2022-07-21 15:00:08 -05:00
// Update Duration and Length
if ( all_frames_detected > 0 ) {
// Use all video frames detected to calculate # of frames
info . video_length = all_frames_detected ;
info . duration = all_frames_detected / avg_fps ;
} else {
// Use previous duration to calculate # of frames
info . video_length = info . duration * avg_fps ;
}
2022-07-21 13:56:29 -05:00
2022-07-21 15:00:08 -05:00
// Update video bit rate
info . video_bit_rate = info . file_size / info . duration ;
2011-12-15 16:11:48 -06:00
}
2019-03-14 09:26:56 -07:00
// Remove AVFrame from cache (and deallocate its memory)
2019-04-18 01:07:57 -05:00
void FFmpegReader : : RemoveAVFrame ( AVFrame * remove_frame ) {
2019-01-31 09:42:26 -08:00
// Remove pFrame (if exists)
2019-04-18 01:07:57 -05:00
if ( remove_frame ) {
2019-01-31 09:42:26 -08:00
// Free memory
2021-02-17 19:44:44 -06:00
av_freep ( & remove_frame - > data [ 0 ] ) ;
2019-01-19 02:18:52 -06:00
# ifndef WIN32
2021-02-17 19:44:44 -06:00
AV_FREE_FRAME ( & remove_frame ) ;
2019-01-19 02:18:52 -06:00
# endif
2018-03-21 02:10:46 -05:00
}
2012-07-03 16:58:07 -05:00
}
2019-03-14 09:26:56 -07:00
// Remove AVPacket from cache (and deallocate its memory)
2019-04-18 01:07:57 -05:00
void FFmpegReader : : RemoveAVPacket ( AVPacket * remove_packet ) {
2016-11-14 22:37:44 -06:00
// deallocate memory for packet
2019-01-31 09:42:26 -08:00
AV_FREE_PACKET ( remove_packet ) ;
2012-07-03 16:58:07 -05:00
2016-11-14 22:37:44 -06:00
// Delete the object
delete remove_packet ;
2012-07-03 16:58:07 -05:00
}
2013-12-07 21:09:55 -06:00
// Generate JSON string of this object
2019-12-27 08:51:51 -05:00
std : : string FFmpegReader : : Json ( ) const {
2013-12-07 21:09:55 -06:00
// Return formatted string
return JsonValue ( ) . toStyledString ( ) ;
}
2019-12-27 08:51:51 -05:00
// Generate Json::Value for this object
Json : : Value FFmpegReader : : JsonValue ( ) const {
2012-07-03 16:58:07 -05:00
2013-12-07 16:52:09 -06:00
// Create root json object
Json : : Value root = ReaderBase : : JsonValue ( ) ; // get parent properties
2013-12-07 21:09:55 -06:00
root [ " type " ] = " FFmpegReader " ;
2013-12-07 16:52:09 -06:00
root [ " path " ] = path ;
// return JsonValue
return root ;
}
2013-12-07 21:09:55 -06:00
// Load JSON string into this object
2019-12-27 08:51:51 -05:00
void FFmpegReader : : SetJson ( const std : : string value ) {
2013-12-07 21:09:55 -06:00
// Parse JSON string into JSON objects
2019-04-18 01:07:57 -05:00
try {
2019-12-27 08:51:51 -05:00
const Json : : Value root = openshot : : stringToJson ( value ) ;
2013-12-07 21:09:55 -06:00
// Set all values that match
SetJsonValue ( root ) ;
}
2019-07-03 12:58:02 -04:00
catch ( const std : : exception & e ) {
2013-12-07 21:09:55 -06:00
// Error parsing JSON (or missing keys)
2019-08-27 15:47:39 -04:00
throw InvalidJSON ( " JSON is invalid (missing keys or invalid data types) " ) ;
2013-12-07 21:09:55 -06:00
}
}
2019-12-27 08:51:51 -05:00
// Load Json::Value into this object
void FFmpegReader : : SetJsonValue ( const Json : : Value root ) {
2013-12-07 16:52:09 -06:00
// Set parent data
2013-12-07 21:09:55 -06:00
ReaderBase : : SetJsonValue ( root ) ;
2013-12-07 16:52:09 -06:00
// Set data from Json (if key is found)
2014-01-08 01:43:58 -06:00
if ( ! root [ " path " ] . isNull ( ) )
2013-12-07 16:52:09 -06:00
path = root [ " path " ] . asString ( ) ;
2013-12-07 21:09:55 -06:00
// Re-Open path, and re-init everything (if needed)
2019-04-18 01:07:57 -05:00
if ( is_open ) {
2013-12-07 21:09:55 -06:00
Close ( ) ;
Open ( ) ;
}
2013-12-07 16:52:09 -06:00
}