diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp
index 98a87ac4..fb08687f 100644
--- a/src/FFmpegReader.cpp
+++ b/src/FFmpegReader.cpp
@@ -75,7 +75,7 @@ FFmpegReader::FFmpegReader(const std::string &path, bool inspect_reader)
current_video_frame(0), packet(NULL), max_concurrent_frames(OPEN_MP_NUM_PROCESSORS), audio_pts(0),
video_pts(0), pFormatCtx(NULL), videoStream(-1), audioStream(-1), pCodecCtx(NULL), aCodecCtx(NULL),
pStream(NULL), aStream(NULL), pFrame(NULL), previous_packet_location{-1,0},
- resend_packet(false) {
+ resend_packet(false) {
// Initialize FFMpeg, and register all formats and codecs
AV_REGISTER_ALL
@@ -214,7 +214,7 @@ void FFmpegReader::Open() {
pFormatCtx = NULL;
{
hw_de_on = (openshot::Settings::Instance()->HARDWARE_DECODER == 0 ? 0 : 1);
- ZmqLogger::Instance()->AppendDebugMethod("Decode hardware acceleration settings", "hw_de_on", hw_de_on, "HARDWARE_DECODER", openshot::Settings::Instance()->HARDWARE_DECODER);
+ ZmqLogger::Instance()->AppendDebugMethod("Decode hardware acceleration settings", "hw_de_on", hw_de_on, "HARDWARE_DECODER", openshot::Settings::Instance()->HARDWARE_DECODER);
}
// Open video file
@@ -648,7 +648,7 @@ void FFmpegReader::Close() {
// Reset some variables
last_frame = 0;
- resend_packet = false;
+ resend_packet = false;
largest_frame_processed = 0;
seek_audio_frame_found = 0;
seek_video_frame_found = 0;
@@ -956,12 +956,12 @@ std::shared_ptr FFmpegReader::ReadStream(int64_t requested_frame) {
}
if (!resend_packet || !packet) {
- // Get the next packet
- packet_error = GetNextPacket();
- if (packet_error < 0 && !packet) {
- // No more packets to be found
- packet_status.packets_eof = true;
- }
+ // Get the next packet
+ packet_error = GetNextPacket();
+ if (packet_error < 0 && !packet) {
+ // No more packets to be found
+ packet_status.packets_eof = true;
+ }
}
// Debug output
@@ -1109,19 +1109,19 @@ bool FFmpegReader::GetAVFrame() {
AVFrame *next_frame = AV_ALLOCATE_FRAME();
#if IS_FFMPEG_3_2
- int send_packet_err = 0;
- int64_t send_packet_pts = 0;
- if ((packet && packet->stream_index == videoStream && !resend_packet) || !packet) {
- send_packet_err = avcodec_send_packet(pCodecCtx, packet);
+ int send_packet_err = 0;
+ int64_t send_packet_pts = 0;
+ if ((packet && packet->stream_index == videoStream && !resend_packet) || !packet) {
+ send_packet_err = avcodec_send_packet(pCodecCtx, packet);
- if (packet && send_packet_err >= 0) {
- send_packet_pts = GetPacketPTS();
- resend_packet = false;
- ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (send packet succeeded)",
- "send_packet_err", send_packet_err, "send_packet_pts",
- send_packet_pts);
- }
- }
+ if (packet && send_packet_err >= 0) {
+ send_packet_pts = GetPacketPTS();
+ resend_packet = false;
+ ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (send packet succeeded)",
+ "send_packet_err", send_packet_err, "send_packet_pts",
+ send_packet_pts);
+ }
+ }
#if USE_HW_ACCEL
// Get the format from the variables set in get_hw_dec_format
@@ -1129,116 +1129,116 @@ bool FFmpegReader::GetAVFrame() {
hw_de_av_device_type = hw_de_av_device_type_global;
#endif // USE_HW_ACCEL
if (send_packet_err < 0 && send_packet_err != AVERROR_EOF) {
- ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (send packet: Not sent [" + av_err2string(send_packet_err) + "])", "send_packet_err", send_packet_err, "send_packet_pts", send_packet_pts);
- if (send_packet_err == AVERROR(EAGAIN)) {
- resend_packet = true;
- ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (send packet: AVERROR(EAGAIN): user must read output with avcodec_receive_frame()", "send_packet_pts", send_packet_pts);
- }
- if (send_packet_err == AVERROR(EINVAL)) {
- ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (send packet: AVERROR(EINVAL): codec not opened, it is an encoder, or requires flush", "send_packet_pts", send_packet_pts);
- }
- if (send_packet_err == AVERROR(ENOMEM)) {
- ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (send packet: AVERROR(ENOMEM): failed to add packet to internal queue, or legitimate decoding errors", "send_packet_pts", send_packet_pts);
- }
+ ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (send packet: Not sent [" + av_err2string(send_packet_err) + "])", "send_packet_err", send_packet_err, "send_packet_pts", send_packet_pts);
+ if (send_packet_err == AVERROR(EAGAIN)) {
+ resend_packet = true;
+ ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (send packet: AVERROR(EAGAIN): user must read output with avcodec_receive_frame()", "send_packet_pts", send_packet_pts);
+ }
+ if (send_packet_err == AVERROR(EINVAL)) {
+ ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (send packet: AVERROR(EINVAL): codec not opened, it is an encoder, or requires flush", "send_packet_pts", send_packet_pts);
+ }
+ if (send_packet_err == AVERROR(ENOMEM)) {
+ ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (send packet: AVERROR(ENOMEM): failed to add packet to internal queue, or legitimate decoding errors", "send_packet_pts", send_packet_pts);
+ }
}
- // Always try and receive a packet, if not EOF.
- // Even if the above avcodec_send_packet failed to send,
- // we might still need to receive a packet.
- int receive_frame_err = 0;
- AVFrame *next_frame2;
+ // Always try and receive a packet, if not EOF.
+ // Even if the above avcodec_send_packet failed to send,
+ // we might still need to receive a packet.
+ int receive_frame_err = 0;
+ AVFrame *next_frame2;
#if USE_HW_ACCEL
- if (hw_de_on && hw_de_supported) {
- next_frame2 = AV_ALLOCATE_FRAME();
- }
- else
+ if (hw_de_on && hw_de_supported) {
+ next_frame2 = AV_ALLOCATE_FRAME();
+ }
+ else
#endif // USE_HW_ACCEL
- {
- next_frame2 = next_frame;
- }
- pFrame = AV_ALLOCATE_FRAME();
- while (receive_frame_err >= 0) {
- receive_frame_err = avcodec_receive_frame(pCodecCtx, next_frame2);
+ {
+ next_frame2 = next_frame;
+ }
+ pFrame = AV_ALLOCATE_FRAME();
+ while (receive_frame_err >= 0) {
+ receive_frame_err = avcodec_receive_frame(pCodecCtx, next_frame2);
- if (receive_frame_err != 0) {
- ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (receive frame: frame not ready yet from decoder [\" + av_err2string(receive_frame_err) + \"])", "receive_frame_err", receive_frame_err, "send_packet_pts", send_packet_pts);
+ if (receive_frame_err != 0) {
+ ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (receive frame: frame not ready yet from decoder [\" + av_err2string(receive_frame_err) + \"])", "receive_frame_err", receive_frame_err, "send_packet_pts", send_packet_pts);
- if (receive_frame_err == AVERROR_EOF) {
- ZmqLogger::Instance()->AppendDebugMethod(
- "FFmpegReader::GetAVFrame (receive frame: AVERROR_EOF: EOF detected from decoder, flushing buffers)", "send_packet_pts", send_packet_pts);
- avcodec_flush_buffers(pCodecCtx);
- packet_status.video_eof = true;
- }
- if (receive_frame_err == AVERROR(EINVAL)) {
- ZmqLogger::Instance()->AppendDebugMethod(
- "FFmpegReader::GetAVFrame (receive frame: AVERROR(EINVAL): invalid frame received, flushing buffers)", "send_packet_pts", send_packet_pts);
- avcodec_flush_buffers(pCodecCtx);
- }
- if (receive_frame_err == AVERROR(EAGAIN)) {
- ZmqLogger::Instance()->AppendDebugMethod(
- "FFmpegReader::GetAVFrame (receive frame: AVERROR(EAGAIN): output is not available in this state - user must try to send new input)", "send_packet_pts", send_packet_pts);
- }
- if (receive_frame_err == AVERROR_INPUT_CHANGED) {
- ZmqLogger::Instance()->AppendDebugMethod(
- "FFmpegReader::GetAVFrame (receive frame: AVERROR_INPUT_CHANGED: current decoded frame has changed parameters with respect to first decoded frame)", "send_packet_pts", send_packet_pts);
- }
+ if (receive_frame_err == AVERROR_EOF) {
+ ZmqLogger::Instance()->AppendDebugMethod(
+ "FFmpegReader::GetAVFrame (receive frame: AVERROR_EOF: EOF detected from decoder, flushing buffers)", "send_packet_pts", send_packet_pts);
+ avcodec_flush_buffers(pCodecCtx);
+ packet_status.video_eof = true;
+ }
+ if (receive_frame_err == AVERROR(EINVAL)) {
+ ZmqLogger::Instance()->AppendDebugMethod(
+ "FFmpegReader::GetAVFrame (receive frame: AVERROR(EINVAL): invalid frame received, flushing buffers)", "send_packet_pts", send_packet_pts);
+ avcodec_flush_buffers(pCodecCtx);
+ }
+ if (receive_frame_err == AVERROR(EAGAIN)) {
+ ZmqLogger::Instance()->AppendDebugMethod(
+ "FFmpegReader::GetAVFrame (receive frame: AVERROR(EAGAIN): output is not available in this state - user must try to send new input)", "send_packet_pts", send_packet_pts);
+ }
+ if (receive_frame_err == AVERROR_INPUT_CHANGED) {
+ ZmqLogger::Instance()->AppendDebugMethod(
+ "FFmpegReader::GetAVFrame (receive frame: AVERROR_INPUT_CHANGED: current decoded frame has changed parameters with respect to first decoded frame)", "send_packet_pts", send_packet_pts);
+ }
- // Break out of decoding loop
- // Nothing ready for decoding yet
- break;
- }
+ // Break out of decoding loop
+ // Nothing ready for decoding yet
+ break;
+ }
#if USE_HW_ACCEL
- if (hw_de_on && hw_de_supported) {
- int err;
- if (next_frame2->format == hw_de_av_pix_fmt) {
- next_frame->format = AV_PIX_FMT_YUV420P;
- if ((err = av_hwframe_transfer_data(next_frame,next_frame2,0)) < 0) {
- ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (Failed to transfer data to output frame)", "hw_de_on", hw_de_on);
- }
- if ((err = av_frame_copy_props(next_frame,next_frame2)) < 0) {
- ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (Failed to copy props to output frame)", "hw_de_on", hw_de_on);
- }
- }
- }
- else
+ if (hw_de_on && hw_de_supported) {
+ int err;
+ if (next_frame2->format == hw_de_av_pix_fmt) {
+ next_frame->format = AV_PIX_FMT_YUV420P;
+ if ((err = av_hwframe_transfer_data(next_frame,next_frame2,0)) < 0) {
+ ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (Failed to transfer data to output frame)", "hw_de_on", hw_de_on);
+ }
+ if ((err = av_frame_copy_props(next_frame,next_frame2)) < 0) {
+ ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAVFrame (Failed to copy props to output frame)", "hw_de_on", hw_de_on);
+ }
+ }
+ }
+ else
#endif // USE_HW_ACCEL
- { // No hardware acceleration used -> no copy from GPU memory needed
- next_frame = next_frame2;
- }
+ { // No hardware acceleration used -> no copy from GPU memory needed
+ next_frame = next_frame2;
+ }
- // TODO also handle possible further frames
- // Use only the first frame like avcodec_decode_video2
- frameFinished = 1;
- packet_status.video_decoded++;
+ // TODO also handle possible further frames
+ // Use only the first frame like avcodec_decode_video2
+ frameFinished = 1;
+ packet_status.video_decoded++;
- av_image_alloc(pFrame->data, pFrame->linesize, info.width, info.height, (AVPixelFormat)(pStream->codecpar->format), 1);
- av_image_copy(pFrame->data, pFrame->linesize, (const uint8_t**)next_frame->data, next_frame->linesize,
- (AVPixelFormat)(pStream->codecpar->format), info.width, info.height);
+ av_image_alloc(pFrame->data, pFrame->linesize, info.width, info.height, (AVPixelFormat)(pStream->codecpar->format), 1);
+ av_image_copy(pFrame->data, pFrame->linesize, (const uint8_t**)next_frame->data, next_frame->linesize,
+ (AVPixelFormat)(pStream->codecpar->format), info.width, info.height);
- // Get display PTS from video frame, often different than packet->pts.
- // Sending packets to the decoder (i.e. packet->pts) is async,
- // and retrieving packets from the decoder (frame->pts) is async. In most decoders
- // sending and retrieving are separated by multiple calls to this method.
- if (next_frame->pts != AV_NOPTS_VALUE) {
- // This is the current decoded frame (and should be the pts used) for
- // processing this data
- video_pts = next_frame->pts;
- } else if (next_frame->pkt_dts != AV_NOPTS_VALUE) {
- // Some videos only set this timestamp (fallback)
- video_pts = next_frame->pkt_dts;
- }
+ // Get display PTS from video frame, often different than packet->pts.
+ // Sending packets to the decoder (i.e. packet->pts) is async,
+ // and retrieving packets from the decoder (frame->pts) is async. In most decoders
+ // sending and retrieving are separated by multiple calls to this method.
+ if (next_frame->pts != AV_NOPTS_VALUE) {
+ // This is the current decoded frame (and should be the pts used) for
+ // processing this data
+ video_pts = next_frame->pts;
+ } else if (next_frame->pkt_dts != AV_NOPTS_VALUE) {
+ // Some videos only set this timestamp (fallback)
+ video_pts = next_frame->pkt_dts;
+ }
- ZmqLogger::Instance()->AppendDebugMethod(
- "FFmpegReader::GetAVFrame (Successful frame received)", "video_pts", video_pts, "send_packet_pts", send_packet_pts);
+ ZmqLogger::Instance()->AppendDebugMethod(
+ "FFmpegReader::GetAVFrame (Successful frame received)", "video_pts", video_pts, "send_packet_pts", send_packet_pts);
- // break out of loop after each successful image returned
- break;
- }
+ // break out of loop after each successful image returned
+ break;
+ }
#if USE_HW_ACCEL
- if (hw_de_on && hw_de_supported) {
- AV_FREE_FRAME(&next_frame2);
- }
+ if (hw_de_on && hw_de_supported) {
+ AV_FREE_FRAME(&next_frame2);
+ }
#endif // USE_HW_ACCEL
#else
avcodec_decode_video2(pCodecCtx, next_frame, &frameFinished, packet);
@@ -1790,7 +1790,7 @@ void FFmpegReader::Seek(int64_t requested_frame) {
video_pts_seconds = NO_PTS_OFFSET;
audio_pts = 0.0;
audio_pts_seconds = NO_PTS_OFFSET;
- resend_packet = false;
+ resend_packet = false;
last_frame = 0;
current_video_frame = 0;
largest_frame_processed = 0;
diff --git a/tests/FFmpegReader.cpp b/tests/FFmpegReader.cpp
index 664271f2..f46decb7 100644
--- a/tests/FFmpegReader.cpp
+++ b/tests/FFmpegReader.cpp
@@ -287,60 +287,60 @@ TEST_CASE( "DisplayInfo", "[libopenshot][ffmpegreader]" )
TEST_CASE( "Decoding AV1 Video", "[libopenshot][ffmpegreader]" )
{
- // Create a reader
- std::stringstream path;
- path << TEST_MEDIA_PATH << "test_video_sync.mp4";
- FFmpegReader r(path.str());
- r.Open();
+ // Create a reader
+ std::stringstream path;
+ path << TEST_MEDIA_PATH << "test_video_sync.mp4";
+ FFmpegReader r(path.str());
+ r.Open();
- std::shared_ptr f = r.GetFrame(1);
+ std::shared_ptr f = r.GetFrame(1);
- // Get the image data
- const unsigned char* pixels = f->GetPixels(10);
- int pixel_index = 112 * 4;
+ // Get the image data
+ const unsigned char* pixels = f->GetPixels(10);
+ int pixel_index = 112 * 4;
- // Check image properties on scanline 10, pixel 112
- CHECK((int)pixels[pixel_index] == Approx(0).margin(5));
- CHECK((int)pixels[pixel_index + 1] == Approx(0).margin(5));
- CHECK((int)pixels[pixel_index + 2] == Approx(0).margin(5));
- CHECK((int)pixels[pixel_index + 3] == Approx(255).margin(5));
+ // Check image properties on scanline 10, pixel 112
+ CHECK((int)pixels[pixel_index] == Approx(0).margin(5));
+ CHECK((int)pixels[pixel_index + 1] == Approx(0).margin(5));
+ CHECK((int)pixels[pixel_index + 2] == Approx(0).margin(5));
+ CHECK((int)pixels[pixel_index + 3] == Approx(255).margin(5));
- f = r.GetFrame(90);
+ f = r.GetFrame(90);
- // Get the image data
- pixels = f->GetPixels(820);
- pixel_index = 930 * 4;
+ // Get the image data
+ pixels = f->GetPixels(820);
+ pixel_index = 930 * 4;
- // Check image properties on scanline 820, pixel 930
- CHECK((int)pixels[pixel_index] == Approx(255).margin(5));
- CHECK((int)pixels[pixel_index + 1] == Approx(255).margin(5));
- CHECK((int)pixels[pixel_index + 2] == Approx(255).margin(5));
- CHECK((int)pixels[pixel_index + 3] == Approx(255).margin(5));
+ // Check image properties on scanline 820, pixel 930
+ CHECK((int)pixels[pixel_index] == Approx(255).margin(5));
+ CHECK((int)pixels[pixel_index + 1] == Approx(255).margin(5));
+ CHECK((int)pixels[pixel_index + 2] == Approx(255).margin(5));
+ CHECK((int)pixels[pixel_index + 3] == Approx(255).margin(5));
- f = r.GetFrame(160);
+ f = r.GetFrame(160);
- // Get the image data
- pixels = f->GetPixels(420);
- pixel_index = 930 * 4;
+ // Get the image data
+ pixels = f->GetPixels(420);
+ pixel_index = 930 * 4;
- // Check image properties on scanline 820, pixel 930
- CHECK((int)pixels[pixel_index] == Approx(255).margin(5));
- CHECK((int)pixels[pixel_index + 1] == Approx(255).margin(5));
- CHECK((int)pixels[pixel_index + 2] == Approx(255).margin(5));
- CHECK((int)pixels[pixel_index + 3] == Approx(255).margin(5));
+ // Check image properties on scanline 820, pixel 930
+ CHECK((int)pixels[pixel_index] == Approx(255).margin(5));
+ CHECK((int)pixels[pixel_index + 1] == Approx(255).margin(5));
+ CHECK((int)pixels[pixel_index + 2] == Approx(255).margin(5));
+ CHECK((int)pixels[pixel_index + 3] == Approx(255).margin(5));
- f = r.GetFrame(240);
+ f = r.GetFrame(240);
- // Get the image data
- pixels = f->GetPixels(624);
- pixel_index = 930 * 4;
+ // Get the image data
+ pixels = f->GetPixels(624);
+ pixel_index = 930 * 4;
- // Check image properties on scanline 820, pixel 930
- CHECK((int)pixels[pixel_index] == Approx(255).margin(5));
- CHECK((int)pixels[pixel_index + 1] == Approx(255).margin(5));
- CHECK((int)pixels[pixel_index + 2] == Approx(255).margin(5));
- CHECK((int)pixels[pixel_index + 3] == Approx(255).margin(5));
+ // Check image properties on scanline 820, pixel 930
+ CHECK((int)pixels[pixel_index] == Approx(255).margin(5));
+ CHECK((int)pixels[pixel_index + 1] == Approx(255).margin(5));
+ CHECK((int)pixels[pixel_index + 2] == Approx(255).margin(5));
+ CHECK((int)pixels[pixel_index + 3] == Approx(255).margin(5));
- // Close reader
- r.Close();
+ // Close reader
+ r.Close();
}
\ No newline at end of file