diff --git a/include/FFmpegWriter.h b/include/FFmpegWriter.h index 1e27c439..3c51a8ab 100644 --- a/include/FFmpegWriter.h +++ b/include/FFmpegWriter.h @@ -50,6 +50,7 @@ #include "Cache.h" #include "Exceptions.h" #include "OpenMPUtilities.h" +#include "ZmqLogger.h" using namespace std; diff --git a/include/ReaderBase.h b/include/ReaderBase.h index 35c9c601..4ac65854 100644 --- a/include/ReaderBase.h +++ b/include/ReaderBase.h @@ -99,23 +99,11 @@ namespace openshot CriticalSection getFrameCriticalSection; CriticalSection processingCriticalSection; - /// Append debug information as JSON - void AppendDebugMethod(string method_name, string arg1_name, float arg1_value, - string arg2_name, float arg2_value, - string arg3_name, float arg3_value, - string arg4_name, float arg4_value, - string arg5_name, float arg5_value, - string arg6_name, float arg6_value); - public: /// Constructor for the base reader, where many things are initialized. ReaderBase(); - /// Enable or disable debug output. Output will display on the standard output, and you can - /// optionally invoke the OutputDebugJSON() method, which will format the debug output as JSON. - bool debug; - /// Information about the current media file ReaderInfo info; diff --git a/include/WriterBase.h b/include/WriterBase.h index 0af221f2..8dc523b1 100644 --- a/include/WriterBase.h +++ b/include/WriterBase.h @@ -84,24 +84,10 @@ namespace openshot */ class WriterBase { - protected: - - /// Append debug information as JSON - void AppendDebugMethod(string method_name, string arg1_name, float arg1_value, - string arg2_name, float arg2_value, - string arg3_name, float arg3_value, - string arg4_name, float arg4_value, - string arg5_name, float arg5_value, - string arg6_name, float arg6_value); - public: /// Constructor for WriterBase class, many things are initialized here WriterBase(); - /// Enable or disable debug output. Output will display on the standard output, and you can - /// optionally invoke the OutputDebugJSON() method, which will format the debug output as JSON. - bool debug; - /// Information about the current media file WriterInfo info; diff --git a/include/ZmqLogger.h b/include/ZmqLogger.h index 3229fa42..93e167a0 100644 --- a/include/ZmqLogger.h +++ b/include/ZmqLogger.h @@ -31,6 +31,7 @@ #include "JuceLibraryCode/JuceHeader.h" #include +#include #include #include #include @@ -60,6 +61,7 @@ namespace openshot { // Logfile related vars string file_path; ofstream log_file; + bool enabled; /// ZMQ Context zmq::context_t *context; @@ -83,12 +85,23 @@ namespace openshot { /// Create or get an instance of this logger singleton (invoke the class with this method) static ZmqLogger * Instance(); + /// Append debug information + void AppendDebugMethod(string method_name, string arg1_name, float arg1_value, + string arg2_name, float arg2_value, + string arg3_name, float arg3_value, + string arg4_name, float arg4_value, + string arg5_name, float arg5_value, + string arg6_name, float arg6_value); + /// Close logger (sockets and/or files) void Close(); /// Set or change connection info for logger (i.e. tcp://*:5556) void Connection(string new_connection); + /// Enable/Disable logging + void Enable(bool is_enabled) { enabled = is_enabled;}; + /// Set or change the file path (optional) void Path(string new_path); diff --git a/src/AudioReaderSource.cpp b/src/AudioReaderSource.cpp index 48adeab1..708f476e 100644 --- a/src/AudioReaderSource.cpp +++ b/src/AudioReaderSource.cpp @@ -62,6 +62,9 @@ void AudioReaderSource::GetMoreSamplesFromReader() amount_remaining = 0; } + // Debug + ZmqLogger::Instance()->AppendDebugMethod("AudioReaderSource::GetMoreSamplesFromReader", "amount_needed", amount_needed, "amount_remaining", amount_remaining, "", -1, "", -1, "", -1, "", -1); + // Init estimated buffer equal to the current frame position (before getting more samples) estimated_frame = frame_number; @@ -142,6 +145,9 @@ juce::AudioSampleBuffer* AudioReaderSource::reverse_buffer(juce::AudioSampleBuff int number_of_samples = buffer->getNumSamples(); int channels = buffer->getNumChannels(); + // Debug + ZmqLogger::Instance()->AppendDebugMethod("AudioReaderSource::reverse_buffer", "number_of_samples", number_of_samples, "channels", channels, "", -1, "", -1, "", -1, "", -1); + // Reverse array (create new buffer to hold the reversed version) AudioSampleBuffer *reversed = new juce::AudioSampleBuffer(channels, number_of_samples); reversed->clear(); @@ -168,7 +174,7 @@ juce::AudioSampleBuffer* AudioReaderSource::reverse_buffer(juce::AudioSampleBuff } // Get the next block of audio samples -void AudioReaderSource::getNextAudioBlock (const AudioSourceChannelInfo& info) +void AudioReaderSource::getNextAudioBlock(const AudioSourceChannelInfo& info) { int buffer_samples = buffer->getNumSamples(); int buffer_channels = buffer->getNumChannels(); @@ -215,6 +221,9 @@ void AudioReaderSource::getNextAudioBlock (const AudioSourceChannelInfo& info) // Determine if any samples need to be copied if (number_to_copy > 0) { + // Debug + ZmqLogger::Instance()->AppendDebugMethod("AudioReaderSource::getNextAudioBlock", "number_to_copy", number_to_copy, "buffer_samples", buffer_samples, "buffer_channels", buffer_channels, "info.numSamples", info.numSamples, "speed", speed, "position", position); + // Loop through each channel and copy some samples for (int channel = 0; channel < buffer_channels; channel++) info.buffer->copyFrom(channel, info.startSample, *buffer, channel, position, number_to_copy); diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 9a3dcf75..dce20e41 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -425,13 +425,13 @@ tr1::shared_ptr FFmpegReader::GetFrame(long int requested_frame) throw(Ou throw InvalidFile("Could not detect the duration of the video or audio stream.", path); // Debug output - AppendDebugMethod("FFmpegReader::GetFrame", "requested_frame", requested_frame, "last_frame", last_frame, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetFrame", "requested_frame", requested_frame, "last_frame", last_frame, "", -1, "", -1, "", -1, "", -1); // Check the cache for this frame tr1::shared_ptr frame = final_cache.GetFrame(requested_frame); if (frame) { // Debug output - AppendDebugMethod("FFmpegReader::GetFrame", "returned cached frame", requested_frame, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetFrame", "returned cached frame", requested_frame, "", -1, "", -1, "", -1, "", -1, "", -1); // Return the cached frame return frame; @@ -447,7 +447,7 @@ tr1::shared_ptr FFmpegReader::GetFrame(long int requested_frame) throw(Ou frame = final_cache.GetFrame(requested_frame); if (frame) { // Debug output - AppendDebugMethod("FFmpegReader::GetFrame", "returned cached frame on 2nd look", requested_frame, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetFrame", "returned cached frame on 2nd look", requested_frame, "", -1, "", -1, "", -1, "", -1, "", -1); // Return the cached frame return frame; @@ -509,7 +509,7 @@ tr1::shared_ptr FFmpegReader::ReadStream(long int requested_frame) omp_set_nested(true); // Debug output - AppendDebugMethod("FFmpegReader::ReadStream", "requested_frame", requested_frame, "OPEN_MP_NUM_PROCESSORS", OPEN_MP_NUM_PROCESSORS, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream", "requested_frame", requested_frame, "OPEN_MP_NUM_PROCESSORS", OPEN_MP_NUM_PROCESSORS, "", -1, "", -1, "", -1, "", -1); #pragma omp parallel { @@ -534,7 +534,7 @@ tr1::shared_ptr FFmpegReader::ReadStream(long int requested_frame) } // Debug output - AppendDebugMethod("FFmpegReader::ReadStream (GetNextPacket)", "requested_frame", requested_frame, "processing_video_frames.size()", processing_video_frames.size(), "processing_audio_frames.size()", processing_audio_frames.size(), "minimum_packets", minimum_packets, "packets_processed", packets_processed, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (GetNextPacket)", "requested_frame", requested_frame, "processing_video_frames.size()", processing_video_frames.size(), "processing_audio_frames.size()", processing_audio_frames.size(), "minimum_packets", minimum_packets, "packets_processed", packets_processed, "", -1); // Video packet if (packet->stream_index == videoStream) @@ -628,7 +628,7 @@ tr1::shared_ptr FFmpegReader::ReadStream(long int requested_frame) } // end omp parallel // Debug output - AppendDebugMethod("FFmpegReader::ReadStream (Completed)", "packets_processed", packets_processed, "end_of_stream", end_of_stream, "largest_frame_processed", largest_frame_processed, "Working Cache Count", working_cache.Count(), "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (Completed)", "packets_processed", packets_processed, "end_of_stream", end_of_stream, "largest_frame_processed", largest_frame_processed, "Working Cache Count", working_cache.Count(), "", -1, "", -1); // End of stream? if (end_of_stream) @@ -760,7 +760,7 @@ bool FFmpegReader::CheckSeek(bool is_video) if (max_seeked_frame >= seeking_frame) { // SEEKED TOO FAR - AppendDebugMethod("FFmpegReader::CheckSeek (Too far, seek again)", "is_video_seek", is_video_seek, "max_seeked_frame", max_seeked_frame, "seeking_frame", seeking_frame, "seeking_pts", seeking_pts, "seek_video_frame_found", seek_video_frame_found, "seek_audio_frame_found", seek_audio_frame_found); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckSeek (Too far, seek again)", "is_video_seek", is_video_seek, "max_seeked_frame", max_seeked_frame, "seeking_frame", seeking_frame, "seeking_pts", seeking_pts, "seek_video_frame_found", seek_video_frame_found, "seek_audio_frame_found", seek_audio_frame_found); // Seek again... to the nearest Keyframe Seek(seeking_frame - (20 * seek_count * seek_count)); @@ -768,7 +768,7 @@ bool FFmpegReader::CheckSeek(bool is_video) else { // SEEK WORKED - AppendDebugMethod("FFmpegReader::CheckSeek (Successful)", "is_video_seek", is_video_seek, "current_pts", packet->pts, "seeking_pts", seeking_pts, "seeking_frame", seeking_frame, "seek_video_frame_found", seek_video_frame_found, "seek_audio_frame_found", seek_audio_frame_found); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckSeek (Successful)", "is_video_seek", is_video_seek, "current_pts", packet->pts, "seeking_pts", seeking_pts, "seeking_frame", seeking_frame, "seek_video_frame_found", seek_video_frame_found, "seek_audio_frame_found", seek_audio_frame_found); // Seek worked, and we are "before" the requested frame is_seeking = false; @@ -799,14 +799,14 @@ void FFmpegReader::ProcessVideoPacket(long int requested_frame) RemoveAVPacket(packet); // Debug output - AppendDebugMethod("FFmpegReader::ProcessVideoPacket (Skipped)", "requested_frame", requested_frame, "current_frame", current_frame, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ProcessVideoPacket (Skipped)", "requested_frame", requested_frame, "current_frame", current_frame, "", -1, "", -1, "", -1, "", -1); // Skip to next frame without decoding or caching return; } // Debug output - AppendDebugMethod("FFmpegReader::ProcessVideoPacket (Before)", "requested_frame", requested_frame, "current_frame", current_frame, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ProcessVideoPacket (Before)", "requested_frame", requested_frame, "current_frame", current_frame, "", -1, "", -1, "", -1, "", -1); // Init some things local (for OpenMP) PixelFormat pix_fmt = pCodecCtx->pix_fmt; @@ -879,7 +879,7 @@ void FFmpegReader::ProcessVideoPacket(long int requested_frame) } // Debug output - AppendDebugMethod("FFmpegReader::ProcessVideoPacket (After)", "requested_frame", requested_frame, "current_frame", current_frame, "f->number", f->number, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ProcessVideoPacket (After)", "requested_frame", requested_frame, "current_frame", current_frame, "f->number", f->number, "", -1, "", -1, "", -1); } // end omp task @@ -899,7 +899,7 @@ void FFmpegReader::ProcessAudioPacket(long int requested_frame, long int target_ RemoveAVPacket(packet); // Debug output - AppendDebugMethod("FFmpegReader::ProcessAudioPacket (Skipped)", "requested_frame", requested_frame, "target_frame", target_frame, "starting_sample", starting_sample, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ProcessAudioPacket (Skipped)", "requested_frame", requested_frame, "target_frame", target_frame, "starting_sample", starting_sample, "", -1, "", -1, "", -1); // Skip to next frame without decoding or caching return; @@ -909,7 +909,7 @@ void FFmpegReader::ProcessAudioPacket(long int requested_frame, long int target_ AVPacket *my_packet = packets[packet]; // Debug output - AppendDebugMethod("FFmpegReader::ProcessAudioPacket (Before)", "requested_frame", requested_frame, "target_frame", target_frame, "starting_sample", starting_sample, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ProcessAudioPacket (Before)", "requested_frame", requested_frame, "target_frame", target_frame, "starting_sample", starting_sample, "", -1, "", -1, "", -1); // Init an AVFrame to hold the decoded audio samples int frame_finished = 0; @@ -946,8 +946,8 @@ void FFmpegReader::ProcessAudioPacket(long int requested_frame, long int target_ double sample_seconds = float(pts_total) / info.sample_rate; // Debug output - AppendDebugMethod("FFmpegReader::ProcessAudioPacket (Decode Info A)", "pts_counter", pts_counter, "PTS", adjusted_pts, "Offset", audio_pts_offset, "PTS Diff", adjusted_pts - prev_pts, "Samples", pts_remaining_samples, "Sample PTS ratio", float(adjusted_pts - prev_pts) / pts_remaining_samples); - AppendDebugMethod("FFmpegReader::ProcessAudioPacket (Decode Info B)", "Sample Diff", pts_remaining_samples - prev_samples - prev_pts, "Total", pts_total, "PTS Seconds", audio_seconds, "Sample Seconds", sample_seconds, "Seconds Diff", audio_seconds - sample_seconds, "raw samples", packet_samples); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ProcessAudioPacket (Decode Info A)", "pts_counter", pts_counter, "PTS", adjusted_pts, "Offset", audio_pts_offset, "PTS Diff", adjusted_pts - prev_pts, "Samples", pts_remaining_samples, "Sample PTS ratio", float(adjusted_pts - prev_pts) / pts_remaining_samples); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ProcessAudioPacket (Decode Info B)", "Sample Diff", pts_remaining_samples - prev_samples - prev_pts, "Total", pts_total, "PTS Seconds", audio_seconds, "Sample Seconds", sample_seconds, "Seconds Diff", audio_seconds - sample_seconds, "raw samples", packet_samples); // DEBUG (FOR AUDIO ISSUES) prev_pts = adjusted_pts; @@ -1000,7 +1000,7 @@ void FFmpegReader::ProcessAudioPacket(long int requested_frame, long int target_ // Allocate audio buffer int16_t *audio_buf = new int16_t[AVCODEC_MAX_AUDIO_FRAME_SIZE + FF_INPUT_BUFFER_PADDING_SIZE]; - AppendDebugMethod("FFmpegReader::ProcessAudioPacket (ReSample)", "packet_samples", packet_samples, "info.channels", info.channels, "info.sample_rate", info.sample_rate, "aCodecCtx->sample_fmt", aCodecCtx->sample_fmt, "AV_SAMPLE_FMT_S16", AV_SAMPLE_FMT_S16, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ProcessAudioPacket (ReSample)", "packet_samples", packet_samples, "info.channels", info.channels, "info.sample_rate", info.sample_rate, "aCodecCtx->sample_fmt", aCodecCtx->sample_fmt, "AV_SAMPLE_FMT_S16", AV_SAMPLE_FMT_S16, "", -1); // Create output frame AVFrame *audio_converted = AV_ALLOCATE_FRAME(); @@ -1113,7 +1113,7 @@ void FFmpegReader::ProcessAudioPacket(long int requested_frame, long int target_ f->AddAudio(true, channel_filter, start, iterate_channel_buffer, samples, 0.98f); // Debug output - AppendDebugMethod("FFmpegReader::ProcessAudioPacket (f->AddAudio)", "frame", starting_frame_number, "start", start, "samples", samples, "channel", channel_filter, "partial_frame", partial_frame, "samples_per_frame", samples_per_frame); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ProcessAudioPacket (f->AddAudio)", "frame", starting_frame_number, "start", start, "samples", samples, "channel", channel_filter, "partial_frame", partial_frame, "samples_per_frame", samples_per_frame); // Add or update cache working_cache.Add(f->number, f); @@ -1168,7 +1168,7 @@ void FFmpegReader::ProcessAudioPacket(long int requested_frame, long int target_ RemoveAVPacket(my_packet); // Debug output - AppendDebugMethod("FFmpegReader::ProcessAudioPacket (After)", "requested_frame", requested_frame, "starting_frame", target_frame, "end_frame", starting_frame_number - 1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ProcessAudioPacket (After)", "requested_frame", requested_frame, "starting_frame", target_frame, "end_frame", starting_frame_number - 1, "", -1, "", -1, "", -1); } // end task @@ -1190,7 +1190,7 @@ void FFmpegReader::Seek(long int requested_frame) throw(TooManySeeks) requested_frame = info.video_length; // Debug output - AppendDebugMethod("FFmpegReader::Seek", "requested_frame", requested_frame, "seek_count", seek_count, "last_frame", last_frame, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::Seek", "requested_frame", requested_frame, "seek_count", seek_count, "last_frame", last_frame, "", -1, "", -1, "", -1); // Clear working cache (since we are seeking to another location in the file) working_cache.Clear(); @@ -1375,7 +1375,7 @@ long int FFmpegReader::ConvertVideoPTStoFrame(long int pts) if (current_video_frame < frame) // has missing frames - AppendDebugMethod("FFmpegReader::ConvertVideoPTStoFrame (detected missing frame)", "calculated frame", frame, "previous_video_frame", previous_video_frame, "current_video_frame", current_video_frame, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ConvertVideoPTStoFrame (detected missing frame)", "calculated frame", frame, "previous_video_frame", previous_video_frame, "current_video_frame", current_video_frame, "", -1, "", -1, "", -1); // Sometimes frames are missing due to varying timestamps, or they were dropped. Determine // if we are missing a video frame. @@ -1474,7 +1474,7 @@ AudioLocation FFmpegReader::GetAudioPTSLocation(long int pts) } // Debug output - AppendDebugMethod("FFmpegReader::GetAudioPTSLocation (Audio Gap Detected)", "Source Frame", orig_frame, "Source Audio Sample", orig_start, "Target Frame", location.frame, "Target Audio Sample", location.sample_start, "pts", pts, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAudioPTSLocation (Audio Gap Detected)", "Source Frame", orig_frame, "Source Audio Sample", orig_start, "Target Frame", location.frame, "Target Audio Sample", location.sample_start, "pts", pts, "", -1); } @@ -1528,7 +1528,7 @@ bool FFmpegReader::IsPartialFrame(long int requested_frame) { bool FFmpegReader::CheckMissingFrame(long int requested_frame) { // Debug output - AppendDebugMethod("FFmpegReader::CheckMissingFrame", "requested_frame", requested_frame, "has_missing_frames", has_missing_frames, "missing_video_frames.size()", missing_video_frames.size(), "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckMissingFrame", "requested_frame", requested_frame, "has_missing_frames", has_missing_frames, "missing_video_frames.size()", missing_video_frames.size(), "", -1, "", -1, "", -1); // Determine if frames are missing (due to no more video packets) if (info.has_video && !is_seeking && num_packets_since_video_frame > 60) @@ -1632,13 +1632,13 @@ void FFmpegReader::CheckWorkingFrames(bool end_of_stream, long int requested_fra } // Debug output - AppendDebugMethod("FFmpegReader::CheckWorkingFrames", "frame_number", f->number, "is_video_ready", is_video_ready, "is_audio_ready", is_audio_ready, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckWorkingFrames", "frame_number", f->number, "is_video_ready", is_video_ready, "is_audio_ready", is_audio_ready, "", -1, "", -1, "", -1); // Check if working frame is final if ((!end_of_stream && is_video_ready && is_audio_ready) || end_of_stream || is_seek_trash) { // Debug output - AppendDebugMethod("FFmpegReader::CheckWorkingFrames (mark frame as final)", "f->number", f->number, "is_seek_trash", is_seek_trash, "Working Cache Count", working_cache.Count(), "Final Cache Count", final_cache.Count(), "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckWorkingFrames (mark frame as final)", "f->number", f->number, "is_seek_trash", is_seek_trash, "Working Cache Count", working_cache.Count(), "Final Cache Count", final_cache.Count(), "", -1, "", -1); if (!is_seek_trash) { diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index 4bd3c66c..86a67f82 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -96,7 +96,7 @@ void FFmpegWriter::auto_detect_format() // initialize streams void FFmpegWriter::initialize_streams() { - AppendDebugMethod("FFmpegWriter::initialize_streams", "fmt->video_codec", fmt->video_codec, "fmt->audio_codec", fmt->audio_codec, "AV_CODEC_ID_NONE", AV_CODEC_ID_NONE, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::initialize_streams", "fmt->video_codec", fmt->video_codec, "fmt->audio_codec", fmt->audio_codec, "AV_CODEC_ID_NONE", AV_CODEC_ID_NONE, "", -1, "", -1, "", -1); // Add the audio and video streams using the default format codecs and initialize the codecs video_st = NULL; @@ -163,7 +163,7 @@ void FFmpegWriter::SetVideoOptions(bool has_video, string codec, Fraction fps, i info.display_ratio.num = size.num; info.display_ratio.den = size.den; - AppendDebugMethod("FFmpegWriter::SetVideoOptions (" + codec + ")", "width", width, "height", height, "size.num", size.num, "size.den", size.den, "fps.num", fps.num, "fps.den", fps.den); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::SetVideoOptions (" + codec + ")", "width", width, "height", height, "size.num", size.num, "size.den", size.den, "fps.num", fps.num, "fps.den", fps.den); // Enable / Disable video info.has_video = has_video; @@ -202,7 +202,7 @@ void FFmpegWriter::SetAudioOptions(bool has_audio, string codec, int sample_rate if (original_channels == 0) original_channels = info.channels; - AppendDebugMethod("FFmpegWriter::SetAudioOptions (" + codec + ")", "sample_rate", sample_rate, "channels", channels, "bit_rate", bit_rate, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::SetAudioOptions (" + codec + ")", "sample_rate", sample_rate, "channels", channels, "bit_rate", bit_rate, "", -1, "", -1, "", -1); // Enable / Disable audio info.has_audio = has_audio; @@ -291,7 +291,7 @@ void FFmpegWriter::SetOption(StreamType stream, string name, string value) throw av_opt_set (c->priv_data, name.c_str(), value.c_str(), 0); #endif - AppendDebugMethod("FFmpegWriter::SetOption (" + (string)name + ")", "stream == VIDEO_STREAM", stream == VIDEO_STREAM, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::SetOption (" + (string)name + ")", "stream == VIDEO_STREAM", stream == VIDEO_STREAM, "", -1, "", -1, "", -1, "", -1, "", -1); } else @@ -305,7 +305,7 @@ void FFmpegWriter::PrepareStreams() if (!info.has_audio && !info.has_video) throw InvalidOptions("No video or audio options have been set. You must set has_video or has_audio (or both).", path); - AppendDebugMethod("FFmpegWriter::PrepareStreams [" + path + "]", "info.has_audio", info.has_audio, "info.has_video", info.has_video, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::PrepareStreams [" + path + "]", "info.has_audio", info.has_audio, "info.has_video", info.has_video, "", -1, "", -1, "", -1, "", -1); // Initialize the streams (i.e. add the streams) initialize_streams(); @@ -339,7 +339,7 @@ void FFmpegWriter::WriteHeader() // Mark as 'written' write_header = true; - AppendDebugMethod("FFmpegWriter::WriteHeader", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::WriteHeader", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); } // Add a frame to the queue waiting to be encoded. @@ -357,7 +357,7 @@ void FFmpegWriter::WriteFrame(tr1::shared_ptr frame) throw(ErrorEncodingV if (info.has_audio && audio_st) spooled_audio_frames.push_back(frame); - AppendDebugMethod("FFmpegWriter::WriteFrame", "frame->number", frame->number, "spooled_video_frames.size()", spooled_video_frames.size(), "spooled_audio_frames.size()", spooled_audio_frames.size(), "cache_size", cache_size, "is_writing", is_writing, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::WriteFrame", "frame->number", frame->number, "spooled_video_frames.size()", spooled_video_frames.size(), "spooled_audio_frames.size()", spooled_audio_frames.size(), "cache_size", cache_size, "is_writing", is_writing, "", -1); // Write the frames once it reaches the correct cache size if (spooled_video_frames.size() == cache_size || spooled_audio_frames.size() == cache_size) @@ -385,7 +385,7 @@ void FFmpegWriter::WriteFrame(tr1::shared_ptr frame) throw(ErrorEncodingV // Write all frames in the queue to the video file. void FFmpegWriter::write_queued_frames() throw (ErrorEncodingVideo) { - AppendDebugMethod("FFmpegWriter::write_queued_frames", "spooled_video_frames.size()", spooled_video_frames.size(), "spooled_audio_frames.size()", spooled_audio_frames.size(), "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_queued_frames", "spooled_video_frames.size()", spooled_video_frames.size(), "spooled_audio_frames.size()", spooled_audio_frames.size(), "", -1, "", -1, "", -1, "", -1); // Flip writing flag is_writing = true; @@ -499,7 +499,7 @@ void FFmpegWriter::write_queued_frames() throw (ErrorEncodingVideo) // Write a block of frames from a reader void FFmpegWriter::WriteFrame(ReaderBase* reader, long int start, long int length) throw(ErrorEncodingVideo, WriterClosed) { - AppendDebugMethod("FFmpegWriter::WriteFrame (from Reader)", "start", start, "length", length, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::WriteFrame (from Reader)", "start", start, "length", length, "", -1, "", -1, "", -1, "", -1); // Loop through each frame (and encoded it) for (long int number = start; number <= length; number++) @@ -534,7 +534,7 @@ void FFmpegWriter::WriteTrailer() // Mark as 'written' write_trailer = true; - AppendDebugMethod("FFmpegWriter::WriteTrailer", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::WriteTrailer", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); } // Flush encoders @@ -594,7 +594,7 @@ void FFmpegWriter::flush_encoders() #endif if (error_code < 0) { - AppendDebugMethod("FFmpegWriter::flush_encoders ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::flush_encoders ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); } if (!got_packet) { stop_encoding = 1; @@ -616,7 +616,7 @@ void FFmpegWriter::flush_encoders() // Write packet error_code = av_interleaved_write_frame(oc, &pkt); if (error_code < 0) { - AppendDebugMethod("FFmpegWriter::flush_encoders ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::flush_encoders ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); } // Deallocate memory (if needed) @@ -646,7 +646,7 @@ void FFmpegWriter::flush_encoders() int got_packet = 0; error_code = avcodec_encode_audio2(audio_codec, &pkt, NULL, &got_packet); if (error_code < 0) { - AppendDebugMethod("FFmpegWriter::flush_encoders ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::flush_encoders ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); } if (!got_packet) { stop_encoding = 1; @@ -672,7 +672,7 @@ void FFmpegWriter::flush_encoders() // Write packet error_code = av_interleaved_write_frame(oc, &pkt); if (error_code < 0) { - AppendDebugMethod("FFmpegWriter::flush_encoders ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::flush_encoders ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); } // deallocate memory for packet @@ -758,7 +758,7 @@ void FFmpegWriter::Close() write_header = false; write_trailer = false; - AppendDebugMethod("FFmpegWriter::Close", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::Close", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); } // Add an AVFrame to the cache @@ -860,7 +860,7 @@ AVStream* FFmpegWriter::add_audio_stream() if (oc->oformat->flags & AVFMT_GLOBALHEADER) c->flags |= CODEC_FLAG_GLOBAL_HEADER; - AppendDebugMethod("FFmpegWriter::add_audio_stream", "c->codec_id", c->codec_id, "c->bit_rate", c->bit_rate, "c->channels", c->channels, "c->sample_fmt", c->sample_fmt, "c->channel_layout", c->channel_layout, "c->sample_rate", c->sample_rate); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::add_audio_stream", "c->codec_id", c->codec_id, "c->bit_rate", c->bit_rate, "c->channels", c->channels, "c->sample_fmt", c->sample_fmt, "c->channel_layout", c->channel_layout, "c->sample_rate", c->sample_rate); return st; } @@ -950,7 +950,7 @@ AVStream* FFmpegWriter::add_video_stream() } } - AppendDebugMethod("FFmpegWriter::add_video_stream (" + (string)fmt->name + " : " + (string)av_get_pix_fmt_name(c->pix_fmt) + ")", "c->codec_id", c->codec_id, "c->bit_rate", c->bit_rate, "c->pix_fmt", c->pix_fmt, "oc->oformat->flags", oc->oformat->flags, "AVFMT_RAWPICTURE", AVFMT_RAWPICTURE, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::add_video_stream (" + (string)fmt->name + " : " + (string)av_get_pix_fmt_name(c->pix_fmt) + ")", "c->codec_id", c->codec_id, "c->bit_rate", c->bit_rate, "c->pix_fmt", c->pix_fmt, "oc->oformat->flags", oc->oformat->flags, "AVFMT_RAWPICTURE", AVFMT_RAWPICTURE, "", -1); return st; } @@ -1008,7 +1008,7 @@ void FFmpegWriter::open_audio(AVFormatContext *oc, AVStream *st) audio_encoder_buffer_size = AUDIO_PACKET_ENCODING_SIZE; audio_encoder_buffer = new uint8_t[audio_encoder_buffer_size]; - AppendDebugMethod("FFmpegWriter::open_audio", "audio_codec->thread_count", audio_codec->thread_count, "audio_input_frame_size", audio_input_frame_size, "buffer_size", AVCODEC_MAX_AUDIO_FRAME_SIZE + FF_INPUT_BUFFER_PADDING_SIZE, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::open_audio", "audio_codec->thread_count", audio_codec->thread_count, "audio_input_frame_size", audio_input_frame_size, "buffer_size", AVCODEC_MAX_AUDIO_FRAME_SIZE + FF_INPUT_BUFFER_PADDING_SIZE, "", -1, "", -1, "", -1); } @@ -1030,7 +1030,7 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) if (avcodec_open2(video_codec, codec, NULL) < 0) throw InvalidCodec("Could not open codec", path); - AppendDebugMethod("FFmpegWriter::open_video", "video_codec->thread_count", video_codec->thread_count, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::open_video", "video_codec->thread_count", video_codec->thread_count, "", -1, "", -1, "", -1, "", -1, "", -1); } @@ -1096,7 +1096,7 @@ void FFmpegWriter::write_audio_packets(bool final) int samples_position = 0; - AppendDebugMethod("FFmpegWriter::write_audio_packets", "final", final, "total_frame_samples", total_frame_samples, "channel_layout_in_frame", channel_layout_in_frame, "channels_in_frame", channels_in_frame, "samples_in_frame", samples_in_frame, "LAYOUT_MONO", LAYOUT_MONO); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_audio_packets", "final", final, "total_frame_samples", total_frame_samples, "channel_layout_in_frame", channel_layout_in_frame, "channels_in_frame", channels_in_frame, "samples_in_frame", samples_in_frame, "LAYOUT_MONO", LAYOUT_MONO); // Keep track of the original sample format AVSampleFormat output_sample_fmt = audio_codec->sample_fmt; @@ -1150,7 +1150,7 @@ void FFmpegWriter::write_audio_packets(bool final) audio_converted->nb_samples = total_frame_samples / channels_in_frame; av_samples_alloc(audio_converted->data, audio_converted->linesize, info.channels, audio_converted->nb_samples, output_sample_fmt, 0); - AppendDebugMethod("FFmpegWriter::write_audio_packets (1st resampling)", "in_sample_fmt", AV_SAMPLE_FMT_S16, "out_sample_fmt", output_sample_fmt, "in_sample_rate", sample_rate_in_frame, "out_sample_rate", info.sample_rate, "in_channels", channels_in_frame, "out_channels", info.channels); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_audio_packets (1st resampling)", "in_sample_fmt", AV_SAMPLE_FMT_S16, "out_sample_fmt", output_sample_fmt, "in_sample_rate", sample_rate_in_frame, "out_sample_rate", info.sample_rate, "in_channels", channels_in_frame, "out_channels", info.channels); // setup resample context if (!avr) { @@ -1189,7 +1189,7 @@ void FFmpegWriter::write_audio_packets(bool final) AV_FREE_FRAME(&audio_converted); all_queued_samples = NULL; // this array cleared with above call - AppendDebugMethod("FFmpegWriter::write_audio_packets (Successfully completed 1st resampling)", "nb_samples", nb_samples, "remaining_frame_samples", remaining_frame_samples, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_audio_packets (Successfully completed 1st resampling)", "nb_samples", nb_samples, "remaining_frame_samples", remaining_frame_samples, "", -1, "", -1, "", -1, "", -1); } // Loop until no more samples @@ -1225,7 +1225,7 @@ void FFmpegWriter::write_audio_packets(bool final) AV_RESET_FRAME(frame_final); if (av_sample_fmt_is_planar(audio_codec->sample_fmt)) { - AppendDebugMethod("FFmpegWriter::write_audio_packets (2nd resampling for Planar formats)", "in_sample_fmt", output_sample_fmt, "out_sample_fmt", audio_codec->sample_fmt, "in_sample_rate", info.sample_rate, "out_sample_rate", info.sample_rate, "in_channels", info.channels, "out_channels", info.channels); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_audio_packets (2nd resampling for Planar formats)", "in_sample_fmt", output_sample_fmt, "out_sample_fmt", audio_codec->sample_fmt, "in_sample_rate", info.sample_rate, "out_sample_rate", info.sample_rate, "in_channels", info.channels, "out_channels", info.channels); // setup resample context if (!avr_planar) { @@ -1277,7 +1277,7 @@ void FFmpegWriter::write_audio_packets(bool final) free(audio_frame->data[0]); // TODO: Determine why av_free crashes on Windows AV_FREE_FRAME(&audio_frame); - AppendDebugMethod("FFmpegWriter::write_audio_packets (Successfully completed 2nd resampling for Planar formats)", "nb_samples", nb_samples, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_audio_packets (Successfully completed 2nd resampling for Planar formats)", "nb_samples", nb_samples, "", -1, "", -1, "", -1, "", -1, "", -1); } else { // Create a new array @@ -1334,13 +1334,13 @@ void FFmpegWriter::write_audio_packets(bool final) int error_code = av_interleaved_write_frame(oc, &pkt); if (error_code < 0) { - AppendDebugMethod("FFmpegWriter::write_audio_packets ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_audio_packets ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); } } if (error_code < 0) { - AppendDebugMethod("FFmpegWriter::write_audio_packets ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_audio_packets ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); } // deallocate AVFrame @@ -1436,7 +1436,7 @@ void FFmpegWriter::process_video_packet(tr1::shared_ptr frame) // Fill with data avpicture_fill((AVPicture *) frame_source, (uint8_t*)pixels, PIX_FMT_RGBA, source_image_width, source_image_height); - AppendDebugMethod("FFmpegWriter::process_video_packet", "frame->number", frame->number, "bytes_source", bytes_source, "bytes_final", bytes_final, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::process_video_packet", "frame->number", frame->number, "bytes_source", bytes_source, "bytes_final", bytes_final, "", -1, "", -1, "", -1); // Resize & convert pixel format sws_scale(scaler, frame_source->data, frame_source->linesize, 0, @@ -1456,7 +1456,7 @@ void FFmpegWriter::process_video_packet(tr1::shared_ptr frame) // write video frame bool FFmpegWriter::write_video_packet(tr1::shared_ptr frame, AVFrame* frame_final) { - AppendDebugMethod("FFmpegWriter::write_video_packet", "frame->number", frame->number, "oc->oformat->flags & AVFMT_RAWPICTURE", oc->oformat->flags & AVFMT_RAWPICTURE, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_video_packet", "frame->number", frame->number, "oc->oformat->flags & AVFMT_RAWPICTURE", oc->oformat->flags & AVFMT_RAWPICTURE, "", -1, "", -1, "", -1, "", -1); if (oc->oformat->flags & AVFMT_RAWPICTURE) { // Raw video case. @@ -1476,7 +1476,7 @@ bool FFmpegWriter::write_video_packet(tr1::shared_ptr frame, AVFrame* fra int error_code = av_interleaved_write_frame(oc, &pkt); if (error_code < 0) { - AppendDebugMethod("FFmpegWriter::write_video_packet ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_video_packet ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); return false; } @@ -1549,7 +1549,7 @@ bool FFmpegWriter::write_video_packet(tr1::shared_ptr frame, AVFrame* fra int error_code = av_interleaved_write_frame(oc, &pkt); if (error_code < 0) { - AppendDebugMethod("FFmpegWriter::write_video_packet ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_video_packet ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); return false; } } diff --git a/src/FrameMapper.cpp b/src/FrameMapper.cpp index 04062860..52b081fa 100644 --- a/src/FrameMapper.cpp +++ b/src/FrameMapper.cpp @@ -84,7 +84,7 @@ void FrameMapper::AddField(Field field) // whether the frame rate is increasing or decreasing. void FrameMapper::Init() { - AppendDebugMethod("FrameMapper::Init (Calculate frame mappings)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::Init (Calculate frame mappings)", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); // Do not initialize anything if just a picture with no audio if (info.has_video and !info.has_audio and info.has_single_image) @@ -382,7 +382,7 @@ tr1::shared_ptr FrameMapper::GetFrame(long int requested_frame) throw(Rea omp_set_nested(true); // Debug output - AppendDebugMethod("FrameMapper::GetFrame (Loop through frames)", "requested_frame", requested_frame, "minimum_frames", minimum_frames, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::GetFrame (Loop through frames)", "requested_frame", requested_frame, "minimum_frames", minimum_frames, "", -1, "", -1, "", -1, "", -1); #pragma omp parallel { @@ -547,14 +547,10 @@ void FrameMapper::Open() throw(InvalidFile) { if (reader) { - AppendDebugMethod("FrameMapper::Open", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::Open", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); // Open the reader reader->Open(); - - // Set child reader in debug mode (if needed) - if (debug) - reader->debug = true; } } @@ -566,7 +562,7 @@ void FrameMapper::Close() // Create a scoped lock, allowing only a single thread to run the following code at one time const GenericScopedLock lock(getFrameCriticalSection); - AppendDebugMethod("FrameMapper::Open", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::Open", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); // Close internal reader reader->Close(); @@ -639,7 +635,7 @@ void FrameMapper::SetJsonValue(Json::Value root) throw(InvalidFile) { // Change frame rate or audio mapping details void FrameMapper::ChangeMapping(Fraction target_fps, PulldownType target_pulldown, int target_sample_rate, int target_channels, ChannelLayout target_channel_layout) { - AppendDebugMethod("FrameMapper::ChangeMapping", "target_fps.num", target_fps.num, "target_fps.den", target_fps.num, "target_pulldown", target_pulldown, "target_sample_rate", target_sample_rate, "target_channels", target_channels, "target_channel_layout", target_channel_layout); + ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::ChangeMapping", "target_fps.num", target_fps.num, "target_fps.den", target_fps.num, "target_pulldown", target_pulldown, "target_sample_rate", target_sample_rate, "target_channels", target_channels, "target_channel_layout", target_channel_layout); // Mark as dirty is_dirty = true; @@ -672,7 +668,7 @@ void FrameMapper::ResampleMappedAudio(tr1::shared_ptr frame, long int ori int samples_in_frame = frame->GetAudioSamplesCount(); ChannelLayout channel_layout_in_frame = frame->ChannelsLayout(); - AppendDebugMethod("FrameMapper::ResampleMappedAudio", "frame->number", frame->number, "original_frame_number", original_frame_number, "channels_in_frame", channels_in_frame, "samples_in_frame", samples_in_frame, "sample_rate_in_frame", sample_rate_in_frame, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::ResampleMappedAudio", "frame->number", frame->number, "original_frame_number", original_frame_number, "channels_in_frame", channels_in_frame, "samples_in_frame", samples_in_frame, "sample_rate_in_frame", sample_rate_in_frame, "", -1); // Get audio sample array float* frame_samples_float = NULL; @@ -695,7 +691,7 @@ void FrameMapper::ResampleMappedAudio(tr1::shared_ptr frame, long int ori delete[] frame_samples_float; frame_samples_float = NULL; - AppendDebugMethod("FrameMapper::ResampleMappedAudio (got sample data from frame)", "frame->number", frame->number, "total_frame_samples", total_frame_samples, "target channels", info.channels, "channels_in_frame", channels_in_frame, "target sample_rate", info.sample_rate, "samples_in_frame", samples_in_frame); + ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::ResampleMappedAudio (got sample data from frame)", "frame->number", frame->number, "total_frame_samples", total_frame_samples, "target channels", info.channels, "channels_in_frame", channels_in_frame, "target sample_rate", info.sample_rate, "samples_in_frame", samples_in_frame); // Create input frame (and allocate arrays) @@ -708,14 +704,14 @@ void FrameMapper::ResampleMappedAudio(tr1::shared_ptr frame, long int ori if (error_code < 0) { - AppendDebugMethod("FrameMapper::ResampleMappedAudio ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::ResampleMappedAudio ERROR [" + (string)av_err2str(error_code) + "]", "error_code", error_code, "", -1, "", -1, "", -1, "", -1, "", -1); throw ErrorEncodingVideo("Error while resampling audio in frame mapper", frame->number); } // Update total samples & input frame size (due to bigger or smaller data types) total_frame_samples = Frame::GetSamplesPerFrame(frame->number, target, info.sample_rate, info.channels); - AppendDebugMethod("FrameMapper::ResampleMappedAudio (adjust # of samples)", "total_frame_samples", total_frame_samples, "info.sample_rate", info.sample_rate, "sample_rate_in_frame", sample_rate_in_frame, "info.channels", info.channels, "channels_in_frame", channels_in_frame, "original_frame_number", original_frame_number); + ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::ResampleMappedAudio (adjust # of samples)", "total_frame_samples", total_frame_samples, "info.sample_rate", info.sample_rate, "sample_rate_in_frame", sample_rate_in_frame, "info.channels", info.channels, "channels_in_frame", channels_in_frame, "original_frame_number", original_frame_number); // Create output frame (and allocate arrays) AVFrame *audio_converted = AV_ALLOCATE_FRAME(); @@ -723,7 +719,7 @@ void FrameMapper::ResampleMappedAudio(tr1::shared_ptr frame, long int ori audio_converted->nb_samples = total_frame_samples; av_samples_alloc(audio_converted->data, audio_converted->linesize, info.channels, total_frame_samples, AV_SAMPLE_FMT_S16, 0); - AppendDebugMethod("FrameMapper::ResampleMappedAudio (preparing for resample)", "in_sample_fmt", AV_SAMPLE_FMT_S16, "out_sample_fmt", AV_SAMPLE_FMT_S16, "in_sample_rate", sample_rate_in_frame, "out_sample_rate", info.sample_rate, "in_channels", channels_in_frame, "out_channels", info.channels); + ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::ResampleMappedAudio (preparing for resample)", "in_sample_fmt", AV_SAMPLE_FMT_S16, "out_sample_fmt", AV_SAMPLE_FMT_S16, "in_sample_rate", sample_rate_in_frame, "out_sample_rate", info.sample_rate, "in_channels", channels_in_frame, "out_channels", info.channels); int nb_samples = 0; // Force the audio resampling to happen in order (1st thread to last thread), so the waveform @@ -771,7 +767,7 @@ void FrameMapper::ResampleMappedAudio(tr1::shared_ptr frame, long int ori int channel_buffer_size = nb_samples; frame->ResizeAudio(info.channels, channel_buffer_size, info.sample_rate, info.channel_layout); - AppendDebugMethod("FrameMapper::ResampleMappedAudio (Audio successfully resampled)", "nb_samples", nb_samples, "total_frame_samples", total_frame_samples, "info.sample_rate", info.sample_rate, "channels_in_frame", channels_in_frame, "info.channels", info.channels, "info.channel_layout", info.channel_layout); + ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::ResampleMappedAudio (Audio successfully resampled)", "nb_samples", nb_samples, "total_frame_samples", total_frame_samples, "info.sample_rate", info.sample_rate, "channels_in_frame", channels_in_frame, "info.channels", info.channels, "info.channel_layout", info.channel_layout); // Array of floats (to hold samples for each channel) float *channel_buffer = new float[channel_buffer_size]; @@ -811,7 +807,7 @@ void FrameMapper::ResampleMappedAudio(tr1::shared_ptr frame, long int ori // Add samples to frame for this channel frame->AddAudio(true, channel_filter, 0, channel_buffer, position, 1.0f); - AppendDebugMethod("FrameMapper::ResampleMappedAudio (Add audio to channel)", "number of samples", position, "channel_filter", channel_filter, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::ResampleMappedAudio (Add audio to channel)", "number of samples", position, "channel_filter", channel_filter, "", -1, "", -1, "", -1, "", -1); } // Update frame's audio meta data diff --git a/src/ImageWriter.cpp b/src/ImageWriter.cpp index 532e0df3..a2bccc6d 100644 --- a/src/ImageWriter.cpp +++ b/src/ImageWriter.cpp @@ -76,7 +76,7 @@ void ImageWriter::SetVideoOptions(string format, Fraction fps, int width, int he info.display_ratio.num = size.num; info.display_ratio.den = size.den; - AppendDebugMethod("ImageWriter::SetVideoOptions (" + format + ")", "width", width, "height", height, "size.num", size.num, "size.den", size.den, "fps.num", fps.num, "fps.den", fps.den); + ZmqLogger::Instance()->AppendDebugMethod("ImageWriter::SetVideoOptions (" + format + ")", "width", width, "height", height, "size.num", size.num, "size.den", size.den, "fps.num", fps.num, "fps.den", fps.den); } // Open the writer @@ -122,7 +122,7 @@ void ImageWriter::WriteFrame(tr1::shared_ptr frame) throw(WriterClosed) // Write a block of frames from a reader void ImageWriter::WriteFrame(ReaderBase* reader, long int start, long int length) throw(WriterClosed) { - AppendDebugMethod("ImageWriter::WriteFrame (from Reader)", "start", start, "length", length, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("ImageWriter::WriteFrame (from Reader)", "start", start, "length", length, "", -1, "", -1, "", -1, "", -1); // Loop through each frame (and encoded it) for (long int number = start; number <= length; number++) @@ -150,6 +150,6 @@ void ImageWriter::Close() // Close writer is_open = false; - AppendDebugMethod("ImageWriter::Close", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("ImageWriter::Close", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1); } diff --git a/src/Qt/VideoPlaybackThread.cpp b/src/Qt/VideoPlaybackThread.cpp index d3fe3aa9..4b6d0af2 100644 --- a/src/Qt/VideoPlaybackThread.cpp +++ b/src/Qt/VideoPlaybackThread.cpp @@ -60,11 +60,17 @@ namespace openshot if (need_render) { + // Debug + ZmqLogger::Instance()->AppendDebugMethod("VideoPlaybackThread::run (before render)", "frame->number", frame->number, "need_render", need_render, "", -1, "", -1, "", -1, "", -1); + // Render the frame to the screen renderer->paint(frame); // Signal to other threads that the rendered event has completed rendered.signal(); + + // Debug + ZmqLogger::Instance()->AppendDebugMethod("VideoPlaybackThread::run (after render)", "frame->number", frame->number, "need_render", need_render, "", -1, "", -1, "", -1, "", -1); } } diff --git a/src/Qt/VideoRenderer.cpp b/src/Qt/VideoRenderer.cpp index 4c30dac1..16160382 100644 --- a/src/Qt/VideoRenderer.cpp +++ b/src/Qt/VideoRenderer.cpp @@ -47,5 +47,6 @@ void VideoRenderer::OverrideWidget(long qwidget_address) void VideoRenderer::render(tr1::shared_ptr image) { - emit present(*image); + if (image) + emit present(*image); } diff --git a/src/QtPlayer.cpp b/src/QtPlayer.cpp index 55f5623e..3eee3fe4 100644 --- a/src/QtPlayer.cpp +++ b/src/QtPlayer.cpp @@ -56,7 +56,6 @@ QtPlayer::~QtPlayer() void QtPlayer::SetSource(const std::string &source) { FFmpegReader *ffreader = new FFmpegReader(source); - ffreader->debug = false; ffreader->DisplayInfo(); //reader = new FrameMapper(ffreader, ffreader->info.fps, PULLDOWN_NONE, ffreader->info.sample_rate, ffreader->info.channels, ffreader->info.channel_layout); @@ -66,7 +65,6 @@ void QtPlayer::SetSource(const std::string &source) Timeline* tm = (Timeline*)reader; tm->AddClip(c); tm->Open(); - tm->debug = false; // Set the reader Reader(reader); diff --git a/src/ReaderBase.cpp b/src/ReaderBase.cpp index dccf9797..194fecfc 100644 --- a/src/ReaderBase.cpp +++ b/src/ReaderBase.cpp @@ -58,55 +58,6 @@ ReaderBase::ReaderBase() info.channel_layout = LAYOUT_MONO; info.audio_stream_index = -1; info.audio_timebase = Fraction(); - - // Initialize debug mode - debug = false; -} - -// Append debug information as JSON -void ReaderBase::AppendDebugMethod(string method_name, string arg1_name, float arg1_value, - string arg2_name, float arg2_value, - string arg3_name, float arg3_value, - string arg4_name, float arg4_value, - string arg5_name, float arg5_value, - string arg6_name, float arg6_value) -{ - if (!debug) - // Don't do anything - return; - - // Output to standard output - #pragma omp critical (debug_output) - { - stringstream message; - message << fixed << setprecision(4); - message << method_name << " ("; - - // Add attributes to method JSON - if (arg1_name.length() > 0) - message << arg1_name << "=" << arg1_value; - - if (arg2_name.length() > 0) - message << ", " << arg2_name << "=" << arg2_value; - - if (arg3_name.length() > 0) - message << ", " << arg3_name << "=" << arg3_value; - - if (arg4_name.length() > 0) - message << ", " << arg4_name << "=" << arg4_value; - - if (arg5_name.length() > 0) - message << ", " << arg5_name << "=" << arg5_value; - - if (arg6_name.length() > 0) - message << ", " << arg6_name << "=" << arg6_value; - - // Output to standard output - message << ")" << endl; - - // Send message through ZMQ - ZmqLogger::Instance()->Log(message.str()); - } } // Display file information diff --git a/src/Timeline.cpp b/src/Timeline.cpp index 891d1489..ea9b178f 100644 --- a/src/Timeline.cpp +++ b/src/Timeline.cpp @@ -156,7 +156,7 @@ tr1::shared_ptr Timeline::apply_effects(tr1::shared_ptr frame, lon float requested_time = calculate_time(timeline_frame_number, info.fps); // Debug output - AppendDebugMethod("Timeline::apply_effects", "requested_time", requested_time, "frame->number", frame->number, "timeline_frame_number", timeline_frame_number, "layer", layer, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::apply_effects", "requested_time", requested_time, "frame->number", frame->number, "timeline_frame_number", timeline_frame_number, "layer", layer, "", -1, "", -1); // Find Effects at this position and layer list::iterator effect_itr; @@ -170,7 +170,7 @@ tr1::shared_ptr Timeline::apply_effects(tr1::shared_ptr frame, lon bool does_effect_intersect = (effect->Position() <= requested_time && effect->Position() + effect_duration >= requested_time && effect->Layer() == layer); // Debug output - AppendDebugMethod("Timeline::apply_effects (Does effect intersect)", "effect->Position()", effect->Position(), "requested_time", requested_time, "does_effect_intersect", does_effect_intersect, "timeline_frame_number", timeline_frame_number, "layer", layer, "effect_duration", effect_duration); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::apply_effects (Does effect intersect)", "effect->Position()", effect->Position(), "requested_time", requested_time, "does_effect_intersect", does_effect_intersect, "timeline_frame_number", timeline_frame_number, "layer", layer, "effect_duration", effect_duration); // Clip is visible if (does_effect_intersect) @@ -180,7 +180,7 @@ tr1::shared_ptr Timeline::apply_effects(tr1::shared_ptr frame, lon int effect_frame_number = round(time_diff * info.fps.ToFloat()) + 1; // Debug output - AppendDebugMethod("Timeline::apply_effects (Process Effect)", "time_diff", time_diff, "effect_frame_number", effect_frame_number, "effect_duration", effect_duration, "does_effect_intersect", does_effect_intersect, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::apply_effects (Process Effect)", "time_diff", time_diff, "effect_frame_number", effect_frame_number, "effect_duration", effect_duration, "does_effect_intersect", does_effect_intersect, "", -1, "", -1); // Apply the effect to this frame frame = effect->GetFrame(frame, effect_frame_number); @@ -234,13 +234,13 @@ void Timeline::add_layer(tr1::shared_ptr new_frame, Clip* source_clip, lo return; // Debug output - AppendDebugMethod("Timeline::add_layer", "new_frame->number", new_frame->number, "clip_frame_number", clip_frame_number, "timeline_frame_number", timeline_frame_number, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer", "new_frame->number", new_frame->number, "clip_frame_number", clip_frame_number, "timeline_frame_number", timeline_frame_number, "", -1, "", -1, "", -1); /* REPLACE IMAGE WITH WAVEFORM IMAGE (IF NEEDED) */ if (source_clip->Waveform()) { // Debug output - AppendDebugMethod("Timeline::add_layer (Generate Waveform Image)", "source_frame->number", source_frame->number, "source_clip->Waveform()", source_clip->Waveform(), "clip_frame_number", clip_frame_number, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Generate Waveform Image)", "source_frame->number", source_frame->number, "source_clip->Waveform()", source_clip->Waveform(), "clip_frame_number", clip_frame_number, "", -1, "", -1, "", -1); // Get the color of the waveform int red = source_clip->wave_color.red.GetInt(clip_frame_number); @@ -265,7 +265,7 @@ void Timeline::add_layer(tr1::shared_ptr new_frame, Clip* source_clip, lo if (source_clip->Reader()->info.has_audio) { // Debug output - AppendDebugMethod("Timeline::add_layer (Copy Audio)", "source_clip->Reader()->info.has_audio", source_clip->Reader()->info.has_audio, "source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(), "info.channels", info.channels, "clip_frame_number", clip_frame_number, "timeline_frame_number", timeline_frame_number, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Copy Audio)", "source_clip->Reader()->info.has_audio", source_clip->Reader()->info.has_audio, "source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(), "info.channels", info.channels, "clip_frame_number", clip_frame_number, "timeline_frame_number", timeline_frame_number, "", -1); if (source_frame->GetAudioChannelsCount() == info.channels) for (int channel = 0; channel < source_frame->GetAudioChannelsCount(); channel++) @@ -297,7 +297,7 @@ void Timeline::add_layer(tr1::shared_ptr new_frame, Clip* source_clip, lo } else // Debug output - AppendDebugMethod("Timeline::add_layer (No Audio Copied - Wrong # of Channels)", "source_clip->Reader()->info.has_audio", source_clip->Reader()->info.has_audio, "source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(), "info.channels", info.channels, "clip_frame_number", clip_frame_number, "timeline_frame_number", timeline_frame_number, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (No Audio Copied - Wrong # of Channels)", "source_clip->Reader()->info.has_audio", source_clip->Reader()->info.has_audio, "source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(), "info.channels", info.channels, "clip_frame_number", clip_frame_number, "timeline_frame_number", timeline_frame_number, "", -1); } @@ -307,7 +307,7 @@ void Timeline::add_layer(tr1::shared_ptr new_frame, Clip* source_clip, lo return; // Debug output - AppendDebugMethod("Timeline::add_layer (Get Source Image)", "source_frame->number", source_frame->number, "source_clip->Waveform()", source_clip->Waveform(), "clip_frame_number", clip_frame_number, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Get Source Image)", "source_frame->number", source_frame->number, "source_clip->Waveform()", source_clip->Waveform(), "clip_frame_number", clip_frame_number, "", -1, "", -1, "", -1); // Get actual frame image data source_image = source_frame->GetImage(); @@ -335,7 +335,7 @@ void Timeline::add_layer(tr1::shared_ptr new_frame, Clip* source_clip, lo } // Debug output - AppendDebugMethod("Timeline::add_layer (Set Alpha & Opacity)", "alpha", alpha, "source_frame->number", source_frame->number, "clip_frame_number", clip_frame_number, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Set Alpha & Opacity)", "alpha", alpha, "source_frame->number", source_frame->number, "clip_frame_number", clip_frame_number, "", -1, "", -1, "", -1); } /* RESIZE SOURCE IMAGE - based on scale type */ @@ -348,7 +348,7 @@ void Timeline::add_layer(tr1::shared_ptr new_frame, Clip* source_clip, lo source_height = source_image->height(); // Debug output - AppendDebugMethod("Timeline::add_layer (Scale: SCALE_FIT)", "source_frame->number", source_frame->number, "source_width", source_width, "source_height", source_height, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Scale: SCALE_FIT)", "source_frame->number", source_frame->number, "source_width", source_width, "source_height", source_height, "", -1, "", -1, "", -1); break; case (SCALE_STRETCH): @@ -358,7 +358,7 @@ void Timeline::add_layer(tr1::shared_ptr new_frame, Clip* source_clip, lo source_height = source_image->height(); // Debug output - AppendDebugMethod("Timeline::add_layer (Scale: SCALE_STRETCH)", "source_frame->number", source_frame->number, "source_width", source_width, "source_height", source_height, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Scale: SCALE_STRETCH)", "source_frame->number", source_frame->number, "source_width", source_width, "source_height", source_height, "", -1, "", -1, "", -1); break; case (SCALE_CROP): @@ -374,7 +374,7 @@ void Timeline::add_layer(tr1::shared_ptr new_frame, Clip* source_clip, lo source_height = source_image->height(); // Debug output - AppendDebugMethod("Timeline::add_layer (Scale: SCALE_CROP)", "source_frame->number", source_frame->number, "source_width", source_width, "source_height", source_height, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Scale: SCALE_CROP)", "source_frame->number", source_frame->number, "source_width", source_width, "source_height", source_height, "", -1, "", -1, "", -1); break; } @@ -421,7 +421,7 @@ void Timeline::add_layer(tr1::shared_ptr new_frame, Clip* source_clip, lo } // Debug output - AppendDebugMethod("Timeline::add_layer (Gravity)", "source_frame->number", source_frame->number, "source_clip->gravity", source_clip->gravity, "info.width", info.width, "source_width", source_width, "info.height", info.height, "source_height", source_height); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Gravity)", "source_frame->number", source_frame->number, "source_clip->gravity", source_clip->gravity, "info.width", info.width, "source_width", source_width, "info.height", info.height, "source_height", source_height); /* LOCATION, ROTATION, AND SCALE */ float r = source_clip->rotation.GetValue(clip_frame_number); // rotate in degrees @@ -437,7 +437,7 @@ void Timeline::add_layer(tr1::shared_ptr new_frame, Clip* source_clip, lo if ((!isEqual(x, 0) || !isEqual(y, 0)) && (isEqual(r, 0) && isEqual(sx, 1) && isEqual(sy, 1) && !is_x_animated && !is_y_animated)) { // SIMPLE OFFSET - AppendDebugMethod("Timeline::add_layer (Transform: SIMPLE)", "source_frame->number", source_frame->number, "x", x, "y", y, "r", r, "sx", sx, "sy", sy); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Transform: SIMPLE)", "source_frame->number", source_frame->number, "x", x, "y", y, "r", r, "sx", sx, "sy", sy); // If only X and Y are different, and no animation is being used (just set the offset for speed) transformed = true; @@ -448,7 +448,7 @@ void Timeline::add_layer(tr1::shared_ptr new_frame, Clip* source_clip, lo } else if (!isEqual(r, 0) || !isEqual(x, 0) || !isEqual(y, 0) || !isEqual(sx, 1) || !isEqual(sy, 1)) { // COMPLEX DISTORTION - AppendDebugMethod("Timeline::add_layer (Transform: COMPLEX)", "source_frame->number", source_frame->number, "x", x, "y", y, "r", r, "sx", sx, "sy", sy); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Transform: COMPLEX)", "source_frame->number", source_frame->number, "x", x, "y", y, "r", r, "sx", sx, "sy", sy); // Use the QTransform object, which can be very CPU intensive transformed = true; @@ -475,11 +475,11 @@ void Timeline::add_layer(tr1::shared_ptr new_frame, Clip* source_clip, lo } // Debug output - AppendDebugMethod("Timeline::add_layer (Transform: COMPLEX: Completed ScaleRotateTranslateDistortion)", "source_frame->number", source_frame->number, "x", x, "y", y, "r", r, "sx", sx, "sy", sy); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Transform: COMPLEX: Completed ScaleRotateTranslateDistortion)", "source_frame->number", source_frame->number, "x", x, "y", y, "r", r, "sx", sx, "sy", sy); } // Debug output - AppendDebugMethod("Timeline::add_layer (Transform: Composite Image Layer: Prepare)", "source_frame->number", source_frame->number, "offset_x", offset_x, "offset_y", offset_y, "new_frame->GetImage()->width()", new_frame->GetImage()->width(), "transformed", transformed, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Transform: Composite Image Layer: Prepare)", "source_frame->number", source_frame->number, "offset_x", offset_x, "offset_y", offset_y, "new_frame->GetImage()->width()", new_frame->GetImage()->width(), "transformed", transformed, "", -1); /* COMPOSITE SOURCE IMAGE (LAYER) ONTO FINAL IMAGE */ tr1::shared_ptr new_image = new_frame->GetImage(); @@ -498,13 +498,13 @@ void Timeline::add_layer(tr1::shared_ptr new_frame, Clip* source_clip, lo painter.end(); // Debug output - AppendDebugMethod("Timeline::add_layer (Transform: Composite Image Layer: Completed)", "source_frame->number", source_frame->number, "offset_x", offset_x, "offset_y", offset_y, "new_frame->GetImage()->width()", new_frame->GetImage()->width(), "transformed", transformed, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Transform: Composite Image Layer: Completed)", "source_frame->number", source_frame->number, "offset_x", offset_x, "offset_y", offset_y, "new_frame->GetImage()->width()", new_frame->GetImage()->width(), "transformed", transformed, "", -1); } // Update the list of 'opened' clips void Timeline::update_open_clips(Clip *clip, bool does_clip_intersect) { - AppendDebugMethod("Timeline::update_open_clips (before)", "does_clip_intersect", does_clip_intersect, "closing_clips.size()", closing_clips.size(), "open_clips.size()", open_clips.size(), "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::update_open_clips (before)", "does_clip_intersect", does_clip_intersect, "closing_clips.size()", closing_clips.size(), "open_clips.size()", open_clips.size(), "", -1, "", -1, "", -1); // is clip already in list? bool clip_found = open_clips.count(clip); @@ -522,24 +522,19 @@ void Timeline::update_open_clips(Clip *clip, bool does_clip_intersect) // Add clip to 'opened' list, because it's missing open_clips[clip] = clip; - // Set debug mode (if needed) - if (debug) - // Also set each Clip's reader to debug mode - clip->Reader()->debug = true; - // Open the clip clip->Open(); } // Debug output - AppendDebugMethod("Timeline::update_open_clips (after)", "does_clip_intersect", does_clip_intersect, "clip_found", clip_found, "closing_clips.size()", closing_clips.size(), "open_clips.size()", open_clips.size(), "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::update_open_clips (after)", "does_clip_intersect", does_clip_intersect, "clip_found", clip_found, "closing_clips.size()", closing_clips.size(), "open_clips.size()", open_clips.size(), "", -1, "", -1); } // Sort clips by position on the timeline void Timeline::sort_clips() { // Debug output - AppendDebugMethod("Timeline::SortClips", "clips.size()", clips.size(), "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::SortClips", "clips.size()", clips.size(), "", -1, "", -1, "", -1, "", -1, "", -1); // sort clips clips.sort(CompareClips()); @@ -600,7 +595,7 @@ tr1::shared_ptr Timeline::GetFrame(long int requested_frame) throw(Reader tr1::shared_ptr frame = final_cache.GetFrame(requested_frame); if (frame) { // Debug output - AppendDebugMethod("Timeline::GetFrame (Cached frame found)", "requested_frame", requested_frame, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Cached frame found)", "requested_frame", requested_frame, "", -1, "", -1, "", -1, "", -1, "", -1); // Return cached frame return frame; @@ -614,7 +609,7 @@ tr1::shared_ptr Timeline::GetFrame(long int requested_frame) throw(Reader frame = final_cache.GetFrame(requested_frame); if (frame) { // Debug output - AppendDebugMethod("Timeline::GetFrame (Cached frame found on 2nd look)", "requested_frame", requested_frame, "", -1, "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Cached frame found on 2nd look)", "requested_frame", requested_frame, "", -1, "", -1, "", -1, "", -1, "", -1); // Return cached frame return frame; @@ -632,7 +627,7 @@ tr1::shared_ptr Timeline::GetFrame(long int requested_frame) throw(Reader omp_set_nested(true); // Debug output - AppendDebugMethod("Timeline::GetFrame", "requested_frame", requested_frame, "minimum_frames", minimum_frames, "OPEN_MP_NUM_PROCESSORS", OPEN_MP_NUM_PROCESSORS, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame", "requested_frame", requested_frame, "minimum_frames", minimum_frames, "OPEN_MP_NUM_PROCESSORS", OPEN_MP_NUM_PROCESSORS, "", -1, "", -1, "", -1); // GENERATE CACHE FOR CLIPS (IN FRAME # SEQUENCE) // Determine all clip frames, and request them in order (to keep resampled audio in sequence) @@ -664,7 +659,7 @@ tr1::shared_ptr Timeline::GetFrame(long int requested_frame) throw(Reader for (long int frame_number = requested_frame; frame_number < requested_frame + minimum_frames; frame_number++) { // Debug output - AppendDebugMethod("Timeline::GetFrame (processing frame)", "frame_number", frame_number, "omp_get_thread_num()", omp_get_thread_num(), "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (processing frame)", "frame_number", frame_number, "omp_get_thread_num()", omp_get_thread_num(), "", -1, "", -1, "", -1, "", -1); // Init some basic properties about this frame int samples_in_frame = Frame::GetSamplesPerFrame(frame_number, info.fps, info.sample_rate, info.channels); @@ -675,7 +670,7 @@ tr1::shared_ptr Timeline::GetFrame(long int requested_frame) throw(Reader new_frame->ChannelsLayout(info.channel_layout); // Debug output - AppendDebugMethod("Timeline::GetFrame (Adding solid color)", "frame_number", frame_number, "info.width", info.width, "info.height", info.height, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Adding solid color)", "frame_number", frame_number, "info.width", info.width, "info.height", info.height, "", -1, "", -1, "", -1); // Add Background Color to 1st layer (if animated or not black) if ((color.red.Points.size() > 1 || color.green.Points.size() > 1 || color.blue.Points.size() > 1) || @@ -686,7 +681,7 @@ tr1::shared_ptr Timeline::GetFrame(long int requested_frame) throw(Reader float requested_time = calculate_time(frame_number, info.fps); // Debug output - AppendDebugMethod("Timeline::GetFrame (Loop through clips)", "frame_number", frame_number, "requested_time", requested_time, "clips.size()", clips.size(), "nearby_clips.size()", nearby_clips.size(), "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Loop through clips)", "frame_number", frame_number, "requested_time", requested_time, "clips.size()", clips.size(), "nearby_clips.size()", nearby_clips.size(), "", -1, "", -1); // Find Clips near this time for (int clip_index = 0; clip_index < nearby_clips.size(); clip_index++) @@ -698,7 +693,7 @@ tr1::shared_ptr Timeline::GetFrame(long int requested_frame) throw(Reader bool does_clip_intersect = (clip->Position() <= requested_time && clip->Position() + clip->Duration() >= requested_time); // Debug output - AppendDebugMethod("Timeline::GetFrame (Does clip intersect)", "frame_number", frame_number, "requested_time", requested_time, "clip->Position()", clip->Position(), "clip->Duration()", clip->Duration(), "does_clip_intersect", does_clip_intersect, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Does clip intersect)", "frame_number", frame_number, "requested_time", requested_time, "clip->Position()", clip->Position(), "clip->Duration()", clip->Duration(), "does_clip_intersect", does_clip_intersect, "", -1); // Clip is visible if (does_clip_intersect) @@ -721,19 +716,19 @@ tr1::shared_ptr Timeline::GetFrame(long int requested_frame) throw(Reader int clip_frame_number = round(time_diff * info.fps.ToFloat()) + 1; // Debug output - AppendDebugMethod("Timeline::GetFrame (Calculate clip's frame #)", "time_diff", time_diff, "requested_time", requested_time, "clip->Position()", clip->Position(), "clip->Start()", clip->Start(), "info.fps.ToFloat()", info.fps.ToFloat(), "clip_frame_number", clip_frame_number); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Calculate clip's frame #)", "time_diff", time_diff, "requested_time", requested_time, "clip->Position()", clip->Position(), "clip->Start()", clip->Start(), "info.fps.ToFloat()", info.fps.ToFloat(), "clip_frame_number", clip_frame_number); // Add clip's frame as layer add_layer(new_frame, clip, clip_frame_number, frame_number, is_top_clip); } else // Debug output - AppendDebugMethod("Timeline::GetFrame (clip does not intersect)", "frame_number", frame_number, "requested_time", requested_time, "does_clip_intersect", does_clip_intersect, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (clip does not intersect)", "frame_number", frame_number, "requested_time", requested_time, "does_clip_intersect", does_clip_intersect, "", -1, "", -1, "", -1); } // end clip loop // Debug output - AppendDebugMethod("Timeline::GetFrame (Add frame to cache)", "frame_number", frame_number, "info.width", info.width, "info.height", info.height, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Add frame to cache)", "frame_number", frame_number, "info.width", info.width, "info.height", info.height, "", -1, "", -1, "", -1); // Add final frame to cache final_cache.Add(frame_number, new_frame); @@ -742,7 +737,7 @@ tr1::shared_ptr Timeline::GetFrame(long int requested_frame) throw(Reader } // end parallel // Debug output - AppendDebugMethod("Timeline::GetFrame (end parallel region)", "requested_frame", requested_frame, "omp_get_thread_num()", omp_get_thread_num(), "", -1, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (end parallel region)", "requested_frame", requested_frame, "omp_get_thread_num()", omp_get_thread_num(), "", -1, "", -1, "", -1, "", -1); // Return frame (or blank frame) return final_cache.GetFrame(requested_frame); @@ -776,7 +771,7 @@ vector Timeline::find_intersecting_clips(long int requested_frame, int nu (clip->Position() > min_requested_time && clip->Position() <= max_requested_time); // Debug output - AppendDebugMethod("Timeline::find_intersecting_clips (Is clip near or intersecting)", "requested_frame", requested_frame, "min_requested_time", min_requested_time, "max_requested_time", max_requested_time, "clip->Position()", clip->Position(), "clip_duration", clip_duration, "does_clip_intersect", does_clip_intersect); + ZmqLogger::Instance()->AppendDebugMethod("Timeline::find_intersecting_clips (Is clip near or intersecting)", "requested_frame", requested_frame, "min_requested_time", min_requested_time, "max_requested_time", max_requested_time, "clip->Position()", clip->Position(), "clip_duration", clip_duration, "does_clip_intersect", does_clip_intersect); // Open (or schedule for closing) this clip, based on if it's intersecting or not #pragma omp critical (reader_lock) diff --git a/src/WriterBase.cpp b/src/WriterBase.cpp index 4314e113..65a10f7d 100644 --- a/src/WriterBase.cpp +++ b/src/WriterBase.cpp @@ -58,55 +58,6 @@ WriterBase::WriterBase() info.channel_layout = LAYOUT_MONO; info.audio_stream_index = -1; info.audio_timebase = Fraction(); - - // Initialize debug - debug = false; -} - -// Append debug information as JSON -void WriterBase::AppendDebugMethod(string method_name, string arg1_name, float arg1_value, - string arg2_name, float arg2_value, - string arg3_name, float arg3_value, - string arg4_name, float arg4_value, - string arg5_name, float arg5_value, - string arg6_name, float arg6_value) -{ - if (!debug) - // Don't do anything - return; - - // Output to standard output - #pragma omp critical (debug_output) - { - stringstream message; - message << fixed << setprecision(4); - message << method_name << " ("; - - // Add attributes to method JSON - if (arg1_name.length() > 0) - message << arg1_name << "=" << arg1_value; - - if (arg2_name.length() > 0) - message << ", " << arg2_name << "=" << arg2_value; - - if (arg3_name.length() > 0) - message << ", " << arg3_name << "=" << arg3_value; - - if (arg4_name.length() > 0) - message << ", " << arg4_name << "=" << arg4_value; - - if (arg5_name.length() > 0) - message << ", " << arg5_name << "=" << arg5_value; - - if (arg6_name.length() > 0) - message << ", " << arg6_name << "=" << arg6_value; - - // Output to standard output - message << ")" << endl; - - // Send message through ZMQ - ZmqLogger::Instance()->Log(message.str()); - } } // This method copy's the info struct of a reader, and sets the writer with the same info diff --git a/src/ZmqLogger.cpp b/src/ZmqLogger.cpp index 2e3a6c98..4370de5a 100644 --- a/src/ZmqLogger.cpp +++ b/src/ZmqLogger.cpp @@ -48,6 +48,9 @@ ZmqLogger *ZmqLogger::Instance() // Default connection m_pInstance->Connection("tcp://*:5556"); + + // Init enabled to False (force user to call Enable()) + m_pInstance->enabled = false; } return m_pInstance; @@ -96,6 +99,10 @@ void ZmqLogger::Connection(string new_connection) void ZmqLogger::Log(string message) { + if (!enabled) + // Don't do anything + return; + // Create a scoped lock, allowing only a single thread to run the following code at one time const GenericScopedLock lock(loggerCriticalSection); @@ -141,4 +148,49 @@ void ZmqLogger::Close() publisher->close(); publisher = NULL; } +} + +// Append debug information +void ZmqLogger::AppendDebugMethod(string method_name, string arg1_name, float arg1_value, + string arg2_name, float arg2_value, + string arg3_name, float arg3_value, + string arg4_name, float arg4_value, + string arg5_name, float arg5_value, + string arg6_name, float arg6_value) +{ + if (!enabled) + // Don't do anything + return; + + // Create a scoped lock, allowing only a single thread to run the following code at one time + const GenericScopedLock lock(loggerCriticalSection); + + stringstream message; + message << fixed << setprecision(4); + message << method_name << " ("; + + // Add attributes to method JSON + if (arg1_name.length() > 0) + message << arg1_name << "=" << arg1_value; + + if (arg2_name.length() > 0) + message << ", " << arg2_name << "=" << arg2_value; + + if (arg3_name.length() > 0) + message << ", " << arg3_name << "=" << arg3_value; + + if (arg4_name.length() > 0) + message << ", " << arg4_name << "=" << arg4_value; + + if (arg5_name.length() > 0) + message << ", " << arg5_name << "=" << arg5_value; + + if (arg6_name.length() > 0) + message << ", " << arg6_name << "=" << arg6_value; + + // Output to standard output + message << ")" << endl; + + // Send message through ZMQ + Log(message.str()); } \ No newline at end of file diff --git a/src/examples/Example.cpp b/src/examples/Example.cpp index 714bb7dc..d1a423e1 100644 --- a/src/examples/Example.cpp +++ b/src/examples/Example.cpp @@ -54,7 +54,6 @@ int main(int argc, char* argv[]) // Create a timeline Timeline r10(640, 480, Fraction(30, 1), 44100, 2, LAYOUT_STEREO); r10.SetJson("{\"tick_pixels\": 100, \"effects\": [{\"reader\": {\"file_size\": \"1658880\", \"width\": 720, \"video_length\": \"2592000\", \"pixel_format\": -1, \"video_bit_rate\": 0, \"fps\": {\"den\": 1, \"num\": 30}, \"has_single_image\": true, \"channel_layout\": 4, \"audio_stream_index\": -1, \"vcodec\": \"\", \"has_video\": true, \"channels\": 0, \"video_timebase\": {\"den\": 30, \"num\": 1}, \"duration\": 86400.0, \"audio_timebase\": {\"den\": 1, \"num\": 1}, \"path\": \"/Users/jonathan/apps/openshot-qt-git/src/transitions/common/circle_out_to_in.svg\", \"interlaced_frame\": false, \"display_ratio\": {\"den\": 4, \"num\": 5}, \"type\": \"QtImageReader\", \"video_stream_index\": -1, \"acodec\": \"\", \"height\": 576, \"audio_bit_rate\": 0, \"pixel_ratio\": {\"den\": 1, \"num\": 1}, \"has_audio\": false, \"top_field_first\": true, \"sample_rate\": 0}, \"contrast\": {\"Points\": [{\"interpolation\": 2, \"co\": {\"X\": 1, \"Y\": 3}}]}, \"position\": 0, \"title\": \"Transition\", \"start\": 0, \"layer\": 4, \"type\": \"Mask\", \"replace_image\": false, \"id\": \"Y1SJ85D8HV\", \"brightness\": {\"Points\": [{\"handle_type\": 0, \"interpolation\": 0, \"handle_right\": {\"X\": 96.5999984741211, \"Y\": 1}, \"handle_left\": {\"X\": 1, \"Y\": 1}, \"co\": {\"X\": 1, \"Y\": 1}}, {\"handle_type\": 0, \"interpolation\": 0, \"handle_right\": {\"X\": 240, \"Y\": -1}, \"handle_left\": {\"X\": 144.3999938964844, \"Y\": -1}, \"co\": {\"X\": 1249.92, \"Y\": -1}}]}, \"end\": 52.08}], \"duration\": 300, \"version\": {\"openshot-qt\": \"2.0.5\", \"libopenshot\": \"0.0.9\"}, \"markers\": [], \"export_path\": \"\", \"clips\": [{\"reader\": {\"file_size\": \"7608204\", \"width\": 1280, \"video_length\": \"1253\", \"pixel_format\": 0, \"video_bit_rate\": 1165807, \"fps\": {\"den\": 1, \"num\": 24}, \"has_single_image\": false, \"channel_layout\": 3, \"audio_stream_index\": 1, \"vcodec\": \"h264\", \"has_video\": true, \"channels\": 2, \"video_timebase\": {\"den\": 24, \"num\": 1}, \"duration\": 51.9466667175293, \"audio_timebase\": {\"den\": 48000, \"num\": 1}, \"path\": \"/Users/jonathan/Movies/sintel_trailer-720p.mp4\", \"interlaced_frame\": false, \"display_ratio\": {\"den\": 9, \"num\": 16}, \"type\": \"FFmpegReader\", \"video_stream_index\": 0, \"acodec\": \"aac\", \"height\": 720, \"audio_bit_rate\": 126694, \"pixel_ratio\": {\"den\": 1, \"num\": 1}, \"has_audio\": true, \"top_field_first\": true, \"sample_rate\": 48000}, \"rotation\": {\"Points\": [{\"interpolation\": 2, \"co\": {\"X\": 1.0, \"Y\": 0.0}}]}, \"perspective_c2_x\": {\"Points\": [{\"interpolation\": 2, \"co\": {\"X\": 1.0, \"Y\": -1.0}}]}, \"perspective_c2_y\": {\"Points\": [{\"interpolation\": 2, \"co\": {\"X\": 1.0, \"Y\": -1.0}}]}, \"shear_x\": {\"Points\": [{\"interpolation\": 2, \"co\": {\"X\": 1.0, \"Y\": 0.0}}]}, \"shear_y\": {\"Points\": [{\"interpolation\": 2, \"co\": {\"X\": 1.0, \"Y\": 0.0}}]}, \"effects\": [], \"anchor\": 0, \"perspective_c4_y\": {\"Points\": [{\"interpolation\": 2, \"co\": {\"X\": 1.0, \"Y\": -1.0}}]}, \"location_y\": {\"Points\": [{\"interpolation\": 2, \"co\": {\"X\": 1.0, \"Y\": 0.0}}]}, \"location_x\": {\"Points\": [{\"interpolation\": 2, \"co\": {\"X\": 1.0, \"Y\": 0.0}}]}, \"crop_width\": {\"Points\": [{\"interpolation\": 2, \"co\": {\"X\": 1.0, \"Y\": -1.0}}]}, \"scale_x\": {\"Points\": [{\"interpolation\": 2, \"co\": {\"X\": 1.0, \"Y\": 1.0}}]}, \"position\": 0, \"gravity\": 4, \"crop_x\": {\"Points\": [{\"interpolation\": 2, \"co\": {\"X\": 1.0, \"Y\": 0.0}}]}, \"crop_height\": {\"Points\": [{\"interpolation\": 2, \"co\": {\"X\": 1.0, \"Y\": -1.0}}]}, \"layer\": 4, \"duration\": 51.9466667175293, \"perspective_c3_y\": {\"Points\": [{\"interpolation\": 2, \"co\": {\"X\": 1.0, \"Y\": -1.0}}]}, \"perspective_c3_x\": {\"Points\": [{\"interpolation\": 2, \"co\": {\"X\": 1.0, \"Y\": -1.0}}]}, \"title\": \"sintel_trailer-720p.mp4\", \"file_id\": \"J6VK47X5IS\", \"perspective_c4_x\": {\"Points\": [{\"interpolation\": 2, \"co\": {\"X\": 1.0, \"Y\": -1.0}}]}, \"perspective_c1_y\": {\"Points\": [{\"interpolation\": 2, \"co\": {\"X\": 1.0, \"Y\": -1.0}}]}, \"perspective_c1_x\": {\"Points\": [{\"interpolation\": 2, \"co\": {\"X\": 1.0, \"Y\": -1.0}}]}, \"time\": {\"Points\": [{\"interpolation\": 2, \"co\": {\"X\": 1.0, \"Y\": 0.0}}]}, \"scale_y\": {\"Points\": [{\"interpolation\": 2, \"co\": {\"X\": 1.0, \"Y\": 1.0}}]}, \"id\": \"QLJJ0D354R\", \"crop_y\": {\"Points\": [{\"interpolation\": 2, \"co\": {\"X\": 1.0, \"Y\": 0.0}}]}, \"image\": \".openshot_qt/thumbnail/J6VK47X5IS.png\", \"alpha\": {\"Points\": [{\"interpolation\": 2, \"co\": {\"X\": 1.0, \"Y\": 1.0}}]}, \"start\": 0, \"scale\": 1, \"waveform\": false, \"wave_color\": {\"red\": {\"Points\": [{\"handle_type\": 0, \"interpolation\": 0, \"handle_left\": {\"X\": 1.0, \"Y\": 0.0}, \"handle_right\": {\"X\": 1.0, \"Y\": 0.0}, \"co\": {\"X\": 1.0, \"Y\": 0.0}}]}, \"alpha\": {\"Points\": [{\"handle_type\": 0, \"interpolation\": 0, \"handle_left\": {\"X\": 1.0, \"Y\": 255.0}, \"handle_right\": {\"X\": 1.0, \"Y\": 255.0}, \"co\": {\"X\": 1.0, \"Y\": 255.0}}]}, \"blue\": {\"Points\": [{\"handle_type\": 0, \"interpolation\": 0, \"handle_left\": {\"X\": 1.0, \"Y\": 255.0}, \"handle_right\": {\"X\": 1.0, \"Y\": 255.0}, \"co\": {\"X\": 1.0, \"Y\": 255.0}}]}, \"green\": {\"Points\": [{\"handle_type\": 0, \"interpolation\": 0, \"handle_left\": {\"X\": 1.0, \"Y\": 123.0}, \"handle_right\": {\"X\": 1.0, \"Y\": 123.0}, \"co\": {\"X\": 1.0, \"Y\": 123.0}}]}}, \"volume\": {\"Points\": [{\"interpolation\": 2, \"co\": {\"X\": 1.0, \"Y\": 1.0}}]}, \"end\": 51.9466667175293}], \"settings\": {}, \"fps\": {\"den\": 1, \"num\": 24}, \"id\": \"T0\", \"channel_layout\": 3, \"playhead_position\": 0, \"layers\": [{\"number\": 0, \"y\": 0, \"label\": \"\", \"id\": \"L0\"}, {\"number\": 1, \"y\": 0, \"label\": \"\", \"id\": \"L1\"}, {\"number\": 2, \"y\": 0, \"label\": \"\", \"id\": \"L2\"}, {\"number\": 3, \"y\": 0, \"label\": \"\", \"id\": \"L3\"}, {\"number\": 4, \"y\": 0, \"label\": \"\", \"id\": \"L4\"}], \"height\": 720, \"progress\": [], \"width\": 1280, \"channels\": 2, \"profile\": \"HDV 720 24p\", \"scale\": 16, \"files\": [{\"file_size\": \"7608204\", \"width\": 1280, \"video_length\": \"1253\", \"pixel_format\": 0, \"video_bit_rate\": 1165807, \"fps\": {\"den\": 1, \"num\": 24}, \"has_single_image\": false, \"channel_layout\": 3, \"audio_stream_index\": 1, \"vcodec\": \"h264\", \"has_video\": true, \"channels\": 2, \"video_timebase\": {\"den\": 24, \"num\": 1}, \"duration\": 51.9466667175293, \"audio_timebase\": {\"den\": 48000, \"num\": 1}, \"path\": \"/Users/jonathan/Movies/sintel_trailer-720p.mp4\", \"interlaced_frame\": false, \"media_type\": \"video\", \"display_ratio\": {\"den\": 9, \"num\": 16}, \"type\": \"FFmpegReader\", \"video_stream_index\": 0, \"id\": \"J6VK47X5IS\", \"acodec\": \"aac\", \"height\": 720, \"audio_bit_rate\": 126694, \"pixel_ratio\": {\"den\": 1, \"num\": 1}, \"has_audio\": true, \"top_field_first\": true, \"sample_rate\": 48000}], \"sample_rate\": 44100}"); - r10.debug = false; // Open Timeline r10.Open(); @@ -116,7 +115,6 @@ int main(int argc, char* argv[]) // // FFmpegReader r110("/home/jonathan/Videos/PlaysTV/Team Fortress 2/2015_07_06_22_43_16-ses.mp4"); // r110.Open(); -//// r110.debug = false; //// r110.DisplayInfo(); //// FrameMapper m110(&r110, Fraction(24,1), PULLDOWN_NONE, 48000, 2, LAYOUT_STEREO); // @@ -168,7 +166,6 @@ int main(int argc, char* argv[]) // c110.Open(); // // Timeline t10(1280, 720, Fraction(24,1), 22050, 2, LAYOUT_STEREO); -// t10.debug = false; // //Clip c20("/home/jonathan/Pictures/DSC00660.JPG"); // //c20.End(1000.0); // //c20.Layer(-1); @@ -222,12 +219,10 @@ int main(int argc, char* argv[]) // Mapper //FrameMapper map(&r9, Fraction(24,1), PULLDOWN_NONE, 48000, 2, LAYOUT_STEREO); //map.DisplayInfo(); - //map.debug = true; //map.Open(); /* WRITER ---------------- */ FFmpegWriter w9("C:\\Users\\Jonathan\\test-output.avi"); - w9.debug = false; //ImageWriter w9("/home/jonathan/output.gif"); // Set options