diff --git a/include/FFmpegReader.h b/include/FFmpegReader.h index 60a23059..2a05bd57 100644 --- a/include/FFmpegReader.h +++ b/include/FFmpegReader.h @@ -118,10 +118,11 @@ namespace openshot map frames; map processing_video_frames; map processing_audio_frames; + map processed_video_frames; + map processed_audio_frames; AudioLocation previous_packet_location; // DEBUG VARIABLES (FOR AUDIO ISSUES) - bool display_debug; int prev_samples; int prev_pts; int pts_total; diff --git a/include/ReaderBase.h b/include/ReaderBase.h index 7d29a56d..f56c773b 100644 --- a/include/ReaderBase.h +++ b/include/ReaderBase.h @@ -88,9 +88,33 @@ namespace openshot */ class ReaderBase { + protected: + /// Section lock for multiple threads CriticalSection getFrameCriticalSection; + /// Debug JSON root + Json::Value debug_root; + + /// Append debug information as JSON + void AppendDebugItem(Json::Value debug_item); + + /// Append debug information as JSON + void AppendDebugMethod(string method_name, string arg1_name, int arg1_value, + string arg2_name, int arg2_value, + string arg3_name, int arg3_value, + string arg4_name, int arg4_value, + string arg5_name, int arg5_value, + string arg6_name, int arg6_value); + public: + + /// Constructor for the base reader, where many things are initialized. + ReaderBase(); + + /// Enable or disable debug output. Output will display on the standard output, and you can + /// optionally invoke the OutputDebugJSON() method, which will format the debug output as JSON. + bool debug; + /// Information about the current media file ReaderInfo info; @@ -100,6 +124,9 @@ namespace openshot /// Display file information in the standard output stream (stdout) void DisplayInfo(); + /// Test method to draw a bitmap on a Qt QGraphicsScene + void DrawFrameOnScene(string path, long _graphics_scene_address); + /// This method is required for all derived classes of ReaderBase, and returns the /// openshot::Frame object, which contains the image and audio information for that /// frame of video. @@ -111,13 +138,6 @@ namespace openshot /// A thread safe version of GetFrame. tr1::shared_ptr GetFrameSafe(int number); - /// Test method to draw a bitmap on a Qt QGraphicsScene - void DrawFrameOnScene(string path, long _graphics_scene_address); - - /// Initialize the values of the ReaderInfo struct. It is important for derived classes to call - /// this method, or the ReaderInfo struct values will not be initialized. - void InitFileInfo(); - /// Determine if reader is open or closed virtual bool IsOpen() = 0; @@ -129,6 +149,10 @@ namespace openshot /// Open the reader (and start consuming resources, such as images or video files) virtual void Open() = 0; + + /// Output debug information as JSON + string OutputDebugJSON(); + }; } diff --git a/include/WriterBase.h b/include/WriterBase.h index 0ea90264..35a487c5 100644 --- a/include/WriterBase.h +++ b/include/WriterBase.h @@ -80,7 +80,30 @@ namespace openshot */ class WriterBase { + protected: + + /// Debug JSON root + Json::Value debug_root; + + /// Append debug information as JSON + void AppendDebugItem(Json::Value debug_item); + + /// Append debug information as JSON + void AppendDebugMethod(string method_name, string arg1_name, int arg1_value, + string arg2_name, int arg2_value, + string arg3_name, int arg3_value, + string arg4_name, int arg4_value, + string arg5_name, int arg5_value, + string arg6_name, int arg6_value); + public: + /// Constructor for WriterBase class, many things are initilized here + WriterBase(); + + /// Enable or disable debug output. Output will display on the standard output, and you can + /// optionally invoke the OutputDebugJSON() method, which will format the debug output as JSON. + bool debug; + /// Information about the current media file WriterInfo info; @@ -93,10 +116,6 @@ namespace openshot /// This method is required for all derived classes of WriterBase. Write a block of frames from a reader. virtual void WriteFrame(ReaderBase* reader, int start, int length) = 0; - /// Initialize the values of the WriterInfo struct. It is important for derived classes to call - /// this method, or the WriterInfo struct values will not be initialized. - void InitFileInfo(); - /// Get and Set JSON methods string Json(); ///< Generate JSON string of this object Json::Value JsonValue(); ///< Generate Json::JsonValue for this object @@ -105,6 +124,9 @@ namespace openshot /// Display file information in the standard output stream (stdout) void DisplayInfo(); + + /// Output debug information as JSON + string OutputDebugJSON(); }; } diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index a6cf2808..3764a8ec 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -279,9 +279,7 @@ set(LIB_INSTALL_DIR lib${LIB_SUFFIX}) # determine correct lib folder # Install primary library INSTALL(TARGETS openshot - LIBRARY DESTINATION ${LIB_INSTALL_DIR} - FRAMEWORK DESTINATION . - INCLUDES DESTINATION include ) + LIBRARY DESTINATION ${LIB_INSTALL_DIR} ) INSTALL(FILES ${headers} DESTINATION ${CMAKE_INSTALL_PREFIX}/include/libopenshot ) diff --git a/src/ChunkReader.cpp b/src/ChunkReader.cpp index a57a272e..d7863ae9 100644 --- a/src/ChunkReader.cpp +++ b/src/ChunkReader.cpp @@ -32,9 +32,6 @@ using namespace openshot; ChunkReader::ChunkReader(string path, ChunkVersion chunk_version) throw(InvalidFile, InvalidJSON) : path(path), chunk_size(24 * 3), is_open(false), version(chunk_version), local_reader(NULL) { - // Init FileInfo struct (clear all values) - InitFileInfo(); - // Check if folder exists? if (!does_folder_exist(path)) // Raise exception diff --git a/src/ChunkWriter.cpp b/src/ChunkWriter.cpp index a5200f1f..3a0d079d 100644 --- a/src/ChunkWriter.cpp +++ b/src/ChunkWriter.cpp @@ -33,9 +33,6 @@ ChunkWriter::ChunkWriter(string path, ReaderBase *reader) throw (InvalidFile, In local_reader(reader), path(path), chunk_size(24*3), chunk_count(1), frame_count(1), is_writing(false), default_extension(".webm"), default_vcodec("libvpx"), default_acodec("libvorbis"), last_frame_needed(false) { - // Init FileInfo struct (clear all values) - InitFileInfo(); - // Change codecs to default info.vcodec = default_vcodec; info.acodec = default_acodec; diff --git a/src/DecklinkReader.cpp b/src/DecklinkReader.cpp index a06e00d5..9c5bf94a 100644 --- a/src/DecklinkReader.cpp +++ b/src/DecklinkReader.cpp @@ -32,9 +32,6 @@ using namespace openshot; DecklinkReader::DecklinkReader(int device, int video_mode, int pixel_format, int channels, int sample_depth) throw(DecklinkError) : device(device), is_open(false), g_videoModeIndex(video_mode), g_audioChannels(channels), g_audioSampleDepth(sample_depth) { - // Init FileInfo struct (clear all values) - InitFileInfo(); - // Init decklink variables inputFlags = 0; selectedDisplayMode = bmdModeNTSC; diff --git a/src/DummyReader.cpp b/src/DummyReader.cpp index db0355d8..55172462 100644 --- a/src/DummyReader.cpp +++ b/src/DummyReader.cpp @@ -39,9 +39,6 @@ DummyReader::DummyReader() { // Constructor for DummyReader. Pass a framerate and samplerate. DummyReader::DummyReader(Fraction fps, int width, int height, int sample_rate, int channels, float duration) { - // Init FileInfo struct (clear all values) - InitFileInfo(); - // Set key info settings info.has_audio = false; info.has_video = true; diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index ab6c577e..c1d3db1a 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -37,10 +37,7 @@ FFmpegReader::FFmpegReader(string path) throw(InvalidFile, NoStreamsFound, Inval audio_pts_offset(99999), video_pts_offset(99999), path(path), is_video_seek(true), check_interlace(false), check_fps(false), enable_seek(true), rescaler_position(0), num_of_rescalers(32), is_open(false), seek_audio_frame_found(-1), seek_video_frame_found(-1), resampleCtx(NULL), prev_samples(0), prev_pts(0), - pts_total(0), pts_counter(0), display_debug(false), is_duration_known(false), largest_frame_processed(0) { - - // Init FileInfo struct (clear all values) - InitFileInfo(); + pts_total(0), pts_counter(0), is_duration_known(false), largest_frame_processed(0) { // Initialize FFMpeg, and register all formats and codecs av_register_all(); @@ -241,6 +238,13 @@ void FFmpegReader::Close() final_cache.Clear(); working_cache.Clear(); + // Clear processed lists + processed_video_frames.clear(); + processed_audio_frames.clear(); + + // Clear debug json + debug_root.clear(); + // Close the video file avformat_close_input(&pFormatCtx); av_freep(&pFormatCtx); @@ -381,9 +385,6 @@ void FFmpegReader::UpdateVideoInfo() tr1::shared_ptr FFmpegReader::GetFrame(int requested_frame) throw(OutOfBoundsFrame, ReaderClosed, TooManySeeks) { - if (display_debug) - cout << "GET FRAME " << requested_frame << ", last_frame: " << last_frame << endl; - // Check for open reader (or throw exception) if (!is_open) throw ReaderClosed("The FFmpegReader is closed. Call Open() before calling this method.", path); @@ -397,11 +398,17 @@ tr1::shared_ptr FFmpegReader::GetFrame(int requested_frame) throw(OutOfBo // Invalid duration of video file throw InvalidFile("Could not detect the duration of the video or audio stream.", path); + // Debug output + AppendDebugMethod("FFmpegReader::GetFrame", "requested_frame", requested_frame, "last_frame", last_frame, "", -1, "", -1, "", -1, "", -1); + // Check the cache for this frame - if (final_cache.Exists(requested_frame)) + if (final_cache.Exists(requested_frame)) { + // Debug output + AppendDebugMethod("FFmpegReader::GetFrame", "returned cached frame", requested_frame, "", -1, "", -1, "", -1, "", -1, "", -1); + // Return the cached frame return final_cache.GetFrame(requested_frame); - + } else { // Frame is not in cache @@ -423,8 +430,6 @@ tr1::shared_ptr FFmpegReader::GetFrame(int requested_frame) throw(OutOfBo else { // Greater than 30 frames away, or backwards, we need to seek to the nearest key frame - cout << " >> TOO FAR, SO SEEK FIRST AND THEN WALK THE STREAM (diff: " << diff << ", requested_frame: " << requested_frame << ", last_frame: " << last_frame << ")" << endl; - //final_cache.Display(); if (enable_seek) // Only seek if enabled Seek(requested_frame); @@ -461,6 +466,9 @@ tr1::shared_ptr FFmpegReader::ReadStream(int requested_frame) // Allow nested OpenMP sections omp_set_nested(true); + // Debug output + AppendDebugMethod("FFmpegReader::ReadStream", "requested_frame", requested_frame, "OPEN_MP_NUM_PROCESSORS", OPEN_MP_NUM_PROCESSORS, "", -1, "", -1, "", -1, "", -1); + #pragma omp parallel { #pragma omp single @@ -563,6 +571,9 @@ tr1::shared_ptr FFmpegReader::ReadStream(int requested_frame) } // end omp single } // end omp parallel + // Debug output + #pragma omp critical (debug_output) + AppendDebugMethod("FFmpegReader::ReadStream (Completed)", "packets_processed", packets_processed, "end_of_stream", end_of_stream, "largest_frame_processed", largest_frame_processed, "", -1, "", -1, "", -1); // End of stream? if (end_of_stream) { @@ -657,6 +668,11 @@ bool FFmpegReader::CheckSeek(bool is_video) // Are we seeking for a specific frame? if (is_seeking) { + // Determine if both an audio and video packet have been decoded since the seek happened. + // If not, allow the ReadStream method to keep looping + if ((is_video_seek && seek_video_frame_found == 0) || (!is_video_seek && seek_audio_frame_found == 0)) + return false; + // CHECK VIDEO SEEK? int current_pts = 0; if (is_video && is_video_seek) @@ -665,19 +681,22 @@ bool FFmpegReader::CheckSeek(bool is_video) else if (!is_video && !is_video_seek) current_pts = packet->pts; - cout << "current_pts: " << current_pts << ", seeking_pts: " << seeking_pts << endl; - // determine if we are "before" the requested frame if (current_pts > seeking_pts) { // SEEKED TOO FAR - cout << "Woops! Need to seek backwards further..." << endl; + #pragma omp critical (debug_output) + AppendDebugMethod("FFmpegReader::CheckSeek (Too far, seek again)", "current_pts", current_pts, "seeking_pts", seeking_pts, "seeking_frame", seeking_frame, "", -1, "", -1, "", -1); // Seek again... to the nearest Keyframe Seek(seeking_frame - 10); } else { + // SEEKED TOO FAR + #pragma omp critical (debug_output) + AppendDebugMethod("FFmpegReader::CheckSeek (Successful)", "current_pts", current_pts, "seeking_pts", seeking_pts, "seeking_frame", seeking_frame, "", -1, "", -1, "", -1); + // Seek worked, and we are "before" the requested frame is_seeking = false; seeking_frame = 0; @@ -707,10 +726,18 @@ void FFmpegReader::ProcessVideoPacket(int requested_frame) RemoveAVPacket(packet); } + // Debug output + #pragma omp critical (debug_output) + AppendDebugMethod("FFmpegReader::ProcessVideoPacket (Skipped)", "requested_frame", requested_frame, "current_frame", current_frame, "", -1, "", -1, "", -1, "", -1); + // Skip to next frame without decoding or caching return; } + // Debug output + #pragma omp critical (debug_output) + AppendDebugMethod("FFmpegReader::ProcessVideoPacket (Before)", "requested_frame", requested_frame, "current_frame", current_frame, "", -1, "", -1, "", -1, "", -1); + // Init some things local (for OpenMP) PixelFormat pix_fmt = pCodecCtx->pix_fmt; int height = info.height; @@ -784,7 +811,14 @@ void FFmpegReader::ProcessVideoPacket(int requested_frame) // Remove video frame from list of processing video frames #pragma omp critical (processing_list) + { processing_video_frames.erase(current_frame); + processed_video_frames[current_frame] = current_frame; + } + + // Debug output + #pragma omp critical (debug_output) + AppendDebugMethod("FFmpegReader::ProcessVideoPacket (After)", "requested_frame", requested_frame, "current_frame", current_frame, "f->number", f->number, "", -1, "", -1, "", -1); } // end omp task @@ -801,6 +835,10 @@ void FFmpegReader::ProcessAudioPacket(int requested_frame, int target_frame, int // Remove packet RemoveAVPacket(packet); + // Debug output + #pragma omp critical (debug_output) + AppendDebugMethod("FFmpegReader::ProcessAudioPacket (Skipped)", "requested_frame", requested_frame, "target_frame", target_frame, "starting_sample", starting_sample, "", -1, "", -1, "", -1); + // Skip to next frame without decoding or caching return; } @@ -817,6 +855,10 @@ void FFmpegReader::ProcessAudioPacket(int requested_frame, int target_frame, int if (!seek_audio_frame_found) seek_audio_frame_found = target_frame; + // Debug output + #pragma omp critical (debug_output) + AppendDebugMethod("FFmpegReader::ProcessAudioPacket (Before)", "requested_frame", requested_frame, "target_frame", target_frame, "starting_sample", starting_sample, "", -1, "", -1, "", -1); + // Allocate audio buffer int16_t *audio_buf = new int16_t[AVCODEC_MAX_AUDIO_FRAME_SIZE + FF_INPUT_BUFFER_PADDING_SIZE]; @@ -847,8 +889,12 @@ void FFmpegReader::ProcessAudioPacket(int requested_frame, int target_frame, int double audio_seconds = double(adjusted_pts) * info.audio_timebase.ToDouble(); double sample_seconds = float(pts_total) / info.sample_rate; - if (display_debug) - cout << pts_counter << ") PTS: " << adjusted_pts << ", Offset: " << audio_pts_offset << ", PTS Diff: " << (adjusted_pts - prev_pts) << ", Samples: " << pts_remaining_samples << ", Sample PTS ratio: " << (float(adjusted_pts - prev_pts) / pts_remaining_samples) << ", Sample Diff: " << (pts_remaining_samples - prev_samples) << ", Total: " << pts_total << ", PTS Seconds: " << audio_seconds << ", Sample Seconds: " << sample_seconds << ", Seconds Diff: " << (audio_seconds - sample_seconds) << ", raw samples: " << packet_samples << endl; + // Debug output + #pragma omp critical (debug_output) + { + AppendDebugMethod("FFmpegReader::ProcessAudioPacket (Decode Info A)", "pts_counter", pts_counter, "PTS", adjusted_pts, "Offset", audio_pts_offset, "PTS Diff", adjusted_pts - prev_pts, "Samples", pts_remaining_samples, "Sample PTS ratio", float(adjusted_pts - prev_pts) / pts_remaining_samples); + AppendDebugMethod("FFmpegReader::ProcessAudioPacket (Decode Info B)", "Sample Diff", pts_remaining_samples - prev_samples - prev_pts, "Total", pts_total, "PTS Seconds", audio_seconds, "Sample Seconds", sample_seconds, "Seconds Diff", audio_seconds - sample_seconds, "raw samples", packet_samples); + } // DEBUG (FOR AUDIO ISSUES) prev_pts = adjusted_pts; @@ -1031,10 +1077,16 @@ void FFmpegReader::ProcessAudioPacket(int requested_frame, int target_frame, int #pragma omp critical (processing_list) { // Update all frames as completed - for (int f = target_frame; f < starting_frame_number; f++) + for (int f = target_frame; f < starting_frame_number; f++) { processing_audio_frames.erase(f); + processed_audio_frames[f] = f; + } } + // Debug output + #pragma omp critical (debug_output) + AppendDebugMethod("FFmpegReader::ProcessAudioPacket (After)", "requested_frame", requested_frame, "starting_frame", target_frame, "end_frame", starting_frame_number, "", -1, "", -1, "", -1); + } // end task } @@ -1044,17 +1096,23 @@ void FFmpegReader::ProcessAudioPacket(int requested_frame, int target_frame, int // Seek to a specific frame. This is not always frame accurate, it's more of an estimation on many codecs. void FFmpegReader::Seek(int requested_frame) throw(TooManySeeks) { - cout << "SEEK TO " << requested_frame << endl; - // Adjust for a requested frame that is too small or too large if (requested_frame < 1) requested_frame = 1; if (requested_frame > info.video_length) requested_frame = info.video_length; + // Debug output + #pragma omp critical (debug_output) + AppendDebugMethod("FFmpegReader::Seek", "requested_frame", requested_frame, "seek_count", seek_count, "last_frame", last_frame, "", -1, "", -1, "", -1); + // Clear working cache (since we are seeking to another location in the file) working_cache.Clear(); + // Clear processed lists + processed_video_frames.clear(); + processed_audio_frames.clear(); + // Reset the last frame variable last_frame = 0; @@ -1254,7 +1312,8 @@ AudioLocation FFmpegReader::GetAudioPTSLocation(int pts) AudioLocation location = {whole_frame, sample_start}; // Compare to previous audio packet (and fix small gaps due to varying PTS timestamps) - if (previous_packet_location.frame != -1 && location.is_near(previous_packet_location, samples_per_frame, samples_per_frame)) + if (previous_packet_location.frame != -1 && location.is_near(previous_packet_location, samples_per_frame, samples_per_frame) && + (location.frame != previous_packet_location.frame && location.sample_start != previous_packet_location.sample_start)) { int orig_frame = location.frame; int orig_start = location.sample_start; @@ -1272,11 +1331,11 @@ AudioLocation FFmpegReader::GetAudioPTSLocation(int pts) location.frame++; } - if (display_debug) - cout << "AUDIO GAP DETECTED!!! Changing frame " << orig_frame << ":" << orig_start << " to frame " << location.frame << ":" << location.sample_start << endl; + // Debug output + #pragma omp critical (debug_output) + AppendDebugMethod("FFmpegReader::GetAudioPTSLocation (Audio Gap Detected)", "Source Frame", orig_frame, "Source Audio Sample", orig_start, "Target Frame", location.frame, "Target Audio Sample", location.sample_start, "pts", pts, "", -1); + } - else if (display_debug) - cout << "NOT NEAR!!! frame " << location.frame << ":" << location.sample_start << " prev frame " << previous_packet_location.frame << ":" << previous_packet_location.sample_start << endl; // Set previous location previous_packet_location = location; @@ -1317,21 +1376,6 @@ tr1::shared_ptr FFmpegReader::CreateFrame(int requested_frame) void FFmpegReader::CheckWorkingFrames(bool end_of_stream) { - // Get the smallest processing video and audio frame numbers - int smallest_video_frame = 1; - int smallest_audio_frame = 1; - #pragma omp critical (processing_list) - { - smallest_video_frame = GetSmallestVideoFrame(); // Adjust to be sure the frame is completed - smallest_audio_frame = GetSmallestAudioFrame(); // Adjust to be sure the frame is completed - - // Adjust for video only, or audio only - if (!info.has_video) - smallest_video_frame = smallest_audio_frame; - if (!info.has_audio) - smallest_audio_frame = smallest_video_frame; - } - // Loop through all working queue frames while (true) { @@ -1342,8 +1386,12 @@ void FFmpegReader::CheckWorkingFrames(bool end_of_stream) // Get the front frame of working cache tr1::shared_ptr f(working_cache.GetSmallestFrame()); - bool is_video_ready = (f->number < smallest_video_frame); - bool is_audio_ready = (f->number < smallest_audio_frame); + bool is_video_ready = processed_video_frames.count(f->number); + bool is_audio_ready = processed_audio_frames.count(f->number); + + // Debug output + #pragma omp critical (debug_output) + AppendDebugMethod("FFmpegReader::CheckWorkingFrames", "frame_number", f->number, "is_video_ready", is_video_ready, "is_audio_ready", is_audio_ready, "processed_video_frames.count(f->number)", processed_video_frames.count(f->number), "processed_audio_frames.count(f->number)", processed_audio_frames.count(f->number), "", -1); // Check if working frame is final if ((!end_of_stream && is_video_ready && is_audio_ready) || end_of_stream || working_cache.Count() >= 200) @@ -1354,6 +1402,10 @@ void FFmpegReader::CheckWorkingFrames(bool end_of_stream) (info.has_video && seek_video_frame_found != 0 && seek_video_frame_found >= f->number)) seek_trash = true; + // Debug output + #pragma omp critical (debug_output) + AppendDebugMethod("FFmpegReader::CheckWorkingFrames (mark frame as final)", "f->number", f->number, "seek_trash", seek_trash, "Working Cache Count", working_cache.Count(), "Final Cache Count", final_cache.Count(), "seek_trash", seek_trash, "", -1); + if (!seek_trash) { // Move frame to final cache diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index 4c9d373b..c6ae4669 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -39,9 +39,6 @@ FFmpegWriter::FFmpegWriter(string path) throw (InvalidFile, InvalidFormat, Inval rescaler_position(0), video_codec(NULL), audio_codec(NULL), is_writing(false), write_video_count(0), write_audio_count(0), original_sample_rate(0), original_channels(0) { - // Init FileInfo struct (clear all values) - InitFileInfo(); - // Disable audio & video (so they can be independently enabled) info.has_audio = false; info.has_video = false; diff --git a/src/FrameMapper.cpp b/src/FrameMapper.cpp index 51730e5e..f4eacb33 100644 --- a/src/FrameMapper.cpp +++ b/src/FrameMapper.cpp @@ -33,10 +33,6 @@ using namespace openshot; FrameMapper::FrameMapper(ReaderBase *reader, Fraction target, PulldownType pulldown) : reader(reader), target(target), pulldown(pulldown), final_cache(820 * 1024) { - - // Init FileInfo struct (clear all values) - InitFileInfo(); - // Set the original frame rate from the reader original = Fraction(reader->info.fps.num, reader->info.fps.den); diff --git a/src/ImageReader.cpp b/src/ImageReader.cpp index 55999ee6..3a9cffcb 100644 --- a/src/ImageReader.cpp +++ b/src/ImageReader.cpp @@ -31,9 +31,6 @@ using namespace openshot; ImageReader::ImageReader(string path) throw(InvalidFile) : path(path), is_open(false) { - // Init FileInfo struct (clear all values) - InitFileInfo(); - // Open and Close the reader, to populate it's attributes (such as height, width, etc...) Open(); Close(); diff --git a/src/Main.cpp b/src/Main.cpp index 174a3640..0a2d697d 100644 --- a/src/Main.cpp +++ b/src/Main.cpp @@ -41,6 +41,30 @@ using namespace tr1; int main(int argc, char* argv[]) { + FFmpegReader sinelReader("/home/jonathan/Videos/sintel_trailer-720p.mp4"); + sinelReader.debug = true; + sinelReader.Open(); + + sinelReader.GetFrame(300)->Display(); + sinelReader.GetFrame(301)->Display(); + sinelReader.GetFrame(302)->Display(); + sinelReader.GetFrame(303)->Display(); + + sinelReader.GetFrame(100)->Display(); + sinelReader.GetFrame(101)->Display(); + sinelReader.GetFrame(102)->Display(); + sinelReader.GetFrame(103)->Display(); + + sinelReader.GetFrame(500)->Display(); + sinelReader.GetFrame(501)->Display(); + sinelReader.GetFrame(502)->Display(); + sinelReader.GetFrame(503)->Display(); + + cout << sinelReader.OutputDebugJSON() << endl;; + sinelReader.Close(); + return 0; + + // Timeline t1000(1280, 720, Fraction(24,1), 44100, 2); // t1000.SetJson("{\"width\": 1280, \"clips\": [{\"position\": 0, \"layer\": 4, \"gravity\": 4, \"reader\": {\"width\": 640, \"file_size\": \"10998\", \"video_stream_index\": -1, \"duration\": 86400, \"top_field_first\": true, \"pixel_format\": -1, \"type\": \"ImageReader\", \"pixel_ratio\": {\"num\": 1, \"den\": 1}, \"video_timebase\": {\"num\": 1, \"den\": 30}, \"audio_bit_rate\": 0, \"has_audio\": false, \"sample_rate\": 0, \"audio_stream_index\": -1, \"video_bit_rate\": 0, \"fps\": {\"num\": 30, \"den\": 1}, \"channels\": 0, \"vcodec\": \"Joint Photographic Experts Group JFIF format\", \"video_length\": \"2592000\", \"interlaced_frame\": false, \"path\": \"/home/jonathan/Pictures/100_0685 (copy).JPG\", \"height\": 360, \"audio_timebase\": {\"num\": 1, \"den\": 1}, \"display_ratio\": {\"num\": 16, \"den\": 9}, \"has_video\": true, \"acodec\": \"\"}, \"title\": \"40319877_640.jpg\", \"duration\": 86400, \"scale\": 1, \"rotation\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 0}, \"interpolation\": 2}]}, \"crop_y\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 0}, \"interpolation\": 2}]}, \"volume\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 1}, \"interpolation\": 2}]}, \"time\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 0}, \"interpolation\": 2}]}, \"waveform\": false, \"scale_x\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 1}, \"interpolation\": 2}]}, \"wave_color\": {\"blue\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 65280}, \"interpolation\": 2}]}, \"green\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 28672}, \"interpolation\": 2}]}, \"red\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 0}, \"interpolation\": 2}]}}, \"crop_width\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": -1}, \"interpolation\": 2}]}, \"crop_height\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": -1}, \"interpolation\": 2}]}, \"shear_y\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 0}, \"interpolation\": 2}]}, \"id\": \"F8GFFDCHSB\", \"location_y\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 0}, \"interpolation\": 2}]}, \"location_x\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 0}, \"interpolation\": 2}]}, \"shear_x\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 0}, \"interpolation\": 2}]}, \"end\": 23, \"perspective_c2_x\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": -1}, \"interpolation\": 2}]}, \"perspective_c2_y\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": -1}, \"interpolation\": 2}]}, \"alpha\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 0}, \"interpolation\": 2}]}, \"perspective_c1_x\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": -1}, \"interpolation\": 2}]}, \"image\": \"/home/jonathan/.openshot_qt/thumbnail/LEUJBK9QMI.png\", \"file_id\": \"LEUJBK9QMI\", \"crop_x\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 0}, \"interpolation\": 2}]}, \"perspective_c1_y\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": -1}, \"interpolation\": 2}]}, \"start\": 0, \"perspective_c4_x\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": -1}, \"interpolation\": 2}]}, \"perspective_c4_y\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": -1}, \"interpolation\": 2}]}, \"scale_y\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 1}, \"interpolation\": 2}]}, \"anchor\": 0, \"perspective_c3_x\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": -1}, \"interpolation\": 2}]}, \"perspective_c3_y\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": -1}, \"interpolation\": 2}]}, \"$$hashKey\": \"00Y\"}, {\"position\": 8.64, \"image\": \"/home/jonathan/.openshot_qt/thumbnail/LEUJBK9QMI.png\", \"gravity\": 4, \"reader\": {\"width\": 640, \"pixel_ratio\": {\"num\": 1, \"den\": 1}, \"video_stream_index\": -1, \"duration\": 86400, \"video_length\": \"2592000\", \"pixel_format\": -1, \"audio_timebase\": {\"num\": 1, \"den\": 1}, \"file_size\": \"10998\", \"video_timebase\": {\"num\": 1, \"den\": 30}, \"audio_bit_rate\": 0, \"has_audio\": false, \"sample_rate\": 0, \"audio_stream_index\": -1, \"video_bit_rate\": 0, \"fps\": {\"num\": 30, \"den\": 1}, \"channels\": 0, \"vcodec\": \"Joint Photographic Experts Group JFIF format\", \"top_field_first\": true, \"interlaced_frame\": false, \"path\": \"/home/jonathan/Pictures/100_0685 (copy).JPG\", \"height\": 360, \"display_ratio\": {\"num\": 16, \"den\": 9}, \"has_video\": true, \"acodec\": \"\", \"type\": \"ImageReader\"}, \"title\": \"40319877_640.jpg\", \"duration\": 86400, \"scale\": 1, \"rotation\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 0}, \"interpolation\": 2}]}, \"crop_y\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 0}, \"interpolation\": 2}]}, \"volume\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 1}, \"interpolation\": 2}]}, \"time\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 0}, \"interpolation\": 2}]}, \"waveform\": false, \"scale_x\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 1}, \"interpolation\": 2}]}, \"wave_color\": {\"blue\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 65280}, \"interpolation\": 2}]}, \"green\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 28672}, \"interpolation\": 2}]}, \"red\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 0}, \"interpolation\": 2}]}}, \"crop_width\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": -1}, \"interpolation\": 2}]}, \"crop_height\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": -1}, \"interpolation\": 2}]}, \"end\": 24, \"id\": \"CIKGBFTVVY\", \"location_y\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 0}, \"interpolation\": 2}]}, \"location_x\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 0}, \"interpolation\": 2}]}, \"shear_x\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 0}, \"interpolation\": 2}]}, \"shear_y\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 0}, \"interpolation\": 2}]}, \"perspective_c2_x\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": -1}, \"interpolation\": 2}]}, \"perspective_c2_y\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": -1}, \"interpolation\": 2}]}, \"alpha\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 0}, \"interpolation\": 2}]}, \"perspective_c1_x\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": -1}, \"interpolation\": 2}]}, \"layer\": 3, \"file_id\": \"LEUJBK9QMI\", \"crop_x\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 0}, \"interpolation\": 2}]}, \"perspective_c1_y\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": -1}, \"interpolation\": 2}]}, \"start\": 0, \"perspective_c4_x\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": -1}, \"interpolation\": 2}]}, \"perspective_c4_y\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": -1}, \"interpolation\": 2}]}, \"scale_y\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 1}, \"interpolation\": 2}]}, \"anchor\": 0, \"perspective_c3_x\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": -1}, \"interpolation\": 2}]}, \"perspective_c3_y\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": -1}, \"interpolation\": 2}]}, \"$$hashKey\": \"011\"}, {\"position\": 40.16, \"image\": \"/home/jonathan/.openshot_qt/thumbnail/LEUJBK9QMI.png\", \"gravity\": 4, \"reader\": {\"width\": 640, \"pixel_ratio\": {\"num\": 1, \"den\": 1}, \"video_stream_index\": -1, \"duration\": 86400, \"video_length\": \"2592000\", \"pixel_format\": -1, \"audio_timebase\": {\"num\": 1, \"den\": 1}, \"file_size\": \"10998\", \"video_timebase\": {\"num\": 1, \"den\": 30}, \"audio_bit_rate\": 0, \"has_audio\": false, \"sample_rate\": 0, \"audio_stream_index\": -1, \"video_bit_rate\": 0, \"fps\": {\"num\": 30, \"den\": 1}, \"channels\": 0, \"vcodec\": \"Joint Photographic Experts Group JFIF format\", \"top_field_first\": true, \"interlaced_frame\": false, \"path\": \"/home/jonathan/Pictures/100_0685 (copy).JPG\", \"height\": 360, \"display_ratio\": {\"num\": 16, \"den\": 9}, \"has_video\": true, \"acodec\": \"\", \"type\": \"ImageReader\"}, \"title\": \"40319877_640.jpg\", \"duration\": 86400, \"scale\": 1, \"rotation\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 0}, \"interpolation\": 2}]}, \"crop_y\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 0}, \"interpolation\": 2}]}, \"volume\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 1}, \"interpolation\": 2}]}, \"time\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 0}, \"interpolation\": 2}]}, \"waveform\": false, \"scale_x\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 1}, \"interpolation\": 2}]}, \"wave_color\": {\"blue\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 65280}, \"interpolation\": 2}]}, \"green\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 28672}, \"interpolation\": 2}]}, \"red\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 0}, \"interpolation\": 2}]}}, \"crop_width\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": -1}, \"interpolation\": 2}]}, \"crop_height\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": -1}, \"interpolation\": 2}]}, \"end\": 47, \"id\": \"HFCX8JEV29\", \"location_y\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 0}, \"interpolation\": 2}]}, \"location_x\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 0}, \"interpolation\": 2}]}, \"shear_x\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 0}, \"interpolation\": 2}]}, \"shear_y\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 0}, \"interpolation\": 2}]}, \"perspective_c2_x\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": -1}, \"interpolation\": 2}]}, \"perspective_c2_y\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": -1}, \"interpolation\": 2}]}, \"alpha\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 0}, \"interpolation\": 2}]}, \"perspective_c1_x\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": -1}, \"interpolation\": 2}]}, \"layer\": 4, \"file_id\": \"LEUJBK9QMI\", \"crop_x\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 0}, \"interpolation\": 2}]}, \"perspective_c1_y\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": -1}, \"interpolation\": 2}]}, \"start\": 0, \"perspective_c4_x\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": -1}, \"interpolation\": 2}]}, \"perspective_c4_y\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": -1}, \"interpolation\": 2}]}, \"scale_y\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": 1}, \"interpolation\": 2}]}, \"anchor\": 0, \"perspective_c3_x\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": -1}, \"interpolation\": 2}]}, \"perspective_c3_y\": {\"Points\": [{\"co\": {\"X\": 0, \"Y\": -1}, \"interpolation\": 2}]}, \"$$hashKey\": \"01B\"}], \"fps\": 30, \"progress\": [[0, 30, \"rendering\"], [40, 50, \"complete\"], [100, 150, \"complete\"]], \"duration\": 600, \"scale\": 16, \"tick_pixels\": 100, \"settings\": {}, \"files\": [{\"width\": 640, \"path\": \"/home/jonathan/Pictures/100_0685 (copy).JPG\", \"file_size\": \"10998\", \"video_stream_index\": -1, \"duration\": 86400.0, \"top_field_first\": true, \"pixel_format\": -1, \"type\": \"ImageReader\", \"pixel_ratio\": {\"num\": 1, \"den\": 1}, \"video_timebase\": {\"num\": 1, \"den\": 30}, \"audio_bit_rate\": 0, \"has_audio\": false, \"sample_rate\": 0, \"audio_stream_index\": -1, \"video_bit_rate\": 0, \"fps\": {\"num\": 30, \"den\": 1}, \"channels\": 0, \"vcodec\": \"Joint Photographic Experts Group JFIF format\", \"video_length\": \"2592000\", \"interlaced_frame\": false, \"media_type\": \"image\", \"id\": \"LEUJBK9QMI\", \"acodec\": \"\", \"audio_timebase\": {\"num\": 1, \"den\": 1}, \"display_ratio\": {\"num\": 16, \"den\": 9}, \"has_video\": true, \"height\": 360}], \"playhead_position\": 0, \"markers\": [{\"location\": 16, \"icon\": \"yellow.png\"}, {\"location\": 120, \"icon\": \"green.png\"}, {\"location\": 300, \"icon\": \"red.png\"}, {\"location\": 10, \"icon\": \"purple.png\"}], \"height\": 720, \"layers\": [{\"y\": 0, \"number\": 4}, {\"y\": 0, \"number\": 3}, {\"y\": 0, \"number\": 2}, {\"y\": 0, \"number\": 1}, {\"y\": 0, \"number\": 0}]}"); // t1000.GetFrame(0)->Display(); diff --git a/src/ReaderBase.cpp b/src/ReaderBase.cpp index 2aa43122..e9d11ea8 100644 --- a/src/ReaderBase.cpp +++ b/src/ReaderBase.cpp @@ -29,9 +29,10 @@ using namespace openshot; -// Initialize the values of the FileInfo struct -void ReaderBase::InitFileInfo() +/// Constructor for the base reader, where many things are initialized. +ReaderBase::ReaderBase() { + // Initialize info struct info.has_video = false; info.has_audio = false; info.duration = 0.0; @@ -55,6 +56,75 @@ void ReaderBase::InitFileInfo() info.channels = 0; info.audio_stream_index = -1; info.audio_timebase = Fraction(); + + // Initialize debug mode + debug = false; +} + +// Output debug information as JSON +string ReaderBase::OutputDebugJSON() +{ + // Return formatted string + return debug_root.toStyledString(); +} + + +// Append debug information as JSON +void ReaderBase::AppendDebugItem(Json::Value debug_item) +{ + // Append item to root array + debug_root.append(debug_item); +} + +// Append debug information as JSON +void ReaderBase::AppendDebugMethod(string method_name, string arg1_name, int arg1_value, + string arg2_name, int arg2_value, + string arg3_name, int arg3_value, + string arg4_name, int arg4_value, + string arg5_name, int arg5_value, + string arg6_name, int arg6_value) +{ + if (!debug) + // Don't do anything + return; + + Json::Value debug_item; + debug_item["method"] = method_name; + + // Output to standard output + cout << "Debug: Method: " << method_name << " ("; + + // Add attributes to method JSON + if (arg1_name.length() > 0) { + debug_item[arg1_name] = arg1_value; + cout << arg1_name << "=" << arg1_value; + } + if (arg2_name.length() > 0) { + debug_item[arg2_name] = arg2_value; + cout << ", " << arg2_name << "=" << arg2_value; + } + if (arg3_name.length() > 0) { + debug_item[arg3_name] = arg3_value; + cout << ", " << arg3_name << "=" << arg3_value; + } + if (arg4_name.length() > 0) { + debug_item[arg4_name] = arg4_value; + cout << ", " << arg4_name << "=" << arg4_value; + } + if (arg5_name.length() > 0) { + debug_item[arg5_name] = arg5_value; + cout << ", " << arg5_name << "=" << arg5_value; + } + if (arg6_name.length() > 0) { + debug_item[arg6_name] = arg6_value; + cout << ", " << arg6_name << "=" << arg6_value; + } + + // Output to standard output + cout << ")" << endl; + + // Append method to root array + debug_root.append(debug_item); } // Display file information @@ -225,6 +295,7 @@ void ReaderBase::DrawFrameOnScene(string path, long _graphics_scene_address) { } +// Lock reader and get a frame tr1::shared_ptr ReaderBase::GetFrameSafe(int number) { const GenericScopedLock lock(getFrameCriticalSection); diff --git a/src/TextReader.cpp b/src/TextReader.cpp index 59b6e172..ec52a0cc 100644 --- a/src/TextReader.cpp +++ b/src/TextReader.cpp @@ -32,9 +32,6 @@ using namespace openshot; /// Default constructor (blank text) TextReader::TextReader() : width(1024), height(768), x_offset(0), y_offset(0), text(""), font("Arial"), size(10.0), text_color("#ffffff"), background_color("#000000"), is_open(false), gravity(GRAVITY_CENTER) { - // Init FileInfo struct (clear all values) - InitFileInfo(); - // Open and Close the reader, to populate it's attributes (such as height, width, etc...) Open(); Close(); @@ -43,9 +40,6 @@ TextReader::TextReader() : width(1024), height(768), x_offset(0), y_offset(0), t TextReader::TextReader(int width, int height, int x_offset, int y_offset, GravityType gravity, string text, string font, double size, string text_color, string background_color) : width(width), height(height), x_offset(x_offset), y_offset(y_offset), text(text), font(font), size(size), text_color(text_color), background_color(background_color), is_open(false), gravity(gravity) { - // Init FileInfo struct (clear all values) - InitFileInfo(); - // Open and Close the reader, to populate it's attributes (such as height, width, etc...) Open(); Close(); diff --git a/src/Timeline.cpp b/src/Timeline.cpp index af9812fc..0ecf6359 100644 --- a/src/Timeline.cpp +++ b/src/Timeline.cpp @@ -47,7 +47,6 @@ Timeline::Timeline(int width, int height, Fraction fps, int sample_rate, int cha final_cache = Cache(2 * bytes); // 20 frames, 4 colors of chars, 2 audio channels of 4 byte floats // Init FileInfo struct (clear all values) - InitFileInfo(); info.width = width; info.height = height; info.fps = fps; diff --git a/src/WriterBase.cpp b/src/WriterBase.cpp index 66331ee7..0e463a79 100644 --- a/src/WriterBase.cpp +++ b/src/WriterBase.cpp @@ -29,9 +29,10 @@ using namespace openshot; -// Initialize the values of the FileInfo struct -void WriterBase::InitFileInfo() +// Constructor +WriterBase::WriterBase() { + // Initialized writer info info.has_video = false; info.has_audio = false; info.duration = 0.0; @@ -55,6 +56,74 @@ void WriterBase::InitFileInfo() info.channels = 0; info.audio_stream_index = -1; info.audio_timebase = Fraction(); + + // Initialize debug + debug = false; +} + +// Output debug information as JSON +string WriterBase::OutputDebugJSON() +{ + // Return formatted string + return debug_root.toStyledString(); +} + +// Append debug information as JSON +void WriterBase::AppendDebugItem(Json::Value debug_item) +{ + // Append item to root array + debug_root.append(debug_item); +} + +// Append debug information as JSON +void WriterBase::AppendDebugMethod(string method_name, string arg1_name, int arg1_value, + string arg2_name, int arg2_value, + string arg3_name, int arg3_value, + string arg4_name, int arg4_value, + string arg5_name, int arg5_value, + string arg6_name, int arg6_value) +{ + if (!debug) + // Don't do anything + return; + + Json::Value debug_item; + debug_item["method"] = method_name; + + // Output to standard output + cout << "Debug: Method: " << method_name << " ("; + + // Add attributes to method JSON + if (arg1_name.length() > 0) { + debug_item[arg1_name] = arg1_value; + cout << arg1_name << "=" << arg1_value; + } + if (arg2_name.length() > 0) { + debug_item[arg2_name] = arg2_value; + cout << ", " << arg2_name << "=" << arg2_value; + } + if (arg3_name.length() > 0) { + debug_item[arg3_name] = arg3_value; + cout << ", " << arg3_name << "=" << arg3_value; + } + if (arg4_name.length() > 0) { + debug_item[arg4_name] = arg4_value; + cout << ", " << arg4_name << "=" << arg4_value; + } + if (arg5_name.length() > 0) { + debug_item[arg5_name] = arg5_value; + cout << ", " << arg5_name << "=" << arg5_value; + } + if (arg6_name.length() > 0) { + debug_item[arg6_name] = arg6_value; + cout << ", " << arg6_name << "=" << arg6_value; + } + + // Output to standard output + cout << ")" << endl; + + // Append method to root array + debug_root.append(debug_item); } // This method copy's the info struct of a reader, and sets the writer with the same info