diff --git a/include/Clip.h b/include/Clip.h index 6de0a954..b3d8d1e1 100644 --- a/include/Clip.h +++ b/include/Clip.h @@ -40,6 +40,8 @@ #include "ClipBase.h" #include "Color.h" #include "Enums.h" +#include "EffectBase.h" +#include "Effects.h" #include "FFmpegReader.h" #include "Fraction.h" #include "FrameMapper.h" @@ -55,6 +57,17 @@ using namespace openshot; namespace openshot { + /// Comparison method for sorting effect pointers (by Position, Layer, and Order). Effects are sorted + /// from lowest layer to top layer (since that is sequence clips are combined), and then by + /// position, and then by effect order. + struct CompareClipEffects{ + bool operator()( EffectBase* lhs, EffectBase* rhs){ + if( lhs->Layer() < rhs->Layer() ) return true; + if( lhs->Layer() == rhs->Layer() && lhs->Position() < rhs->Position() ) return true; + if( lhs->Layer() == rhs->Layer() && lhs->Position() == rhs->Position() && lhs->Order() > rhs->Order() ) return true; + return false; + }}; + /** * @brief This class represents a clip (used to arrange readers on the timeline) * @@ -91,6 +104,7 @@ namespace openshot { class Clip : public ClipBase { private: bool waveform; ///< Should a waveform be used instead of the clip's image + list effects; /// apply_effects(tr1::shared_ptr frame); + /// Get file extension string get_file_extension(string path); @@ -111,6 +128,9 @@ namespace openshot { /// Init default settings for a clip void init_settings(); + /// Sort effects by order + void sort_effects(); + /// Reverse an audio buffer void reverse_buffer(juce::AudioSampleBuffer* buffer); @@ -130,9 +150,16 @@ namespace openshot { /// @param reader The reader to be used by this clip Clip(ReaderBase* reader); + /// @brief Add an effect to the clip + /// @param effect Add an effect to the clip. An effect can modify the audio or video of an openshot::Frame. + void AddEffect(EffectBase* effect); + /// Close the internal reader void Close() throw(ReaderClosed); + /// Return the list of effects on the timeline + list Effects() { return effects; }; + /// @brief Get an openshot::Frame object for a specific frame number of this timeline. /// /// @returns The requested frame (containing the image) @@ -163,6 +190,10 @@ namespace openshot { /// of all properties at any time) string PropertiesJSON(int requested_frame); + /// @brief Remove an effect from the clip + /// @param effect Remove an effect from the clip. + void RemoveEffect(EffectBase* effect); + /// Waveform property bool Waveform() { return waveform; } ///< Get the waveform property of this clip void Waveform(bool value) { waveform = value; } ///< Set the waveform property of this clip diff --git a/include/Timeline.h b/include/Timeline.h index 20896319..03200fe5 100644 --- a/include/Timeline.h +++ b/include/Timeline.h @@ -170,6 +170,12 @@ namespace openshot { /// Compare 2 floating point numbers for equality bool isEqual(double a, double b); + /// Sort clips by position on the timeline + void sort_clips(); + + /// Sort effects by position on the timeline + void sort_effects(); + /// Update the list of 'opened' clips void update_open_clips(Clip *clip, bool is_open); @@ -243,12 +249,6 @@ namespace openshot { /// @param effect Remove an effect from the timeline. void RemoveEffect(EffectBase* effect); - /// Sort clips by position on the timeline - void SortClips(); - - /// Sort effects by position on the timeline - void SortEffects(); - }; diff --git a/src/Clip.cpp b/src/Clip.cpp index b1c3cbaa..6188419f 100644 --- a/src/Clip.cpp +++ b/src/Clip.cpp @@ -234,12 +234,30 @@ tr1::shared_ptr Clip::GetFrame(int requested_frame) throw(ReaderClosed) if (time.Values.size() > 1) new_frame_number = time.GetInt(requested_frame); + + // Now that we have re-mapped what frame number is needed, go and get the frame pointer - tr1::shared_ptr frame = reader->GetFrame(new_frame_number); + tr1::shared_ptr original_frame = reader->GetFrame(new_frame_number); + + // Create a new frame + tr1::shared_ptr frame(new Frame(new_frame_number, 1, 1, "#000000", original_frame->GetAudioSamplesCount(), original_frame->GetAudioChannelsCount())); + frame->SampleRate(original_frame->SampleRate()); + + // Copy the image from the odd field + frame->AddImage(original_frame->GetImage()); + + // Loop through each channel, add audio + for (int channel = 0; channel < original_frame->GetAudioChannelsCount(); channel++) + frame->AddAudio(true, channel, 0, original_frame->GetAudioSamples(channel), original_frame->GetAudioSamplesCount(), 1.0); + + // Get time mapped frame number (used to increase speed, change direction, etc...) tr1::shared_ptr new_frame = get_time_mapped_frame(frame, requested_frame); + // Apply effects to the frame (if any) + apply_effects(new_frame); + // Return processed 'frame' return new_frame; } @@ -600,6 +618,18 @@ Json::Value Clip::JsonValue() { root["perspective_c4_x"] = perspective_c4_x.JsonValue(); root["perspective_c4_y"] = perspective_c4_y.JsonValue(); + // Add array of effects + root["effects"] = Json::Value(Json::arrayValue); + + // loop through effects + list::iterator effect_itr; + for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr) + { + // Get clip object from the iterator + EffectBase *existing_effect = (*effect_itr); + root["effects"].append(existing_effect->JsonValue()); + } + if (reader) root["reader"] = reader->JsonValue(); @@ -691,6 +721,39 @@ void Clip::SetJsonValue(Json::Value root) { perspective_c4_x.SetJsonValue(root["perspective_c4_x"]); if (!root["perspective_c4_y"].isNull()) perspective_c4_y.SetJsonValue(root["perspective_c4_y"]); + if (!root["effects"].isNull()) { + // Clear existing effects + effects.clear(); + + // loop through effects + for (int x = 0; x < root["effects"].size(); x++) { + // Get each effect + Json::Value existing_effect = root["effects"][x]; + + // Create Effect + EffectBase *e = NULL; + + if (!existing_effect["type"].isNull()) + // Init the matching effect object + if (existing_effect["type"].asString() == "ChromaKey") + e = new ChromaKey(); + + else if (existing_effect["type"].asString() == "Deinterlace") + e = new Deinterlace(); + + else if (existing_effect["type"].asString() == "Mask") + e = new Mask(); + + else if (existing_effect["type"].asString() == "Negate") + e = new Negate(); + + // Load Json into Effect + e->SetJsonValue(existing_effect); + + // Add Effect to Timeline + AddEffect(e); + } + } if (!root["reader"].isNull()) // does Json contain a reader? { if (!root["reader"]["type"].isNull()) // does the reader Json contain a 'type'? @@ -749,3 +812,45 @@ void Clip::SetJsonValue(Json::Value root) { } } } + +// Sort effects by order +void Clip::sort_effects() +{ + // sort clips + effects.sort(CompareClipEffects()); +} + +// Add an effect to the clip +void Clip::AddEffect(EffectBase* effect) +{ + // Add effect to list + effects.push_back(effect); + + // Sort effects + sort_effects(); +} + +// Remove an effect from the clip +void Clip::RemoveEffect(EffectBase* effect) +{ + effects.remove(effect); +} + +// Apply effects to the source frame (if any) +tr1::shared_ptr Clip::apply_effects(tr1::shared_ptr frame) +{ + // Find Effects at this position and layer + list::iterator effect_itr; + for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr) + { + // Get clip object from the iterator + EffectBase *effect = (*effect_itr); + + // Apply the effect to this frame + frame = effect->GetFrame(frame, frame->number); + + } // end effect loop + + // Return modified frame + return frame; +} diff --git a/src/Timeline.cpp b/src/Timeline.cpp index 2dfe05ec..8b16f6c1 100644 --- a/src/Timeline.cpp +++ b/src/Timeline.cpp @@ -67,7 +67,7 @@ void Timeline::AddClip(Clip* clip) throw(ReaderClosed) clips.push_back(clip); // Sort clips - SortClips(); + sort_clips(); } // Add an effect to the timeline @@ -77,7 +77,7 @@ void Timeline::AddEffect(EffectBase* effect) effects.push_back(effect); // Sort effects - SortEffects(); + sort_effects(); } // Remove an effect from the timeline @@ -465,7 +465,7 @@ void Timeline::update_closed_clips() } // Sort clips by position on the timeline -void Timeline::SortClips() +void Timeline::sort_clips() { // Debug output #pragma omp critical (debug_output) @@ -476,7 +476,7 @@ void Timeline::SortClips() } // Sort effects by position on the timeline -void Timeline::SortEffects() +void Timeline::sort_effects() { // sort clips effects.sort(CompareEffects()); @@ -661,7 +661,7 @@ list Timeline::find_intersecting_clips(int requested_frame, int number_of float max_requested_time = calculate_time(requested_frame + (number_of_frames - 1), info.fps); // Re-Sort Clips (since they likely changed) - SortClips(); + sort_clips(); // Find Clips at this time list::iterator clip_itr; @@ -776,10 +776,10 @@ void Timeline::SetJsonValue(Json::Value root) throw(InvalidFile, ReaderClosed) { // Set parent data ReaderBase::SetJsonValue(root); - // Clear existing clips - clips.clear(); + if (!root["clips"].isNull()) { + // Clear existing clips + clips.clear(); - if (!root["clips"].isNull()) // loop through clips for (int x = 0; x < root["clips"].size(); x++) { // Get each clip @@ -794,11 +794,12 @@ void Timeline::SetJsonValue(Json::Value root) throw(InvalidFile, ReaderClosed) { // Add Clip to Timeline AddClip(c); } + } - // Clear existing effects - effects.clear(); + if (!root["effects"].isNull()) { + // Clear existing effects + effects.clear(); - if (!root["effects"].isNull()) // loop through effects for (int x = 0; x < root["effects"].size(); x++) { // Get each effect @@ -827,6 +828,7 @@ void Timeline::SetJsonValue(Json::Value root) throw(InvalidFile, ReaderClosed) { // Add Effect to Timeline AddEffect(e); } + } } // Apply a special formatted JSON object, which represents a change to the timeline (insert, update, delete) diff --git a/src/examples/Example.cpp b/src/examples/Example.cpp index ff6e317c..1e0393f2 100644 --- a/src/examples/Example.cpp +++ b/src/examples/Example.cpp @@ -42,6 +42,18 @@ using namespace tr1; int main(int argc, char* argv[]) { + + Clip c10("/home/jonathan/Videos/sintel_trailer-720p.mp4"); + c10.Open(); + + Negate n; + c10.AddEffect(&n); + + tr1::shared_ptr f =c10.GetFrame(500); + f->Display(); + return 0; + + // Test getting lots of JSON cout << "starting..." << endl; diff --git a/tests/Clip_Tests.cpp b/tests/Clip_Tests.cpp index 1c931581..540cdedf 100644 --- a/tests/Clip_Tests.cpp +++ b/tests/Clip_Tests.cpp @@ -199,3 +199,48 @@ TEST(Clip_Properties) } +TEST(Clip_Effects) +{ + // Load clip with video + Clip c10("../../src/examples/sintel_trailer-720p.mp4"); + c10.Open(); + + Negate n; + c10.AddEffect(&n); + + // Get frame 1 + tr1::shared_ptr f = c10.GetFrame(500); + + // Get the image data + const Magick::PixelPacket* pixels = f->GetPixels(10); + + // Check image properties on scanline 10, pixel 112 + CHECK_EQUAL(65535, pixels[112].red); + CHECK_EQUAL(65535, pixels[112].blue); + CHECK_EQUAL(65535, pixels[112].green); + CHECK_EQUAL(0, pixels[112].opacity); + + // Check the # of Effects + CHECK_EQUAL(1, c10.Effects().size()); + + + // Add a 2nd negate effect + Negate n1; + c10.AddEffect(&n1); + + // Get frame 1 + f = c10.GetFrame(500); + + // Get the image data + pixels = f->GetPixels(10); + + // Check image properties on scanline 10, pixel 112 + CHECK_EQUAL(0, pixels[112].red); + CHECK_EQUAL(0, pixels[112].blue); + CHECK_EQUAL(0, pixels[112].green); + CHECK_EQUAL(0, pixels[112].opacity); + + // Check the # of Effects + CHECK_EQUAL(2, c10.Effects().size()); + +}