From fc1836684378b75ff537b42de343a0d328b8b38d Mon Sep 17 00:00:00 2001 From: "FeRD (Frank Dana)" Date: Sat, 1 Feb 2020 02:00:43 -0500 Subject: [PATCH 01/14] ZmqLogger: Add optional dumping to stderr Add an openshot::Settings boolean `DEBUG_TO_STDERR`, which when set true will direct all AppendDebugMethod() messages to std::clog, in addition to (possibly) being sent over the ZeroMQ wire. --- include/Settings.h | 3 ++ include/ZmqLogger.h | 23 +++++++------- src/Settings.cpp | 7 +++-- src/ZmqLogger.cpp | 73 +++++++++++++++++++++++++-------------------- 4 files changed, 60 insertions(+), 46 deletions(-) diff --git a/include/Settings.h b/include/Settings.h index 56a84fd7..0a2cfb99 100644 --- a/include/Settings.h +++ b/include/Settings.h @@ -124,6 +124,9 @@ namespace openshot { /// The audio device name to use during playback std::string PLAYBACK_AUDIO_DEVICE_NAME = ""; + /// Whether to dump ZeroMQ debug messages to stderr + bool DEBUG_TO_STDERR = false; + /// Create or get an instance of this logger singleton (invoke the class with this method) static Settings * Instance(); }; diff --git a/include/ZmqLogger.h b/include/ZmqLogger.h index c165299e..5502285a 100644 --- a/include/ZmqLogger.h +++ b/include/ZmqLogger.h @@ -43,6 +43,7 @@ #include #include #include "JuceHeader.h" +#include "Settings.h" namespace openshot { @@ -70,17 +71,17 @@ namespace openshot { zmq::socket_t *publisher; /// Default constructor - ZmqLogger(){}; // Don't allow user to create an instance of this singleton + ZmqLogger(){}; // Don't allow user to create an instance of this singleton #if __GNUC__ >=7 /// Default copy method - ZmqLogger(ZmqLogger const&) = delete; // Don't allow the user to assign this instance + ZmqLogger(ZmqLogger const&) = delete; // Don't allow the user to assign this instance /// Default assignment operator ZmqLogger & operator=(ZmqLogger const&) = delete; // Don't allow the user to assign this instance #else /// Default copy method - ZmqLogger(ZmqLogger const&) {}; // Don't allow the user to assign this instance + ZmqLogger(ZmqLogger const&) {}; // Don't allow the user to assign this instance /// Default assignment operator ZmqLogger & operator=(ZmqLogger const&); // Don't allow the user to assign this instance @@ -94,13 +95,15 @@ namespace openshot { static ZmqLogger * Instance(); /// Append debug information - void AppendDebugMethod(std::string method_name, - std::string arg1_name="", float arg1_value=-1.0, - std::string arg2_name="", float arg2_value=-1.0, - std::string arg3_name="", float arg3_value=-1.0, - std::string arg4_name="", float arg4_value=-1.0, - std::string arg5_name="", float arg5_value=-1.0, - std::string arg6_name="", float arg6_value=-1.0); + void AppendDebugMethod( + std::string method_name, + std::string arg1_name="", float arg1_value=-1.0, + std::string arg2_name="", float arg2_value=-1.0, + std::string arg3_name="", float arg3_value=-1.0, + std::string arg4_name="", float arg4_value=-1.0, + std::string arg5_name="", float arg5_value=-1.0, + std::string arg6_name="", float arg6_value=-1.0 + ); /// Close logger (sockets and/or files) void Close(); diff --git a/src/Settings.cpp b/src/Settings.cpp index e48fd981..2261b1ec 100644 --- a/src/Settings.cpp +++ b/src/Settings.cpp @@ -34,14 +34,14 @@ using namespace std; using namespace openshot; -// Global reference to logger +// Global reference to Settings Settings *Settings::m_pInstance = NULL; -// Create or Get an instance of the logger singleton +// Create or Get an instance of the settings singleton Settings *Settings::Instance() { if (!m_pInstance) { - // Create the actual instance of logger only once + // Create the actual instance of Settings only once m_pInstance = new Settings; m_pInstance->HARDWARE_DECODER = 0; m_pInstance->HIGH_QUALITY_SCALING = false; @@ -55,6 +55,7 @@ Settings *Settings::Instance() m_pInstance->HW_DE_DEVICE_SET = 0; m_pInstance->HW_EN_DEVICE_SET = 0; m_pInstance->PLAYBACK_AUDIO_DEVICE_NAME = ""; + m_pInstance->DEBUG_TO_STDERR = false; } return m_pInstance; diff --git a/src/ZmqLogger.cpp b/src/ZmqLogger.cpp index 89d2798a..61c0e164 100644 --- a/src/ZmqLogger.cpp +++ b/src/ZmqLogger.cpp @@ -29,12 +29,13 @@ */ #include "../include/ZmqLogger.h" +#include +#include #if USE_RESVG == 1 #include "ResvgQt.h" #endif -using namespace std; using namespace openshot; @@ -64,17 +65,16 @@ ZmqLogger *ZmqLogger::Instance() // This can only happen 1 time or it will crash ResvgRenderer::initLog(); #endif - } return m_pInstance; } // Set the connection for this logger -void ZmqLogger::Connection(string new_connection) +void ZmqLogger::Connection(std::string new_connection) { // Create a scoped lock, allowing only a single thread to run the following code at one time - const GenericScopedLock lock(loggerCriticalSection); + const juce::GenericScopedLock lock(loggerCriticalSection); // Does anything need to happen? if (new_connection == connection) @@ -102,27 +102,27 @@ void ZmqLogger::Connection(string new_connection) publisher->bind(connection.c_str()); } catch (zmq::error_t &e) { - cout << "ZmqLogger::Connection - Error binding to " << connection << ". Switching to an available port." << endl; + std::cout << "ZmqLogger::Connection - Error binding to " << connection << ". Switching to an available port." << std::endl; connection = "tcp://*:*"; publisher->bind(connection.c_str()); } // Sleeping to allow connection to wake up (0.25 seconds) - usleep(250000); + std::this_thread::sleep_for(std::chrono::milliseconds(250)); } -void ZmqLogger::Log(string message) +void ZmqLogger::Log(std::string message) { if (!enabled) // Don't do anything return; // Create a scoped lock, allowing only a single thread to run the following code at one time - const GenericScopedLock lock(loggerCriticalSection); + const juce::GenericScopedLock lock(loggerCriticalSection); // Send message over socket (ZeroMQ) zmq::message_t reply (message.length()); - memcpy (reply.data(), message.c_str(), message.length()); + std::memcpy (reply.data(), message.c_str(), message.length()); publisher->send(reply); // Write to log file (if opened, and force it to write to disk in case of a crash) @@ -131,14 +131,14 @@ void ZmqLogger::Log(string message) } // Log message to a file (if path set) -void ZmqLogger::LogToFile(string message) +void ZmqLogger::LogToFile(std::string message) { // Write to log file (if opened, and force it to write to disk in case of a crash) if (log_file.is_open()) log_file << message << std::flush; } -void ZmqLogger::Path(string new_path) +void ZmqLogger::Path(std::string new_path) { // Update path file_path = new_path; @@ -148,14 +148,14 @@ void ZmqLogger::Path(string new_path) log_file.close(); // Open file (write + append) - log_file.open (file_path.c_str(), ios::out | ios::app); + log_file.open (file_path.c_str(), std::ios::out | std::ios::app); // Get current time and log first message - time_t now = time(0); - tm* localtm = localtime(&now); - log_file << "------------------------------------------" << endl; - log_file << "libopenshot logging: " << asctime(localtm); - log_file << "------------------------------------------" << endl; + std::time_t now = std::time(0); + std::tm* localtm = std::localtime(&now); + log_file << "------------------------------------------" << std::endl; + log_file << "libopenshot logging: " << std::asctime(localtm); + log_file << "------------------------------------------" << std::endl; } void ZmqLogger::Close() @@ -176,27 +176,28 @@ void ZmqLogger::Close() } // Append debug information -void ZmqLogger::AppendDebugMethod(string method_name, - string arg1_name, float arg1_value, - string arg2_name, float arg2_value, - string arg3_name, float arg3_value, - string arg4_name, float arg4_value, - string arg5_name, float arg5_value, - string arg6_name, float arg6_value) +void ZmqLogger::AppendDebugMethod(std::string method_name, + std::string arg1_name, float arg1_value, + std::string arg2_name, float arg2_value, + std::string arg3_name, float arg3_value, + std::string arg4_name, float arg4_value, + std::string arg5_name, float arg5_value, + std::string arg6_name, float arg6_value) { - if (!enabled) + if (!enabled && !openshot::Settings::Instance()->DEBUG_TO_STDERR) // Don't do anything return; { // Create a scoped lock, allowing only a single thread to run the following code at one time - const GenericScopedLock lock(loggerCriticalSection); + const juce::GenericScopedLock lock(loggerCriticalSection); - stringstream message; - message << fixed << setprecision(4); + std::stringstream message; + message << std::fixed << std::setprecision(4); + + // Construct message message << method_name << " ("; - // Add attributes to method JSON if (arg1_name.length() > 0) message << arg1_name << "=" << arg1_value; @@ -215,10 +216,16 @@ void ZmqLogger::AppendDebugMethod(string method_name, if (arg6_name.length() > 0) message << ", " << arg6_name << "=" << arg6_value; - // Output to standard output - message << ")" << endl; + message << ")" << std::endl; - // Send message through ZMQ - Log(message.str()); + if (openshot::Settings::Instance()->DEBUG_TO_STDERR) { + // Print message to stderr + std::clog << message.str(); + } + + if (enabled) { + // Send message through ZMQ + Log(message.str()); + } } } From 4de0001e530c1c17f384ed6396b3f2a90b49b4fd Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Tue, 6 Oct 2020 03:00:58 -0500 Subject: [PATCH 02/14] Adding initial caption effect, which supports VTT and SubRip formats (limited support, no formating, no regions) --- include/Effects.h | 1 + include/effects/Caption.h | 131 +++++++++++++++ src/CMakeLists.txt | 3 +- src/EffectInfo.cpp | 4 + src/effects/Caption.cpp | 332 ++++++++++++++++++++++++++++++++++++++ 5 files changed, 470 insertions(+), 1 deletion(-) create mode 100644 include/effects/Caption.h create mode 100644 src/effects/Caption.cpp diff --git a/include/Effects.h b/include/Effects.h index 746da4c0..2acddd69 100644 --- a/include/Effects.h +++ b/include/Effects.h @@ -35,6 +35,7 @@ #include "effects/Bars.h" #include "effects/Blur.h" #include "effects/Brightness.h" +#include "effects/Caption.h" #include "effects/ChromaKey.h" #include "effects/ColorShift.h" #include "effects/Crop.h" diff --git a/include/effects/Caption.h b/include/effects/Caption.h new file mode 100644 index 00000000..f2f55126 --- /dev/null +++ b/include/effects/Caption.h @@ -0,0 +1,131 @@ +/** + * @file + * @brief Header file for Caption effect class + * @author Jonathan Thomas + * + * @ref License + */ + +/* LICENSE + * + * Copyright (c) 2008-2019 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#ifndef OPENSHOT_CAPTION_EFFECT_H +#define OPENSHOT_CAPTION_EFFECT_H + +#include "../EffectBase.h" + +#include +#include +#include +#include +#include "../Color.h" +#include "../Fraction.h" +#include "../Json.h" +#include "../KeyFrame.h" + + + +namespace openshot +{ + + /** + * @brief This class adds captions/text over a video, based on timestamps. You can also animate some limited + * aspects, such as words appearing/disappearing. + * + * Adding captions can be an easy way to generate text overlays through-out a long clip. + */ + class Caption : public EffectBase + { + private: + std::vector matchedCaptions; ///< RegEx to capture cues and text + std::string caption_text; ///< Text of caption + std::string caption_format; ///< Format of caption (application/x-subrip, text/vtt) + bool is_dirty; + + /// Init effect settings + void init_effect_details(); + + /// Process regex capture + void process_regex(); + + + public: + Color color; ///< Color of caption text + Color stroke; ///< Color of text border / stroke + Keyframe stroke_width; ///< Width of text border / stroke + Keyframe font_size; ///< Font size in points + Keyframe left; ///< Size of left bar + Keyframe top; ///< Size of top bar + Keyframe right; ///< Size of right bar + Keyframe bottom; ///< Size of bottom bar + + /// Blank constructor, useful when using Json to load the effect properties + Caption(); + + /// Default constructor, which takes 4 curves and a color. These curves animated the bars over time. + /// + /// @param color The curve to adjust the color of bars + /// @param left The curve to adjust the left bar size (between 0 and 1) + /// @param top The curve to adjust the top bar size (between 0 and 1) + /// @param right The curve to adjust the right bar size (between 0 and 1) + /// @param bottom The curve to adjust the bottom bar size (between 0 and 1) + Caption(Color color, std::string captions, std::string format); + + /// @brief This method is required for all derived classes of ClipBase, and returns a + /// new openshot::Frame object. All Clip keyframes and effects are resolved into + /// pixels. + /// + /// @returns A new openshot::Frame object + /// @param frame_number The frame number (starting at 1) of the clip or effect on the timeline. + std::shared_ptr GetFrame(int64_t frame_number) override { return GetFrame(std::shared_ptr (new Frame()), frame_number); } + + /// @brief This method is required for all derived classes of ClipBase, and returns a + /// modified openshot::Frame object + /// + /// The frame object is passed into this method and used as a starting point (pixels and audio). + /// All Clip keyframes and effects are resolved into pixels. + /// + /// @returns The modified openshot::Frame object + /// @param frame The frame object that needs the clip or effect applied to it + /// @param frame_number The frame number (starting at 1) of the clip or effect on the timeline. + std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number) override; + + // Get and Set caption data + std::string CaptionText(); ///< Set the caption string to use (see VTT format) + void CaptionText(std::string new_caption_text); ///< Get the caption string + std::string CaptionFormat(); ///< Set the caption format to use (only VTT format is currently supported) + void CaptionFormat(std::string new_caption_format); ///< Get the caption format + + /// Get and Set JSON methods + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value) override; ///< Load JSON string into this object + Json::Value JsonValue() const override; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root) override; ///< Load Json::Value into this object + + /// Get all properties for a specific frame (perfect for a UI to display the current state + /// of all properties at any time) + std::string PropertiesJSON(int64_t requested_frame) const override; + }; + +} + +#endif diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 8cc2b6f2..4f44e347 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -117,7 +117,7 @@ if (ENABLE_BLACKMAGIC) endif() ############### PROFILING ################# -#set(PROFILER "/usr/lib/libprofiler.so.0.3.2") +#set(PROFILER "/usr/lib//usr/lib/libprofiler.so.0.4.5") #set(PROFILER "/usr/lib/libtcmalloc.so.4") if(CMAKE_VERSION VERSION_LESS 3.3) @@ -186,6 +186,7 @@ set(EFFECTS_SOURCES effects/Bars.cpp effects/Blur.cpp effects/Brightness.cpp + effects/Caption.cpp effects/ChromaKey.cpp effects/ColorShift.cpp effects/Crop.cpp diff --git a/src/EffectInfo.cpp b/src/EffectInfo.cpp index 6829f4eb..9593d2f7 100644 --- a/src/EffectInfo.cpp +++ b/src/EffectInfo.cpp @@ -53,6 +53,9 @@ EffectBase* EffectInfo::CreateEffect(std::string effect_type) { else if (effect_type == "Brightness") return new Brightness(); + else if (effect_type == "Caption") + return new Caption(); + else if (effect_type == "ChromaKey") return new ChromaKey(); @@ -98,6 +101,7 @@ Json::Value EffectInfo::JsonValue() { root.append(Bars().JsonInfo()); root.append(Blur().JsonInfo()); root.append(Brightness().JsonInfo()); + root.append(Caption().JsonInfo()); root.append(ChromaKey().JsonInfo()); root.append(ColorShift().JsonInfo()); root.append(Crop().JsonInfo()); diff --git a/src/effects/Caption.cpp b/src/effects/Caption.cpp new file mode 100644 index 00000000..dfbebb53 --- /dev/null +++ b/src/effects/Caption.cpp @@ -0,0 +1,332 @@ +/** + * @file + * @brief Source file for Caption effect class + * @author Jonathan Thomas + * + * @ref License + */ + +/* LICENSE + * + * Copyright (c) 2008-2019 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#include "../../include/effects/Caption.h" +#include "../../include/Clip.h" +#include "../../include/Timeline.h" + +using namespace openshot; + +/// Blank constructor, useful when using Json to load the effect properties +Caption::Caption() : color("#ffffff"), stroke("#a9a9a9"), left(0.25), top(0.8), right(0.1), bottom(0.1), stroke_width(0.001), font_size(30.0), is_dirty(true) { + // Init effect properties + init_effect_details(); +} + +// Default constructor +Caption::Caption(Color color, std::string captions, std::string format) : + color(color), caption_text(captions), caption_format(format), stroke("#a9a9a9"), left(0.25), top(0.8), right(0.1), bottom(0.1), stroke_width(0.001), font_size(30.0), is_dirty(true) +{ + // Init effect properties + init_effect_details(); +} + +// Init effect settings +void Caption::init_effect_details() +{ + /// Initialize the values of the EffectInfo struct. + InitEffectInfo(); + + /// Set the effect info + info.class_name = "Caption"; + info.name = "Caption"; + info.description = "Add text captions on top of your video."; + info.has_audio = false; + info.has_video = true; +} + +// Set the caption string to use (see VTT format) +std::string Caption::CaptionText() { + return caption_text; +} + +// Get the caption string +void Caption::CaptionText(std::string new_caption_text) { + caption_text = new_caption_text; + is_dirty = true; +} + +// Set the caption format to use (only VTT format is currently supported) +std::string Caption::CaptionFormat() { + return caption_format; +} + +// Get the caption format +void Caption::CaptionFormat(std::string new_caption_format) { + caption_format = new_caption_format; + is_dirty = true; +} + +// Process regex string only when dirty +void Caption::process_regex() { + if (is_dirty) { + is_dirty = false; + + // Clear existing matches + matchedCaptions.clear(); + + // Parse regex and find all matches + QRegularExpression allPathsRegex(QStringLiteral("(\\d{2})?:*(\\d{2}):(\\d{2}).(\\d{2,3})\\s*-->\\s*(\\d{2})?:*(\\d{2}):(\\d{2}).(\\d{2,3})([\\s\\S]*?)\\n(.*?)(?=\\n\\d{2,3}|\\Z)"), QRegularExpression::MultilineOption); + QRegularExpressionMatchIterator i = allPathsRegex.globalMatch(QString(caption_text.c_str())); + while (i.hasNext()) { + QRegularExpressionMatch match = i.next(); + if (match.hasMatch()) { + // Push all match objects into a vector (so we can reverse them later) + matchedCaptions.push_back(match); + } + } + } +} + +// This method is required for all derived classes of EffectBase, and returns a +// modified openshot::Frame object +std::shared_ptr Caption::GetFrame(std::shared_ptr frame, int64_t frame_number) +{ + // Process regex (if needed) + process_regex(); + + // Get the Clip and Timeline pointers (if available) + Clip* clip = (Clip*) ParentClip(); + Timeline* timeline = NULL; + Fraction fps; + double scale_factor = 1.0; // amount of scaling needed for text (based on preview window size) + if (clip->ParentTimeline() != NULL) { + timeline = (Timeline*) clip->ParentTimeline(); + } else if (this->ParentTimeline() != NULL) { + timeline = (Timeline*) this->ParentTimeline(); + } + + // Get the FPS from the parent object (Timeline or Clip's Reader) + if (timeline != NULL) { + fps.num = timeline->info.fps.num; + fps.den = timeline->info.fps.den; + scale_factor = (double) timeline->preview_width / (double) timeline->info.width; + } else if (clip != NULL && clip->Reader() != NULL) { + fps.num = clip->Reader()->info.fps.num; + fps.den = clip->Reader()->info.fps.den; + scale_factor = 1.0; + } + + // Get the frame's image + std::shared_ptr frame_image = frame->GetImage(); + + // Load timeline's new frame image into a QPainter + QPainter painter(frame_image.get()); + painter.setRenderHints(QPainter::Antialiasing | QPainter::SmoothPixmapTransform | QPainter::TextAntialiasing, true); + + // Composite a new layer onto the image + painter.setCompositionMode(QPainter::CompositionMode_SourceOver); + + // Stroke / border pen + if (stroke_width.GetValue(frame_number) > 0.0) { + QPen pen; + pen.setColor(QColor(QString(stroke.GetColorHex(frame_number).c_str()))); + pen.setWidth(stroke_width.GetValue(frame_number) * scale_factor); + painter.setPen(pen); + } + + // Fill color brush + QBrush brush; + brush.setColor(QColor(QString(color.GetColorHex(frame_number).c_str()))); + brush.setStyle(Qt::SolidPattern); + painter.setBrush(brush); + + // Font options for caption + // TODO: Allow more font options (family, bold, style) + QFont font; + if (font_size.GetValue(frame_number) > 0.0) { + font.setPointSizeF(font_size.GetValue(frame_number) * scale_factor); + } else { + // Font can't be 0 sized + font.setPointSizeF(1.0); + } + + // Loop through matches and find text to display (if any) + for (auto match = matchedCaptions.begin(); match != matchedCaptions.end(); match++) { + + // Build timestamp (00:00:04.000 --> 00:00:06.500) + int64_t start_frame = ((match->captured(1).toFloat() * 60.0 * 60.0 ) + (match->captured(2).toFloat() * 60.0 ) + + match->captured(3).toFloat() + (match->captured(4).toFloat() / 1000.0)) * fps.ToFloat(); + int64_t end_frame = ((match->captured(5).toFloat() * 60.0 * 60.0 ) + (match->captured(6).toFloat() * 60.0 ) + + match->captured(7).toFloat() + (match->captured(8).toFloat() / 1000.0)) * fps.ToFloat(); + + // Get current keyframe values + double left_value = left.GetValue(frame_number); + double top_value = top.GetValue(frame_number); + + // TODO: Use all 4 margins and wrap text + double right_value = right.GetValue(frame_number); + double bottom_value = bottom.GetValue(frame_number); + + // Parse WEBVTT caption format + double starting_x = frame_image->width() * left_value; + double starting_y = frame_image->height() * top_value;; + + // Split multiple lines into separate paths + QStringList lines = match->captured(9).split("\n"); + for(int index = 0; index < lines.length(); index++) { + // Multi-line + QString line = lines[index]; + // Ignore lines that start with NOTE, or are <= 1 char long + if (!line.startsWith(QStringLiteral("NOTE")) && + !line.isEmpty() && frame_number >= start_frame && frame_number <= end_frame && + !line.length() <= 1 ) { + + // Location for text + QPoint p(starting_x, starting_y); + + // Draw text onto path (for correct border and fill) + QPainterPath path1; + path1.addText(p, font, line); + painter.drawPath(path1); + + // Increment QPoint to height of text (for next line) + padding + starting_y += path1.boundingRect().height() + (10.0 * scale_factor); + } + } + } + + // End painter + painter.end(); + + // return the modified frame + return frame; +} + +// Generate JSON string of this object +std::string Caption::Json() const { + + // Return formatted string + return JsonValue().toStyledString(); +} + +// Generate Json::Value for this object +Json::Value Caption::JsonValue() const { + + // Create root json object + Json::Value root = EffectBase::JsonValue(); // get parent properties + root["type"] = info.class_name; + root["color"] = color.JsonValue(); + root["stroke"] = stroke.JsonValue(); + root["stroke_width"] = stroke_width.JsonValue(); + root["font_size"] = font_size.JsonValue(); + root["left"] = left.JsonValue(); + root["top"] = top.JsonValue(); + root["right"] = right.JsonValue(); + root["bottom"] = bottom.JsonValue(); + root["caption_text"] = caption_text; + root["caption_format"] = caption_format; + + // return JsonValue + return root; +} + +// Load JSON string into this object +void Caption::SetJson(const std::string value) { + + // Parse JSON string into JSON objects + try + { + const Json::Value root = openshot::stringToJson(value); + // Set all values that match + SetJsonValue(root); + } + catch (const std::exception& e) + { + // Error parsing JSON (or missing keys) + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); + } +} + +// Load Json::Value into this object +void Caption::SetJsonValue(const Json::Value root) { + + // Set parent data + EffectBase::SetJsonValue(root); + + // Set data from Json (if key is found) + if (!root["color"].isNull()) + color.SetJsonValue(root["color"]); + if (!root["stroke"].isNull()) + stroke.SetJsonValue(root["stroke"]); + if (!root["stroke_width"].isNull()) + stroke_width.SetJsonValue(root["stroke_width"]); + if (!root["font_size"].isNull()) + font_size.SetJsonValue(root["font_size"]); + if (!root["left"].isNull()) + left.SetJsonValue(root["left"]); + if (!root["top"].isNull()) + top.SetJsonValue(root["top"]); + if (!root["right"].isNull()) + right.SetJsonValue(root["right"]); + if (!root["bottom"].isNull()) + bottom.SetJsonValue(root["bottom"]); + if (!root["caption_text"].isNull()) + caption_text = root["caption_text"].asString(); + if (!root["caption_format"].isNull()) + caption_format = root["caption_format"].asString(); + + // Mark effect as dirty to reparse Regex + is_dirty = true; +} + +// Get all properties for a specific frame +std::string Caption::PropertiesJSON(int64_t requested_frame) const { + + // Generate JSON properties list + Json::Value root; + root["id"] = add_property_json("ID", 0.0, "string", Id(), NULL, -1, -1, true, requested_frame); + root["position"] = add_property_json("Position", Position(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); + root["layer"] = add_property_json("Track", Layer(), "int", "", NULL, 0, 20, false, requested_frame); + root["start"] = add_property_json("Start", Start(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); + root["end"] = add_property_json("End", End(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); + root["duration"] = add_property_json("Duration", Duration(), "float", "", NULL, 0, 1000 * 60 * 30, true, requested_frame); + + // Keyframes + root["color"] = add_property_json("Color", 0.0, "color", "", NULL, 0, 255, false, requested_frame); + root["color"]["red"] = add_property_json("Red", color.red.GetValue(requested_frame), "float", "", &color.red, 0, 255, false, requested_frame); + root["color"]["blue"] = add_property_json("Blue", color.blue.GetValue(requested_frame), "float", "", &color.blue, 0, 255, false, requested_frame); + root["color"]["green"] = add_property_json("Green", color.green.GetValue(requested_frame), "float", "", &color.green, 0, 255, false, requested_frame); + root["stroke"] = add_property_json("Border", 0.0, "color", "", NULL, 0, 255, false, requested_frame); + root["stroke"]["red"] = add_property_json("Red", stroke.red.GetValue(requested_frame), "float", "", &stroke.red, 0, 255, false, requested_frame); + root["stroke"]["blue"] = add_property_json("Blue", stroke.blue.GetValue(requested_frame), "float", "", &stroke.blue, 0, 255, false, requested_frame); + root["stroke"]["green"] = add_property_json("Green", stroke.green.GetValue(requested_frame), "float", "", &stroke.green, 0, 255, false, requested_frame); + root["stroke_width"] = add_property_json("Stroke Width", stroke_width.GetValue(requested_frame), "float", "", &stroke_width, 0, 10.0, false, requested_frame); + root["font_size"] = add_property_json("Font Size", font_size.GetValue(requested_frame), "float", "", &font_size, 0, 200.0, false, requested_frame); + root["left"] = add_property_json("Left Size", left.GetValue(requested_frame), "float", "", &left, 0.0, 0.5, false, requested_frame); + root["top"] = add_property_json("Top Size", top.GetValue(requested_frame), "float", "", &top, 0.0, 0.5, false, requested_frame); + root["right"] = add_property_json("Right Size", right.GetValue(requested_frame), "float", "", &right, 0.0, 0.5, false, requested_frame); + root["bottom"] = add_property_json("Bottom Size", bottom.GetValue(requested_frame), "float", "", &bottom, 0.0, 0.5, false, requested_frame); + root["caption_text"] = add_property_json("Captions", 0.0, "string", caption_text, NULL, -1, -1, false, requested_frame); + root["caption_format"] = add_property_json("Format", 0.0, "string", caption_format, NULL, -1, -1, false, requested_frame); + + // Return formatted string + return root.toStyledString(); +} From 9b2ca50f6d7708ac622a38c3a30fe23623285c9d Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Tue, 6 Oct 2020 03:11:59 -0500 Subject: [PATCH 03/14] Allow sub-pixel sized pen stroke --- src/effects/Caption.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/effects/Caption.cpp b/src/effects/Caption.cpp index dfbebb53..212c9e72 100644 --- a/src/effects/Caption.cpp +++ b/src/effects/Caption.cpp @@ -35,14 +35,14 @@ using namespace openshot; /// Blank constructor, useful when using Json to load the effect properties -Caption::Caption() : color("#ffffff"), stroke("#a9a9a9"), left(0.25), top(0.8), right(0.1), bottom(0.1), stroke_width(0.001), font_size(30.0), is_dirty(true) { +Caption::Caption() : color("#ffffff"), stroke("#a9a9a9"), left(0.25), top(0.8), right(0.1), bottom(0.1), stroke_width(0.5), font_size(30.0), is_dirty(true) { // Init effect properties init_effect_details(); } // Default constructor Caption::Caption(Color color, std::string captions, std::string format) : - color(color), caption_text(captions), caption_format(format), stroke("#a9a9a9"), left(0.25), top(0.8), right(0.1), bottom(0.1), stroke_width(0.001), font_size(30.0), is_dirty(true) + color(color), caption_text(captions), caption_format(format), stroke("#a9a9a9"), left(0.25), top(0.8), right(0.1), bottom(0.1), stroke_width(0.5), font_size(30.0), is_dirty(true) { // Init effect properties init_effect_details(); @@ -148,7 +148,7 @@ std::shared_ptr Caption::GetFrame(std::shared_ptr 0.0) { QPen pen; pen.setColor(QColor(QString(stroke.GetColorHex(frame_number).c_str()))); - pen.setWidth(stroke_width.GetValue(frame_number) * scale_factor); + pen.setWidthF(stroke_width.GetValue(frame_number) * scale_factor); painter.setPen(pen); } From 80a1fe8ca4731a30c681ecdf05879f34550dc759 Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Mon, 12 Oct 2020 23:28:03 -0500 Subject: [PATCH 04/14] Initializing Clip info struct, and fixing clip cache settings --- include/Clip.h | 15 +++++++++------ src/Clip.cpp | 29 ++++++++++++++++++++--------- 2 files changed, 29 insertions(+), 15 deletions(-) diff --git a/include/Clip.h b/include/Clip.h index fd28c162..0bc99e84 100644 --- a/include/Clip.h +++ b/include/Clip.h @@ -98,6 +98,15 @@ namespace openshot { /// Section lock for multiple threads juce::CriticalSection getFrameCriticalSection; + /// Init default settings for a clip + void init_settings(); + + /// Init reader info details + void init_reader_settings(); + + /// Update default rotation from reader + void init_reader_rotation(); + private: bool waveform; ///< Should a waveform be used instead of the clip's image std::list effects; /// frame, int64_t frame_number); - /// Init default settings for a clip - void init_settings(); - - /// Update default rotation from reader - void init_reader_rotation(); - /// Compare 2 floating point numbers bool isEqual(double a, double b); diff --git a/src/Clip.cpp b/src/Clip.cpp index 1d2b392d..d81b5c61 100644 --- a/src/Clip.cpp +++ b/src/Clip.cpp @@ -69,9 +69,6 @@ void Clip::init_settings() // Init alpha alpha = Keyframe(1.0); - // Init rotation - init_reader_rotation(); - // Init time & volume time = Keyframe(1.0); volume = Keyframe(1.0); @@ -101,8 +98,22 @@ void Clip::init_settings() has_audio = Keyframe(-1.0); has_video = Keyframe(-1.0); - // Initialize Clip cache - cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels); + // Init reader info struct and cache size + init_reader_settings(); +} + +// Init reader info details +void Clip::init_reader_settings() { + if (reader) { + // Init rotation (if any) + init_reader_rotation(); + + // Initialize info struct + info = reader->info; + + // Initialize Clip cache + cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels); + } } // Init reader's rotation (if any) @@ -208,8 +219,8 @@ Clip::Clip(std::string path) : resampler(NULL), reader(NULL), allocated_reader(N End(reader->info.duration); reader->ParentClip(this); allocated_reader = reader; - init_reader_rotation(); - } + // Init reader info struct and cache size + init_reader_settings(); } } // Destructor @@ -237,8 +248,8 @@ void Clip::Reader(ReaderBase* new_reader) // set parent reader->ParentClip(this); - // Init rotation (if any) - init_reader_rotation(); + // Init reader info struct and cache size + init_reader_settings(); } /// Get the current reader From 07a10e3bf6ae7abed5b1e42458db6dbd07b7c9ab Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Wed, 21 Oct 2020 01:27:49 -0500 Subject: [PATCH 05/14] - Added word-wrap (using adjustable left, top, and right side keyframes) - Added background color and alpha - Added font color alpha - Added fade in / out logic - Added background padding & rounded corners - Added adjustable font name --- src/effects/Caption.cpp | 214 ++++++++++++++++++++++++++++------------ src/effects/Caption.h | 26 ++--- 2 files changed, 163 insertions(+), 77 deletions(-) diff --git a/src/effects/Caption.cpp b/src/effects/Caption.cpp index 212c9e72..e48d0d7b 100644 --- a/src/effects/Caption.cpp +++ b/src/effects/Caption.cpp @@ -28,21 +28,26 @@ * along with OpenShot Library. If not, see . */ -#include "../../include/effects/Caption.h" -#include "../../include/Clip.h" -#include "../../include/Timeline.h" +#include "Caption.h" +#include "../Clip.h" +#include "../Timeline.h" using namespace openshot; /// Blank constructor, useful when using Json to load the effect properties -Caption::Caption() : color("#ffffff"), stroke("#a9a9a9"), left(0.25), top(0.8), right(0.1), bottom(0.1), stroke_width(0.5), font_size(30.0), is_dirty(true) { +Caption::Caption() : color("#ffffff"), stroke("#a9a9a9"), background("#ff000000"), background_alpha(0.0), left(0.25), top(0.7), right(0.1), + stroke_width(0.5), font_size(30.0), font_alpha(1.0), is_dirty(true), font_name("sans"), font(NULL), metrics(NULL), + fade_in(0.35), fade_out(0.35), background_corner(10.0), background_padding(20.0) +{ // Init effect properties init_effect_details(); } // Default constructor -Caption::Caption(Color color, std::string captions, std::string format) : - color(color), caption_text(captions), caption_format(format), stroke("#a9a9a9"), left(0.25), top(0.8), right(0.1), bottom(0.1), stroke_width(0.5), font_size(30.0), is_dirty(true) +Caption::Caption(Color color, std::string captions) : + color(color), caption_text(captions), stroke("#a9a9a9"), background("#ff000000"), background_alpha(0.0), + left(0.25), top(0.7), right(0.1), stroke_width(0.5), font_size(30.0), font_alpha(1.0), is_dirty(true), font_name("sans"), + font(NULL), metrics(NULL), fade_in(0.35), fade_out(0.35), background_corner(10.0), background_padding(20.0) { // Init effect properties init_effect_details(); @@ -73,17 +78,6 @@ void Caption::CaptionText(std::string new_caption_text) { is_dirty = true; } -// Set the caption format to use (only VTT format is currently supported) -std::string Caption::CaptionFormat() { - return caption_format; -} - -// Get the caption format -void Caption::CaptionFormat(std::string new_caption_format) { - caption_format = new_caption_format; - is_dirty = true; -} - // Process regex string only when dirty void Caption::process_regex() { if (is_dirty) { @@ -127,6 +121,7 @@ std::shared_ptr Caption::GetFrame(std::shared_ptrinfo.fps.num; fps.den = timeline->info.fps.den; + // preview window is sometimes smaller/larger than the timeline size scale_factor = (double) timeline->preview_width / (double) timeline->info.width; } else if (clip != NULL && clip->Reader() != NULL) { fps.num = clip->Reader()->info.fps.num; @@ -144,30 +139,59 @@ std::shared_ptr Caption::GetFrame(std::shared_ptr 0.0) { - QPen pen; - pen.setColor(QColor(QString(stroke.GetColorHex(frame_number).c_str()))); + // Font options and metrics for caption text + double font_size_value = font_size.GetValue(frame_number) * scale_factor; + QFont font(QString(font_name.c_str()), int(font_size_value)); + font.setPointSizeF(std::max(font_size_value, 1.0)); + QFontMetricsF metrics = QFontMetricsF(font); + + // Get current keyframe values + double left_value = left.GetValue(frame_number); + double top_value = top.GetValue(frame_number); + double fade_in_value = fade_in.GetValue(frame_number) * fps.ToDouble(); + double fade_out_value = fade_out.GetValue(frame_number) * fps.ToDouble(); + double right_value = right.GetValue(frame_number); + double background_corner_value = background_corner.GetValue(frame_number); + double padding_value = background_padding.GetValue(frame_number); + + // Calculate caption area (based on left, top, and right margin) + double left_margin_x = frame_image->width() * left_value; + double starting_y = (frame_image->height() * top_value) + (metrics.lineSpacing() * scale_factor); + double right_margin_x = frame_image->width() - (frame_image->width() * right_value); + double caption_area_width = right_margin_x - left_margin_x; + QRectF caption_area = QRectF(left_margin_x, starting_y, caption_area_width, frame_image->height()); + QRectF caption_area_with_padding = QRectF(left_margin_x - (padding_value / 2.0), starting_y - (padding_value / 2.0), caption_area_width + padding_value, frame_image->height() + padding_value); + + // Set background color of caption + QBrush brush; + QColor background_qcolor = QColor(QString(background.GetColorHex(frame_number).c_str())); + background_qcolor.setAlphaF(background_alpha.GetValue(frame_number)); + brush.setColor(background_qcolor); + brush.setStyle(Qt::SolidPattern); + painter.setBrush(brush); + painter.setPen(Qt::NoPen); + painter.drawRoundedRect(caption_area_with_padding, background_corner_value, background_corner_value); + + // Set text color of caption + QPen pen; + QColor stroke_qcolor; + if (stroke_width.GetValue(frame_number) <= 0.0) { + // No stroke + painter.setPen(Qt::NoPen); + } else { + // Stroke color + stroke_qcolor = QColor(QString(stroke.GetColorHex(frame_number).c_str())); + stroke_qcolor.setAlphaF(font_alpha.GetValue(frame_number)); + pen.setColor(stroke_qcolor); pen.setWidthF(stroke_width.GetValue(frame_number) * scale_factor); painter.setPen(pen); } - - // Fill color brush - QBrush brush; - brush.setColor(QColor(QString(color.GetColorHex(frame_number).c_str()))); - brush.setStyle(Qt::SolidPattern); + // Fill color of text + QColor font_qcolor = QColor(QString(color.GetColorHex(frame_number).c_str())); + font_qcolor.setAlphaF(font_alpha.GetValue(frame_number)); + brush.setColor(font_qcolor); painter.setBrush(brush); - // Font options for caption - // TODO: Allow more font options (family, bold, style) - QFont font; - if (font_size.GetValue(frame_number) > 0.0) { - font.setPointSizeF(font_size.GetValue(frame_number) * scale_factor); - } else { - // Font can't be 0 sized - font.setPointSizeF(1.0); - } - // Loop through matches and find text to display (if any) for (auto match = matchedCaptions.begin(); match != matchedCaptions.end(); match++) { @@ -177,18 +201,6 @@ std::shared_ptr Caption::GetFrame(std::shared_ptrcaptured(5).toFloat() * 60.0 * 60.0 ) + (match->captured(6).toFloat() * 60.0 ) + match->captured(7).toFloat() + (match->captured(8).toFloat() / 1000.0)) * fps.ToFloat(); - // Get current keyframe values - double left_value = left.GetValue(frame_number); - double top_value = top.GetValue(frame_number); - - // TODO: Use all 4 margins and wrap text - double right_value = right.GetValue(frame_number); - double bottom_value = bottom.GetValue(frame_number); - - // Parse WEBVTT caption format - double starting_x = frame_image->width() * left_value; - double starting_y = frame_image->height() * top_value;; - // Split multiple lines into separate paths QStringList lines = match->captured(9).split("\n"); for(int index = 0; index < lines.length(); index++) { @@ -199,16 +211,61 @@ std::shared_ptr Caption::GetFrame(std::shared_ptr= start_frame && frame_number <= end_frame && !line.length() <= 1 ) { - // Location for text - QPoint p(starting_x, starting_y); + // Calculate fade in/out ranges + double fade_in_percentage = ((float) frame_number - (float) start_frame) / fade_in_value; + double fade_out_percentage = 1.0 - (((float) frame_number - ((float) end_frame - fade_out_value)) / fade_out_value); + if (fade_in_percentage < 1.0) { + // Fade in + font_qcolor.setAlphaF(fade_in_percentage * font_alpha.GetValue(frame_number)); + stroke_qcolor.setAlphaF(fade_in_percentage * font_alpha.GetValue(frame_number)); + } else if (fade_out_percentage >= 0.0 && fade_out_percentage <= 1.0) { + // Fade out + font_qcolor.setAlphaF(fade_out_percentage * font_alpha.GetValue(frame_number)); + stroke_qcolor.setAlphaF(fade_out_percentage * font_alpha.GetValue(frame_number)); + } + pen.setColor(stroke_qcolor); + brush.setColor(font_qcolor); + painter.setPen(pen); + painter.setBrush(brush); - // Draw text onto path (for correct border and fill) - QPainterPath path1; - path1.addText(p, font, line); - painter.drawPath(path1); + // Loop through words, and find word-wrap boundaries + QStringList words = line.split(" "); + int words_remaining = words.length(); + while (words_remaining > 0) { + bool words_displayed = false; + for(int word_index = words.length(); word_index > 0; word_index--) { + // Current matched caption string (from the beginning to the current word index) + QString fitting_line = words.mid(0, word_index).join(" "); + + // Calculate size of text + QRectF textRect = metrics.boundingRect(caption_area, Qt::TextSingleLine, fitting_line); + if (textRect.width() <= caption_area.width()) { + // Location for text + QPoint p(left_margin_x, starting_y); + + // Draw text onto path (for correct border and fill) + QPainterPath path1; + QString fitting_line = words.mid(0, word_index).join(" "); + path1.addText(p, font, fitting_line); + painter.drawPath(path1); + + // Increment QPoint to height of text (for next line) + padding + starting_y += path1.boundingRect().height() + (metrics.lineSpacing() * scale_factor); + + // Update line (to remove words already drawn + words = words.mid(word_index, words.length()); + words_remaining = words.length(); + words_displayed = true; + break; + } + } + + if (words_displayed == false) { + // Exit loop if no words displayed + words_remaining = 0; + } + } - // Increment QPoint to height of text (for next line) + padding - starting_y += path1.boundingRect().height() + (10.0 * scale_factor); } } } @@ -235,14 +292,20 @@ Json::Value Caption::JsonValue() const { root["type"] = info.class_name; root["color"] = color.JsonValue(); root["stroke"] = stroke.JsonValue(); + root["background"] = background.JsonValue(); + root["background_alpha"] = background_alpha.JsonValue(); + root["background_corner"] = background_corner.JsonValue(); + root["background_padding"] = background_padding.JsonValue(); root["stroke_width"] = stroke_width.JsonValue(); root["font_size"] = font_size.JsonValue(); + root["font_alpha"] = font_alpha.JsonValue(); + root["fade_in"] = fade_in.JsonValue(); + root["fade_out"] = fade_out.JsonValue(); root["left"] = left.JsonValue(); root["top"] = top.JsonValue(); root["right"] = right.JsonValue(); - root["bottom"] = bottom.JsonValue(); root["caption_text"] = caption_text; - root["caption_format"] = caption_format; + root["caption_font"] = font_name; // return JsonValue return root; @@ -276,22 +339,34 @@ void Caption::SetJsonValue(const Json::Value root) { color.SetJsonValue(root["color"]); if (!root["stroke"].isNull()) stroke.SetJsonValue(root["stroke"]); + if (!root["background"].isNull()) + background.SetJsonValue(root["background"]); + if (!root["background_alpha"].isNull()) + background_alpha.SetJsonValue(root["background_alpha"]); + if (!root["background_corner"].isNull()) + background_corner.SetJsonValue(root["background_corner"]); + if (!root["background_padding"].isNull()) + background_padding.SetJsonValue(root["background_padding"]); if (!root["stroke_width"].isNull()) stroke_width.SetJsonValue(root["stroke_width"]); if (!root["font_size"].isNull()) font_size.SetJsonValue(root["font_size"]); + if (!root["font_alpha"].isNull()) + font_alpha.SetJsonValue(root["font_alpha"]); + if (!root["fade_in"].isNull()) + fade_in.SetJsonValue(root["fade_in"]); + if (!root["fade_out"].isNull()) + fade_out.SetJsonValue(root["fade_out"]); if (!root["left"].isNull()) left.SetJsonValue(root["left"]); if (!root["top"].isNull()) top.SetJsonValue(root["top"]); if (!root["right"].isNull()) right.SetJsonValue(root["right"]); - if (!root["bottom"].isNull()) - bottom.SetJsonValue(root["bottom"]); if (!root["caption_text"].isNull()) caption_text = root["caption_text"].asString(); - if (!root["caption_format"].isNull()) - caption_format = root["caption_format"].asString(); + if (!root["caption_font"].isNull()) + font_name = root["caption_font"].asString(); // Mark effect as dirty to reparse Regex is_dirty = true; @@ -318,14 +393,23 @@ std::string Caption::PropertiesJSON(int64_t requested_frame) const { root["stroke"]["red"] = add_property_json("Red", stroke.red.GetValue(requested_frame), "float", "", &stroke.red, 0, 255, false, requested_frame); root["stroke"]["blue"] = add_property_json("Blue", stroke.blue.GetValue(requested_frame), "float", "", &stroke.blue, 0, 255, false, requested_frame); root["stroke"]["green"] = add_property_json("Green", stroke.green.GetValue(requested_frame), "float", "", &stroke.green, 0, 255, false, requested_frame); + root["background_alpha"] = add_property_json("Background Alpha", background_alpha.GetValue(requested_frame), "float", "", &background_alpha, 0.0, 1.0, false, requested_frame); + root["background_corner"] = add_property_json("Background Corner Radius", background_corner.GetValue(requested_frame), "float", "", &background_corner, 0.0, 60.0, false, requested_frame); + root["background_padding"] = add_property_json("Background Padding", background_padding.GetValue(requested_frame), "float", "", &background_padding, 0.0, 60.0, false, requested_frame); + root["background"] = add_property_json("Background", 0.0, "color", "", NULL, 0, 255, false, requested_frame); + root["background"]["red"] = add_property_json("Red", background.red.GetValue(requested_frame), "float", "", &background.red, 0, 255, false, requested_frame); + root["background"]["blue"] = add_property_json("Blue", background.blue.GetValue(requested_frame), "float", "", &background.blue, 0, 255, false, requested_frame); + root["background"]["green"] = add_property_json("Green", background.green.GetValue(requested_frame), "float", "", &background.green, 0, 255, false, requested_frame); root["stroke_width"] = add_property_json("Stroke Width", stroke_width.GetValue(requested_frame), "float", "", &stroke_width, 0, 10.0, false, requested_frame); root["font_size"] = add_property_json("Font Size", font_size.GetValue(requested_frame), "float", "", &font_size, 0, 200.0, false, requested_frame); + root["font_alpha"] = add_property_json("Font Alpha", font_alpha.GetValue(requested_frame), "float", "", &font_alpha, 0.0, 1.0, false, requested_frame); + root["fade_in"] = add_property_json("Fade In (Seconds)", fade_in.GetValue(requested_frame), "float", "", &fade_in, 0.0, 3.0, false, requested_frame); + root["fade_out"] = add_property_json("Fade Out (Seconds)", fade_out.GetValue(requested_frame), "float", "", &fade_out, 0.0, 3.0, false, requested_frame); root["left"] = add_property_json("Left Size", left.GetValue(requested_frame), "float", "", &left, 0.0, 0.5, false, requested_frame); - root["top"] = add_property_json("Top Size", top.GetValue(requested_frame), "float", "", &top, 0.0, 0.5, false, requested_frame); + root["top"] = add_property_json("Top Size", top.GetValue(requested_frame), "float", "", &top, 0.0, 1.0, false, requested_frame); root["right"] = add_property_json("Right Size", right.GetValue(requested_frame), "float", "", &right, 0.0, 0.5, false, requested_frame); - root["bottom"] = add_property_json("Bottom Size", bottom.GetValue(requested_frame), "float", "", &bottom, 0.0, 0.5, false, requested_frame); root["caption_text"] = add_property_json("Captions", 0.0, "string", caption_text, NULL, -1, -1, false, requested_frame); - root["caption_format"] = add_property_json("Format", 0.0, "string", caption_format, NULL, -1, -1, false, requested_frame); + root["caption_font"] = add_property_json("Font", 0.0, "string", font_name, NULL, -1, -1, false, requested_frame); // Return formatted string return root.toStyledString(); diff --git a/src/effects/Caption.h b/src/effects/Caption.h index f2f55126..f52f9a99 100644 --- a/src/effects/Caption.h +++ b/src/effects/Caption.h @@ -31,13 +31,12 @@ #ifndef OPENSHOT_CAPTION_EFFECT_H #define OPENSHOT_CAPTION_EFFECT_H -#include "../EffectBase.h" - #include #include #include #include #include "../Color.h" +#include "../EffectBase.h" #include "../Fraction.h" #include "../Json.h" #include "../KeyFrame.h" @@ -57,8 +56,9 @@ namespace openshot { private: std::vector matchedCaptions; ///< RegEx to capture cues and text - std::string caption_text; ///< Text of caption - std::string caption_format; ///< Format of caption (application/x-subrip, text/vtt) + std::string caption_text; ///< Text of caption + QFontMetrics* metrics; ///< Font metrics object + QFont* font; ///< QFont object bool is_dirty; /// Init effect settings @@ -71,12 +71,19 @@ namespace openshot public: Color color; ///< Color of caption text Color stroke; ///< Color of text border / stroke + Color background; ///< Color of caption area background + Keyframe background_alpha; ///< Background color alpha + Keyframe background_corner; ///< Background cornder radius + Keyframe background_padding; ///< Background padding Keyframe stroke_width; ///< Width of text border / stroke Keyframe font_size; ///< Font size in points + Keyframe font_alpha; ///< Font color alpha Keyframe left; ///< Size of left bar Keyframe top; ///< Size of top bar Keyframe right; ///< Size of right bar - Keyframe bottom; ///< Size of bottom bar + Keyframe fade_in; ///< Fade in per caption (# of seconds) + Keyframe fade_out; ///< Fade in per caption (# of seconds) + std::string font_name; ///< Font string /// Blank constructor, useful when using Json to load the effect properties Caption(); @@ -84,11 +91,8 @@ namespace openshot /// Default constructor, which takes 4 curves and a color. These curves animated the bars over time. /// /// @param color The curve to adjust the color of bars - /// @param left The curve to adjust the left bar size (between 0 and 1) - /// @param top The curve to adjust the top bar size (between 0 and 1) - /// @param right The curve to adjust the right bar size (between 0 and 1) - /// @param bottom The curve to adjust the bottom bar size (between 0 and 1) - Caption(Color color, std::string captions, std::string format); + /// @param captions A string with VTT/Subrip format text captions + Caption(Color color, std::string captions); /// @brief This method is required for all derived classes of ClipBase, and returns a /// new openshot::Frame object. All Clip keyframes and effects are resolved into @@ -112,8 +116,6 @@ namespace openshot // Get and Set caption data std::string CaptionText(); ///< Set the caption string to use (see VTT format) void CaptionText(std::string new_caption_text); ///< Get the caption string - std::string CaptionFormat(); ///< Set the caption format to use (only VTT format is currently supported) - void CaptionFormat(std::string new_caption_format); ///< Get the caption format /// Get and Set JSON methods std::string Json() const override; ///< Generate JSON string of this object From 54a070438a4c8490cc34049ac2c23acf85fe1afe Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Thu, 22 Oct 2020 01:32:33 -0500 Subject: [PATCH 06/14] - Added default caption value, for demonstration purposes (to help users see a valid example) - Append some newlines onto the end of any caption text... needed by the regex for some reason - Updated font name and caption text to be a new type (font and caption), and we have corresponding UI changes for those on openshot-qt --- src/effects/Caption.cpp | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/src/effects/Caption.cpp b/src/effects/Caption.cpp index e48d0d7b..43b24dd4 100644 --- a/src/effects/Caption.cpp +++ b/src/effects/Caption.cpp @@ -65,6 +65,11 @@ void Caption::init_effect_details() info.description = "Add text captions on top of your video."; info.has_audio = false; info.has_video = true; + + // Init placeholder caption (for demo) + if (caption_text.length() == 0) { + caption_text = "00:00:00:000 --> 00:10:00:000\nEdit this caption with our caption editor"; + } } // Set the caption string to use (see VTT format) @@ -86,9 +91,15 @@ void Caption::process_regex() { // Clear existing matches matchedCaptions.clear(); + QString caption_prepared = QString(caption_text.c_str()); + if (caption_prepared.endsWith("\n\n") == false) { + // We need a couple line ends at the end of the caption string (for our regex to work correctly) + caption_prepared.append("\n\n"); + } + // Parse regex and find all matches QRegularExpression allPathsRegex(QStringLiteral("(\\d{2})?:*(\\d{2}):(\\d{2}).(\\d{2,3})\\s*-->\\s*(\\d{2})?:*(\\d{2}):(\\d{2}).(\\d{2,3})([\\s\\S]*?)\\n(.*?)(?=\\n\\d{2,3}|\\Z)"), QRegularExpression::MultilineOption); - QRegularExpressionMatchIterator i = allPathsRegex.globalMatch(QString(caption_text.c_str())); + QRegularExpressionMatchIterator i = allPathsRegex.globalMatch(caption_prepared); while (i.hasNext()) { QRegularExpressionMatch match = i.next(); if (match.hasMatch()) { @@ -408,8 +419,8 @@ std::string Caption::PropertiesJSON(int64_t requested_frame) const { root["left"] = add_property_json("Left Size", left.GetValue(requested_frame), "float", "", &left, 0.0, 0.5, false, requested_frame); root["top"] = add_property_json("Top Size", top.GetValue(requested_frame), "float", "", &top, 0.0, 1.0, false, requested_frame); root["right"] = add_property_json("Right Size", right.GetValue(requested_frame), "float", "", &right, 0.0, 0.5, false, requested_frame); - root["caption_text"] = add_property_json("Captions", 0.0, "string", caption_text, NULL, -1, -1, false, requested_frame); - root["caption_font"] = add_property_json("Font", 0.0, "string", font_name, NULL, -1, -1, false, requested_frame); + root["caption_text"] = add_property_json("Captions", 0.0, "caption", caption_text, NULL, -1, -1, false, requested_frame); + root["caption_font"] = add_property_json("Font", 0.0, "font", font_name, NULL, -1, -1, false, requested_frame); // Return formatted string return root.toStyledString(); From def8d9dd7bc2efb6671126c4521dd0f9c08b4139 Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Tue, 27 Oct 2020 02:13:57 -0500 Subject: [PATCH 07/14] Updating method docs --- src/effects/Caption.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/effects/Caption.h b/src/effects/Caption.h index f52f9a99..89347415 100644 --- a/src/effects/Caption.h +++ b/src/effects/Caption.h @@ -88,9 +88,9 @@ namespace openshot /// Blank constructor, useful when using Json to load the effect properties Caption(); - /// Default constructor, which takes 4 curves and a color. These curves animated the bars over time. + /// Default constructor, which takes a string of VTT/Subrip formatted caption data, and displays them over time. /// - /// @param color The curve to adjust the color of bars + /// @param color The curve to adjust the color of caption text /// @param captions A string with VTT/Subrip format text captions Caption(Color color, std::string captions); From a3c20c8f447ab01619f8b452b36b5e88955da092 Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Tue, 27 Oct 2020 02:50:02 -0500 Subject: [PATCH 08/14] Fixed a few codacy issues --- src/effects/Caption.cpp | 6 +++--- src/effects/Caption.h | 3 +-- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/src/effects/Caption.cpp b/src/effects/Caption.cpp index 43b24dd4..b9ce2249 100644 --- a/src/effects/Caption.cpp +++ b/src/effects/Caption.cpp @@ -44,8 +44,8 @@ Caption::Caption() : color("#ffffff"), stroke("#a9a9a9"), background("#ff000000" } // Default constructor -Caption::Caption(Color color, std::string captions) : - color(color), caption_text(captions), stroke("#a9a9a9"), background("#ff000000"), background_alpha(0.0), +Caption::Caption(std::string captions) : + color("#ffffff"), caption_text(captions), stroke("#a9a9a9"), background("#ff000000"), background_alpha(0.0), left(0.25), top(0.7), right(0.1), stroke_width(0.5), font_size(30.0), font_alpha(1.0), is_dirty(true), font_name("sans"), font(NULL), metrics(NULL), fade_in(0.35), fade_out(0.35), background_corner(10.0), background_padding(20.0) { @@ -220,7 +220,7 @@ std::shared_ptr Caption::GetFrame(std::shared_ptr= start_frame && frame_number <= end_frame && - !line.length() <= 1 ) { + line.length() > 1) { // Calculate fade in/out ranges double fade_in_percentage = ((float) frame_number - (float) start_frame) / fade_in_value; diff --git a/src/effects/Caption.h b/src/effects/Caption.h index 89347415..749b2c17 100644 --- a/src/effects/Caption.h +++ b/src/effects/Caption.h @@ -90,9 +90,8 @@ namespace openshot /// Default constructor, which takes a string of VTT/Subrip formatted caption data, and displays them over time. /// - /// @param color The curve to adjust the color of caption text /// @param captions A string with VTT/Subrip format text captions - Caption(Color color, std::string captions); + Caption(std::string captions); /// @brief This method is required for all derived classes of ClipBase, and returns a /// new openshot::Frame object. All Clip keyframes and effects are resolved into From df154c3844e7a95552c6ad533939a5fe4e524ff1 Mon Sep 17 00:00:00 2001 From: Jonathan Thomas Date: Fri, 30 Oct 2020 18:23:45 -0500 Subject: [PATCH 09/14] Fixing color format for opencv conversion --- src/Frame.cpp | 4 +-- src/Qt/PlayerPrivate.cpp | 2 +- src/Qt/VideoCacheThread.cpp | 58 ++++++++++++++++++------------------- 3 files changed, 32 insertions(+), 32 deletions(-) diff --git a/src/Frame.cpp b/src/Frame.cpp index b32a7d20..e25aa5ad 100644 --- a/src/Frame.cpp +++ b/src/Frame.cpp @@ -968,14 +968,14 @@ cv::Mat Frame::GetImageCV() std::shared_ptr Frame::Mat2Qimage(cv::Mat img){ cv::cvtColor(img, img, cv::COLOR_BGR2RGB); - QImage qimg((uchar*) img.data, img.cols, img.rows, img.step, QImage::Format_RGBA8888_Premultiplied); + QImage qimg((uchar*) img.data, img.cols, img.rows, img.step, QImage::Format_RGB888); std::shared_ptr imgIn = std::make_shared(qimg.copy()); // Always convert to RGBA8888 (if different) if (imgIn->format() != QImage::Format_RGBA8888_Premultiplied) *imgIn = imgIn->convertToFormat(QImage::Format_RGBA8888_Premultiplied); - + return imgIn; } diff --git a/src/Qt/PlayerPrivate.cpp b/src/Qt/PlayerPrivate.cpp index 75052fc3..7a3943c7 100644 --- a/src/Qt/PlayerPrivate.cpp +++ b/src/Qt/PlayerPrivate.cpp @@ -195,10 +195,10 @@ namespace openshot // Stop video/audio playback void PlayerPrivate::stopPlayback(int timeOutMilliseconds) { - if (isThreadRunning()) stopThread(timeOutMilliseconds); if (audioPlayback->isThreadRunning() && reader->info.has_audio) audioPlayback->stopThread(timeOutMilliseconds); if (videoCache->isThreadRunning() && reader->info.has_video) videoCache->stopThread(timeOutMilliseconds); if (videoPlayback->isThreadRunning() && reader->info.has_video) videoPlayback->stopThread(timeOutMilliseconds); + if (isThreadRunning()) stopThread(timeOutMilliseconds); } } diff --git a/src/Qt/VideoCacheThread.cpp b/src/Qt/VideoCacheThread.cpp index e1e53f5d..0cf76ef0 100644 --- a/src/Qt/VideoCacheThread.cpp +++ b/src/Qt/VideoCacheThread.cpp @@ -93,43 +93,43 @@ namespace openshot while (!threadShouldExit() && is_playing) { - // Cache frames before the other threads need them - // Cache frames up to the max frames. Reset to current position - // if cache gets too far away from display frame. Cache frames - // even when player is paused (i.e. speed 0). - while ((position - current_display_frame) < max_frames) - { - // Only cache up till the max_frames amount... then sleep - try + // Cache frames before the other threads need them + // Cache frames up to the max frames. Reset to current position + // if cache gets too far away from display frame. Cache frames + // even when player is paused (i.e. speed 0). + while (((position - current_display_frame) < max_frames) && is_playing) { - if (reader) { - ZmqLogger::Instance()->AppendDebugMethod("VideoCacheThread::run (cache frame)", "position", position, "current_display_frame", current_display_frame, "max_frames", max_frames, "needed_frames", (position - current_display_frame)); + // Only cache up till the max_frames amount... then sleep + try + { + if (reader) { + ZmqLogger::Instance()->AppendDebugMethod("VideoCacheThread::run (cache frame)", "position", position, "current_display_frame", current_display_frame, "max_frames", max_frames, "needed_frames", (position - current_display_frame)); - // Force the frame to be generated - if (reader->GetCache()->GetSmallestFrame()) { - int64_t smallest_cached_frame = reader->GetCache()->GetSmallestFrame()->number; - if (smallest_cached_frame > current_display_frame) { - // Cache position has gotten too far away from current display frame. - // Reset the position to the current display frame. - position = current_display_frame; + // Force the frame to be generated + if (reader->GetCache()->GetSmallestFrame()) { + int64_t smallest_cached_frame = reader->GetCache()->GetSmallestFrame()->number; + if (smallest_cached_frame > current_display_frame) { + // Cache position has gotten too far away from current display frame. + // Reset the position to the current display frame. + position = current_display_frame; + } } + reader->GetFrame(position); } - reader->GetFrame(position); + + } + catch (const OutOfBoundsFrame & e) + { + // Ignore out of bounds frame exceptions } - } - catch (const OutOfBoundsFrame & e) - { - // Ignore out of bounds frame exceptions + // Increment frame number + position++; } - // Increment frame number - position++; - } - - // Sleep for 1 frame length - std::this_thread::sleep_for(frame_duration); - } + // Sleep for 1 frame length + std::this_thread::sleep_for(frame_duration); + } return; } From 3f63b2c638c684a70963319925a0f09dd00b265c Mon Sep 17 00:00:00 2001 From: Brenno Date: Sun, 1 Nov 2020 20:02:46 -0300 Subject: [PATCH 10/14] Added error message handling for ClipProcessingJob --- src/CVObjectDetection.cpp | 26 ++++++++++++++++++++++++-- src/CVObjectDetection.h | 2 ++ src/CVTracker.cpp | 26 +++++++++++++++++++++----- src/CVTracker.h | 4 +++- src/ClipProcessingJobs.cpp | 16 +++++++++++++++- src/ClipProcessingJobs.h | 5 ++++- src/ProcessingController.h | 21 +++++++++++++++++++++ 7 files changed, 90 insertions(+), 10 deletions(-) diff --git a/src/CVObjectDetection.cpp b/src/CVObjectDetection.cpp index 1ef1d821..36c62863 100644 --- a/src/CVObjectDetection.cpp +++ b/src/CVObjectDetection.cpp @@ -32,7 +32,6 @@ using namespace openshot; - CVObjectDetection::CVObjectDetection(std::string processInfoJson, ProcessingController &processingController) : processingController(&processingController), processingDevice("CPU"){ SetJson(processInfoJson); @@ -56,6 +55,10 @@ void CVObjectDetection::detectObjectsClip(openshot::Clip &video, size_t _start, video.Open(); + if(error){ + return; + } + // Load names of classes std::ifstream ifs(classesFile.c_str()); std::string line; @@ -381,13 +384,32 @@ void CVObjectDetection::SetJsonValue(const Json::Value root) { processingDevice = (root["processing_device"].asString()); } if (!root["model_configuration"].isNull()){ - modelConfiguration = (root["model_configuration"].asString()); + modelConfiguration = (root["model_configuration"].asString()); + std::ifstream infile(modelConfiguration); + if(!infile.good()){ + processingController->SetError(true, "Incorrect path to model config file"); + error = true; + } + } if (!root["model_weights"].isNull()){ modelWeights= (root["model_weights"].asString()); + std::ifstream infile(modelWeights); + if(!infile.good()){ + processingController->SetError(true, "Incorrect path to model weight file"); + error = true; + } + } if (!root["classes_file"].isNull()){ classesFile = (root["classes_file"].asString()); + + std::ifstream infile(classesFile); + if(!infile.good()){ + processingController->SetError(true, "Incorrect path to class name file"); + error = true; + } + } } diff --git a/src/CVObjectDetection.h b/src/CVObjectDetection.h index 8513b405..4ca55fc6 100644 --- a/src/CVObjectDetection.h +++ b/src/CVObjectDetection.h @@ -91,6 +91,8 @@ namespace openshot size_t start; size_t end; + bool error = false; + /// Will handle a Thread safely comutication between ClipProcessingJobs and the processing effect classes ProcessingController *processingController; diff --git a/src/CVTracker.cpp b/src/CVTracker.cpp index 48e84622..073e0771 100644 --- a/src/CVTracker.cpp +++ b/src/CVTracker.cpp @@ -65,7 +65,6 @@ cv::Ptr CVTracker::selectTracker(std::string trackerType){ void CVTracker::trackClip(openshot::Clip& video, size_t _start, size_t _end, bool process_interval){ video.Open(); - if(!json_interval){ start = _start; end = _end; @@ -79,7 +78,12 @@ void CVTracker::trackClip(openshot::Clip& video, size_t _start, size_t _end, boo start = start + video.Start() * video.Reader()->info.fps.ToInt(); end = video.End() * video.Reader()->info.fps.ToInt(); } - + + if(error){ + return; + } + + processingController->SetError(false, ""); bool trackerInit = false; size_t frame; @@ -274,6 +278,7 @@ void CVTracker::SetJsonValue(const Json::Value root) { if (!root["tracker_type"].isNull()){ trackerType = (root["tracker_type"].asString()); } + if (!root["bbox"].isNull()){ double x = root["bbox"]["x"].asDouble(); double y = root["bbox"]["y"].asDouble(); @@ -282,9 +287,20 @@ void CVTracker::SetJsonValue(const Json::Value root) { cv::Rect2d prev_bbox(x,y,w,h); bbox = prev_bbox; } - if (!root["first_frame"].isNull()){ - start = root["first_frame"].asInt64(); - json_interval = true; + else{ + processingController->SetError(true, "No initial bounding box selected"); + error = true; + } + + if(root.isMember("first_frame")){ + if (!root["first_frame"].isNull()){ + start = root["first_frame"].asInt64(); + json_interval = true; + } + } + else{ + processingController->SetError(true, "No first_frame"); + error = true; } } diff --git a/src/CVTracker.h b/src/CVTracker.h index 7006263d..8ea72371 100644 --- a/src/CVTracker.h +++ b/src/CVTracker.h @@ -103,10 +103,12 @@ namespace openshot /// Will handle a Thread safely comutication between ClipProcessingJobs and the processing effect classes ProcessingController *processingController; - + bool json_interval; size_t start; size_t end; + + bool error = false; // Initialize the tracker bool initTracker(cv::Mat &frame, size_t frameId); diff --git a/src/ClipProcessingJobs.cpp b/src/ClipProcessingJobs.cpp index 6a4b6689..fb64fd78 100644 --- a/src/ClipProcessingJobs.cpp +++ b/src/ClipProcessingJobs.cpp @@ -5,7 +5,8 @@ ClipProcessingJobs::ClipProcessingJobs(std::string processingType, std::string p processingType(processingType), processInfoJson(processInfoJson){ } -void ClipProcessingJobs::processClip(Clip& clip){ +void ClipProcessingJobs::processClip(Clip& clip, std::string json){ + processInfoJson = json; // Process clip and save processed data if(processingType == "Stabilizer"){ @@ -83,11 +84,13 @@ void ClipProcessingJobs::stabilizeClip(Clip& clip, ProcessingController& control } } +// Get processing progress while iterating on the clip int ClipProcessingJobs::GetProgress(){ return (int)processingController.GetProgress(); } +// Check if processing finished bool ClipProcessingJobs::IsDone(){ if(processingController.GetFinished()){ @@ -96,6 +99,17 @@ bool ClipProcessingJobs::IsDone(){ return processingController.GetFinished(); } +// stop preprocessing before finishing it void ClipProcessingJobs::CancelProcessing(){ processingController.CancelProcessing(); +} + +// check if there is an error with the config +bool ClipProcessingJobs::GetError(){ + return processingController.GetError(); +} + +// get the error message +std::string ClipProcessingJobs::GetErrorMessage(){ + return processingController.GetErrorMessage(); } \ No newline at end of file diff --git a/src/ClipProcessingJobs.h b/src/ClipProcessingJobs.h index 835b2911..2a34d46e 100644 --- a/src/ClipProcessingJobs.h +++ b/src/ClipProcessingJobs.h @@ -75,11 +75,14 @@ class ClipProcessingJobs{ // Constructor ClipProcessingJobs(std::string processingType, std::string processInfoJson); // Process clip accordingly to processingType - void processClip(Clip& clip); + void processClip(Clip& clip, std::string json); // Thread related variables and methods int GetProgress(); bool IsDone(); void CancelProcessing(); + bool GetError(); + std::string GetErrorMessage(); + }; \ No newline at end of file diff --git a/src/ProcessingController.h b/src/ProcessingController.h index 6071ee88..98c77888 100644 --- a/src/ProcessingController.h +++ b/src/ProcessingController.h @@ -41,10 +41,13 @@ class ProcessingController{ uint processingProgress; bool processingFinished; bool stopProcessing; + bool error = true; + std::string error_message; std::mutex mtxProgress; std::mutex mtxFinished; std::mutex mtxStop; + std::mutex mtxerror; public: @@ -87,6 +90,24 @@ class ProcessingController{ return s; } + void SetError(bool err, std::string message){ + std::lock_guard lck (mtxerror); + error = err; + error_message = message; + } + + bool GetError(){ + std::lock_guard lck (mtxerror); + bool e = error; + return e; + } + + std::string GetErrorMessage(){ + std::lock_guard lck (mtxerror); + std::string message = error_message; + return message; + } + }; #endif \ No newline at end of file From 0bdc6483fc3cfdb0aa87df5b1326c86aacb46fa4 Mon Sep 17 00:00:00 2001 From: Brenno Date: Tue, 3 Nov 2020 19:45:17 -0300 Subject: [PATCH 11/14] Fixed CVTracker test --- tests/CVTracker_Tests.cpp | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/tests/CVTracker_Tests.cpp b/tests/CVTracker_Tests.cpp index 6fe6abf0..7a1dcb95 100644 --- a/tests/CVTracker_Tests.cpp +++ b/tests/CVTracker_Tests.cpp @@ -55,14 +55,11 @@ SUITE(CVTracker_Tests) c1.Open(); // Create tracker - CVTracker kcfTracker("{\"protobuf_data_path\": \"\", \"tracker_type\": \"KCF\", \"bbox\": {\"x\": 294, \"y\": 102, \"w\": 180, \"h\": 166}}", processingController); - + CVTracker kcfTracker("{\"protobuf_data_path\": \"\", \"tracker_type\": \"KCF\", \"bbox\": {\"x\": 294, \"y\": 102, \"w\": 180, \"h\": 166}, \"first_frame\": 0}", processingController); // Track clip for frames 0-20 kcfTracker.trackClip(c1, 0, 20, true); - // Get tracked data FrameData fd = kcfTracker.GetTrackedData(20); - float x = fd.x1; float y = fd.y1; float width = fd.x2 - x; @@ -88,7 +85,7 @@ SUITE(CVTracker_Tests) c1.Open(); // Create first tracker - CVTracker kcfTracker_1("{\"protobuf_data_path\": \"kcf_tracker.data\", \"tracker_type\": \"KCF\", \"bbox\": {\"x\": 294, \"y\": 102, \"w\": 180, \"h\": 166}}", processingController); + CVTracker kcfTracker_1("{\"protobuf_data_path\": \"kcf_tracker.data\", \"tracker_type\": \"KCF\", \"bbox\": {\"x\": 294, \"y\": 102, \"w\": 180, \"h\": 166}, \"first_frame\": 0}", processingController); // Track clip for frames 0-20 kcfTracker_1.trackClip(c1, 0, 20, true); @@ -105,7 +102,7 @@ SUITE(CVTracker_Tests) kcfTracker_1.SaveTrackedData(); // Create second tracker - CVTracker kcfTracker_2("{\"protobuf_data_path\": \"kcf_tracker.data\", \"tracker_type\": \"\", \"bbox\": {\"x\": -1, \"y\": -1, \"w\": -1, \"h\": -1}}", processingController); + CVTracker kcfTracker_2("{\"protobuf_data_path\": \"kcf_tracker.data\", \"tracker_type\": \"\", \"bbox\": {\"x\": -1, \"y\": -1, \"w\": -1, \"h\": -1}, \"first_frame\": 0}", processingController); // Load tracked data from first tracker protobuf data kcfTracker_2._LoadTrackedData(); From 2bd6bd4456c199f8032e99942afc8017c956b05a Mon Sep 17 00:00:00 2001 From: Brenno Date: Thu, 5 Nov 2020 11:17:03 -0300 Subject: [PATCH 12/14] Improved error handling for effect pre-processing --- src/CVObjectDetection.cpp | 20 +++++++++++--------- src/CVStabilization.cpp | 9 +++++++-- src/CVStabilization.h | 1 + src/CVTracker.cpp | 29 +++++++++++------------------ tests/CVStabilizer_Tests.cpp | 6 +++--- tests/CVTracker_Tests.cpp | 10 +++++----- 6 files changed, 38 insertions(+), 37 deletions(-) diff --git a/src/CVObjectDetection.cpp b/src/CVObjectDetection.cpp index 36c62863..a4e92e0d 100644 --- a/src/CVObjectDetection.cpp +++ b/src/CVObjectDetection.cpp @@ -58,7 +58,9 @@ void CVObjectDetection::detectObjectsClip(openshot::Clip &video, size_t _start, if(error){ return; } - + + processingController->SetError(false, ""); + // Load names of classes std::ifstream ifs(classesFile.c_str()); std::string line; @@ -380,11 +382,11 @@ void CVObjectDetection::SetJsonValue(const Json::Value root) { if (!root["protobuf_data_path"].isNull()){ protobuf_data_path = (root["protobuf_data_path"].asString()); } - if (!root["processing_device"].isNull()){ - processingDevice = (root["processing_device"].asString()); + if (!root["processing-device"].isNull()){ + processingDevice = (root["processing-device"].asString()); } - if (!root["model_configuration"].isNull()){ - modelConfiguration = (root["model_configuration"].asString()); + if (!root["model-config"].isNull()){ + modelConfiguration = (root["model-config"].asString()); std::ifstream infile(modelConfiguration); if(!infile.good()){ processingController->SetError(true, "Incorrect path to model config file"); @@ -392,8 +394,8 @@ void CVObjectDetection::SetJsonValue(const Json::Value root) { } } - if (!root["model_weights"].isNull()){ - modelWeights= (root["model_weights"].asString()); + if (!root["model-weights"].isNull()){ + modelWeights= (root["model-weights"].asString()); std::ifstream infile(modelWeights); if(!infile.good()){ processingController->SetError(true, "Incorrect path to model weight file"); @@ -401,8 +403,8 @@ void CVObjectDetection::SetJsonValue(const Json::Value root) { } } - if (!root["classes_file"].isNull()){ - classesFile = (root["classes_file"].asString()); + if (!root["class-names"].isNull()){ + classesFile = (root["class-names"].asString()); std::ifstream infile(classesFile); if(!infile.good()){ diff --git a/src/CVStabilization.cpp b/src/CVStabilization.cpp index 121bf189..6e647f19 100644 --- a/src/CVStabilization.cpp +++ b/src/CVStabilization.cpp @@ -42,6 +42,11 @@ CVStabilization::CVStabilization(std::string processInfoJson, ProcessingControll // Process clip and store necessary stabilization data void CVStabilization::stabilizeClip(openshot::Clip& video, size_t _start, size_t _end, bool process_interval){ + if(error){ + return; + } + processingController->SetError(false, ""); + start = _start; end = _end; // Compute max and average transformation parameters avr_dx=0; avr_dy=0; avr_da=0; max_dx=0; max_dy=0; max_da=0; @@ -364,8 +369,8 @@ void CVStabilization::SetJsonValue(const Json::Value root) { if (!root["protobuf_data_path"].isNull()){ protobuf_data_path = (root["protobuf_data_path"].asString()); } - if (!root["smoothing_window"].isNull()){ - smoothingWindow = (root["smoothing_window"].asInt()); + if (!root["smoothing-window"].isNull()){ + smoothingWindow = (root["smoothing-window"].asInt()); } } diff --git a/src/CVStabilization.h b/src/CVStabilization.h index 4c7a9449..f6502be3 100644 --- a/src/CVStabilization.h +++ b/src/CVStabilization.h @@ -101,6 +101,7 @@ class CVStabilization { std::string protobuf_data_path; uint progress; + bool error = false; /// Will handle a Thread safely comutication between ClipProcessingJobs and the processing effect classes ProcessingController *processingController; diff --git a/src/CVTracker.cpp b/src/CVTracker.cpp index 073e0771..1833c581 100644 --- a/src/CVTracker.cpp +++ b/src/CVTracker.cpp @@ -96,7 +96,6 @@ void CVTracker::trackClip(openshot::Clip& video, size_t _start, size_t _end, boo return; } - std::cout<<"Frame: "< f = video.GetFrame(frame_number); @@ -275,15 +274,15 @@ void CVTracker::SetJsonValue(const Json::Value root) { if (!root["protobuf_data_path"].isNull()){ protobuf_data_path = (root["protobuf_data_path"].asString()); } - if (!root["tracker_type"].isNull()){ - trackerType = (root["tracker_type"].asString()); + if (!root["tracker-type"].isNull()){ + trackerType = (root["tracker-type"].asString()); } - if (!root["bbox"].isNull()){ - double x = root["bbox"]["x"].asDouble(); - double y = root["bbox"]["y"].asDouble(); - double w = root["bbox"]["w"].asDouble(); - double h = root["bbox"]["h"].asDouble(); + if (!root["region"].isNull()){ + double x = root["region"]["x"].asDouble(); + double y = root["region"]["y"].asDouble(); + double w = root["region"]["width"].asDouble(); + double h = root["region"]["height"].asDouble(); cv::Rect2d prev_bbox(x,y,w,h); bbox = prev_bbox; } @@ -292,28 +291,22 @@ void CVTracker::SetJsonValue(const Json::Value root) { error = true; } - if(root.isMember("first_frame")){ - if (!root["first_frame"].isNull()){ - start = root["first_frame"].asInt64(); - json_interval = true; - } + if (!root["region"]["first-frame"].isNull()){ + start = root["region"]["first-frame"].asInt64(); + json_interval = true; } else{ - processingController->SetError(true, "No first_frame"); + processingController->SetError(true, "No first-frame"); error = true; } } - - /* |||||||||||||||||||||||||||||||||||||||||||||||||| ONLY FOR MAKE TEST |||||||||||||||||||||||||||||||||||||||||||||||||| */ - - // Load protobuf data file bool CVTracker::_LoadTrackedData(){ // Create tracker message diff --git a/tests/CVStabilizer_Tests.cpp b/tests/CVStabilizer_Tests.cpp index 400f74ea..2ea0ff2e 100644 --- a/tests/CVStabilizer_Tests.cpp +++ b/tests/CVStabilizer_Tests.cpp @@ -55,7 +55,7 @@ SUITE(CVStabilizer_Tests) c1.Open(); // Create stabilizer - CVStabilization stabilizer("{\"protobuf_data_path\": \"stabilizer.data\", \"smoothing_window\": 30}", processingController); + CVStabilization stabilizer("{\"protobuf_data_path\": \"stabilizer.data\", \"smoothing-window\": 30}", processingController); // Stabilize clip for frames 0-21 stabilizer.stabilizeClip(c1, 0, 21, true); @@ -93,7 +93,7 @@ SUITE(CVStabilizer_Tests) c1.Open(); // Create first stabilizer - CVStabilization stabilizer_1("{\"protobuf_data_path\": \"stabilizer.data\", \"smoothing_window\": 30}", processingController); + CVStabilization stabilizer_1("{\"protobuf_data_path\": \"stabilizer.data\", \"smoothing-window\": 30}", processingController); // Stabilize clip for frames 0-20 stabilizer_1.stabilizeClip(c1, 0, 20+1, true); @@ -106,7 +106,7 @@ SUITE(CVStabilizer_Tests) stabilizer_1.SaveStabilizedData(); // Create second stabilizer - CVStabilization stabilizer_2("{\"protobuf_data_path\": \"stabilizer.data\", \"smoothing_window\": 30}", processingController); + CVStabilization stabilizer_2("{\"protobuf_data_path\": \"stabilizer.data\", \"smoothing-window\": 30}", processingController); // Load stabilized data from first stabilizer protobuf data stabilizer_2._LoadStabilizedData(); diff --git a/tests/CVTracker_Tests.cpp b/tests/CVTracker_Tests.cpp index 7a1dcb95..c6620208 100644 --- a/tests/CVTracker_Tests.cpp +++ b/tests/CVTracker_Tests.cpp @@ -55,7 +55,7 @@ SUITE(CVTracker_Tests) c1.Open(); // Create tracker - CVTracker kcfTracker("{\"protobuf_data_path\": \"\", \"tracker_type\": \"KCF\", \"bbox\": {\"x\": 294, \"y\": 102, \"w\": 180, \"h\": 166}, \"first_frame\": 0}", processingController); + CVTracker kcfTracker("{\"protobuf_data_path\": \"\", \"tracker-type\": \"KCF\", \"region\": {\"x\": 294, \"y\": 102, \"width\": 180, \"height\": 166, \"first-frame\": 0}}", processingController); // Track clip for frames 0-20 kcfTracker.trackClip(c1, 0, 20, true); // Get tracked data @@ -64,7 +64,7 @@ SUITE(CVTracker_Tests) float y = fd.y1; float width = fd.x2 - x; float height = fd.y2 - y; - + std::cout<<"\n\n Error: "<< processingController.GetErrorMessage() <<"\n"; // Compare if tracked data is equal to pre-tested ones CHECK_EQUAL(259, (int)(x * 640)); CHECK_EQUAL(131, (int)(y * 360)); @@ -85,7 +85,7 @@ SUITE(CVTracker_Tests) c1.Open(); // Create first tracker - CVTracker kcfTracker_1("{\"protobuf_data_path\": \"kcf_tracker.data\", \"tracker_type\": \"KCF\", \"bbox\": {\"x\": 294, \"y\": 102, \"w\": 180, \"h\": 166}, \"first_frame\": 0}", processingController); + CVTracker kcfTracker_1("{\"protobuf_data_path\": \"kcf_tracker.data\", \"tracker-type\": \"KCF\", \"region\": {\"x\": 294, \"y\": 102, \"width\": 180, \"height\": 166, \"first-frame\": 0}}", processingController); // Track clip for frames 0-20 kcfTracker_1.trackClip(c1, 0, 20, true); @@ -102,7 +102,7 @@ SUITE(CVTracker_Tests) kcfTracker_1.SaveTrackedData(); // Create second tracker - CVTracker kcfTracker_2("{\"protobuf_data_path\": \"kcf_tracker.data\", \"tracker_type\": \"\", \"bbox\": {\"x\": -1, \"y\": -1, \"w\": -1, \"h\": -1}, \"first_frame\": 0}", processingController); + CVTracker kcfTracker_2("{\"protobuf_data_path\": \"kcf_tracker.data\", \"tracker-type\": \"\", \"region\": {\"x\": -1, \"y\": -1, \"width\": -1, \"height\": -1, \"first-frame\": 0}}", processingController); // Load tracked data from first tracker protobuf data kcfTracker_2._LoadTrackedData(); @@ -114,7 +114,7 @@ SUITE(CVTracker_Tests) float y_2 = fd_2.y1; float width_2 = fd_2.x2 - x_2; float height_2 = fd_2.y2 - y_2; - + std::cout<<"\n\n Error: "<< processingController.GetErrorMessage() <<"\n"; // Compare first tracker data with second tracker data CHECK_EQUAL((int)(x_1 * 640), (int)(x_2 * 640)); CHECK_EQUAL((int)(y_1 * 360), (int)(y_2 * 360)); From 111883e23e8456ac1c6396bf87a0585b7b87a33e Mon Sep 17 00:00:00 2001 From: Brenno Date: Thu, 5 Nov 2020 12:23:08 -0300 Subject: [PATCH 13/14] Applied code review suggestions --- .gitignore | 1 - src/CMakeLists.txt | 4 ++-- tests/CMakeLists.txt | 40 ++++++++++++++++++++------------------- tests/CVTracker_Tests.cpp | 36 ++++++++++++++++++++++++++++++++--- tests/Frame_Tests.cpp | 2 +- 5 files changed, 57 insertions(+), 26 deletions(-) diff --git a/.gitignore b/.gitignore index 09a42bc3..5d00d580 100644 --- a/.gitignore +++ b/.gitignore @@ -9,4 +9,3 @@ tags *~ -.vscode/ diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 9b338ef6..6c1d03f4 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -400,8 +400,8 @@ endif() find_package( OpenCV 4 ) if (OpenCV_FOUND) message("\nCOMPILING WITH OPENCV\n") - set(CMAKE_SWIG_FLAGS "-DUSE_OPENCV=1") - add_definitions( -DUSE_OPENCV=1 ) + list(APPEND CMAKE_SWIG_FLAGS -DUSE_OPENCV=1) + target_compile_definitions(openshot PUBLIC USE_OPENCV=1) else() message("\nOPENCV NOT FOUND, SOME FUNCTIONALITIES WILL BE DISABLED\n") endif() diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 419b9750..ad1d3b9b 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -95,32 +95,34 @@ set(OPENSHOT_TEST_FILES Timeline_Tests.cpp) ########## SET OPENCV RELATED TEST FILES ############### -set(OPENSHOT_CV_TEST_FILES - CVTracker_Tests.cpp - CVStabilizer_Tests.cpp - # CVObjectDetection_Tests.cpp +if(OpenCV_FOUND) + set(OPENSHOT_CV_TEST_FILES + CVTracker_Tests.cpp + CVStabilizer_Tests.cpp + # CVObjectDetection_Tests.cpp + ) + set(OPENSHOT_CV_LIBRARIES + ${OpenCV_LIBS} + ${PROTOBUF_LIBRARY} ) +endif() ################ TESTER EXECUTABLE ################# # Create unit test executable (openshot-test) message (STATUS "Tests enabled, test executable will be built as tests/openshot-test") -if (OpenCV_FOUND) - add_executable(openshot-test - tests.cpp - ${OPENSHOT_TEST_FILES} - ${OPENSHOT_CV_TEST_FILES}) - - # Link libraries to the new executable - target_link_libraries(openshot-test openshot ${UnitTest++_LIBRARIES} ${OpenCV_LIBS} ${PROTOBUF_LIBRARY}) -else() - add_executable(openshot-test - tests.cpp - ${OPENSHOT_TEST_FILES} ) +add_executable(openshot-test + tests.cpp + ${OPENSHOT_TEST_FILES} + ${OPENSHOT_CV_TEST_FILES} + ) - # Link libraries to the new executable - target_link_libraries(openshot-test openshot ${UnitTest++_LIBRARIES}) -endif() +# Link libraries to the new executable +target_link_libraries(openshot-test + openshot + ${UnitTest++_LIBRARIES} + ${OPENSHOT_CV_LIBRARIES} + ) ##### RUNNING TESTS (make os_test / make test) ##### # Hook up the 'make os_test' target to the 'openshot-test' executable diff --git a/tests/CVTracker_Tests.cpp b/tests/CVTracker_Tests.cpp index c6620208..3ec36b01 100644 --- a/tests/CVTracker_Tests.cpp +++ b/tests/CVTracker_Tests.cpp @@ -54,8 +54,18 @@ SUITE(CVTracker_Tests) openshot::Clip c1(path.str()); c1.Open(); + std::string proto_data = R"proto( + { + "protobuf_data_path": "", + "tracker-type": "KCF", + "region": {"x": 294, "y": 102, "width": 180, "height": 166, "first-frame": 0} + } )proto"; + // Create tracker - CVTracker kcfTracker("{\"protobuf_data_path\": \"\", \"tracker-type\": \"KCF\", \"region\": {\"x\": 294, \"y\": 102, \"width\": 180, \"height\": 166, \"first-frame\": 0}}", processingController); + //CVTracker kcfTracker("{\"protobuf_data_path\": \"\", \"tracker_type\": \"KCF\", \"bbox\": {\"x\": 294, \"y\": 102, \"w\": 180, \"h\": 166}, \"first_frame\": 0}", processingController); + CVTracker kcfTracker(proto_data, processingController); + + // Track clip for frames 0-20 kcfTracker.trackClip(c1, 0, 20, true); // Get tracked data @@ -84,8 +94,18 @@ SUITE(CVTracker_Tests) openshot::Clip c1(path.str()); c1.Open(); + std::string proto_data = R"proto( + { + "protobuf_data_path": "kcf_tracker.data", + "tracker_type": "KCF", + "bbox": {"x": 294, "y": 102, "w": 180, "h": 166}, + "first_frame": 0 + } )proto"; + // Create first tracker - CVTracker kcfTracker_1("{\"protobuf_data_path\": \"kcf_tracker.data\", \"tracker-type\": \"KCF\", \"region\": {\"x\": 294, \"y\": 102, \"width\": 180, \"height\": 166, \"first-frame\": 0}}", processingController); + //CVTracker kcfTracker_1("{\"protobuf_data_path\": \"kcf_tracker.data\", \"tracker_type\": \"KCF\", \"bbox\": {\"x\": 294, \"y\": 102, \"w\": 180, \"h\": 166}, \"first_frame\": 0}", processingController); + CVTracker kcfTracker_1(proto_data, processingController); + // Track clip for frames 0-20 kcfTracker_1.trackClip(c1, 0, 20, true); @@ -101,8 +121,18 @@ SUITE(CVTracker_Tests) // Save tracked data kcfTracker_1.SaveTrackedData(); + std::string proto_data_1 = R"proto( + { + "protobuf_data_path": "kcf_tracker.data", + "tracker_type": "", + "bbox": {"x": -1, "y": -1, "w": -1, "h": -1}, + "first_frame": 0 + } )proto"; + // Create second tracker - CVTracker kcfTracker_2("{\"protobuf_data_path\": \"kcf_tracker.data\", \"tracker-type\": \"\", \"region\": {\"x\": -1, \"y\": -1, \"width\": -1, \"height\": -1, \"first-frame\": 0}}", processingController); + //CVTracker kcfTracker_2("{\"protobuf_data_path\": \"kcf_tracker.data\", \"tracker_type\": \"\", \"bbox\": {\"x\": -1, \"y\": -1, \"w\": -1, \"h\": -1}, \"first_frame\": 0}", processingController); + CVTracker kcfTracker_2(proto_data_1, processingController); + // Load tracked data from first tracker protobuf data kcfTracker_2._LoadTrackedData(); diff --git a/tests/Frame_Tests.cpp b/tests/Frame_Tests.cpp index c61d7f84..e5562523 100644 --- a/tests/Frame_Tests.cpp +++ b/tests/Frame_Tests.cpp @@ -157,7 +157,7 @@ TEST(Convert_Image) c1.Open(); // Get first frame - std::shared_ptr f1 = c1.GetFrame(1); + auto f1 = c1.GetFrame(1); // Get first Mat image cv::Mat cvimage = f1->GetImageCV(); From 2d181430e7bf3cf707c8dbbe1467e6bfd853a546 Mon Sep 17 00:00:00 2001 From: Brenno Date: Thu, 5 Nov 2020 22:05:34 -0300 Subject: [PATCH 14/14] Applied code review suggestions --- tests/CVStabilizer_Tests.cpp | 20 ++++++++++++++++---- tests/CVTracker_Tests.cpp | 25 +++++++++---------------- 2 files changed, 25 insertions(+), 20 deletions(-) diff --git a/tests/CVStabilizer_Tests.cpp b/tests/CVStabilizer_Tests.cpp index 2ea0ff2e..401884d7 100644 --- a/tests/CVStabilizer_Tests.cpp +++ b/tests/CVStabilizer_Tests.cpp @@ -54,8 +54,14 @@ SUITE(CVStabilizer_Tests) openshot::Clip c1(path.str()); c1.Open(); + std::string json_data = R"proto( + { + "protobuf_data_path": "stabilizer.data", + "smoothing-window": 30 + } )proto"; + // Create stabilizer - CVStabilization stabilizer("{\"protobuf_data_path\": \"stabilizer.data\", \"smoothing-window\": 30}", processingController); + CVStabilization stabilizer(json_data, processingController); // Stabilize clip for frames 0-21 stabilizer.stabilizeClip(c1, 0, 21, true); @@ -92,8 +98,14 @@ SUITE(CVStabilizer_Tests) openshot::Clip c1(path.str()); c1.Open(); + std::string json_data = R"proto( + { + "protobuf_data_path": "stabilizer.data", + "smoothing-window": 30 + } )proto"; + // Create first stabilizer - CVStabilization stabilizer_1("{\"protobuf_data_path\": \"stabilizer.data\", \"smoothing-window\": 30}", processingController); + CVStabilization stabilizer_1(json_data, processingController); // Stabilize clip for frames 0-20 stabilizer_1.stabilizeClip(c1, 0, 20+1, true); @@ -106,7 +118,7 @@ SUITE(CVStabilizer_Tests) stabilizer_1.SaveStabilizedData(); // Create second stabilizer - CVStabilization stabilizer_2("{\"protobuf_data_path\": \"stabilizer.data\", \"smoothing-window\": 30}", processingController); + CVStabilization stabilizer_2(json_data, processingController); // Load stabilized data from first stabilizer protobuf data stabilizer_2._LoadStabilizedData(); @@ -114,7 +126,7 @@ SUITE(CVStabilizer_Tests) // Get stabilized data TransformParam tp_2 = stabilizer_2.GetTransformParamData(20); CamTrajectory ct_2 = stabilizer_2.GetCamTrajectoryTrackedData(20); - + // Compare first stabilizer data with second stabilizer data CHECK_EQUAL((int) (tp_1.dx * 10000), (int) (tp_2.dx *10000)); CHECK_EQUAL((int) (tp_1.dy * 10000), (int) (tp_2.dy * 10000)); diff --git a/tests/CVTracker_Tests.cpp b/tests/CVTracker_Tests.cpp index 3ec36b01..5e1bf709 100644 --- a/tests/CVTracker_Tests.cpp +++ b/tests/CVTracker_Tests.cpp @@ -54,17 +54,15 @@ SUITE(CVTracker_Tests) openshot::Clip c1(path.str()); c1.Open(); - std::string proto_data = R"proto( + std::string json_data = R"proto( { - "protobuf_data_path": "", + "protobuf_data_path": "kcf_tracker.data", "tracker-type": "KCF", "region": {"x": 294, "y": 102, "width": 180, "height": 166, "first-frame": 0} } )proto"; // Create tracker - //CVTracker kcfTracker("{\"protobuf_data_path\": \"\", \"tracker_type\": \"KCF\", \"bbox\": {\"x\": 294, \"y\": 102, \"w\": 180, \"h\": 166}, \"first_frame\": 0}", processingController); - CVTracker kcfTracker(proto_data, processingController); - + CVTracker kcfTracker(json_data, processingController); // Track clip for frames 0-20 kcfTracker.trackClip(c1, 0, 20, true); @@ -94,18 +92,16 @@ SUITE(CVTracker_Tests) openshot::Clip c1(path.str()); c1.Open(); - std::string proto_data = R"proto( + std::string json_data = R"proto( { "protobuf_data_path": "kcf_tracker.data", - "tracker_type": "KCF", - "bbox": {"x": 294, "y": 102, "w": 180, "h": 166}, - "first_frame": 0 + "tracker-type": "KCF", + "region": {"x": 294, "y": 102, "width": 180, "height": 166, "first-frame": 0} } )proto"; - // Create first tracker - //CVTracker kcfTracker_1("{\"protobuf_data_path\": \"kcf_tracker.data\", \"tracker_type\": \"KCF\", \"bbox\": {\"x\": 294, \"y\": 102, \"w\": 180, \"h\": 166}, \"first_frame\": 0}", processingController); - CVTracker kcfTracker_1(proto_data, processingController); + // Create first tracker + CVTracker kcfTracker_1(json_data, processingController); // Track clip for frames 0-20 kcfTracker_1.trackClip(c1, 0, 20, true); @@ -125,15 +121,12 @@ SUITE(CVTracker_Tests) { "protobuf_data_path": "kcf_tracker.data", "tracker_type": "", - "bbox": {"x": -1, "y": -1, "w": -1, "h": -1}, - "first_frame": 0 + "region": {"x": -1, "y": -1, "width": -1, "height": -1, "first-frame": 0} } )proto"; // Create second tracker - //CVTracker kcfTracker_2("{\"protobuf_data_path\": \"kcf_tracker.data\", \"tracker_type\": \"\", \"bbox\": {\"x\": -1, \"y\": -1, \"w\": -1, \"h\": -1}, \"first_frame\": 0}", processingController); CVTracker kcfTracker_2(proto_data_1, processingController); - // Load tracked data from first tracker protobuf data kcfTracker_2._LoadTrackedData();