diff --git a/.gitignore b/.gitignore index 09a42bc3..5d00d580 100644 --- a/.gitignore +++ b/.gitignore @@ -9,4 +9,3 @@ tags *~ -.vscode/ diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 8144127a..6958580b 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -35,7 +35,7 @@ if (POLICY CMP0057) endif() ############### PROFILING ################# -#set(PROFILER "/usr/lib/libprofiler.so.0.3.2") +#set(PROFILER "/usr/lib//usr/lib/libprofiler.so.0.4.5") #set(PROFILER "/usr/lib/libtcmalloc.so.4") if(CMAKE_VERSION VERSION_LESS 3.3) @@ -125,6 +125,7 @@ set(EFFECTS_SOURCES effects/Bars.cpp effects/Blur.cpp effects/Brightness.cpp + effects/Caption.cpp effects/ChromaKey.cpp effects/ColorShift.cpp effects/Crop.cpp @@ -400,8 +401,8 @@ endif() find_package( OpenCV 4 ) if (OpenCV_FOUND) message("\nCOMPILING WITH OPENCV\n") - set(CMAKE_SWIG_FLAGS "-DUSE_OPENCV=1") - add_definitions( -DUSE_OPENCV=1 ) + list(APPEND CMAKE_SWIG_FLAGS -DUSE_OPENCV=1) + target_compile_definitions(openshot PUBLIC USE_OPENCV=1) else() message("\nOPENCV NOT FOUND, SOME FUNCTIONALITIES WILL BE DISABLED\n") endif() diff --git a/src/CVObjectDetection.cpp b/src/CVObjectDetection.cpp index 1ef1d821..a4e92e0d 100644 --- a/src/CVObjectDetection.cpp +++ b/src/CVObjectDetection.cpp @@ -32,7 +32,6 @@ using namespace openshot; - CVObjectDetection::CVObjectDetection(std::string processInfoJson, ProcessingController &processingController) : processingController(&processingController), processingDevice("CPU"){ SetJson(processInfoJson); @@ -56,6 +55,12 @@ void CVObjectDetection::detectObjectsClip(openshot::Clip &video, size_t _start, video.Open(); + if(error){ + return; + } + + processingController->SetError(false, ""); + // Load names of classes std::ifstream ifs(classesFile.c_str()); std::string line; @@ -377,17 +382,36 @@ void CVObjectDetection::SetJsonValue(const Json::Value root) { if (!root["protobuf_data_path"].isNull()){ protobuf_data_path = (root["protobuf_data_path"].asString()); } - if (!root["processing_device"].isNull()){ - processingDevice = (root["processing_device"].asString()); + if (!root["processing-device"].isNull()){ + processingDevice = (root["processing-device"].asString()); } - if (!root["model_configuration"].isNull()){ - modelConfiguration = (root["model_configuration"].asString()); + if (!root["model-config"].isNull()){ + modelConfiguration = (root["model-config"].asString()); + std::ifstream infile(modelConfiguration); + if(!infile.good()){ + processingController->SetError(true, "Incorrect path to model config file"); + error = true; + } + } - if (!root["model_weights"].isNull()){ - modelWeights= (root["model_weights"].asString()); + if (!root["model-weights"].isNull()){ + modelWeights= (root["model-weights"].asString()); + std::ifstream infile(modelWeights); + if(!infile.good()){ + processingController->SetError(true, "Incorrect path to model weight file"); + error = true; + } + } - if (!root["classes_file"].isNull()){ - classesFile = (root["classes_file"].asString()); + if (!root["class-names"].isNull()){ + classesFile = (root["class-names"].asString()); + + std::ifstream infile(classesFile); + if(!infile.good()){ + processingController->SetError(true, "Incorrect path to class name file"); + error = true; + } + } } diff --git a/src/CVObjectDetection.h b/src/CVObjectDetection.h index 8513b405..4ca55fc6 100644 --- a/src/CVObjectDetection.h +++ b/src/CVObjectDetection.h @@ -91,6 +91,8 @@ namespace openshot size_t start; size_t end; + bool error = false; + /// Will handle a Thread safely comutication between ClipProcessingJobs and the processing effect classes ProcessingController *processingController; diff --git a/src/CVStabilization.cpp b/src/CVStabilization.cpp index 121bf189..6e647f19 100644 --- a/src/CVStabilization.cpp +++ b/src/CVStabilization.cpp @@ -42,6 +42,11 @@ CVStabilization::CVStabilization(std::string processInfoJson, ProcessingControll // Process clip and store necessary stabilization data void CVStabilization::stabilizeClip(openshot::Clip& video, size_t _start, size_t _end, bool process_interval){ + if(error){ + return; + } + processingController->SetError(false, ""); + start = _start; end = _end; // Compute max and average transformation parameters avr_dx=0; avr_dy=0; avr_da=0; max_dx=0; max_dy=0; max_da=0; @@ -364,8 +369,8 @@ void CVStabilization::SetJsonValue(const Json::Value root) { if (!root["protobuf_data_path"].isNull()){ protobuf_data_path = (root["protobuf_data_path"].asString()); } - if (!root["smoothing_window"].isNull()){ - smoothingWindow = (root["smoothing_window"].asInt()); + if (!root["smoothing-window"].isNull()){ + smoothingWindow = (root["smoothing-window"].asInt()); } } diff --git a/src/CVStabilization.h b/src/CVStabilization.h index 4c7a9449..f6502be3 100644 --- a/src/CVStabilization.h +++ b/src/CVStabilization.h @@ -101,6 +101,7 @@ class CVStabilization { std::string protobuf_data_path; uint progress; + bool error = false; /// Will handle a Thread safely comutication between ClipProcessingJobs and the processing effect classes ProcessingController *processingController; diff --git a/src/CVTracker.cpp b/src/CVTracker.cpp index 48e84622..1833c581 100644 --- a/src/CVTracker.cpp +++ b/src/CVTracker.cpp @@ -65,7 +65,6 @@ cv::Ptr CVTracker::selectTracker(std::string trackerType){ void CVTracker::trackClip(openshot::Clip& video, size_t _start, size_t _end, bool process_interval){ video.Open(); - if(!json_interval){ start = _start; end = _end; @@ -79,7 +78,12 @@ void CVTracker::trackClip(openshot::Clip& video, size_t _start, size_t _end, boo start = start + video.Start() * video.Reader()->info.fps.ToInt(); end = video.End() * video.Reader()->info.fps.ToInt(); } - + + if(error){ + return; + } + + processingController->SetError(false, ""); bool trackerInit = false; size_t frame; @@ -92,7 +96,6 @@ void CVTracker::trackClip(openshot::Clip& video, size_t _start, size_t _end, boo return; } - std::cout<<"Frame: "< f = video.GetFrame(frame_number); @@ -271,33 +274,39 @@ void CVTracker::SetJsonValue(const Json::Value root) { if (!root["protobuf_data_path"].isNull()){ protobuf_data_path = (root["protobuf_data_path"].asString()); } - if (!root["tracker_type"].isNull()){ - trackerType = (root["tracker_type"].asString()); + if (!root["tracker-type"].isNull()){ + trackerType = (root["tracker-type"].asString()); } - if (!root["bbox"].isNull()){ - double x = root["bbox"]["x"].asDouble(); - double y = root["bbox"]["y"].asDouble(); - double w = root["bbox"]["w"].asDouble(); - double h = root["bbox"]["h"].asDouble(); + + if (!root["region"].isNull()){ + double x = root["region"]["x"].asDouble(); + double y = root["region"]["y"].asDouble(); + double w = root["region"]["width"].asDouble(); + double h = root["region"]["height"].asDouble(); cv::Rect2d prev_bbox(x,y,w,h); bbox = prev_bbox; } - if (!root["first_frame"].isNull()){ - start = root["first_frame"].asInt64(); + else{ + processingController->SetError(true, "No initial bounding box selected"); + error = true; + } + + if (!root["region"]["first-frame"].isNull()){ + start = root["region"]["first-frame"].asInt64(); json_interval = true; } + else{ + processingController->SetError(true, "No first-frame"); + error = true; + } } - - /* |||||||||||||||||||||||||||||||||||||||||||||||||| ONLY FOR MAKE TEST |||||||||||||||||||||||||||||||||||||||||||||||||| */ - - // Load protobuf data file bool CVTracker::_LoadTrackedData(){ // Create tracker message diff --git a/src/CVTracker.h b/src/CVTracker.h index 7006263d..8ea72371 100644 --- a/src/CVTracker.h +++ b/src/CVTracker.h @@ -103,10 +103,12 @@ namespace openshot /// Will handle a Thread safely comutication between ClipProcessingJobs and the processing effect classes ProcessingController *processingController; - + bool json_interval; size_t start; size_t end; + + bool error = false; // Initialize the tracker bool initTracker(cv::Mat &frame, size_t frameId); diff --git a/src/ClipProcessingJobs.cpp b/src/ClipProcessingJobs.cpp index 6a4b6689..fb64fd78 100644 --- a/src/ClipProcessingJobs.cpp +++ b/src/ClipProcessingJobs.cpp @@ -5,7 +5,8 @@ ClipProcessingJobs::ClipProcessingJobs(std::string processingType, std::string p processingType(processingType), processInfoJson(processInfoJson){ } -void ClipProcessingJobs::processClip(Clip& clip){ +void ClipProcessingJobs::processClip(Clip& clip, std::string json){ + processInfoJson = json; // Process clip and save processed data if(processingType == "Stabilizer"){ @@ -83,11 +84,13 @@ void ClipProcessingJobs::stabilizeClip(Clip& clip, ProcessingController& control } } +// Get processing progress while iterating on the clip int ClipProcessingJobs::GetProgress(){ return (int)processingController.GetProgress(); } +// Check if processing finished bool ClipProcessingJobs::IsDone(){ if(processingController.GetFinished()){ @@ -96,6 +99,17 @@ bool ClipProcessingJobs::IsDone(){ return processingController.GetFinished(); } +// stop preprocessing before finishing it void ClipProcessingJobs::CancelProcessing(){ processingController.CancelProcessing(); +} + +// check if there is an error with the config +bool ClipProcessingJobs::GetError(){ + return processingController.GetError(); +} + +// get the error message +std::string ClipProcessingJobs::GetErrorMessage(){ + return processingController.GetErrorMessage(); } \ No newline at end of file diff --git a/src/ClipProcessingJobs.h b/src/ClipProcessingJobs.h index 835b2911..2a34d46e 100644 --- a/src/ClipProcessingJobs.h +++ b/src/ClipProcessingJobs.h @@ -75,11 +75,14 @@ class ClipProcessingJobs{ // Constructor ClipProcessingJobs(std::string processingType, std::string processInfoJson); // Process clip accordingly to processingType - void processClip(Clip& clip); + void processClip(Clip& clip, std::string json); // Thread related variables and methods int GetProgress(); bool IsDone(); void CancelProcessing(); + bool GetError(); + std::string GetErrorMessage(); + }; \ No newline at end of file diff --git a/src/EffectInfo.cpp b/src/EffectInfo.cpp index 2bbb3aca..fb292dde 100644 --- a/src/EffectInfo.cpp +++ b/src/EffectInfo.cpp @@ -52,6 +52,9 @@ EffectBase* EffectInfo::CreateEffect(std::string effect_type) { else if (effect_type == "Brightness") return new Brightness(); + else if (effect_type == "Caption") + return new Caption(); + else if (effect_type == "ChromaKey") return new ChromaKey(); @@ -109,6 +112,7 @@ Json::Value EffectInfo::JsonValue() { root.append(Bars().JsonInfo()); root.append(Blur().JsonInfo()); root.append(Brightness().JsonInfo()); + root.append(Caption().JsonInfo()); root.append(ChromaKey().JsonInfo()); root.append(ColorShift().JsonInfo()); root.append(Crop().JsonInfo()); diff --git a/src/Effects.h b/src/Effects.h index 73bf0272..e4abc958 100644 --- a/src/Effects.h +++ b/src/Effects.h @@ -35,6 +35,7 @@ #include "effects/Bars.h" #include "effects/Blur.h" #include "effects/Brightness.h" +#include "effects/Caption.h" #include "effects/ChromaKey.h" #include "effects/ColorShift.h" #include "effects/Crop.h" diff --git a/src/Frame.cpp b/src/Frame.cpp index b32a7d20..e25aa5ad 100644 --- a/src/Frame.cpp +++ b/src/Frame.cpp @@ -968,14 +968,14 @@ cv::Mat Frame::GetImageCV() std::shared_ptr Frame::Mat2Qimage(cv::Mat img){ cv::cvtColor(img, img, cv::COLOR_BGR2RGB); - QImage qimg((uchar*) img.data, img.cols, img.rows, img.step, QImage::Format_RGBA8888_Premultiplied); + QImage qimg((uchar*) img.data, img.cols, img.rows, img.step, QImage::Format_RGB888); std::shared_ptr imgIn = std::make_shared(qimg.copy()); // Always convert to RGBA8888 (if different) if (imgIn->format() != QImage::Format_RGBA8888_Premultiplied) *imgIn = imgIn->convertToFormat(QImage::Format_RGBA8888_Premultiplied); - + return imgIn; } diff --git a/src/ProcessingController.h b/src/ProcessingController.h index 6071ee88..98c77888 100644 --- a/src/ProcessingController.h +++ b/src/ProcessingController.h @@ -41,10 +41,13 @@ class ProcessingController{ uint processingProgress; bool processingFinished; bool stopProcessing; + bool error = true; + std::string error_message; std::mutex mtxProgress; std::mutex mtxFinished; std::mutex mtxStop; + std::mutex mtxerror; public: @@ -87,6 +90,24 @@ class ProcessingController{ return s; } + void SetError(bool err, std::string message){ + std::lock_guard lck (mtxerror); + error = err; + error_message = message; + } + + bool GetError(){ + std::lock_guard lck (mtxerror); + bool e = error; + return e; + } + + std::string GetErrorMessage(){ + std::lock_guard lck (mtxerror); + std::string message = error_message; + return message; + } + }; #endif \ No newline at end of file diff --git a/src/Qt/PlayerPrivate.cpp b/src/Qt/PlayerPrivate.cpp index 75052fc3..7a3943c7 100644 --- a/src/Qt/PlayerPrivate.cpp +++ b/src/Qt/PlayerPrivate.cpp @@ -195,10 +195,10 @@ namespace openshot // Stop video/audio playback void PlayerPrivate::stopPlayback(int timeOutMilliseconds) { - if (isThreadRunning()) stopThread(timeOutMilliseconds); if (audioPlayback->isThreadRunning() && reader->info.has_audio) audioPlayback->stopThread(timeOutMilliseconds); if (videoCache->isThreadRunning() && reader->info.has_video) videoCache->stopThread(timeOutMilliseconds); if (videoPlayback->isThreadRunning() && reader->info.has_video) videoPlayback->stopThread(timeOutMilliseconds); + if (isThreadRunning()) stopThread(timeOutMilliseconds); } } diff --git a/src/Qt/VideoCacheThread.cpp b/src/Qt/VideoCacheThread.cpp index e1e53f5d..0cf76ef0 100644 --- a/src/Qt/VideoCacheThread.cpp +++ b/src/Qt/VideoCacheThread.cpp @@ -93,43 +93,43 @@ namespace openshot while (!threadShouldExit() && is_playing) { - // Cache frames before the other threads need them - // Cache frames up to the max frames. Reset to current position - // if cache gets too far away from display frame. Cache frames - // even when player is paused (i.e. speed 0). - while ((position - current_display_frame) < max_frames) - { - // Only cache up till the max_frames amount... then sleep - try + // Cache frames before the other threads need them + // Cache frames up to the max frames. Reset to current position + // if cache gets too far away from display frame. Cache frames + // even when player is paused (i.e. speed 0). + while (((position - current_display_frame) < max_frames) && is_playing) { - if (reader) { - ZmqLogger::Instance()->AppendDebugMethod("VideoCacheThread::run (cache frame)", "position", position, "current_display_frame", current_display_frame, "max_frames", max_frames, "needed_frames", (position - current_display_frame)); + // Only cache up till the max_frames amount... then sleep + try + { + if (reader) { + ZmqLogger::Instance()->AppendDebugMethod("VideoCacheThread::run (cache frame)", "position", position, "current_display_frame", current_display_frame, "max_frames", max_frames, "needed_frames", (position - current_display_frame)); - // Force the frame to be generated - if (reader->GetCache()->GetSmallestFrame()) { - int64_t smallest_cached_frame = reader->GetCache()->GetSmallestFrame()->number; - if (smallest_cached_frame > current_display_frame) { - // Cache position has gotten too far away from current display frame. - // Reset the position to the current display frame. - position = current_display_frame; + // Force the frame to be generated + if (reader->GetCache()->GetSmallestFrame()) { + int64_t smallest_cached_frame = reader->GetCache()->GetSmallestFrame()->number; + if (smallest_cached_frame > current_display_frame) { + // Cache position has gotten too far away from current display frame. + // Reset the position to the current display frame. + position = current_display_frame; + } } + reader->GetFrame(position); } - reader->GetFrame(position); + + } + catch (const OutOfBoundsFrame & e) + { + // Ignore out of bounds frame exceptions } - } - catch (const OutOfBoundsFrame & e) - { - // Ignore out of bounds frame exceptions + // Increment frame number + position++; } - // Increment frame number - position++; - } - - // Sleep for 1 frame length - std::this_thread::sleep_for(frame_duration); - } + // Sleep for 1 frame length + std::this_thread::sleep_for(frame_duration); + } return; } diff --git a/src/Settings.cpp b/src/Settings.cpp index 879be449..9cf16fbb 100644 --- a/src/Settings.cpp +++ b/src/Settings.cpp @@ -34,14 +34,14 @@ using namespace std; using namespace openshot; -// Global reference to logger +// Global reference to Settings Settings *Settings::m_pInstance = NULL; -// Create or Get an instance of the logger singleton +// Create or Get an instance of the settings singleton Settings *Settings::Instance() { if (!m_pInstance) { - // Create the actual instance of logger only once + // Create the actual instance of Settings only once m_pInstance = new Settings; m_pInstance->HARDWARE_DECODER = 0; m_pInstance->HIGH_QUALITY_SCALING = false; @@ -53,6 +53,7 @@ Settings *Settings::Instance() m_pInstance->HW_DE_DEVICE_SET = 0; m_pInstance->HW_EN_DEVICE_SET = 0; m_pInstance->PLAYBACK_AUDIO_DEVICE_NAME = ""; + m_pInstance->DEBUG_TO_STDERR = false; } return m_pInstance; diff --git a/src/Settings.h b/src/Settings.h index 4b12c612..da85b89e 100644 --- a/src/Settings.h +++ b/src/Settings.h @@ -127,6 +127,9 @@ namespace openshot { /// The current install path of OpenShot (needs to be set when using Timeline(path), since certain /// paths depend on the location of OpenShot transitions and files) std::string PATH_OPENSHOT_INSTALL = ""; + + /// Whether to dump ZeroMQ debug messages to stderr + bool DEBUG_TO_STDERR = false; /// Create or get an instance of this logger singleton (invoke the class with this method) static Settings * Instance(); diff --git a/src/ZmqLogger.cpp b/src/ZmqLogger.cpp index 8d72f352..e19b20e3 100644 --- a/src/ZmqLogger.cpp +++ b/src/ZmqLogger.cpp @@ -34,8 +34,8 @@ #include "ResvgQt.h" #endif -using namespace std; using namespace openshot; + #include #include #include @@ -70,7 +70,6 @@ ZmqLogger *ZmqLogger::Instance() // This can only happen 1 time or it will crash ResvgRenderer::initLog(); #endif - } return m_pInstance; @@ -80,7 +79,7 @@ ZmqLogger *ZmqLogger::Instance() void ZmqLogger::Connection(std::string new_connection) { // Create a scoped lock, allowing only a single thread to run the following code at one time - const GenericScopedLock lock(loggerCriticalSection); + const juce::GenericScopedLock lock(loggerCriticalSection); // Does anything need to happen? if (new_connection == connection) @@ -124,7 +123,7 @@ void ZmqLogger::Log(std::string message) return; // Create a scoped lock, allowing only a single thread to run the following code at one time - const GenericScopedLock lock(loggerCriticalSection); + const juce::GenericScopedLock lock(loggerCriticalSection); // Send message over socket (ZeroMQ) zmq::message_t reply (message.length()); @@ -195,19 +194,20 @@ void ZmqLogger::AppendDebugMethod(std::string method_name, std::string arg5_name, float arg5_value, std::string arg6_name, float arg6_value) { - if (!enabled) + if (!enabled && !openshot::Settings::Instance()->DEBUG_TO_STDERR) // Don't do anything return; { // Create a scoped lock, allowing only a single thread to run the following code at one time - const GenericScopedLock lock(loggerCriticalSection); + const juce::GenericScopedLock lock(loggerCriticalSection); std::stringstream message; message << std::fixed << std::setprecision(4); + + // Construct message message << method_name << " ("; - // Add attributes to method JSON if (arg1_name.length() > 0) message << arg1_name << "=" << arg1_value; @@ -226,10 +226,16 @@ void ZmqLogger::AppendDebugMethod(std::string method_name, if (arg6_name.length() > 0) message << ", " << arg6_name << "=" << arg6_value; - // Output to standard output - message << ")" << endl; + message << ")" << std::endl; - // Send message through ZMQ - Log(message.str()); + if (openshot::Settings::Instance()->DEBUG_TO_STDERR) { + // Print message to stderr + std::clog << message.str(); + } + + if (enabled) { + // Send message through ZMQ + Log(message.str()); + } } } diff --git a/src/ZmqLogger.h b/src/ZmqLogger.h index af1eea6f..deb87e35 100644 --- a/src/ZmqLogger.h +++ b/src/ZmqLogger.h @@ -43,6 +43,7 @@ #include #include #include "JuceHeader.h" +#include "Settings.h" namespace openshot { @@ -70,17 +71,17 @@ namespace openshot { zmq::socket_t *publisher; /// Default constructor - ZmqLogger(){}; // Don't allow user to create an instance of this singleton + ZmqLogger(){}; // Don't allow user to create an instance of this singleton #if __GNUC__ >=7 /// Default copy method - ZmqLogger(ZmqLogger const&) = delete; // Don't allow the user to assign this instance + ZmqLogger(ZmqLogger const&) = delete; // Don't allow the user to assign this instance /// Default assignment operator ZmqLogger & operator=(ZmqLogger const&) = delete; // Don't allow the user to assign this instance #else /// Default copy method - ZmqLogger(ZmqLogger const&) {}; // Don't allow the user to assign this instance + ZmqLogger(ZmqLogger const&) {}; // Don't allow the user to assign this instance /// Default assignment operator ZmqLogger & operator=(ZmqLogger const&); // Don't allow the user to assign this instance @@ -94,13 +95,15 @@ namespace openshot { static ZmqLogger * Instance(); /// Append debug information - void AppendDebugMethod(std::string method_name, - std::string arg1_name="", float arg1_value=-1.0, - std::string arg2_name="", float arg2_value=-1.0, - std::string arg3_name="", float arg3_value=-1.0, - std::string arg4_name="", float arg4_value=-1.0, - std::string arg5_name="", float arg5_value=-1.0, - std::string arg6_name="", float arg6_value=-1.0); + void AppendDebugMethod( + std::string method_name, + std::string arg1_name="", float arg1_value=-1.0, + std::string arg2_name="", float arg2_value=-1.0, + std::string arg3_name="", float arg3_value=-1.0, + std::string arg4_name="", float arg4_value=-1.0, + std::string arg5_name="", float arg5_value=-1.0, + std::string arg6_name="", float arg6_value=-1.0 + ); /// Close logger (sockets and/or files) void Close(); diff --git a/src/effects/Caption.cpp b/src/effects/Caption.cpp new file mode 100644 index 00000000..b9ce2249 --- /dev/null +++ b/src/effects/Caption.cpp @@ -0,0 +1,427 @@ +/** + * @file + * @brief Source file for Caption effect class + * @author Jonathan Thomas + * + * @ref License + */ + +/* LICENSE + * + * Copyright (c) 2008-2019 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#include "Caption.h" +#include "../Clip.h" +#include "../Timeline.h" + +using namespace openshot; + +/// Blank constructor, useful when using Json to load the effect properties +Caption::Caption() : color("#ffffff"), stroke("#a9a9a9"), background("#ff000000"), background_alpha(0.0), left(0.25), top(0.7), right(0.1), + stroke_width(0.5), font_size(30.0), font_alpha(1.0), is_dirty(true), font_name("sans"), font(NULL), metrics(NULL), + fade_in(0.35), fade_out(0.35), background_corner(10.0), background_padding(20.0) +{ + // Init effect properties + init_effect_details(); +} + +// Default constructor +Caption::Caption(std::string captions) : + color("#ffffff"), caption_text(captions), stroke("#a9a9a9"), background("#ff000000"), background_alpha(0.0), + left(0.25), top(0.7), right(0.1), stroke_width(0.5), font_size(30.0), font_alpha(1.0), is_dirty(true), font_name("sans"), + font(NULL), metrics(NULL), fade_in(0.35), fade_out(0.35), background_corner(10.0), background_padding(20.0) +{ + // Init effect properties + init_effect_details(); +} + +// Init effect settings +void Caption::init_effect_details() +{ + /// Initialize the values of the EffectInfo struct. + InitEffectInfo(); + + /// Set the effect info + info.class_name = "Caption"; + info.name = "Caption"; + info.description = "Add text captions on top of your video."; + info.has_audio = false; + info.has_video = true; + + // Init placeholder caption (for demo) + if (caption_text.length() == 0) { + caption_text = "00:00:00:000 --> 00:10:00:000\nEdit this caption with our caption editor"; + } +} + +// Set the caption string to use (see VTT format) +std::string Caption::CaptionText() { + return caption_text; +} + +// Get the caption string +void Caption::CaptionText(std::string new_caption_text) { + caption_text = new_caption_text; + is_dirty = true; +} + +// Process regex string only when dirty +void Caption::process_regex() { + if (is_dirty) { + is_dirty = false; + + // Clear existing matches + matchedCaptions.clear(); + + QString caption_prepared = QString(caption_text.c_str()); + if (caption_prepared.endsWith("\n\n") == false) { + // We need a couple line ends at the end of the caption string (for our regex to work correctly) + caption_prepared.append("\n\n"); + } + + // Parse regex and find all matches + QRegularExpression allPathsRegex(QStringLiteral("(\\d{2})?:*(\\d{2}):(\\d{2}).(\\d{2,3})\\s*-->\\s*(\\d{2})?:*(\\d{2}):(\\d{2}).(\\d{2,3})([\\s\\S]*?)\\n(.*?)(?=\\n\\d{2,3}|\\Z)"), QRegularExpression::MultilineOption); + QRegularExpressionMatchIterator i = allPathsRegex.globalMatch(caption_prepared); + while (i.hasNext()) { + QRegularExpressionMatch match = i.next(); + if (match.hasMatch()) { + // Push all match objects into a vector (so we can reverse them later) + matchedCaptions.push_back(match); + } + } + } +} + +// This method is required for all derived classes of EffectBase, and returns a +// modified openshot::Frame object +std::shared_ptr Caption::GetFrame(std::shared_ptr frame, int64_t frame_number) +{ + // Process regex (if needed) + process_regex(); + + // Get the Clip and Timeline pointers (if available) + Clip* clip = (Clip*) ParentClip(); + Timeline* timeline = NULL; + Fraction fps; + double scale_factor = 1.0; // amount of scaling needed for text (based on preview window size) + if (clip->ParentTimeline() != NULL) { + timeline = (Timeline*) clip->ParentTimeline(); + } else if (this->ParentTimeline() != NULL) { + timeline = (Timeline*) this->ParentTimeline(); + } + + // Get the FPS from the parent object (Timeline or Clip's Reader) + if (timeline != NULL) { + fps.num = timeline->info.fps.num; + fps.den = timeline->info.fps.den; + // preview window is sometimes smaller/larger than the timeline size + scale_factor = (double) timeline->preview_width / (double) timeline->info.width; + } else if (clip != NULL && clip->Reader() != NULL) { + fps.num = clip->Reader()->info.fps.num; + fps.den = clip->Reader()->info.fps.den; + scale_factor = 1.0; + } + + // Get the frame's image + std::shared_ptr frame_image = frame->GetImage(); + + // Load timeline's new frame image into a QPainter + QPainter painter(frame_image.get()); + painter.setRenderHints(QPainter::Antialiasing | QPainter::SmoothPixmapTransform | QPainter::TextAntialiasing, true); + + // Composite a new layer onto the image + painter.setCompositionMode(QPainter::CompositionMode_SourceOver); + + // Font options and metrics for caption text + double font_size_value = font_size.GetValue(frame_number) * scale_factor; + QFont font(QString(font_name.c_str()), int(font_size_value)); + font.setPointSizeF(std::max(font_size_value, 1.0)); + QFontMetricsF metrics = QFontMetricsF(font); + + // Get current keyframe values + double left_value = left.GetValue(frame_number); + double top_value = top.GetValue(frame_number); + double fade_in_value = fade_in.GetValue(frame_number) * fps.ToDouble(); + double fade_out_value = fade_out.GetValue(frame_number) * fps.ToDouble(); + double right_value = right.GetValue(frame_number); + double background_corner_value = background_corner.GetValue(frame_number); + double padding_value = background_padding.GetValue(frame_number); + + // Calculate caption area (based on left, top, and right margin) + double left_margin_x = frame_image->width() * left_value; + double starting_y = (frame_image->height() * top_value) + (metrics.lineSpacing() * scale_factor); + double right_margin_x = frame_image->width() - (frame_image->width() * right_value); + double caption_area_width = right_margin_x - left_margin_x; + QRectF caption_area = QRectF(left_margin_x, starting_y, caption_area_width, frame_image->height()); + QRectF caption_area_with_padding = QRectF(left_margin_x - (padding_value / 2.0), starting_y - (padding_value / 2.0), caption_area_width + padding_value, frame_image->height() + padding_value); + + // Set background color of caption + QBrush brush; + QColor background_qcolor = QColor(QString(background.GetColorHex(frame_number).c_str())); + background_qcolor.setAlphaF(background_alpha.GetValue(frame_number)); + brush.setColor(background_qcolor); + brush.setStyle(Qt::SolidPattern); + painter.setBrush(brush); + painter.setPen(Qt::NoPen); + painter.drawRoundedRect(caption_area_with_padding, background_corner_value, background_corner_value); + + // Set text color of caption + QPen pen; + QColor stroke_qcolor; + if (stroke_width.GetValue(frame_number) <= 0.0) { + // No stroke + painter.setPen(Qt::NoPen); + } else { + // Stroke color + stroke_qcolor = QColor(QString(stroke.GetColorHex(frame_number).c_str())); + stroke_qcolor.setAlphaF(font_alpha.GetValue(frame_number)); + pen.setColor(stroke_qcolor); + pen.setWidthF(stroke_width.GetValue(frame_number) * scale_factor); + painter.setPen(pen); + } + // Fill color of text + QColor font_qcolor = QColor(QString(color.GetColorHex(frame_number).c_str())); + font_qcolor.setAlphaF(font_alpha.GetValue(frame_number)); + brush.setColor(font_qcolor); + painter.setBrush(brush); + + // Loop through matches and find text to display (if any) + for (auto match = matchedCaptions.begin(); match != matchedCaptions.end(); match++) { + + // Build timestamp (00:00:04.000 --> 00:00:06.500) + int64_t start_frame = ((match->captured(1).toFloat() * 60.0 * 60.0 ) + (match->captured(2).toFloat() * 60.0 ) + + match->captured(3).toFloat() + (match->captured(4).toFloat() / 1000.0)) * fps.ToFloat(); + int64_t end_frame = ((match->captured(5).toFloat() * 60.0 * 60.0 ) + (match->captured(6).toFloat() * 60.0 ) + + match->captured(7).toFloat() + (match->captured(8).toFloat() / 1000.0)) * fps.ToFloat(); + + // Split multiple lines into separate paths + QStringList lines = match->captured(9).split("\n"); + for(int index = 0; index < lines.length(); index++) { + // Multi-line + QString line = lines[index]; + // Ignore lines that start with NOTE, or are <= 1 char long + if (!line.startsWith(QStringLiteral("NOTE")) && + !line.isEmpty() && frame_number >= start_frame && frame_number <= end_frame && + line.length() > 1) { + + // Calculate fade in/out ranges + double fade_in_percentage = ((float) frame_number - (float) start_frame) / fade_in_value; + double fade_out_percentage = 1.0 - (((float) frame_number - ((float) end_frame - fade_out_value)) / fade_out_value); + if (fade_in_percentage < 1.0) { + // Fade in + font_qcolor.setAlphaF(fade_in_percentage * font_alpha.GetValue(frame_number)); + stroke_qcolor.setAlphaF(fade_in_percentage * font_alpha.GetValue(frame_number)); + } else if (fade_out_percentage >= 0.0 && fade_out_percentage <= 1.0) { + // Fade out + font_qcolor.setAlphaF(fade_out_percentage * font_alpha.GetValue(frame_number)); + stroke_qcolor.setAlphaF(fade_out_percentage * font_alpha.GetValue(frame_number)); + } + pen.setColor(stroke_qcolor); + brush.setColor(font_qcolor); + painter.setPen(pen); + painter.setBrush(brush); + + // Loop through words, and find word-wrap boundaries + QStringList words = line.split(" "); + int words_remaining = words.length(); + while (words_remaining > 0) { + bool words_displayed = false; + for(int word_index = words.length(); word_index > 0; word_index--) { + // Current matched caption string (from the beginning to the current word index) + QString fitting_line = words.mid(0, word_index).join(" "); + + // Calculate size of text + QRectF textRect = metrics.boundingRect(caption_area, Qt::TextSingleLine, fitting_line); + if (textRect.width() <= caption_area.width()) { + // Location for text + QPoint p(left_margin_x, starting_y); + + // Draw text onto path (for correct border and fill) + QPainterPath path1; + QString fitting_line = words.mid(0, word_index).join(" "); + path1.addText(p, font, fitting_line); + painter.drawPath(path1); + + // Increment QPoint to height of text (for next line) + padding + starting_y += path1.boundingRect().height() + (metrics.lineSpacing() * scale_factor); + + // Update line (to remove words already drawn + words = words.mid(word_index, words.length()); + words_remaining = words.length(); + words_displayed = true; + break; + } + } + + if (words_displayed == false) { + // Exit loop if no words displayed + words_remaining = 0; + } + } + + } + } + } + + // End painter + painter.end(); + + // return the modified frame + return frame; +} + +// Generate JSON string of this object +std::string Caption::Json() const { + + // Return formatted string + return JsonValue().toStyledString(); +} + +// Generate Json::Value for this object +Json::Value Caption::JsonValue() const { + + // Create root json object + Json::Value root = EffectBase::JsonValue(); // get parent properties + root["type"] = info.class_name; + root["color"] = color.JsonValue(); + root["stroke"] = stroke.JsonValue(); + root["background"] = background.JsonValue(); + root["background_alpha"] = background_alpha.JsonValue(); + root["background_corner"] = background_corner.JsonValue(); + root["background_padding"] = background_padding.JsonValue(); + root["stroke_width"] = stroke_width.JsonValue(); + root["font_size"] = font_size.JsonValue(); + root["font_alpha"] = font_alpha.JsonValue(); + root["fade_in"] = fade_in.JsonValue(); + root["fade_out"] = fade_out.JsonValue(); + root["left"] = left.JsonValue(); + root["top"] = top.JsonValue(); + root["right"] = right.JsonValue(); + root["caption_text"] = caption_text; + root["caption_font"] = font_name; + + // return JsonValue + return root; +} + +// Load JSON string into this object +void Caption::SetJson(const std::string value) { + + // Parse JSON string into JSON objects + try + { + const Json::Value root = openshot::stringToJson(value); + // Set all values that match + SetJsonValue(root); + } + catch (const std::exception& e) + { + // Error parsing JSON (or missing keys) + throw InvalidJSON("JSON is invalid (missing keys or invalid data types)"); + } +} + +// Load Json::Value into this object +void Caption::SetJsonValue(const Json::Value root) { + + // Set parent data + EffectBase::SetJsonValue(root); + + // Set data from Json (if key is found) + if (!root["color"].isNull()) + color.SetJsonValue(root["color"]); + if (!root["stroke"].isNull()) + stroke.SetJsonValue(root["stroke"]); + if (!root["background"].isNull()) + background.SetJsonValue(root["background"]); + if (!root["background_alpha"].isNull()) + background_alpha.SetJsonValue(root["background_alpha"]); + if (!root["background_corner"].isNull()) + background_corner.SetJsonValue(root["background_corner"]); + if (!root["background_padding"].isNull()) + background_padding.SetJsonValue(root["background_padding"]); + if (!root["stroke_width"].isNull()) + stroke_width.SetJsonValue(root["stroke_width"]); + if (!root["font_size"].isNull()) + font_size.SetJsonValue(root["font_size"]); + if (!root["font_alpha"].isNull()) + font_alpha.SetJsonValue(root["font_alpha"]); + if (!root["fade_in"].isNull()) + fade_in.SetJsonValue(root["fade_in"]); + if (!root["fade_out"].isNull()) + fade_out.SetJsonValue(root["fade_out"]); + if (!root["left"].isNull()) + left.SetJsonValue(root["left"]); + if (!root["top"].isNull()) + top.SetJsonValue(root["top"]); + if (!root["right"].isNull()) + right.SetJsonValue(root["right"]); + if (!root["caption_text"].isNull()) + caption_text = root["caption_text"].asString(); + if (!root["caption_font"].isNull()) + font_name = root["caption_font"].asString(); + + // Mark effect as dirty to reparse Regex + is_dirty = true; +} + +// Get all properties for a specific frame +std::string Caption::PropertiesJSON(int64_t requested_frame) const { + + // Generate JSON properties list + Json::Value root; + root["id"] = add_property_json("ID", 0.0, "string", Id(), NULL, -1, -1, true, requested_frame); + root["position"] = add_property_json("Position", Position(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); + root["layer"] = add_property_json("Track", Layer(), "int", "", NULL, 0, 20, false, requested_frame); + root["start"] = add_property_json("Start", Start(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); + root["end"] = add_property_json("End", End(), "float", "", NULL, 0, 1000 * 60 * 30, false, requested_frame); + root["duration"] = add_property_json("Duration", Duration(), "float", "", NULL, 0, 1000 * 60 * 30, true, requested_frame); + + // Keyframes + root["color"] = add_property_json("Color", 0.0, "color", "", NULL, 0, 255, false, requested_frame); + root["color"]["red"] = add_property_json("Red", color.red.GetValue(requested_frame), "float", "", &color.red, 0, 255, false, requested_frame); + root["color"]["blue"] = add_property_json("Blue", color.blue.GetValue(requested_frame), "float", "", &color.blue, 0, 255, false, requested_frame); + root["color"]["green"] = add_property_json("Green", color.green.GetValue(requested_frame), "float", "", &color.green, 0, 255, false, requested_frame); + root["stroke"] = add_property_json("Border", 0.0, "color", "", NULL, 0, 255, false, requested_frame); + root["stroke"]["red"] = add_property_json("Red", stroke.red.GetValue(requested_frame), "float", "", &stroke.red, 0, 255, false, requested_frame); + root["stroke"]["blue"] = add_property_json("Blue", stroke.blue.GetValue(requested_frame), "float", "", &stroke.blue, 0, 255, false, requested_frame); + root["stroke"]["green"] = add_property_json("Green", stroke.green.GetValue(requested_frame), "float", "", &stroke.green, 0, 255, false, requested_frame); + root["background_alpha"] = add_property_json("Background Alpha", background_alpha.GetValue(requested_frame), "float", "", &background_alpha, 0.0, 1.0, false, requested_frame); + root["background_corner"] = add_property_json("Background Corner Radius", background_corner.GetValue(requested_frame), "float", "", &background_corner, 0.0, 60.0, false, requested_frame); + root["background_padding"] = add_property_json("Background Padding", background_padding.GetValue(requested_frame), "float", "", &background_padding, 0.0, 60.0, false, requested_frame); + root["background"] = add_property_json("Background", 0.0, "color", "", NULL, 0, 255, false, requested_frame); + root["background"]["red"] = add_property_json("Red", background.red.GetValue(requested_frame), "float", "", &background.red, 0, 255, false, requested_frame); + root["background"]["blue"] = add_property_json("Blue", background.blue.GetValue(requested_frame), "float", "", &background.blue, 0, 255, false, requested_frame); + root["background"]["green"] = add_property_json("Green", background.green.GetValue(requested_frame), "float", "", &background.green, 0, 255, false, requested_frame); + root["stroke_width"] = add_property_json("Stroke Width", stroke_width.GetValue(requested_frame), "float", "", &stroke_width, 0, 10.0, false, requested_frame); + root["font_size"] = add_property_json("Font Size", font_size.GetValue(requested_frame), "float", "", &font_size, 0, 200.0, false, requested_frame); + root["font_alpha"] = add_property_json("Font Alpha", font_alpha.GetValue(requested_frame), "float", "", &font_alpha, 0.0, 1.0, false, requested_frame); + root["fade_in"] = add_property_json("Fade In (Seconds)", fade_in.GetValue(requested_frame), "float", "", &fade_in, 0.0, 3.0, false, requested_frame); + root["fade_out"] = add_property_json("Fade Out (Seconds)", fade_out.GetValue(requested_frame), "float", "", &fade_out, 0.0, 3.0, false, requested_frame); + root["left"] = add_property_json("Left Size", left.GetValue(requested_frame), "float", "", &left, 0.0, 0.5, false, requested_frame); + root["top"] = add_property_json("Top Size", top.GetValue(requested_frame), "float", "", &top, 0.0, 1.0, false, requested_frame); + root["right"] = add_property_json("Right Size", right.GetValue(requested_frame), "float", "", &right, 0.0, 0.5, false, requested_frame); + root["caption_text"] = add_property_json("Captions", 0.0, "caption", caption_text, NULL, -1, -1, false, requested_frame); + root["caption_font"] = add_property_json("Font", 0.0, "font", font_name, NULL, -1, -1, false, requested_frame); + + // Return formatted string + return root.toStyledString(); +} diff --git a/src/effects/Caption.h b/src/effects/Caption.h new file mode 100644 index 00000000..749b2c17 --- /dev/null +++ b/src/effects/Caption.h @@ -0,0 +1,132 @@ +/** + * @file + * @brief Header file for Caption effect class + * @author Jonathan Thomas + * + * @ref License + */ + +/* LICENSE + * + * Copyright (c) 2008-2019 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#ifndef OPENSHOT_CAPTION_EFFECT_H +#define OPENSHOT_CAPTION_EFFECT_H + +#include +#include +#include +#include +#include "../Color.h" +#include "../EffectBase.h" +#include "../Fraction.h" +#include "../Json.h" +#include "../KeyFrame.h" + + + +namespace openshot +{ + + /** + * @brief This class adds captions/text over a video, based on timestamps. You can also animate some limited + * aspects, such as words appearing/disappearing. + * + * Adding captions can be an easy way to generate text overlays through-out a long clip. + */ + class Caption : public EffectBase + { + private: + std::vector matchedCaptions; ///< RegEx to capture cues and text + std::string caption_text; ///< Text of caption + QFontMetrics* metrics; ///< Font metrics object + QFont* font; ///< QFont object + bool is_dirty; + + /// Init effect settings + void init_effect_details(); + + /// Process regex capture + void process_regex(); + + + public: + Color color; ///< Color of caption text + Color stroke; ///< Color of text border / stroke + Color background; ///< Color of caption area background + Keyframe background_alpha; ///< Background color alpha + Keyframe background_corner; ///< Background cornder radius + Keyframe background_padding; ///< Background padding + Keyframe stroke_width; ///< Width of text border / stroke + Keyframe font_size; ///< Font size in points + Keyframe font_alpha; ///< Font color alpha + Keyframe left; ///< Size of left bar + Keyframe top; ///< Size of top bar + Keyframe right; ///< Size of right bar + Keyframe fade_in; ///< Fade in per caption (# of seconds) + Keyframe fade_out; ///< Fade in per caption (# of seconds) + std::string font_name; ///< Font string + + /// Blank constructor, useful when using Json to load the effect properties + Caption(); + + /// Default constructor, which takes a string of VTT/Subrip formatted caption data, and displays them over time. + /// + /// @param captions A string with VTT/Subrip format text captions + Caption(std::string captions); + + /// @brief This method is required for all derived classes of ClipBase, and returns a + /// new openshot::Frame object. All Clip keyframes and effects are resolved into + /// pixels. + /// + /// @returns A new openshot::Frame object + /// @param frame_number The frame number (starting at 1) of the clip or effect on the timeline. + std::shared_ptr GetFrame(int64_t frame_number) override { return GetFrame(std::shared_ptr (new Frame()), frame_number); } + + /// @brief This method is required for all derived classes of ClipBase, and returns a + /// modified openshot::Frame object + /// + /// The frame object is passed into this method and used as a starting point (pixels and audio). + /// All Clip keyframes and effects are resolved into pixels. + /// + /// @returns The modified openshot::Frame object + /// @param frame The frame object that needs the clip or effect applied to it + /// @param frame_number The frame number (starting at 1) of the clip or effect on the timeline. + std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number) override; + + // Get and Set caption data + std::string CaptionText(); ///< Set the caption string to use (see VTT format) + void CaptionText(std::string new_caption_text); ///< Get the caption string + + /// Get and Set JSON methods + std::string Json() const override; ///< Generate JSON string of this object + void SetJson(const std::string value) override; ///< Load JSON string into this object + Json::Value JsonValue() const override; ///< Generate Json::Value for this object + void SetJsonValue(const Json::Value root) override; ///< Load Json::Value into this object + + /// Get all properties for a specific frame (perfect for a UI to display the current state + /// of all properties at any time) + std::string PropertiesJSON(int64_t requested_frame) const override; + }; + +} + +#endif diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 419b9750..ad1d3b9b 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -95,32 +95,34 @@ set(OPENSHOT_TEST_FILES Timeline_Tests.cpp) ########## SET OPENCV RELATED TEST FILES ############### -set(OPENSHOT_CV_TEST_FILES - CVTracker_Tests.cpp - CVStabilizer_Tests.cpp - # CVObjectDetection_Tests.cpp +if(OpenCV_FOUND) + set(OPENSHOT_CV_TEST_FILES + CVTracker_Tests.cpp + CVStabilizer_Tests.cpp + # CVObjectDetection_Tests.cpp + ) + set(OPENSHOT_CV_LIBRARIES + ${OpenCV_LIBS} + ${PROTOBUF_LIBRARY} ) +endif() ################ TESTER EXECUTABLE ################# # Create unit test executable (openshot-test) message (STATUS "Tests enabled, test executable will be built as tests/openshot-test") -if (OpenCV_FOUND) - add_executable(openshot-test - tests.cpp - ${OPENSHOT_TEST_FILES} - ${OPENSHOT_CV_TEST_FILES}) - - # Link libraries to the new executable - target_link_libraries(openshot-test openshot ${UnitTest++_LIBRARIES} ${OpenCV_LIBS} ${PROTOBUF_LIBRARY}) -else() - add_executable(openshot-test - tests.cpp - ${OPENSHOT_TEST_FILES} ) +add_executable(openshot-test + tests.cpp + ${OPENSHOT_TEST_FILES} + ${OPENSHOT_CV_TEST_FILES} + ) - # Link libraries to the new executable - target_link_libraries(openshot-test openshot ${UnitTest++_LIBRARIES}) -endif() +# Link libraries to the new executable +target_link_libraries(openshot-test + openshot + ${UnitTest++_LIBRARIES} + ${OPENSHOT_CV_LIBRARIES} + ) ##### RUNNING TESTS (make os_test / make test) ##### # Hook up the 'make os_test' target to the 'openshot-test' executable diff --git a/tests/CVStabilizer_Tests.cpp b/tests/CVStabilizer_Tests.cpp index 400f74ea..401884d7 100644 --- a/tests/CVStabilizer_Tests.cpp +++ b/tests/CVStabilizer_Tests.cpp @@ -54,8 +54,14 @@ SUITE(CVStabilizer_Tests) openshot::Clip c1(path.str()); c1.Open(); + std::string json_data = R"proto( + { + "protobuf_data_path": "stabilizer.data", + "smoothing-window": 30 + } )proto"; + // Create stabilizer - CVStabilization stabilizer("{\"protobuf_data_path\": \"stabilizer.data\", \"smoothing_window\": 30}", processingController); + CVStabilization stabilizer(json_data, processingController); // Stabilize clip for frames 0-21 stabilizer.stabilizeClip(c1, 0, 21, true); @@ -92,8 +98,14 @@ SUITE(CVStabilizer_Tests) openshot::Clip c1(path.str()); c1.Open(); + std::string json_data = R"proto( + { + "protobuf_data_path": "stabilizer.data", + "smoothing-window": 30 + } )proto"; + // Create first stabilizer - CVStabilization stabilizer_1("{\"protobuf_data_path\": \"stabilizer.data\", \"smoothing_window\": 30}", processingController); + CVStabilization stabilizer_1(json_data, processingController); // Stabilize clip for frames 0-20 stabilizer_1.stabilizeClip(c1, 0, 20+1, true); @@ -106,7 +118,7 @@ SUITE(CVStabilizer_Tests) stabilizer_1.SaveStabilizedData(); // Create second stabilizer - CVStabilization stabilizer_2("{\"protobuf_data_path\": \"stabilizer.data\", \"smoothing_window\": 30}", processingController); + CVStabilization stabilizer_2(json_data, processingController); // Load stabilized data from first stabilizer protobuf data stabilizer_2._LoadStabilizedData(); @@ -114,7 +126,7 @@ SUITE(CVStabilizer_Tests) // Get stabilized data TransformParam tp_2 = stabilizer_2.GetTransformParamData(20); CamTrajectory ct_2 = stabilizer_2.GetCamTrajectoryTrackedData(20); - + // Compare first stabilizer data with second stabilizer data CHECK_EQUAL((int) (tp_1.dx * 10000), (int) (tp_2.dx *10000)); CHECK_EQUAL((int) (tp_1.dy * 10000), (int) (tp_2.dy * 10000)); diff --git a/tests/CVTracker_Tests.cpp b/tests/CVTracker_Tests.cpp index 6fe6abf0..5e1bf709 100644 --- a/tests/CVTracker_Tests.cpp +++ b/tests/CVTracker_Tests.cpp @@ -54,20 +54,25 @@ SUITE(CVTracker_Tests) openshot::Clip c1(path.str()); c1.Open(); - // Create tracker - CVTracker kcfTracker("{\"protobuf_data_path\": \"\", \"tracker_type\": \"KCF\", \"bbox\": {\"x\": 294, \"y\": 102, \"w\": 180, \"h\": 166}}", processingController); + std::string json_data = R"proto( + { + "protobuf_data_path": "kcf_tracker.data", + "tracker-type": "KCF", + "region": {"x": 294, "y": 102, "width": 180, "height": 166, "first-frame": 0} + } )proto"; + // Create tracker + CVTracker kcfTracker(json_data, processingController); + // Track clip for frames 0-20 kcfTracker.trackClip(c1, 0, 20, true); - // Get tracked data FrameData fd = kcfTracker.GetTrackedData(20); - float x = fd.x1; float y = fd.y1; float width = fd.x2 - x; float height = fd.y2 - y; - + std::cout<<"\n\n Error: "<< processingController.GetErrorMessage() <<"\n"; // Compare if tracked data is equal to pre-tested ones CHECK_EQUAL(259, (int)(x * 640)); CHECK_EQUAL(131, (int)(y * 360)); @@ -87,8 +92,16 @@ SUITE(CVTracker_Tests) openshot::Clip c1(path.str()); c1.Open(); + std::string json_data = R"proto( + { + "protobuf_data_path": "kcf_tracker.data", + "tracker-type": "KCF", + "region": {"x": 294, "y": 102, "width": 180, "height": 166, "first-frame": 0} + } )proto"; + + // Create first tracker - CVTracker kcfTracker_1("{\"protobuf_data_path\": \"kcf_tracker.data\", \"tracker_type\": \"KCF\", \"bbox\": {\"x\": 294, \"y\": 102, \"w\": 180, \"h\": 166}}", processingController); + CVTracker kcfTracker_1(json_data, processingController); // Track clip for frames 0-20 kcfTracker_1.trackClip(c1, 0, 20, true); @@ -104,8 +117,15 @@ SUITE(CVTracker_Tests) // Save tracked data kcfTracker_1.SaveTrackedData(); + std::string proto_data_1 = R"proto( + { + "protobuf_data_path": "kcf_tracker.data", + "tracker_type": "", + "region": {"x": -1, "y": -1, "width": -1, "height": -1, "first-frame": 0} + } )proto"; + // Create second tracker - CVTracker kcfTracker_2("{\"protobuf_data_path\": \"kcf_tracker.data\", \"tracker_type\": \"\", \"bbox\": {\"x\": -1, \"y\": -1, \"w\": -1, \"h\": -1}}", processingController); + CVTracker kcfTracker_2(proto_data_1, processingController); // Load tracked data from first tracker protobuf data kcfTracker_2._LoadTrackedData(); @@ -117,7 +137,7 @@ SUITE(CVTracker_Tests) float y_2 = fd_2.y1; float width_2 = fd_2.x2 - x_2; float height_2 = fd_2.y2 - y_2; - + std::cout<<"\n\n Error: "<< processingController.GetErrorMessage() <<"\n"; // Compare first tracker data with second tracker data CHECK_EQUAL((int)(x_1 * 640), (int)(x_2 * 640)); CHECK_EQUAL((int)(y_1 * 360), (int)(y_2 * 360)); diff --git a/tests/Frame_Tests.cpp b/tests/Frame_Tests.cpp index c61d7f84..e5562523 100644 --- a/tests/Frame_Tests.cpp +++ b/tests/Frame_Tests.cpp @@ -157,7 +157,7 @@ TEST(Convert_Image) c1.Open(); // Get first frame - std::shared_ptr f1 = c1.GetFrame(1); + auto f1 = c1.GetFrame(1); // Get first Mat image cv::Mat cvimage = f1->GetImageCV();