improved the stabilize effect integration

This commit is contained in:
Brenno
2020-07-15 10:29:08 -03:00
parent 6d547650dc
commit d6e0a69c1f
13 changed files with 225 additions and 164 deletions

View File

@@ -38,8 +38,7 @@
#include <opencv2/core.hpp>
#undef uint64
#undef int64
// #include "CVStabilization.h"
#endif
#include <memory>
@@ -161,14 +160,6 @@ namespace openshot {
openshot::FrameDisplayType display; ///< The format to display the frame number (if any)
openshot::VolumeMixType mixing; ///< What strategy should be followed when mixing audio with other clips
// #ifdef USE_OPENCV
// /// Smoothed transformation for all the clip frames
// std::vector <TransformParam> new_prev_to_cur_transform;
// /// apply the smoothed transformation warp when retrieving a frame
// bool hasStabilization = false;
// void apply_stabilization(std::shared_ptr<openshot::Frame> f, int64_t frame_number);
// #endif
/// Default Constructor
Clip();
@@ -232,7 +223,7 @@ namespace openshot {
void Waveform(bool value) { waveform = value; } ///< Set the waveform property of this clip
/// Stabilize the clip using opencv and opticalflow
void stabilize_video();
//void stabilize_video();
// Scale, Location, and Alpha curves
openshot::Keyframe scale_x; ///< Curve representing the horizontal scaling in percent (0 to 1)

View File

@@ -42,8 +42,6 @@
#endif
#include "Clip.h"
#include "effects/Tracker.h"
#include "effects/Stabilizer.h"
using namespace openshot;
@@ -51,14 +49,21 @@ using namespace openshot;
class ClipProcessingJobs{
private:
// Apply object tracking to clip
void trackVideo(Clip& videoClip);
// Apply stabilization to clip
void stabilizeVideo(Clip& video);
int processingProgress;
bool processingDone = false;
bool stopProcessing = false;
public:
ClipProcessingJobs(std::string processingType, Clip& videoClip);
ClipProcessingJobs(std::string processingType, Clip& videoClip);
int GetProgress();
void CancelProcessing();
// Apply object tracking to clip
std::string trackVideo(Clip& videoClip);
// Apply stabilization to clip
std::string stabilizeVideo(Clip& videoClip);

View File

@@ -49,10 +49,7 @@ namespace openshot
public:
// Create an instance of an effect (factory style)
EffectBase* CreateEffect(std::string effect_type);
// Create an instance of an video effect (require processing the whole clip)
EffectBase* CreateEffect(std::string effect_type, Clip* video_clip);
EffectBase* CreateEffect(std::string effect_type, std::string pb_data_path);
/// JSON methods
static std::string Json(); ///< Generate JSON string of this object

View File

@@ -43,8 +43,10 @@
#include "effects/Mask.h"
#include "effects/Negate.h"
#include "effects/Pixelate.h"
#include "effects/Stabilizer.h"
#include "effects/Saturation.h"
#include "effects/Shift.h"
#include "effects/Tracker.h"
#include "effects/Wave.h"

View File

@@ -33,16 +33,47 @@
#include "../EffectBase.h"
#include <google/protobuf/util/time_util.h>
#include <cmath>
#include <stdio.h>
#include <memory>
#include "../Color.h"
#include "../Json.h"
#include "../KeyFrame.h"
#include "../CVStabilization.h"
#include "../Clip.h"
#include "../stabilizedata.pb.h"
using namespace std;
using google::protobuf::util::TimeUtil;
struct EffectTransformParam
{
EffectTransformParam() {}
EffectTransformParam(double _dx, double _dy, double _da) {
dx = _dx;
dy = _dy;
da = _da;
}
double dx;
double dy;
double da; // angle
};
struct EffectCamTrajectory
{
EffectCamTrajectory() {}
EffectCamTrajectory(double _x, double _y, double _a) {
x = _x;
y = _y;
a = _a;
}
double x;
double y;
double a; // angle
};
namespace openshot
{
@@ -58,11 +89,12 @@ namespace openshot
private:
/// Init effect settings
void init_effect_details();
std::string protobuf_data_path;
public:
std::map <size_t,CamTrajectory> trajectoryData; // Save camera trajectory data
std::map <size_t,TransformParam> transformationData; // Save transormation data
std::string teste;
std::map <size_t,EffectCamTrajectory> trajectoryData; // Save camera trajectory data
std::map <size_t,EffectTransformParam> transformationData; // Save transormation data
/// Blank constructor, useful when using Json to load the effect properties
Stabilizer(std::string clipTrackerDataPath);

View File

@@ -33,16 +33,48 @@
#include "../EffectBase.h"
#include <google/protobuf/util/time_util.h>
#include <cmath>
#include <fstream>
#include <stdio.h>
#include <memory>
#include "../Color.h"
#include "../Json.h"
#include "../KeyFrame.h"
#include "../CVTracker.h"
#include "../Clip.h"
#include "../trackerdata.pb.h"
using namespace std;
using google::protobuf::util::TimeUtil;
// Tracking info struct
struct EffectFrameData{
int frame_id = -1;
float rotation = 0;
int x1 = -1;
int y1 = -1;
int x2 = -1;
int y2 = -1;
// Constructors
EffectFrameData()
{}
EffectFrameData( int _frame_id)
{frame_id = _frame_id;}
EffectFrameData( int _frame_id , float _rotation, int _x1, int _y1, int _x2, int _y2)
{
frame_id = _frame_id;
rotation = _rotation;
x1 = _x1;
y1 = _y1;
x2 = _x2;
y2 = _y2;
}
};
namespace openshot
{
@@ -61,7 +93,7 @@ namespace openshot
public:
std::map<int, FrameData> trackedDataById; // Save object tracking box data
std::map<int, EffectFrameData> trackedDataById; // Save object tracking box data
/// Blank constructor, useful when using Json to load the effect properties
Tracker(std::string clipTrackerDataPath);
@@ -84,7 +116,7 @@ namespace openshot
bool LoadTrackedData(std::string inputFilePath);
// Get tracker info for the desired frame
FrameData GetTrackedData(int frameId);
EffectFrameData GetTrackedData(int frameId);
/// Get and Set JSON methods
std::string Json() const override; ///< Generate JSON string of this object

View File

@@ -360,12 +360,6 @@ std::shared_ptr<Frame> Clip::GetFrame(int64_t requested_frame)
// Apply effects to the frame (if any)
apply_effects(frame);
// #ifdef USE_OPENCV
// if(hasStabilization){
// apply_stabilization(frame, requested_frame);
// }
// #endif
// Return processed 'frame'
return frame;
}
@@ -374,33 +368,6 @@ std::shared_ptr<Frame> Clip::GetFrame(int64_t requested_frame)
throw ReaderClosed("No Reader has been initialized for this Clip. Call Reader(*reader) before calling this method.");
}
// #ifdef USE_OPENCV
// void Clip::apply_stabilization(std::shared_ptr<openshot::Frame> f, int64_t frame_number){
// cv::Mat T(2,3,CV_64F);
// // Grab Mat image
// cv::Mat cur = f->GetImageCV();
// T.at<double>(0,0) = cos(new_prev_to_cur_transform[frame_number].da);
// T.at<double>(0,1) = -sin(new_prev_to_cur_transform[frame_number].da);
// T.at<double>(1,0) = sin(new_prev_to_cur_transform[frame_number].da);
// T.at<double>(1,1) = cos(new_prev_to_cur_transform[frame_number].da);
// T.at<double>(0,2) = new_prev_to_cur_transform[frame_number].dx;
// T.at<double>(1,2) = new_prev_to_cur_transform[frame_number].dy;
// cv::Mat frame_stabilized;
// cv::warpAffine(cur, frame_stabilized, T, cur.size());
// // Scale up the image to remove black borders
// cv::Mat T_scale = cv::getRotationMatrix2D(cv::Point2f(frame_stabilized.cols/2, frame_stabilized.rows/2), 0, 1.04);
// cv::warpAffine(frame_stabilized, frame_stabilized, T_scale, frame_stabilized.size());
// f->SetImageCV(frame_stabilized);
// }
// #endif
// Get file extension
std::string Clip::get_file_extension(std::string path)
{
@@ -945,16 +912,30 @@ void Clip::SetJsonValue(const Json::Value root) {
for (const auto existing_effect : root["effects"]) {
// Create Effect
EffectBase *e = NULL;
if (!existing_effect["type"].isNull()) {
// Create instance of effect
if ( (e = EffectInfo().CreateEffect(existing_effect["type"].asString())) ) {
// Load Json into Effect
e->SetJsonValue(existing_effect);
std::vector<std::string> pEffects{"Stabilizer", "Tracker"};
std::string effectName = existing_effect["type"].asString();
// Add Effect to Timeline
AddEffect(e);
if(std::find(pEffects.begin(), pEffects.end(), effectName) == pEffects.end()){
// Create instance of effect
if ( (e = EffectInfo().CreateEffect(effectName))) {
// Load Json into Effect
e->SetJsonValue(existing_effect);
// Add Effect to Timeline
AddEffect(e);
}
}
else{
if ( (e = EffectInfo().CreateEffect(effectName, existing_effect["protobuf_data_path"].asString()))) {
// Load Json into Effect
e->SetJsonValue(existing_effect);
// Add Effect to Timeline
AddEffect(e);
}
}
}
}
@@ -1075,45 +1056,3 @@ std::shared_ptr<Frame> Clip::apply_effects(std::shared_ptr<Frame> frame)
// Return modified frame
return frame;
}
// #ifdef USE_OPENCV
// void Clip::stabilize_video(){
// // create CVStabilization object
// CVStabilization stabilizer;
// // Make sure Clip is opened
// Open();
// // Get total number of frames
// int videoLenght = Reader()->info.video_length;
// // Get first Opencv image
// // std::shared_ptr<openshot::Frame> f = GetFrame(0);
// // cv::Mat prev = f->GetImageCV();
// // // OpticalFlow works with grayscale images
// // cv::cvtColor(prev, prev_grey, cv::COLOR_BGR2GRAY);
// // Extract and track opticalflow features for each frame
// for (long int frame_number = 0; frame_number <= videoLenght; frame_number++)
// {
// std::shared_ptr<openshot::Frame> f = GetFrame(frame_number);
// // Grab Mat image
// cv::Mat cvimage = f->GetImageCV();
// cv::cvtColor(cvimage, cvimage, cv::COLOR_RGB2GRAY);
// stabilizer.TrackFrameFeatures(cvimage, frame_number);
// }
// vector <CamTrajectory> trajectory = stabilizer.ComputeFramesTrajectory();
// vector <CamTrajectory> smoothed_trajectory = stabilizer.SmoothTrajectory(trajectory);
// // Get the smoothed trajectory
// new_prev_to_cur_transform = stabilizer.GenNewCamPosition(smoothed_trajectory);
// // Will apply the smoothed transformation warp when retrieving a frame
// hasStabilization = true;
// }
// #else
// void Clip::stabilize_video(){
// throw "Please compile libopenshot with OpenCV to use this feature";
// }
// #endif

View File

@@ -3,16 +3,19 @@
// Constructor responsible to choose processing type and apply to clip
ClipProcessingJobs::ClipProcessingJobs(std::string processingType, Clip& videoClip){
if(processingType == "Stabilize"){
stabilizeVideo(videoClip);
}
if(processingType == "Track")
trackVideo(videoClip);
// if(processingType == "Stabilize"){
// std::cout<<"Stabilize";
// stabilizeVideo(videoClip);
// }
// if(processingType == "Track"){
// std::cout<<"Track";
// trackVideo(videoClip);
// }
}
// Apply object tracking to clip
void ClipProcessingJobs::trackVideo(Clip& videoClip){
std::string ClipProcessingJobs::trackVideo(Clip& videoClip){
// Opencv display window
cv::namedWindow("Display Image", cv::WINDOW_NORMAL );
@@ -23,28 +26,32 @@ void ClipProcessingJobs::trackVideo(Clip& videoClip){
tracker.trackClip(videoClip);
// Save tracking data
tracker.SaveTrackedData("kcf_tracker.data");
tracker.SaveTrackedData("/media/brenno/Data/projects/openshot/kcf_tracker.data");
// Create new Tracker Effect
EffectBase* trackerEffect = new Tracker("kcf_tracker.data");
// Apply Tracker Effect to clip
videoClip.AddEffect(trackerEffect);
// Return path to protobuf saved data
return "/media/brenno/Data/projects/openshot/kcf_tracker.data";
}
// Apply stabilization to clip
void ClipProcessingJobs::stabilizeVideo(Clip& videoClip){
std::string ClipProcessingJobs::stabilizeVideo(Clip& videoClip){
// create CVStabilization object
CVStabilization stabilizer;
// Start stabilization process
stabilizer.ProcessClip(videoClip);
// Save stabilization data
stabilizer.SaveStabilizedData("stabilization.data");
stabilizer.SaveStabilizedData("/media/brenno/Data/projects/openshot/stabilization.data");
// Create new Stabilizer Effect
EffectBase* stabilizeEffect = new Stabilizer("stabilization.data");
// Apply Stabilizer Effect to clip
videoClip.AddEffect(stabilizeEffect);
// Return path to protobuf saved data
return "/media/brenno/Data/projects/openshot/stabilization.data";
}
}
int ClipProcessingJobs::GetProgress(){
return processingProgress;
}
void ClipProcessingJobs::CancelProcessing(){
stopProcessing = true;
}

View File

@@ -29,7 +29,6 @@
*/
#include "../include/EffectInfo.h"
// #include "../include/Clip.h"
using namespace openshot;
@@ -86,9 +85,24 @@ EffectBase* EffectInfo::CreateEffect(std::string effect_type) {
else if (effect_type == "Wave")
return new Wave();
else if(effect_type == "Stabilizer")
return new Stabilizer();
else if(effect_type == "Tracker")
return new Tracker();
return NULL;
}
EffectBase* EffectInfo::CreateEffect(std::string effect_type, std::string pb_data_path){
if(effect_type == "Stabilizer")
return new Stabilizer(pb_data_path);
else if(effect_type == "Tracker")
return new Tracker();
}
// Generate Json::Value for this object
Json::Value EffectInfo::JsonValue() {
@@ -110,6 +124,8 @@ Json::Value EffectInfo::JsonValue() {
root.append(Saturation().JsonInfo());
root.append(Shift().JsonInfo());
root.append(Wave().JsonInfo());
root.append(Stabilizer().JsonInfo());
root.append(Tracker().JsonInfo());
// return JsonValue
return root;

View File

@@ -953,7 +953,7 @@ cv::Mat Frame::GetImageCV()
}
std::shared_ptr<QImage> Frame::Mat2Qimage(cv::Mat img){
// cv::cvtColor(img, img, cv::COLOR_BGR2RGB);
cv::cvtColor(img, img, cv::COLOR_BGR2RGB);
std::shared_ptr<QImage> imgIn = std::shared_ptr<QImage>(new QImage((uchar*) img.data, img.cols, img.rows, img.step, QImage::Format_RGB888));
// Always convert to RGBA8888 (if different)
if (imgIn->format() != QImage::Format_RGBA8888)

View File

@@ -206,6 +206,7 @@
%include "Timeline.h"
%include "ZmqLogger.h"
%include "AudioDeviceInfo.h"
%include "ClipProcessingJobs.h"
#ifdef USE_IMAGEMAGICK
%include "ImageReader.h"
@@ -228,6 +229,8 @@
%include "effects/Saturation.h"
%include "effects/Shift.h"
%include "effects/Wave.h"
%include "effects/Stabilizer.h"
%include "effects/Tracker.h"
/* Wrap std templates (list, vector, etc...) */

View File

@@ -33,11 +33,10 @@
using namespace openshot;
/// Blank constructor, useful when using Json to load the effect properties
Stabilizer::Stabilizer(std::string clipStabilizedDataPath)
Stabilizer::Stabilizer(std::string clipStabilizedDataPath):protobuf_data_path(clipStabilizedDataPath)
{
// Init effect properties
init_effect_details();
// Tries to load the stabilization data from protobuf
LoadStabilizedData(clipStabilizedDataPath);
}
@@ -47,11 +46,13 @@ Stabilizer::Stabilizer()
{
// Init effect properties
init_effect_details();
// LoadStabilizedData("/home/gustavostahl/LabVisao/VideoEditor/openshot-qt/stabilization.data");
}
// Init effect settings
void Stabilizer::init_effect_details()
{
/// Initialize the values of the EffectInfo struct.
InitEffectInfo();
@@ -61,42 +62,54 @@ void Stabilizer::init_effect_details()
info.description = "Stabilize video clip to remove undesired shaking and jitter.";
info.has_audio = false;
info.has_video = true;
}
// This method is required for all derived classes of EffectBase, and returns a
// modified openshot::Frame object
std::shared_ptr<Frame> Stabilizer::GetFrame(std::shared_ptr<Frame> frame, int64_t frame_number)
{
// Create empty rotation matrix
cv::Mat T(2,3,CV_64F);
// Grab OpenCV Mat image
cv::Mat cur = frame->GetImageCV();
cv::Mat frame_image = frame->GetImageCV();
// Set rotation matrix values
T.at<double>(0,0) = cos(transformationData[frame_number].da);
T.at<double>(0,1) = -sin(transformationData[frame_number].da);
T.at<double>(1,0) = sin(transformationData[frame_number].da);
T.at<double>(1,1) = cos(transformationData[frame_number].da);
// If frame is NULL, return itself
if(!frame_image.empty()){
T.at<double>(0,2) = transformationData[frame_number].dx;
T.at<double>(1,2) = transformationData[frame_number].dy;
// Check if track data exists for the requested frame
if(transformationData.find(frame_number) != transformationData.end()){
// Create empty rotation matrix
cv::Mat T(2,3,CV_64F);
// Apply rotation matrix to image
cv::Mat frame_stabilized;
cv::warpAffine(cur, frame_stabilized, T, cur.size());
// Set rotation matrix values
T.at<double>(0,0) = cos(transformationData[frame_number].da);
T.at<double>(0,1) = -sin(transformationData[frame_number].da);
T.at<double>(1,0) = sin(transformationData[frame_number].da);
T.at<double>(1,1) = cos(transformationData[frame_number].da);
// Scale up the image to remove black borders
cv::Mat T_scale = cv::getRotationMatrix2D(cv::Point2f(frame_stabilized.cols/2, frame_stabilized.rows/2), 0, 1.04);
cv::warpAffine(frame_stabilized, frame_stabilized, T_scale, frame_stabilized.size());
T.at<double>(0,2) = transformationData[frame_number].dx;
T.at<double>(1,2) = transformationData[frame_number].dy;
// Apply rotation matrix to image
cv::Mat frame_stabilized;
cv::warpAffine(frame_image, frame_stabilized, T, frame_image.size());
// Scale up the image to remove black borders
cv::Mat T_scale = cv::getRotationMatrix2D(cv::Point2f(frame_stabilized.cols/2, frame_stabilized.rows/2), 0, 1.04);
cv::warpAffine(frame_stabilized, frame_stabilized, T_scale, frame_stabilized.size());
frame_image = frame_stabilized;
}
}
// Set stabilized image to frame
frame->SetImageCV(frame_stabilized);
// If the input image is NULL or doesn't have tracking data, it's returned as it came
frame->SetImageCV(frame_image);
return frame;
}
// Load protobuf data file
bool Stabilizer::LoadStabilizedData(std::string inputFilePath){
// Create stabilization message
libopenshotstabilize::Stabilization stabilizationMessage;
@@ -113,6 +126,7 @@ bool Stabilizer::LoadStabilizedData(std::string inputFilePath){
// Iterate over all frames of the saved message and assign to the data maps
for (size_t i = 0; i < stabilizationMessage.frame_size(); i++) {
// Create stabilization message
const libopenshotstabilize::Frame& pbFrameData = stabilizationMessage.frame(i);
@@ -125,7 +139,7 @@ bool Stabilizer::LoadStabilizedData(std::string inputFilePath){
float a = pbFrameData.a();
// Assign data to trajectory map
trajectoryData[i] = CamTrajectory(x,y,a);
trajectoryData[i] = EffectCamTrajectory(x,y,a);
// Load transformation data
float dx = pbFrameData.dx();
@@ -133,7 +147,8 @@ bool Stabilizer::LoadStabilizedData(std::string inputFilePath){
float da = pbFrameData.da();
// Assing data to transformation map
transformationData[i] = TransformParam(dx,dy,da);
transformationData[i] = EffectTransformParam(dx,dy,da);
std::cout<<x<<y<<a<<dx<<dy<<da<<std::endl;
}
// Show the time stamp from the last update in stabilization data file
@@ -162,6 +177,7 @@ Json::Value Stabilizer::JsonValue() const {
// Create root json object
Json::Value root = EffectBase::JsonValue(); // get parent properties
root["type"] = info.class_name;
root["protobuf_data_path"] = protobuf_data_path;
// return JsonValue
return root;
@@ -169,12 +185,13 @@ Json::Value Stabilizer::JsonValue() const {
// Load JSON string into this object
void Stabilizer::SetJson(const std::string value) {
std::cout<<value<<std::endl;
// Parse JSON string into JSON objects
try
{
const Json::Value root = openshot::stringToJson(value);
// Set all values that match
SetJsonValue(root);
}
catch (const std::exception& e)
@@ -189,6 +206,17 @@ void Stabilizer::SetJsonValue(const Json::Value root) {
// Set parent data
EffectBase::SetJsonValue(root);
// Set data from Json (if key is found)
if (!root["Stabilizer"]["protobuf_data_path"].isNull() && protobuf_data_path == ""){
protobuf_data_path = (root["Stabilizer"]["protobuf_data_path"].asString());
std::cout<<"AAAAAAAAAAAAAAAAAAAAAAAAA Abaixo"<<std::endl;
std::cout<<protobuf_data_path<<std::endl;
if(!LoadStabilizedData(protobuf_data_path)){
std::cout<<"Invalid protobuf data path";
protobuf_data_path = "";
}
}
}
// Get all properties for a specific frame

View File

@@ -71,12 +71,21 @@ std::shared_ptr<Frame> Tracker::GetFrame(std::shared_ptr<Frame> frame, int64_t f
// Get the frame's image
cv::Mat frame_image = frame->GetImageCV();
// Draw box on image
FrameData fd = trackedDataById[frame_number];
cv::Rect2d box(fd.x1, fd.y1, fd.x2-fd.x1, fd.y2-fd.y1);
cv::rectangle(frame_image, box, cv::Scalar( 255, 0, 0 ), 2, 1 );
// Check if frame isn't NULL
if(!frame_image.empty()){
// Check if track data exists for the requested frame
if (trackedDataById.find(frame_number) != trackedDataById.end()) {
// Draw box on image
EffectFrameData fd = trackedDataById[frame_number];
cv::Rect2d box(fd.x1, fd.y1, fd.x2-fd.x1, fd.y2-fd.y1);
cv::rectangle(frame_image, box, cv::Scalar( 255, 0, 0 ), 2, 1 );
}
}
// Set image with drawn box to frame
// If the input image is NULL or doesn't have tracking data, it's returned as it came
frame->SetImageCV(frame_image);
return frame;
@@ -115,7 +124,7 @@ bool Tracker::LoadTrackedData(std::string inputFilePath){
int y2 = box.y2();
// Assign data to tracker map
trackedDataById[id] = FrameData(id, rotation, x1, y1, x2, y2);
trackedDataById[id] = EffectFrameData(id, rotation, x1, y1, x2, y2);
}
// Show the time stamp from the last update in tracker data file
@@ -130,11 +139,11 @@ bool Tracker::LoadTrackedData(std::string inputFilePath){
}
// Get tracker info for the desired frame
FrameData Tracker::GetTrackedData(int frameId){
EffectFrameData Tracker::GetTrackedData(int frameId){
// Check if the tracker info for the requested frame exists
if ( trackedDataById.find(frameId) == trackedDataById.end() ) {
return FrameData();
return EffectFrameData();
} else {
return trackedDataById[frameId];
}