2020-07-09 20:26:01 -03:00
|
|
|
/**
|
|
|
|
|
* @file
|
|
|
|
|
* @brief Source file for CVStabilization class
|
|
|
|
|
* @author Jonathan Thomas <jonathan@openshot.org>
|
|
|
|
|
*
|
|
|
|
|
* @ref License
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
/* LICENSE
|
|
|
|
|
*
|
|
|
|
|
* Copyright (c) 2008-2019 OpenShot Studios, LLC
|
|
|
|
|
* <http://www.openshotstudios.com/>. This file is part of
|
|
|
|
|
* OpenShot Library (libopenshot), an open-source project dedicated to
|
|
|
|
|
* delivering high quality video editing and animation solutions to the
|
|
|
|
|
* world. For more information visit <http://www.openshot.org/>.
|
|
|
|
|
*
|
|
|
|
|
* OpenShot Library (libopenshot) is free software: you can redistribute it
|
|
|
|
|
* and/or modify it under the terms of the GNU Lesser General Public License
|
|
|
|
|
* as published by the Free Software Foundation, either version 3 of the
|
|
|
|
|
* License, or (at your option) any later version.
|
|
|
|
|
*
|
|
|
|
|
* OpenShot Library (libopenshot) is distributed in the hope that it will be
|
|
|
|
|
* useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
|
* GNU Lesser General Public License for more details.
|
|
|
|
|
*
|
|
|
|
|
* You should have received a copy of the GNU Lesser General Public License
|
|
|
|
|
* along with OpenShot Library. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
*/
|
|
|
|
|
|
2020-07-02 19:09:04 -03:00
|
|
|
#include "../include/CVStabilization.h"
|
|
|
|
|
|
2020-07-09 20:26:01 -03:00
|
|
|
// Set default smoothing window value to compute stabilization
|
2020-07-16 21:10:02 -03:00
|
|
|
CVStabilization::CVStabilization(std::string processInfoJson, ProcessingController &processingController)
|
2020-07-21 11:12:06 -03:00
|
|
|
: processingController(&processingController){
|
2020-07-16 21:10:02 -03:00
|
|
|
SetJson(processInfoJson);
|
|
|
|
|
}
|
2020-07-29 18:50:11 -03:00
|
|
|
double mediax=0, mediay=0, mediaa=0, mediastatus=0, maiora = 0, maiorx = 0, maiory = 0;
|
|
|
|
|
int maiorstatus=0;
|
2020-07-02 19:09:04 -03:00
|
|
|
|
2020-07-09 20:26:01 -03:00
|
|
|
// Process clip and store necessary stabilization data
|
2020-07-21 21:42:09 -03:00
|
|
|
void CVStabilization::stabilizeClip(openshot::Clip& video, size_t _start, size_t _end, bool process_interval){
|
|
|
|
|
|
|
|
|
|
start = _start; end = _end;
|
|
|
|
|
|
|
|
|
|
video.Open();
|
2020-07-21 11:12:06 -03:00
|
|
|
|
2020-07-16 21:10:02 -03:00
|
|
|
size_t frame_number;
|
|
|
|
|
if(!process_interval || end == 0 || end-start <= 0){
|
|
|
|
|
// Get total number of frames in video
|
2020-07-21 21:42:09 -03:00
|
|
|
start = video.Start() * video.Reader()->info.fps.ToInt();
|
|
|
|
|
end = video.End() * video.Reader()->info.fps.ToInt();
|
2020-07-16 21:10:02 -03:00
|
|
|
}
|
2020-07-02 19:09:04 -03:00
|
|
|
|
2020-07-08 22:31:26 -03:00
|
|
|
// Extract and track opticalflow features for each frame
|
2020-07-22 14:05:19 -03:00
|
|
|
for (frame_number = start; frame_number <= end; frame_number++)
|
2020-07-08 22:31:26 -03:00
|
|
|
{
|
2020-07-16 21:10:02 -03:00
|
|
|
// Stop the feature tracker process
|
|
|
|
|
if(processingController->ShouldStop()){
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
2020-07-08 22:31:26 -03:00
|
|
|
std::shared_ptr<openshot::Frame> f = video.GetFrame(frame_number);
|
2020-07-02 19:09:04 -03:00
|
|
|
|
2020-07-09 20:26:01 -03:00
|
|
|
// Grab OpenCV Mat image
|
2020-07-08 22:31:26 -03:00
|
|
|
cv::Mat cvimage = f->GetImageCV();
|
|
|
|
|
cv::cvtColor(cvimage, cvimage, cv::COLOR_RGB2GRAY);
|
2020-07-21 21:42:09 -03:00
|
|
|
|
2020-07-29 18:50:11 -03:00
|
|
|
if(! TrackFrameFeatures(cvimage, frame_number))
|
|
|
|
|
prev_to_cur_transform.push_back(TransformParam(0, 0, 0));
|
2020-07-16 21:10:02 -03:00
|
|
|
|
|
|
|
|
// Update progress
|
2020-07-22 14:05:19 -03:00
|
|
|
processingController->SetProgress(uint(100*(frame_number-start)/(end-start)));
|
2020-07-08 22:31:26 -03:00
|
|
|
}
|
2020-07-02 19:09:04 -03:00
|
|
|
|
2020-07-09 20:26:01 -03:00
|
|
|
// Calculate trajectory data
|
|
|
|
|
std::vector <CamTrajectory> trajectory = ComputeFramesTrajectory();
|
2020-07-02 19:09:04 -03:00
|
|
|
|
2020-07-09 20:26:01 -03:00
|
|
|
// Calculate and save smoothed trajectory data
|
2020-07-08 22:31:26 -03:00
|
|
|
trajectoryData = SmoothTrajectory(trajectory);
|
2020-07-02 19:09:04 -03:00
|
|
|
|
2020-07-09 20:26:01 -03:00
|
|
|
// Calculate and save transformation data
|
2020-07-08 22:31:26 -03:00
|
|
|
transformationData = GenNewCamPosition(trajectoryData);
|
|
|
|
|
}
|
2020-07-02 19:09:04 -03:00
|
|
|
|
|
|
|
|
// Track current frame features and find the relative transformation
|
2020-07-29 18:50:11 -03:00
|
|
|
bool CVStabilization::TrackFrameFeatures(cv::Mat frame, size_t frameNum){
|
2020-07-28 21:01:57 -03:00
|
|
|
std::cout<<"frame "<<frameNum<<"\n";
|
|
|
|
|
if(cv::countNonZero(frame) < 1){
|
2020-07-29 18:50:11 -03:00
|
|
|
// last_T = cv::Mat();
|
|
|
|
|
// prev_grey = cv::Mat();
|
|
|
|
|
return false;
|
2020-07-28 21:01:57 -03:00
|
|
|
}
|
2020-07-21 21:42:09 -03:00
|
|
|
|
2020-07-05 16:31:37 -03:00
|
|
|
if(prev_grey.empty()){
|
|
|
|
|
prev_grey = frame;
|
2020-07-29 18:50:11 -03:00
|
|
|
return false;
|
2020-07-05 16:31:37 -03:00
|
|
|
}
|
2020-07-02 19:09:04 -03:00
|
|
|
|
|
|
|
|
// OpticalFlow features vector
|
2020-07-09 20:26:01 -03:00
|
|
|
std::vector <cv::Point2f> prev_corner, cur_corner;
|
|
|
|
|
std::vector <cv::Point2f> prev_corner2, cur_corner2;
|
|
|
|
|
std::vector <uchar> status;
|
|
|
|
|
std::vector <float> err;
|
|
|
|
|
// Extract new image features
|
2020-07-29 18:50:11 -03:00
|
|
|
cv::goodFeaturesToTrack(prev_grey, prev_corner, 200, 0.01, 15);
|
2020-07-02 19:09:04 -03:00
|
|
|
// Track features
|
|
|
|
|
cv::calcOpticalFlowPyrLK(prev_grey, frame, prev_corner, cur_corner, status, err);
|
|
|
|
|
// Remove untracked features
|
2020-07-29 18:50:11 -03:00
|
|
|
mediastatus+=status.size();
|
|
|
|
|
if(status.size() > maiorstatus)
|
|
|
|
|
maiorstatus = status.size();
|
|
|
|
|
|
2020-07-02 19:09:04 -03:00
|
|
|
for(size_t i=0; i < status.size(); i++) {
|
|
|
|
|
if(status[i]) {
|
|
|
|
|
prev_corner2.push_back(prev_corner[i]);
|
|
|
|
|
cur_corner2.push_back(cur_corner[i]);
|
|
|
|
|
}
|
|
|
|
|
}
|
2020-07-28 21:01:57 -03:00
|
|
|
// In case no feature was detected
|
|
|
|
|
if(prev_corner2.empty() || cur_corner2.empty()){
|
|
|
|
|
last_T = cv::Mat();
|
|
|
|
|
prev_grey = cv::Mat();
|
2020-07-29 18:50:11 -03:00
|
|
|
return false;
|
2020-07-28 21:01:57 -03:00
|
|
|
}
|
2020-07-02 19:09:04 -03:00
|
|
|
|
2020-07-28 21:01:57 -03:00
|
|
|
// Translation + rotation only
|
2020-07-29 18:50:11 -03:00
|
|
|
cv::Mat T = cv::estimateAffinePartial2D(prev_corner2, cur_corner2); // false = rigid transform, no scaling/shearing
|
2020-07-28 21:01:57 -03:00
|
|
|
|
|
|
|
|
double da, dx, dy;
|
|
|
|
|
if(T.size().width == 0 || T.size().height == 0){
|
2020-07-29 18:50:11 -03:00
|
|
|
return false;
|
2020-07-28 21:01:57 -03:00
|
|
|
}
|
|
|
|
|
else{
|
|
|
|
|
// If no transformation is found, just use the last known good transform.
|
|
|
|
|
if(T.data == NULL && !last_T.empty())
|
|
|
|
|
last_T.copyTo(T);
|
|
|
|
|
// Decompose T
|
|
|
|
|
dx = T.at<double>(0,2);
|
|
|
|
|
dy = T.at<double>(1,2);
|
|
|
|
|
da = atan2(T.at<double>(1,0), T.at<double>(0,0));
|
2020-07-02 19:09:04 -03:00
|
|
|
}
|
|
|
|
|
|
2020-07-29 18:50:11 -03:00
|
|
|
if(dx > 100 || dy > 100 || da > 0.1){
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
mediax+=fabs(dx);
|
|
|
|
|
mediay+=fabs(dy);
|
|
|
|
|
mediaa+=fabs(da);
|
|
|
|
|
|
|
|
|
|
if(fabs(dx) > maiorx)
|
|
|
|
|
maiorx = dx;
|
|
|
|
|
if(fabs(dy) > maiory)
|
|
|
|
|
maiory = dy;
|
|
|
|
|
if(fabs(da) > maiora)
|
|
|
|
|
maiora = da;
|
|
|
|
|
|
|
|
|
|
std::cout<<dx<<" "<<dy<<" "<<da<<"\n";
|
|
|
|
|
|
2020-07-02 19:09:04 -03:00
|
|
|
T.copyTo(last_T);
|
|
|
|
|
|
|
|
|
|
prev_to_cur_transform.push_back(TransformParam(dx, dy, da));
|
|
|
|
|
frame.copyTo(prev_grey);
|
|
|
|
|
|
2020-07-09 20:26:01 -03:00
|
|
|
// Show processing info
|
2020-07-02 19:09:04 -03:00
|
|
|
cout << "Frame: " << frameNum << " - good optical flow: " << prev_corner2.size() << endl;
|
2020-07-21 21:42:09 -03:00
|
|
|
|
2020-07-02 19:09:04 -03:00
|
|
|
}
|
|
|
|
|
|
2020-07-09 20:26:01 -03:00
|
|
|
std::vector<CamTrajectory> CVStabilization::ComputeFramesTrajectory(){
|
2020-07-02 19:09:04 -03:00
|
|
|
|
|
|
|
|
// Accumulated frame to frame transform
|
|
|
|
|
double a = 0;
|
|
|
|
|
double x = 0;
|
|
|
|
|
double y = 0;
|
|
|
|
|
|
|
|
|
|
vector <CamTrajectory> trajectory; // trajectory at all frames
|
|
|
|
|
|
|
|
|
|
// Compute global camera trajectory. First frame is the origin
|
|
|
|
|
for(size_t i=0; i < prev_to_cur_transform.size(); i++) {
|
|
|
|
|
x += prev_to_cur_transform[i].dx;
|
|
|
|
|
y += prev_to_cur_transform[i].dy;
|
|
|
|
|
a += prev_to_cur_transform[i].da;
|
|
|
|
|
|
2020-07-09 20:26:01 -03:00
|
|
|
// Save trajectory data to vector
|
2020-07-02 19:09:04 -03:00
|
|
|
trajectory.push_back(CamTrajectory(x,y,a));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return trajectory;
|
|
|
|
|
}
|
|
|
|
|
|
2020-07-09 20:26:01 -03:00
|
|
|
std::map<size_t,CamTrajectory> CVStabilization::SmoothTrajectory(std::vector <CamTrajectory> &trajectory){
|
2020-07-02 19:09:04 -03:00
|
|
|
|
2020-07-09 20:26:01 -03:00
|
|
|
std::map <size_t,CamTrajectory> smoothed_trajectory; // trajectory at all frames
|
2020-07-02 19:09:04 -03:00
|
|
|
|
|
|
|
|
for(size_t i=0; i < trajectory.size(); i++) {
|
|
|
|
|
double sum_x = 0;
|
|
|
|
|
double sum_y = 0;
|
|
|
|
|
double sum_a = 0;
|
|
|
|
|
int count = 0;
|
|
|
|
|
|
2020-07-05 16:31:37 -03:00
|
|
|
for(int j=-smoothingWindow; j <= smoothingWindow; j++) {
|
2020-07-02 19:09:04 -03:00
|
|
|
if(i+j >= 0 && i+j < trajectory.size()) {
|
|
|
|
|
sum_x += trajectory[i+j].x;
|
|
|
|
|
sum_y += trajectory[i+j].y;
|
|
|
|
|
sum_a += trajectory[i+j].a;
|
|
|
|
|
|
|
|
|
|
count++;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
double avg_a = sum_a / count;
|
|
|
|
|
double avg_x = sum_x / count;
|
|
|
|
|
double avg_y = sum_y / count;
|
|
|
|
|
|
2020-07-09 20:26:01 -03:00
|
|
|
// Add smoothed trajectory data to map
|
2020-07-21 21:42:09 -03:00
|
|
|
smoothed_trajectory[i + start] = CamTrajectory(avg_x, avg_y, avg_a);
|
2020-07-02 19:09:04 -03:00
|
|
|
}
|
|
|
|
|
return smoothed_trajectory;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Generate new transformations parameters for each frame to follow the smoothed trajectory
|
2020-07-09 20:26:01 -03:00
|
|
|
std::map<size_t,TransformParam> CVStabilization::GenNewCamPosition(std::map <size_t,CamTrajectory> &smoothed_trajectory){
|
|
|
|
|
std::map <size_t,TransformParam> new_prev_to_cur_transform;
|
2020-07-02 19:09:04 -03:00
|
|
|
|
|
|
|
|
// Accumulated frame to frame transform
|
|
|
|
|
double a = 0;
|
|
|
|
|
double x = 0;
|
|
|
|
|
double y = 0;
|
|
|
|
|
|
|
|
|
|
for(size_t i=0; i < prev_to_cur_transform.size(); i++) {
|
|
|
|
|
x += prev_to_cur_transform[i].dx;
|
|
|
|
|
y += prev_to_cur_transform[i].dy;
|
|
|
|
|
a += prev_to_cur_transform[i].da;
|
|
|
|
|
|
|
|
|
|
// target - current
|
2020-07-21 21:42:09 -03:00
|
|
|
double diff_x = smoothed_trajectory[i + start].x - x;
|
|
|
|
|
double diff_y = smoothed_trajectory[i + start].y - y;
|
|
|
|
|
double diff_a = smoothed_trajectory[i + start].a - a;
|
2020-07-02 19:09:04 -03:00
|
|
|
|
|
|
|
|
double dx = prev_to_cur_transform[i].dx + diff_x;
|
|
|
|
|
double dy = prev_to_cur_transform[i].dy + diff_y;
|
|
|
|
|
double da = prev_to_cur_transform[i].da + diff_a;
|
|
|
|
|
|
2020-07-09 20:26:01 -03:00
|
|
|
// Add transformation data to map
|
2020-07-21 21:42:09 -03:00
|
|
|
new_prev_to_cur_transform[i + start] = TransformParam(dx, dy, da);
|
2020-07-02 19:09:04 -03:00
|
|
|
}
|
|
|
|
|
return new_prev_to_cur_transform;
|
|
|
|
|
}
|
|
|
|
|
|
2020-07-09 20:26:01 -03:00
|
|
|
|
|
|
|
|
// Save stabilization data to protobuf file
|
2020-07-16 21:10:02 -03:00
|
|
|
bool CVStabilization::SaveStabilizedData(){
|
2020-07-09 20:26:01 -03:00
|
|
|
// Create stabilization message
|
2020-07-08 22:31:26 -03:00
|
|
|
libopenshotstabilize::Stabilization stabilizationMessage;
|
|
|
|
|
|
2020-07-09 20:26:01 -03:00
|
|
|
std::map<size_t,CamTrajectory>::iterator trajData = trajectoryData.begin();
|
|
|
|
|
std::map<size_t,TransformParam>::iterator transData = transformationData.begin();
|
2020-07-08 22:31:26 -03:00
|
|
|
|
2020-07-09 20:26:01 -03:00
|
|
|
// Iterate over all frames data and save in protobuf message
|
|
|
|
|
for(; trajData != trajectoryData.end(); ++trajData, ++transData){
|
|
|
|
|
AddFrameDataToProto(stabilizationMessage.add_frame(), trajData->second, transData->second, trajData->first);
|
2020-07-08 22:31:26 -03:00
|
|
|
}
|
|
|
|
|
// Add timestamp
|
|
|
|
|
*stabilizationMessage.mutable_last_updated() = TimeUtil::SecondsToTimestamp(time(NULL));
|
|
|
|
|
|
|
|
|
|
// Write the new message to disk.
|
2020-07-16 21:10:02 -03:00
|
|
|
std::fstream output(protobuf_data_path, ios::out | ios::trunc | ios::binary);
|
2020-07-08 22:31:26 -03:00
|
|
|
if (!stabilizationMessage.SerializeToOstream(&output)) {
|
|
|
|
|
cerr << "Failed to write protobuf message." << endl;
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Delete all global objects allocated by libprotobuf.
|
|
|
|
|
google::protobuf::ShutdownProtobufLibrary();
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2020-07-09 20:26:01 -03:00
|
|
|
// Add frame stabilization data into protobuf message
|
|
|
|
|
void CVStabilization::AddFrameDataToProto(libopenshotstabilize::Frame* pbFrameData, CamTrajectory& trajData, TransformParam& transData, size_t frame_number){
|
2020-07-08 22:31:26 -03:00
|
|
|
|
|
|
|
|
// Save frame number
|
|
|
|
|
pbFrameData->set_id(frame_number);
|
|
|
|
|
|
|
|
|
|
// Save camera trajectory data
|
|
|
|
|
pbFrameData->set_a(trajData.a);
|
|
|
|
|
pbFrameData->set_x(trajData.x);
|
|
|
|
|
pbFrameData->set_y(trajData.y);
|
|
|
|
|
|
|
|
|
|
// Save transformation data
|
|
|
|
|
pbFrameData->set_da(transData.da);
|
|
|
|
|
pbFrameData->set_dx(transData.dx);
|
|
|
|
|
pbFrameData->set_dy(transData.dy);
|
|
|
|
|
}
|
|
|
|
|
|
2020-07-09 20:26:01 -03:00
|
|
|
// Load protobuf data file
|
2020-07-16 21:10:02 -03:00
|
|
|
bool CVStabilization::LoadStabilizedData(){
|
2020-07-09 20:26:01 -03:00
|
|
|
// Create stabilization message
|
2020-07-08 22:31:26 -03:00
|
|
|
libopenshotstabilize::Stabilization stabilizationMessage;
|
|
|
|
|
// Read the existing tracker message.
|
2020-07-16 21:10:02 -03:00
|
|
|
fstream input(protobuf_data_path, ios::in | ios::binary);
|
2020-07-08 22:31:26 -03:00
|
|
|
if (!stabilizationMessage.ParseFromIstream(&input)) {
|
|
|
|
|
cerr << "Failed to parse protobuf message." << endl;
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
2020-07-09 20:26:01 -03:00
|
|
|
// Make sure the data maps are empty
|
2020-07-08 22:31:26 -03:00
|
|
|
transformationData.clear();
|
|
|
|
|
trajectoryData.clear();
|
|
|
|
|
|
2020-07-09 20:26:01 -03:00
|
|
|
// Iterate over all frames of the saved message and assign to the data maps
|
|
|
|
|
for (size_t i = 0; i < stabilizationMessage.frame_size(); i++) {
|
2020-07-08 22:31:26 -03:00
|
|
|
const libopenshotstabilize::Frame& pbFrameData = stabilizationMessage.frame(i);
|
2020-07-09 20:26:01 -03:00
|
|
|
|
|
|
|
|
// Load frame number
|
2020-07-16 21:10:02 -03:00
|
|
|
size_t id = pbFrameData.id();
|
2020-07-08 22:31:26 -03:00
|
|
|
|
2020-07-09 20:26:01 -03:00
|
|
|
// Load camera trajectory data
|
2020-07-08 22:31:26 -03:00
|
|
|
float x = pbFrameData.x();
|
|
|
|
|
float y = pbFrameData.y();
|
|
|
|
|
float a = pbFrameData.a();
|
|
|
|
|
|
2020-07-09 20:26:01 -03:00
|
|
|
// Assign data to trajectory map
|
2020-07-21 21:42:09 -03:00
|
|
|
trajectoryData[id] = CamTrajectory(x,y,a);
|
2020-07-08 22:31:26 -03:00
|
|
|
|
2020-07-09 20:26:01 -03:00
|
|
|
// Load transformation data
|
2020-07-08 22:31:26 -03:00
|
|
|
float dx = pbFrameData.dx();
|
|
|
|
|
float dy = pbFrameData.dy();
|
|
|
|
|
float da = pbFrameData.da();
|
|
|
|
|
|
2020-07-09 20:26:01 -03:00
|
|
|
// Assing data to transformation map
|
2020-07-21 21:42:09 -03:00
|
|
|
transformationData[id] = TransformParam(dx,dy,da);
|
2020-07-08 22:31:26 -03:00
|
|
|
}
|
|
|
|
|
|
2020-07-09 20:26:01 -03:00
|
|
|
// Show the time stamp from the last update in stabilization data file
|
2020-07-08 22:31:26 -03:00
|
|
|
if (stabilizationMessage.has_last_updated()) {
|
|
|
|
|
cout << " Loaded Data. Saved Time Stamp: " << TimeUtil::ToString(stabilizationMessage.last_updated()) << endl;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Delete all global objects allocated by libprotobuf.
|
|
|
|
|
google::protobuf::ShutdownProtobufLibrary();
|
|
|
|
|
|
|
|
|
|
return true;
|
2020-07-16 21:10:02 -03:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
TransformParam CVStabilization::GetTransformParamData(size_t frameId){
|
|
|
|
|
|
|
|
|
|
// Check if the stabilizer info for the requested frame exists
|
|
|
|
|
if ( transformationData.find(frameId) == transformationData.end() ) {
|
|
|
|
|
|
|
|
|
|
return TransformParam();
|
|
|
|
|
} else {
|
|
|
|
|
|
|
|
|
|
return transformationData[frameId];
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
CamTrajectory CVStabilization::GetCamTrajectoryTrackedData(size_t frameId){
|
|
|
|
|
|
|
|
|
|
// Check if the stabilizer info for the requested frame exists
|
|
|
|
|
if ( trajectoryData.find(frameId) == trajectoryData.end() ) {
|
|
|
|
|
|
|
|
|
|
return CamTrajectory();
|
|
|
|
|
} else {
|
|
|
|
|
|
|
|
|
|
return trajectoryData[frameId];
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Load JSON string into this object
|
|
|
|
|
void CVStabilization::SetJson(const std::string value) {
|
|
|
|
|
// Parse JSON string into JSON objects
|
|
|
|
|
try
|
|
|
|
|
{
|
|
|
|
|
const Json::Value root = openshot::stringToJson(value);
|
|
|
|
|
// Set all values that match
|
|
|
|
|
|
|
|
|
|
SetJsonValue(root);
|
|
|
|
|
}
|
|
|
|
|
catch (const std::exception& e)
|
|
|
|
|
{
|
|
|
|
|
// Error parsing JSON (or missing keys)
|
2020-07-21 21:42:09 -03:00
|
|
|
throw openshot::InvalidJSON("JSON is invalid (missing keys or invalid data types)");
|
2020-07-16 21:10:02 -03:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Load Json::Value into this object
|
|
|
|
|
void CVStabilization::SetJsonValue(const Json::Value root) {
|
|
|
|
|
|
|
|
|
|
// Set data from Json (if key is found)
|
|
|
|
|
if (!root["protobuf_data_path"].isNull()){
|
|
|
|
|
protobuf_data_path = (root["protobuf_data_path"].asString());
|
|
|
|
|
}
|
2020-07-21 11:12:06 -03:00
|
|
|
if (!root["smoothing_window"].isNull()){
|
|
|
|
|
smoothingWindow = (root["smoothing_window"].asInt());
|
|
|
|
|
}
|
2020-07-08 22:31:26 -03:00
|
|
|
}
|