Fixed Stabilizer and created Object Detector tests

This commit is contained in:
Brenno
2020-07-30 21:39:20 -03:00
parent 58d2e8fe07
commit 6ca35bbf9b
13 changed files with 351 additions and 219 deletions

View File

@@ -35,13 +35,13 @@ CVStabilization::CVStabilization(std::string processInfoJson, ProcessingControll
: processingController(&processingController){
SetJson(processInfoJson);
}
double mediax=0, mediay=0, mediaa=0, mediastatus=0, maiora = 0, maiorx = 0, maiory = 0;
int maiorstatus=0;
// Process clip and store necessary stabilization data
void CVStabilization::stabilizeClip(openshot::Clip& video, size_t _start, size_t _end, bool process_interval){
start = _start; end = _end;
// Compute max and average transformation parameters
avr_dx=0; avr_dy=0; avr_da=0; max_dx=0; max_dy=0; max_da=0;
video.Open();
@@ -66,12 +66,16 @@ void CVStabilization::stabilizeClip(openshot::Clip& video, size_t _start, size_t
cv::Mat cvimage = f->GetImageCV();
cv::cvtColor(cvimage, cvimage, cv::COLOR_RGB2GRAY);
if(! TrackFrameFeatures(cvimage, frame_number))
if(!TrackFrameFeatures(cvimage, frame_number)){
prev_to_cur_transform.push_back(TransformParam(0, 0, 0));
}
// Update progress
processingController->SetProgress(uint(100*(frame_number-start)/(end-start)));
}
// Show average and max transformation parameters
std::cout<<"\nAVERAGE DX: "<<avr_dx/(frame_number-1)<<" AVERAGE DY: "<<avr_dy/(frame_number-1)<<" AVERAGE A: "<<avr_da/(frame_number-1)<<"\n";
std::cout<<"MAX X: "<<max_dx<<" MAX Y: "<<max_dy<<" MAX A: "<<max_da<<"\n\n";
// Calculate trajectory data
std::vector <CamTrajectory> trajectory = ComputeFramesTrajectory();
@@ -85,16 +89,15 @@ void CVStabilization::stabilizeClip(openshot::Clip& video, size_t _start, size_t
// Track current frame features and find the relative transformation
bool CVStabilization::TrackFrameFeatures(cv::Mat frame, size_t frameNum){
std::cout<<"frame "<<frameNum<<"\n";
// Check if there are black frames
if(cv::countNonZero(frame) < 1){
// last_T = cv::Mat();
// prev_grey = cv::Mat();
return false;
}
// Initialize prev_grey if not
if(prev_grey.empty()){
prev_grey = frame;
return false;
return true;
}
// OpticalFlow features vector
@@ -103,14 +106,10 @@ bool CVStabilization::TrackFrameFeatures(cv::Mat frame, size_t frameNum){
std::vector <uchar> status;
std::vector <float> err;
// Extract new image features
cv::goodFeaturesToTrack(prev_grey, prev_corner, 200, 0.01, 15);
cv::goodFeaturesToTrack(prev_grey, prev_corner, 200, 0.01, 30);
// Track features
cv::calcOpticalFlowPyrLK(prev_grey, frame, prev_corner, cur_corner, status, err);
// Remove untracked features
mediastatus+=status.size();
if(status.size() > maiorstatus)
maiorstatus = status.size();
for(size_t i=0; i < status.size(); i++) {
if(status[i]) {
prev_corner2.push_back(prev_corner[i]);
@@ -120,7 +119,7 @@ bool CVStabilization::TrackFrameFeatures(cv::Mat frame, size_t frameNum){
// In case no feature was detected
if(prev_corner2.empty() || cur_corner2.empty()){
last_T = cv::Mat();
prev_grey = cv::Mat();
// prev_grey = cv::Mat();
return false;
}
@@ -128,36 +127,40 @@ bool CVStabilization::TrackFrameFeatures(cv::Mat frame, size_t frameNum){
cv::Mat T = cv::estimateAffinePartial2D(prev_corner2, cur_corner2); // false = rigid transform, no scaling/shearing
double da, dx, dy;
// If T has nothing inside return (probably a segment where there is nothing to stabilize)
if(T.size().width == 0 || T.size().height == 0){
return false;
}
else{
// If no transformation is found, just use the last known good transform.
if(T.data == NULL && !last_T.empty())
last_T.copyTo(T);
// If no transformation is found, just use the last known good transform
if(T.data == NULL){
if(!last_T.empty())
last_T.copyTo(T);
else
return false;
}
// Decompose T
dx = T.at<double>(0,2);
dy = T.at<double>(1,2);
da = atan2(T.at<double>(1,0), T.at<double>(0,0));
}
if(dx > 100 || dy > 100 || da > 0.1){
// Filter transformations parameters, if they are higher than these: return
if(dx > 200 || dy > 200 || da > 0.1){
return false;
}
mediax+=fabs(dx);
mediay+=fabs(dy);
mediaa+=fabs(da);
if(fabs(dx) > maiorx)
maiorx = dx;
if(fabs(dy) > maiory)
maiory = dy;
if(fabs(da) > maiora)
maiora = da;
std::cout<<dx<<" "<<dy<<" "<<da<<"\n";
// Keep computing average and max transformation parameters
avr_dx+=fabs(dx);
avr_dy+=fabs(dy);
avr_da+=fabs(da);
if(fabs(dx) > max_dx)
max_dx = dx;
if(fabs(dy) > max_dy)
max_dy = dy;
if(fabs(da) > max_da)
max_da = da;
T.copyTo(last_T);
prev_to_cur_transform.push_back(TransformParam(dx, dy, da));
@@ -166,6 +169,7 @@ bool CVStabilization::TrackFrameFeatures(cv::Mat frame, size_t frameNum){
// Show processing info
cout << "Frame: " << frameNum << " - good optical flow: " << prev_corner2.size() << endl;
return true;
}
std::vector<CamTrajectory> CVStabilization::ComputeFramesTrajectory(){
@@ -295,56 +299,6 @@ void CVStabilization::AddFrameDataToProto(libopenshotstabilize::Frame* pbFrameDa
pbFrameData->set_dy(transData.dy);
}
// Load protobuf data file
bool CVStabilization::LoadStabilizedData(){
// Create stabilization message
libopenshotstabilize::Stabilization stabilizationMessage;
// Read the existing tracker message.
fstream input(protobuf_data_path, ios::in | ios::binary);
if (!stabilizationMessage.ParseFromIstream(&input)) {
cerr << "Failed to parse protobuf message." << endl;
return false;
}
// Make sure the data maps are empty
transformationData.clear();
trajectoryData.clear();
// Iterate over all frames of the saved message and assign to the data maps
for (size_t i = 0; i < stabilizationMessage.frame_size(); i++) {
const libopenshotstabilize::Frame& pbFrameData = stabilizationMessage.frame(i);
// Load frame number
size_t id = pbFrameData.id();
// Load camera trajectory data
float x = pbFrameData.x();
float y = pbFrameData.y();
float a = pbFrameData.a();
// Assign data to trajectory map
trajectoryData[id] = CamTrajectory(x,y,a);
// Load transformation data
float dx = pbFrameData.dx();
float dy = pbFrameData.dy();
float da = pbFrameData.da();
// Assing data to transformation map
transformationData[id] = TransformParam(dx,dy,da);
}
// Show the time stamp from the last update in stabilization data file
if (stabilizationMessage.has_last_updated()) {
cout << " Loaded Data. Saved Time Stamp: " << TimeUtil::ToString(stabilizationMessage.last_updated()) << endl;
}
// Delete all global objects allocated by libprotobuf.
google::protobuf::ShutdownProtobufLibrary();
return true;
}
TransformParam CVStabilization::GetTransformParamData(size_t frameId){
// Check if the stabilizer info for the requested frame exists
@@ -396,4 +350,64 @@ void CVStabilization::SetJsonValue(const Json::Value root) {
if (!root["smoothing_window"].isNull()){
smoothingWindow = (root["smoothing_window"].asInt());
}
}
/*
||||||||||||||||||||||||||||||||||||||||||||||||||
ONLY FOR MAKE TEST
||||||||||||||||||||||||||||||||||||||||||||||||||
*/
// Load protobuf data file
bool CVStabilization::_LoadStabilizedData(){
// Create stabilization message
libopenshotstabilize::Stabilization stabilizationMessage;
// Read the existing tracker message.
fstream input(protobuf_data_path, ios::in | ios::binary);
if (!stabilizationMessage.ParseFromIstream(&input)) {
cerr << "Failed to parse protobuf message." << endl;
return false;
}
// Make sure the data maps are empty
transformationData.clear();
trajectoryData.clear();
// Iterate over all frames of the saved message and assign to the data maps
for (size_t i = 0; i < stabilizationMessage.frame_size(); i++) {
const libopenshotstabilize::Frame& pbFrameData = stabilizationMessage.frame(i);
// Load frame number
size_t id = pbFrameData.id();
// Load camera trajectory data
float x = pbFrameData.x();
float y = pbFrameData.y();
float a = pbFrameData.a();
// Assign data to trajectory map
trajectoryData[id] = CamTrajectory(x,y,a);
// Load transformation data
float dx = pbFrameData.dx();
float dy = pbFrameData.dy();
float da = pbFrameData.da();
// Assing data to transformation map
transformationData[id] = TransformParam(dx,dy,da);
}
// Show the time stamp from the last update in stabilization data file
if (stabilizationMessage.has_last_updated()) {
cout << " Loaded Data. Saved Time Stamp: " << TimeUtil::ToString(stabilizationMessage.last_updated()) << endl;
}
// Delete all global objects allocated by libprotobuf.
google::protobuf::ShutdownProtobufLibrary();
return true;
}