2013-09-12 23:41:49 -05:00
|
|
|
/**
|
|
|
|
|
* @file
|
|
|
|
|
* @brief Unit tests for openshot::Clip
|
|
|
|
|
* @author Jonathan Thomas <jonathan@openshot.org>
|
|
|
|
|
*
|
2019-06-09 08:31:04 -04:00
|
|
|
* @ref License
|
|
|
|
|
*/
|
|
|
|
|
|
2021-10-16 01:26:26 -04:00
|
|
|
// Copyright (c) 2008-2019 OpenShot Studios, LLC
|
|
|
|
|
//
|
|
|
|
|
// SPDX-License-Identifier: LGPL-3.0-or-later
|
2013-09-12 23:41:49 -05:00
|
|
|
|
2020-12-26 21:51:24 -05:00
|
|
|
#include <sstream>
|
|
|
|
|
#include <memory>
|
|
|
|
|
|
2022-06-17 15:07:16 -04:00
|
|
|
#include "openshot_catch.h"
|
2020-03-19 05:55:06 -04:00
|
|
|
|
2021-10-27 00:26:56 -04:00
|
|
|
#include <QColor>
|
|
|
|
|
#include <QImage>
|
|
|
|
|
#include <QSize>
|
|
|
|
|
|
2020-12-26 21:51:24 -05:00
|
|
|
#include "Clip.h"
|
2022-03-02 16:24:09 -06:00
|
|
|
#include "DummyReader.h"
|
2021-10-27 00:26:56 -04:00
|
|
|
#include "Enums.h"
|
2021-10-27 14:34:05 -04:00
|
|
|
#include "Exceptions.h"
|
2023-02-28 14:13:12 -06:00
|
|
|
#include "FFmpegReader.h"
|
2020-12-26 21:51:24 -05:00
|
|
|
#include "Frame.h"
|
|
|
|
|
#include "Fraction.h"
|
2023-02-27 22:11:13 -06:00
|
|
|
#include "FrameMapper.h"
|
2020-12-26 21:51:24 -05:00
|
|
|
#include "Timeline.h"
|
|
|
|
|
#include "Json.h"
|
2021-10-27 14:34:05 -04:00
|
|
|
#include "effects/Negate.h"
|
2012-10-05 01:58:27 -05:00
|
|
|
|
|
|
|
|
using namespace openshot;
|
|
|
|
|
|
2021-04-09 04:09:36 -04:00
|
|
|
TEST_CASE( "default constructor", "[libopenshot][clip]" )
|
2012-10-05 01:58:27 -05:00
|
|
|
{
|
|
|
|
|
// Create a empty clip
|
|
|
|
|
Clip c1;
|
|
|
|
|
|
|
|
|
|
// Check basic settings
|
2021-04-09 04:09:36 -04:00
|
|
|
CHECK(c1.anchor == ANCHOR_CANVAS);
|
|
|
|
|
CHECK(c1.gravity == GRAVITY_CENTER);
|
|
|
|
|
CHECK(c1.scale == SCALE_FIT);
|
|
|
|
|
CHECK(c1.Layer() == 0);
|
|
|
|
|
CHECK(c1.Position() == Approx(0.0f).margin(0.00001));
|
|
|
|
|
CHECK(c1.Start() == Approx(0.0f).margin(0.00001));
|
|
|
|
|
CHECK(c1.End() == Approx(0.0f).margin(0.00001));
|
2012-10-05 01:58:27 -05:00
|
|
|
}
|
|
|
|
|
|
2021-04-09 04:09:36 -04:00
|
|
|
TEST_CASE( "path string constructor", "[libopenshot][clip]" )
|
2012-10-05 01:58:27 -05:00
|
|
|
{
|
|
|
|
|
// Create a empty clip
|
2020-03-19 05:55:06 -04:00
|
|
|
std::stringstream path;
|
2015-09-28 22:05:50 -05:00
|
|
|
path << TEST_MEDIA_PATH << "piano.wav";
|
|
|
|
|
Clip c1(path.str());
|
2012-10-09 01:45:34 -05:00
|
|
|
c1.Open();
|
2012-10-05 01:58:27 -05:00
|
|
|
|
|
|
|
|
// Check basic settings
|
2021-04-09 04:09:36 -04:00
|
|
|
CHECK(c1.anchor == ANCHOR_CANVAS);
|
|
|
|
|
CHECK(c1.gravity == GRAVITY_CENTER);
|
|
|
|
|
CHECK(c1.scale == SCALE_FIT);
|
|
|
|
|
CHECK(c1.Layer() == 0);
|
|
|
|
|
CHECK(c1.Position() == Approx(0.0f).margin(0.00001));
|
|
|
|
|
CHECK(c1.Start() == Approx(0.0f).margin(0.00001));
|
|
|
|
|
CHECK(c1.End() == Approx(4.39937f).margin(0.00001));
|
2012-10-05 01:58:27 -05:00
|
|
|
}
|
|
|
|
|
|
2021-04-09 04:09:36 -04:00
|
|
|
TEST_CASE( "basic getters and setters", "[libopenshot][clip]" )
|
2012-10-05 01:58:27 -05:00
|
|
|
{
|
|
|
|
|
// Create a empty clip
|
|
|
|
|
Clip c1;
|
|
|
|
|
|
|
|
|
|
// Check basic settings
|
2021-04-09 04:09:36 -04:00
|
|
|
CHECK_THROWS_AS(c1.Open(), ReaderClosed);
|
|
|
|
|
CHECK(c1.anchor == ANCHOR_CANVAS);
|
|
|
|
|
CHECK(c1.gravity == GRAVITY_CENTER);
|
|
|
|
|
CHECK(c1.scale == SCALE_FIT);
|
|
|
|
|
CHECK(c1.Layer() == 0);
|
|
|
|
|
CHECK(c1.Position() == Approx(0.0f).margin(0.00001));
|
|
|
|
|
CHECK(c1.Start() == Approx(0.0f).margin(0.00001));
|
|
|
|
|
CHECK(c1.End() == Approx(0.0f).margin(0.00001));
|
2012-10-05 01:58:27 -05:00
|
|
|
|
|
|
|
|
// Change some properties
|
|
|
|
|
c1.Layer(1);
|
|
|
|
|
c1.Position(5.0);
|
|
|
|
|
c1.Start(3.5);
|
|
|
|
|
c1.End(10.5);
|
|
|
|
|
|
2021-04-09 04:09:36 -04:00
|
|
|
CHECK(c1.Layer() == 1);
|
|
|
|
|
CHECK(c1.Position() == Approx(5.0f).margin(0.00001));
|
|
|
|
|
CHECK(c1.Start() == Approx(3.5f).margin(0.00001));
|
|
|
|
|
CHECK(c1.End() == Approx(10.5f).margin(0.00001));
|
2012-10-05 01:58:27 -05:00
|
|
|
}
|
2015-02-17 00:21:57 -06:00
|
|
|
|
2021-04-09 04:09:36 -04:00
|
|
|
TEST_CASE( "properties", "[libopenshot][clip]" )
|
2015-02-17 00:21:57 -06:00
|
|
|
{
|
|
|
|
|
// Create a empty clip
|
|
|
|
|
Clip c1;
|
|
|
|
|
|
|
|
|
|
// Change some properties
|
|
|
|
|
c1.Layer(1);
|
|
|
|
|
c1.Position(5.0);
|
|
|
|
|
c1.Start(3.5);
|
|
|
|
|
c1.End(10.5);
|
|
|
|
|
c1.alpha.AddPoint(1, 1.0);
|
|
|
|
|
c1.alpha.AddPoint(500, 0.0);
|
|
|
|
|
|
|
|
|
|
// Get properties JSON string at frame 1
|
2020-03-19 05:55:06 -04:00
|
|
|
std::string properties = c1.PropertiesJSON(1);
|
2015-02-17 00:21:57 -06:00
|
|
|
|
|
|
|
|
// Parse JSON string into JSON objects
|
|
|
|
|
Json::Value root;
|
2019-06-19 21:20:04 -04:00
|
|
|
Json::CharReaderBuilder rbuilder;
|
|
|
|
|
Json::CharReader* reader(rbuilder.newCharReader());
|
2020-03-19 05:55:06 -04:00
|
|
|
std::string errors;
|
|
|
|
|
bool success = reader->parse(
|
|
|
|
|
properties.c_str(),
|
|
|
|
|
properties.c_str() + properties.size(),
|
|
|
|
|
&root, &errors );
|
2021-04-09 04:09:36 -04:00
|
|
|
CHECK(success == true);
|
2015-02-17 00:21:57 -06:00
|
|
|
|
2020-03-19 05:55:06 -04:00
|
|
|
// Check for specific things
|
2021-04-09 04:09:36 -04:00
|
|
|
CHECK(root["alpha"]["value"].asDouble() == Approx(1.0f).margin(0.01));
|
|
|
|
|
CHECK(root["alpha"]["keyframe"].asBool() == true);
|
2015-02-17 00:21:57 -06:00
|
|
|
|
|
|
|
|
// Get properties JSON string at frame 250
|
|
|
|
|
properties = c1.PropertiesJSON(250);
|
|
|
|
|
|
|
|
|
|
// Parse JSON string into JSON objects
|
|
|
|
|
root.clear();
|
2020-03-19 05:55:06 -04:00
|
|
|
success = reader->parse(
|
|
|
|
|
properties.c_str(),
|
|
|
|
|
properties.c_str() + properties.size(),
|
|
|
|
|
&root, &errors );
|
2021-04-09 04:09:36 -04:00
|
|
|
CHECK(success == true);
|
2015-02-17 00:21:57 -06:00
|
|
|
|
2020-03-19 05:55:06 -04:00
|
|
|
// Check for specific things
|
2021-04-09 04:09:36 -04:00
|
|
|
CHECK(root["alpha"]["value"].asDouble() == Approx(0.5f).margin(0.01));
|
|
|
|
|
CHECK_FALSE(root["alpha"]["keyframe"].asBool());
|
2015-02-17 00:21:57 -06:00
|
|
|
|
|
|
|
|
// Get properties JSON string at frame 250 (again)
|
2020-03-19 05:55:06 -04:00
|
|
|
properties = c1.PropertiesJSON(250);
|
2015-02-17 00:21:57 -06:00
|
|
|
|
|
|
|
|
// Parse JSON string into JSON objects
|
|
|
|
|
root.clear();
|
2020-03-19 05:55:06 -04:00
|
|
|
success = reader->parse(
|
|
|
|
|
properties.c_str(),
|
|
|
|
|
properties.c_str() + properties.size(),
|
|
|
|
|
&root, &errors );
|
2021-04-09 04:09:36 -04:00
|
|
|
CHECK(success == true);
|
2015-02-17 00:21:57 -06:00
|
|
|
|
2020-03-19 05:55:06 -04:00
|
|
|
// Check for specific things
|
2021-04-09 04:09:36 -04:00
|
|
|
CHECK_FALSE(root["alpha"]["keyframe"].asBool());
|
2015-02-17 00:21:57 -06:00
|
|
|
|
|
|
|
|
// Get properties JSON string at frame 500
|
|
|
|
|
properties = c1.PropertiesJSON(500);
|
|
|
|
|
|
|
|
|
|
// Parse JSON string into JSON objects
|
|
|
|
|
root.clear();
|
2020-03-19 05:55:06 -04:00
|
|
|
success = reader->parse(
|
|
|
|
|
properties.c_str(),
|
|
|
|
|
properties.c_str() + properties.size(),
|
|
|
|
|
&root, &errors );
|
2021-04-09 04:09:36 -04:00
|
|
|
CHECK(success == true);
|
2015-02-17 00:21:57 -06:00
|
|
|
|
2020-03-19 05:55:06 -04:00
|
|
|
// Check for specific things
|
2021-04-09 04:09:36 -04:00
|
|
|
CHECK(root["alpha"]["value"].asDouble() == Approx(0.0f).margin(0.00001));
|
|
|
|
|
CHECK(root["alpha"]["keyframe"].asBool() == true);
|
2019-07-11 05:00:47 -04:00
|
|
|
|
|
|
|
|
// Free up the reader we allocated
|
|
|
|
|
delete reader;
|
2015-02-17 00:21:57 -06:00
|
|
|
}
|
|
|
|
|
|
2021-04-09 04:09:36 -04:00
|
|
|
TEST_CASE( "effects", "[libopenshot][clip]" )
|
2015-03-14 01:36:13 -05:00
|
|
|
{
|
|
|
|
|
// Load clip with video
|
2020-03-19 05:55:06 -04:00
|
|
|
std::stringstream path;
|
2015-09-28 22:05:50 -05:00
|
|
|
path << TEST_MEDIA_PATH << "sintel_trailer-720p.mp4";
|
|
|
|
|
Clip c10(path.str());
|
2015-03-14 01:36:13 -05:00
|
|
|
c10.Open();
|
|
|
|
|
|
|
|
|
|
Negate n;
|
|
|
|
|
c10.AddEffect(&n);
|
|
|
|
|
|
|
|
|
|
// Get frame 1
|
2017-08-20 17:37:39 -05:00
|
|
|
std::shared_ptr<Frame> f = c10.GetFrame(500);
|
2015-03-14 01:36:13 -05:00
|
|
|
|
|
|
|
|
// Get the image data
|
2015-06-01 00:20:14 -07:00
|
|
|
const unsigned char* pixels = f->GetPixels(10);
|
|
|
|
|
int pixel_index = 112 * 4; // pixel 112 (4 bytes per pixel)
|
2015-03-14 01:36:13 -05:00
|
|
|
|
|
|
|
|
// Check image properties on scanline 10, pixel 112
|
2021-04-09 04:09:36 -04:00
|
|
|
CHECK((int)pixels[pixel_index] == 255);
|
|
|
|
|
CHECK((int)pixels[pixel_index + 1] == 255);
|
|
|
|
|
CHECK((int)pixels[pixel_index + 2] == 255);
|
|
|
|
|
CHECK((int)pixels[pixel_index + 3] == 255);
|
2015-03-14 01:36:13 -05:00
|
|
|
|
|
|
|
|
// Check the # of Effects
|
2021-04-09 04:09:36 -04:00
|
|
|
CHECK((int)c10.Effects().size() == 1);
|
2015-03-14 01:36:13 -05:00
|
|
|
|
|
|
|
|
|
|
|
|
|
// Add a 2nd negate effect
|
|
|
|
|
Negate n1;
|
|
|
|
|
c10.AddEffect(&n1);
|
|
|
|
|
|
|
|
|
|
// Get frame 1
|
|
|
|
|
f = c10.GetFrame(500);
|
|
|
|
|
|
|
|
|
|
// Get the image data
|
|
|
|
|
pixels = f->GetPixels(10);
|
2015-06-01 00:20:14 -07:00
|
|
|
pixel_index = 112 * 4; // pixel 112 (4 bytes per pixel)
|
2015-03-14 01:36:13 -05:00
|
|
|
|
|
|
|
|
// Check image properties on scanline 10, pixel 112
|
2021-04-09 04:09:36 -04:00
|
|
|
CHECK((int)pixels[pixel_index] == 0);
|
|
|
|
|
CHECK((int)pixels[pixel_index + 1] == 0);
|
|
|
|
|
CHECK((int)pixels[pixel_index + 2] == 0);
|
|
|
|
|
CHECK((int)pixels[pixel_index + 3] == 255);
|
2015-03-14 01:36:13 -05:00
|
|
|
|
|
|
|
|
// Check the # of Effects
|
2021-04-09 04:09:36 -04:00
|
|
|
CHECK((int)c10.Effects().size() == 2);
|
2015-03-14 01:36:13 -05:00
|
|
|
}
|
2020-03-19 05:55:06 -04:00
|
|
|
|
2021-04-09 04:09:36 -04:00
|
|
|
TEST_CASE( "verify parent Timeline", "[libopenshot][clip]" )
|
2020-10-23 01:35:46 -05:00
|
|
|
{
|
|
|
|
|
Timeline t1(640, 480, Fraction(30,1), 44100, 2, LAYOUT_STEREO);
|
|
|
|
|
|
|
|
|
|
// Load clip with video
|
|
|
|
|
std::stringstream path;
|
|
|
|
|
path << TEST_MEDIA_PATH << "sintel_trailer-720p.mp4";
|
|
|
|
|
Clip c1(path.str());
|
|
|
|
|
c1.Open();
|
|
|
|
|
|
|
|
|
|
// Check size of frame image
|
2021-04-09 04:09:36 -04:00
|
|
|
CHECK(1280 == c1.GetFrame(1)->GetImage()->width());
|
|
|
|
|
CHECK(720 == c1.GetFrame(1)->GetImage()->height());
|
2020-10-23 01:35:46 -05:00
|
|
|
|
|
|
|
|
// Add clip to timeline
|
|
|
|
|
t1.AddClip(&c1);
|
|
|
|
|
|
|
|
|
|
// Check size of frame image (with an associated timeline)
|
2021-04-09 04:09:36 -04:00
|
|
|
CHECK(640 == c1.GetFrame(1)->GetImage()->width());
|
|
|
|
|
CHECK(360 == c1.GetFrame(1)->GetImage()->height());
|
2020-10-23 01:35:46 -05:00
|
|
|
}
|
2021-10-27 00:26:56 -04:00
|
|
|
|
|
|
|
|
TEST_CASE( "has_video", "[libopenshot][clip]" )
|
|
|
|
|
{
|
2023-02-27 22:11:13 -06:00
|
|
|
std::stringstream path;
|
|
|
|
|
path << TEST_MEDIA_PATH << "sintel_trailer-720p.mp4";
|
|
|
|
|
openshot::Clip c1(path.str());
|
2021-10-27 00:26:56 -04:00
|
|
|
|
2023-02-27 22:11:13 -06:00
|
|
|
c1.has_video.AddPoint(1.0, 0.0);
|
|
|
|
|
c1.has_video.AddPoint(5.0, -1.0, openshot::CONSTANT);
|
|
|
|
|
c1.has_video.AddPoint(10.0, 1.0, openshot::CONSTANT);
|
2021-10-27 00:26:56 -04:00
|
|
|
|
2023-02-27 22:11:13 -06:00
|
|
|
c1.Open();
|
2021-10-27 00:26:56 -04:00
|
|
|
|
2023-02-27 22:11:13 -06:00
|
|
|
auto trans_color = QColor(Qt::transparent);
|
|
|
|
|
auto f1 = c1.GetFrame(1);
|
|
|
|
|
CHECK(f1->has_image_data);
|
2021-10-27 00:26:56 -04:00
|
|
|
|
2023-02-27 22:11:13 -06:00
|
|
|
auto f2 = c1.GetFrame(5);
|
|
|
|
|
CHECK(f2->has_image_data);
|
2021-10-27 00:26:56 -04:00
|
|
|
|
2023-02-27 22:11:13 -06:00
|
|
|
auto f3 = c1.GetFrame(5);
|
|
|
|
|
CHECK(f3->has_image_data);
|
2021-10-27 00:26:56 -04:00
|
|
|
|
2023-02-27 22:11:13 -06:00
|
|
|
auto i1 = f1->GetImage();
|
|
|
|
|
QSize f1_size(f1->GetWidth(), f1->GetHeight());
|
|
|
|
|
CHECK(i1->size() == f1_size);
|
|
|
|
|
CHECK(i1->pixelColor(20, 20) == trans_color);
|
2021-10-27 00:26:56 -04:00
|
|
|
|
2023-02-27 22:11:13 -06:00
|
|
|
auto i2 = f2->GetImage();
|
|
|
|
|
QSize f2_size(f2->GetWidth(), f2->GetHeight());
|
|
|
|
|
CHECK(i2->size() == f2_size);
|
|
|
|
|
CHECK(i2->pixelColor(20, 20) != trans_color);
|
2021-10-27 00:26:56 -04:00
|
|
|
|
2023-02-27 22:11:13 -06:00
|
|
|
auto i3 = f3->GetImage();
|
|
|
|
|
QSize f3_size(f3->GetWidth(), f3->GetHeight());
|
|
|
|
|
CHECK(i3->size() == f3_size);
|
|
|
|
|
CHECK(i3->pixelColor(20, 20) != trans_color);
|
2021-10-27 00:26:56 -04:00
|
|
|
}
|
2022-03-02 16:24:09 -06:00
|
|
|
|
|
|
|
|
TEST_CASE( "access frames past reader length", "[libopenshot][clip]" )
|
|
|
|
|
{
|
2023-02-27 22:11:13 -06:00
|
|
|
// Create cache object to hold test frames
|
|
|
|
|
openshot::CacheMemory cache;
|
2022-03-02 16:24:09 -06:00
|
|
|
|
2023-02-27 22:11:13 -06:00
|
|
|
// Let's create some test frames
|
|
|
|
|
for (int64_t frame_number = 1; frame_number <= 30; frame_number++) {
|
|
|
|
|
// Create blank frame (with specific frame #, samples, and channels)
|
|
|
|
|
// Sample count should be 44100 / 30 fps = 1470 samples per frame
|
|
|
|
|
int sample_count = 1470;
|
|
|
|
|
auto f = std::make_shared<openshot::Frame>(frame_number, sample_count, 2);
|
2022-03-02 16:24:09 -06:00
|
|
|
|
2023-02-27 22:11:13 -06:00
|
|
|
// Create test samples with incrementing value
|
|
|
|
|
float *audio_buffer = new float[sample_count];
|
|
|
|
|
for (int64_t sample_number = 0; sample_number < sample_count; sample_number++) {
|
|
|
|
|
// Generate an incrementing audio sample value (just as an example)
|
|
|
|
|
audio_buffer[sample_number] = float(frame_number) + (float(sample_number) / float(sample_count));
|
|
|
|
|
}
|
2022-03-02 16:24:09 -06:00
|
|
|
|
2023-02-27 22:11:13 -06:00
|
|
|
// Add custom audio samples to Frame (bool replaceSamples, int destChannel, int destStartSample, const float* source,
|
|
|
|
|
f->AddAudio(true, 0, 0, audio_buffer, sample_count, 1.0); // add channel 1
|
|
|
|
|
f->AddAudio(true, 1, 0, audio_buffer, sample_count, 1.0); // add channel 2
|
2022-03-02 16:24:09 -06:00
|
|
|
|
2023-02-27 22:11:13 -06:00
|
|
|
// Add test frame to dummy reader
|
|
|
|
|
cache.Add(f);
|
2022-03-02 16:24:09 -06:00
|
|
|
|
2023-02-27 22:11:13 -06:00
|
|
|
delete[] audio_buffer;
|
|
|
|
|
}
|
2022-03-02 16:24:09 -06:00
|
|
|
|
2023-02-27 22:11:13 -06:00
|
|
|
// Create a dummy reader, with a pre-existing cache
|
|
|
|
|
openshot::DummyReader r(openshot::Fraction(30, 1), 1920, 1080, 44100, 2, 1.0, &cache);
|
|
|
|
|
r.Open(); // Open the reader
|
2022-03-02 16:24:09 -06:00
|
|
|
|
2023-02-27 22:11:13 -06:00
|
|
|
openshot::Clip c1;
|
|
|
|
|
c1.Reader(&r);
|
|
|
|
|
c1.Open();
|
2022-03-02 16:24:09 -06:00
|
|
|
|
2023-02-27 22:11:13 -06:00
|
|
|
// Get the last valid frame #
|
|
|
|
|
std::shared_ptr<openshot::Frame> frame = c1.GetFrame(30);
|
2022-03-02 16:24:09 -06:00
|
|
|
|
2023-02-27 22:11:13 -06:00
|
|
|
CHECK(frame->GetAudioSamples(0)[0] == Approx(30.0).margin(0.00001));
|
|
|
|
|
CHECK(frame->GetAudioSamples(0)[600] == Approx(30.4081631).margin(0.00001));
|
|
|
|
|
CHECK(frame->GetAudioSamples(0)[1200] == Approx(30.8163261).margin(0.00001));
|
2022-03-02 16:24:09 -06:00
|
|
|
|
2023-02-27 22:11:13 -06:00
|
|
|
// Get the +1 past the end of the reader (should be audio silence)
|
|
|
|
|
frame = c1.GetFrame(31);
|
2022-03-02 16:24:09 -06:00
|
|
|
|
2023-02-27 22:11:13 -06:00
|
|
|
CHECK(frame->GetAudioSamples(0)[0] == Approx(0.0).margin(0.00001));
|
|
|
|
|
CHECK(frame->GetAudioSamples(0)[600] == Approx(0.0).margin(0.00001));
|
|
|
|
|
CHECK(frame->GetAudioSamples(0)[1200] == Approx(0.0).margin(0.00001));
|
2022-03-02 16:24:09 -06:00
|
|
|
|
2023-02-27 22:11:13 -06:00
|
|
|
// Get the +2 past the end of the reader (should be audio silence)
|
|
|
|
|
frame = c1.GetFrame(32);
|
2022-03-02 16:24:09 -06:00
|
|
|
|
2023-02-27 22:11:13 -06:00
|
|
|
CHECK(frame->GetAudioSamples(0)[0] == Approx(0.0).margin(0.00001));
|
|
|
|
|
CHECK(frame->GetAudioSamples(0)[600] == Approx(0.0).margin(0.00001));
|
|
|
|
|
CHECK(frame->GetAudioSamples(0)[1200] == Approx(0.0).margin(0.00001));
|
2022-10-28 11:00:47 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
TEST_CASE( "setting and clobbering readers", "[libopenshot][clip]" )
|
|
|
|
|
{
|
2023-02-27 22:11:13 -06:00
|
|
|
// Create a dummy reader #1, with a pre-existing cache
|
|
|
|
|
openshot::DummyReader r1(openshot::Fraction(24, 1), 1920, 1080, 44100, 2, 1.0);
|
|
|
|
|
r1.Open(); // Open the reader
|
2022-10-28 11:00:47 -05:00
|
|
|
|
2023-02-27 22:11:13 -06:00
|
|
|
// Create a dummy reader #2, with a pre-existing cache
|
|
|
|
|
openshot::DummyReader r2(openshot::Fraction(30, 1), 1920, 1080, 44100, 2, 1.0);
|
|
|
|
|
r2.Open(); // Open the reader
|
2022-10-28 11:00:47 -05:00
|
|
|
|
2023-02-27 22:11:13 -06:00
|
|
|
// Create a clip with constructor (and an allocated internal reader A)
|
|
|
|
|
std::stringstream path;
|
|
|
|
|
path << TEST_MEDIA_PATH << "piano.wav";
|
|
|
|
|
Clip c1(path.str());
|
|
|
|
|
c1.Open();
|
2022-10-28 11:00:47 -05:00
|
|
|
|
2023-02-27 22:11:13 -06:00
|
|
|
// Clobber allocated reader A with reader #1
|
|
|
|
|
c1.Reader(&r1);
|
2022-10-28 11:00:47 -05:00
|
|
|
|
2023-02-27 22:11:13 -06:00
|
|
|
// Clobber reader #1 with reader #2
|
|
|
|
|
c1.Reader(&r2);
|
2022-10-28 11:00:47 -05:00
|
|
|
|
2023-02-27 22:11:13 -06:00
|
|
|
// Clobber reader #2 with SetJson (allocated reader B)
|
|
|
|
|
c1.SetJson("{\"reader\":{\"acodec\":\"raw\",\"audio_bit_rate\":0,\"audio_stream_index\":-1,\"audio_timebase\":{\"den\":1,\"num\":1},\"channel_layout\":4,\"channels\":2,\"display_ratio\":{\"den\":9,\"num\":16},\"duration\":1.0,\"file_size\":\"8294400\",\"fps\":{\"den\":1,\"num\":30},\"has_audio\":false,\"has_single_image\":false,\"has_video\":true,\"height\":1080,\"interlaced_frame\":false,\"metadata\":{},\"pixel_format\":-1,\"pixel_ratio\":{\"den\":1,\"num\":1},\"sample_rate\":44100,\"top_field_first\":true,\"type\":\"DummyReader\",\"vcodec\":\"raw\",\"video_bit_rate\":0,\"video_length\":\"30\",\"video_stream_index\":-1,\"video_timebase\":{\"den\":30,\"num\":1},\"width\":1920}}");
|
2022-10-28 11:00:47 -05:00
|
|
|
|
2023-02-27 22:11:13 -06:00
|
|
|
// Clobber allocated reader B with reader 2
|
|
|
|
|
c1.Reader(&r2);
|
2022-10-28 11:00:47 -05:00
|
|
|
|
2023-02-27 22:11:13 -06:00
|
|
|
// Clobber reader 2 with reader 1
|
|
|
|
|
c1.Reader(&r1);
|
2022-10-28 11:00:47 -05:00
|
|
|
}
|
2023-02-27 22:11:13 -06:00
|
|
|
|
|
|
|
|
TEST_CASE( "time remapping", "[libopenshot][clip]" )
|
|
|
|
|
{
|
|
|
|
|
Fraction fps(23,1);
|
|
|
|
|
Timeline t1(640, 480, fps, 44100, 2, LAYOUT_STEREO);
|
|
|
|
|
|
|
|
|
|
// Load clip with video
|
|
|
|
|
std::stringstream path;
|
2023-03-10 01:15:14 -06:00
|
|
|
path << TEST_MEDIA_PATH << "piano.wav";
|
|
|
|
|
|
2023-02-27 22:11:13 -06:00
|
|
|
Clip clip(path.str());
|
|
|
|
|
int original_video_length = clip.Reader()->info.video_length;
|
|
|
|
|
clip.Position(0.0);
|
|
|
|
|
clip.Start(0.0);
|
|
|
|
|
|
|
|
|
|
// Set time keyframe (4X speed REVERSE)
|
2023-03-10 01:15:14 -06:00
|
|
|
//clip.time.AddPoint(1, original_video_length, openshot::LINEAR);
|
|
|
|
|
//clip.time.AddPoint(original_video_length, 1.0, openshot::LINEAR);
|
2023-02-27 22:11:13 -06:00
|
|
|
|
|
|
|
|
// Set time keyframe (4X speed FORWARD)
|
|
|
|
|
//clip.time.AddPoint(1, 1.0, openshot::LINEAR);
|
|
|
|
|
//clip.time.AddPoint(original_video_length / 2, original_video_length, openshot::LINEAR);
|
|
|
|
|
|
|
|
|
|
// Set time keyframe (1/4X speed FORWARD)
|
|
|
|
|
//clip.time.AddPoint(1, 1.0, openshot::LINEAR);
|
|
|
|
|
//clip.time.AddPoint(original_video_length * 4, original_video_length, openshot::LINEAR);
|
|
|
|
|
|
|
|
|
|
// TODO: clip.Duration() != clip.Reader->info.duration
|
|
|
|
|
// Set clip length based on time-values
|
|
|
|
|
if (clip.time.GetLength() > 1) {
|
|
|
|
|
clip.End(clip.time.GetLength() / fps.ToDouble());
|
|
|
|
|
} else {
|
|
|
|
|
clip.End(clip.Reader()->info.duration);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Add clip
|
|
|
|
|
t1.AddClip(&clip);
|
|
|
|
|
t1.Open();
|
|
|
|
|
|
|
|
|
|
// Get frame
|
|
|
|
|
int64_t clip_start_frame = (clip.Position() * fps.ToDouble()) + 1;
|
|
|
|
|
int64_t clip_end_frame = clip_start_frame + clip.time.GetLength();
|
|
|
|
|
if (clip.time.GetLength() == 1) {
|
|
|
|
|
clip_end_frame = clip_start_frame + (clip.Duration() * fps.ToDouble());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Loop through frames
|
|
|
|
|
for (int64_t frame = clip_start_frame; frame <= clip_end_frame; frame++) {
|
|
|
|
|
int expected_sample_count = Frame::GetSamplesPerFrame(frame, t1.info.fps,
|
|
|
|
|
t1.info.sample_rate,
|
|
|
|
|
t1.info.channels);
|
|
|
|
|
|
|
|
|
|
std::shared_ptr<Frame> f = t1.GetFrame(frame);
|
|
|
|
|
if (expected_sample_count != f->GetAudioSamplesCount()) {
|
|
|
|
|
CHECK(expected_sample_count == f->GetAudioSamplesCount());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Clear cache
|
|
|
|
|
t1.ClearAllCache(true);
|
|
|
|
|
|
|
|
|
|
// Loop again through frames
|
|
|
|
|
// Time-remapping should start over (detect a gap)
|
|
|
|
|
for (int64_t frame = clip_start_frame; frame <= clip_end_frame; frame++) {
|
|
|
|
|
int expected_sample_count = Frame::GetSamplesPerFrame(frame, t1.info.fps,
|
|
|
|
|
t1.info.sample_rate,
|
|
|
|
|
t1.info.channels);
|
|
|
|
|
|
|
|
|
|
std::shared_ptr<Frame> f = t1.GetFrame(frame);
|
|
|
|
|
if (expected_sample_count != f->GetAudioSamplesCount()) {
|
|
|
|
|
CHECK(expected_sample_count == f->GetAudioSamplesCount());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
t1.Close();
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2023-03-10 01:15:14 -06:00
|
|
|
TEST_CASE( "resample_audio_8000_to_48000_reverse", "[libopenshot][clip]" )
|
2023-02-27 22:11:13 -06:00
|
|
|
{
|
|
|
|
|
// Create a reader
|
|
|
|
|
std::stringstream path;
|
2023-03-10 01:15:14 -06:00
|
|
|
path << TEST_MEDIA_PATH << "sine.wav";
|
2023-02-28 14:13:12 -06:00
|
|
|
openshot::FFmpegReader reader(path.str(), true);
|
|
|
|
|
|
|
|
|
|
// Map to 24 fps, 2 channels stereo, 44100 sample rate
|
2023-03-10 01:15:14 -06:00
|
|
|
FrameMapper map(&reader, Fraction(24,1), PULLDOWN_NONE, 48000, 2, LAYOUT_STEREO);
|
2023-02-28 14:13:12 -06:00
|
|
|
map.Open();
|
|
|
|
|
|
|
|
|
|
Clip clip;
|
|
|
|
|
clip.Reader(&map);
|
|
|
|
|
clip.Open();
|
2023-03-10 01:15:14 -06:00
|
|
|
int original_video_length = clip.Reader()->info.video_length;
|
2023-02-27 22:11:13 -06:00
|
|
|
|
|
|
|
|
clip.Position(0.0);
|
|
|
|
|
clip.Start(0.0);
|
|
|
|
|
|
2023-03-10 01:15:14 -06:00
|
|
|
// Set time keyframe (REVERSE direction using bezier curve)
|
2023-02-27 22:11:13 -06:00
|
|
|
clip.time.AddPoint(1, original_video_length, openshot::LINEAR);
|
2023-03-10 01:15:14 -06:00
|
|
|
clip.time.AddPoint(original_video_length, 1.0, openshot::BEZIER);
|
2023-02-27 22:11:13 -06:00
|
|
|
|
|
|
|
|
// Loop again through frames
|
|
|
|
|
// Time-remapping should start over (detect a gap)
|
2023-03-10 01:15:14 -06:00
|
|
|
for (int64_t frame = 1; frame <= original_video_length; frame++) {
|
2023-02-27 22:11:13 -06:00
|
|
|
int expected_sample_count = Frame::GetSamplesPerFrame(frame, map.info.fps,
|
|
|
|
|
map.info.sample_rate,
|
|
|
|
|
map.info.channels);
|
|
|
|
|
|
2023-02-28 14:13:12 -06:00
|
|
|
std::shared_ptr<Frame> f = clip.GetFrame(frame);
|
2023-02-27 22:11:13 -06:00
|
|
|
if (expected_sample_count != f->GetAudioSamplesCount()) {
|
|
|
|
|
CHECK(expected_sample_count == f->GetAudioSamplesCount());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Clear clip cache
|
|
|
|
|
clip.GetCache()->Clear();
|
|
|
|
|
|
|
|
|
|
// Loop again through frames
|
|
|
|
|
// Time-remapping should start over (detect a gap)
|
2023-03-10 01:15:14 -06:00
|
|
|
for (int64_t frame = 1; frame < original_video_length; frame++) {
|
2023-02-27 22:11:13 -06:00
|
|
|
int expected_sample_count = Frame::GetSamplesPerFrame(frame, map.info.fps,
|
|
|
|
|
map.info.sample_rate,
|
|
|
|
|
map.info.channels);
|
|
|
|
|
|
2023-02-28 14:13:12 -06:00
|
|
|
std::shared_ptr<Frame> f = clip.GetFrame(frame);
|
2023-03-10 01:15:14 -06:00
|
|
|
CHECK(expected_sample_count == f->GetAudioSamplesCount());
|
2023-02-27 22:11:13 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Close mapper
|
|
|
|
|
map.Close();
|
2023-02-28 14:13:12 -06:00
|
|
|
reader.Close();
|
|
|
|
|
clip.Close();
|
2023-02-27 22:11:13 -06:00
|
|
|
}
|