2013-09-12 23:41:49 -05:00
/**
* @ file
* @ brief Unit tests for openshot : : Timeline
* @ author Jonathan Thomas < jonathan @ openshot . org >
*
2019-06-09 08:31:04 -04:00
* @ ref License
*/
2021-10-16 01:26:26 -04:00
// Copyright (c) 2008-2019 OpenShot Studios, LLC
//
// SPDX-License-Identifier: LGPL-3.0-or-later
2013-09-12 23:41:49 -05:00
2021-04-09 04:09:36 -04:00
# include <string>
2020-12-26 21:51:24 -05:00
# include <sstream>
# include <memory>
# include <list>
2022-10-13 00:01:03 -05:00
# include <omp.h>
2020-12-26 21:51:24 -05:00
2022-06-17 15:07:16 -04:00
# include "openshot_catch.h"
2021-04-09 04:09:36 -04:00
2023-02-13 16:42:21 -06:00
# include "FrameMapper.h"
2020-12-26 21:51:24 -05:00
# include "Timeline.h"
# include "Clip.h"
# include "Frame.h"
# include "Fraction.h"
# include "effects/Blur.h"
# include "effects/Negate.h"
2012-10-05 01:58:27 -05:00
using namespace openshot ;
2021-04-09 04:09:36 -04:00
TEST_CASE ( " constructor " , " [libopenshot][timeline] " )
2020-09-01 22:57:32 -04:00
{
2014-01-05 22:37:11 -06:00
Fraction fps ( 30000 , 1000 ) ;
2015-06-01 00:20:14 -07:00
Timeline t1 ( 640 , 480 , fps , 44100 , 2 , LAYOUT_STEREO ) ;
2012-10-05 01:58:27 -05:00
// Check values
2021-04-09 04:09:36 -04:00
CHECK ( t1 . info . width = = 640 ) ;
CHECK ( t1 . info . height = = 480 ) ;
2012-10-05 01:58:27 -05:00
2015-06-01 00:20:14 -07:00
Timeline t2 ( 300 , 240 , fps , 44100 , 2 , LAYOUT_STEREO ) ;
2012-10-05 01:58:27 -05:00
// Check values
2021-04-09 04:09:36 -04:00
CHECK ( t2 . info . width = = 300 ) ;
CHECK ( t2 . info . height = = 240 ) ;
2012-10-05 01:58:27 -05:00
}
2022-10-06 15:07:31 -05:00
TEST_CASE ( " Set Json and clear clips " , " [libopenshot][timeline] " )
{
2023-02-13 16:42:21 -06:00
Fraction fps ( 30000 , 1000 ) ;
Timeline t ( 640 , 480 , fps , 44100 , 2 , LAYOUT_STEREO ) ;
2022-10-06 15:07:31 -05:00
2023-02-13 16:42:21 -06:00
// Large ugly JSON project (4 clips + 3 transitions)
std : : stringstream project_json ;
project_json < < " { \" id \" : \" CQA0YW6I2Q \" , \" fps \" :{ \" num \" :30, \" den \" :1}, \" display_ratio \" :{ \" num \" :16, \" den \" :9}, \" pixel_ratio \" :{ \" num \" :1, \" den \" :1}, \" width \" :1280, \" height \" :720, \" sample_rate \" :48000, \" channels \" :2, \" channel_layout \" :3, \" settings \" :{}, \" clips \" :[{ \" alpha \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" anchor \" :0, \" channel_filter \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" channel_mapping \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" display \" :0, \" duration \" :51.9466667175293, \" effects \" :[], \" end \" :10.666666666666666, \" gravity \" :4, \" has_audio \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" has_video \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" id \" : \" QHESI4ZW0E \" , \" layer \" :5000000, \" location_x \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :0}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" location_y \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :0}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" mixing \" :0, \" origin_x \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :0.5}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" origin_y \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :0.5}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" parentObjectId \" : \" \" , \" perspective_c1_x \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" perspective_c1_y \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" perspective_c2_x \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" perspective_c2_y \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" perspective_c3_x \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" perspective_c3_y \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" perspective_c4_x \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" perspective_c4_y \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" position \" :0, \" reader \" :{ \" acodec \" : \" aac \" , \" audio_bit_rate \" :126694, \" audio_stream_index \" :1, \" audio_timebase \" :{ \" den \" :48000, \" num \" :1}, \" channel_layout \" :3, \" channels \" :2, \" display_ratio \" :{ \" den \" :9, \" num \" :16}, \" duration \" :51.9466667175293, \" file_size \" : \" 7608204 \" , \" fps \" :{ \" den \" :1, \" num \" :24}, \" has_audio \" :true, \" has_single_image \" :false, \" has_video \" :true, \" height \" :720, \" interlaced_frame \" :false, \" metadata \" :{ \" artist \" : \" Durian Open Movie Team \" , \" compatible_brands \" : \" isomiso2avc1mp41 \" , \" copyright \" : \" (c) copyright Blender Foundation | durian.blender.org \" , \" creation_time \" : \" 1970-01-01T00:00:00.000000Z \" , \" description \" : \" Trailer for the Sintel open movie project \" , \" encoder \" : \" Lavf52.62.0 \" ,
t . SetJson ( project_json . str ( ) ) ;
2022-10-06 15:07:31 -05:00
2023-02-13 16:42:21 -06:00
// Count clips & effects
CHECK ( t . Clips ( ) . size ( ) = = 4 ) ;
CHECK ( t . Effects ( ) . size ( ) = = 3 ) ;
2022-10-06 15:07:31 -05:00
2023-02-13 16:42:21 -06:00
// Clear timeline and clear allocated clips, effects, and frame mappers
t . Clear ( ) ;
2022-10-06 15:07:31 -05:00
2023-02-13 16:42:21 -06:00
// Count clips & effects
CHECK ( t . Clips ( ) . size ( ) = = 0 ) ;
CHECK ( t . Effects ( ) . size ( ) = = 0 ) ;
2022-10-06 15:07:31 -05:00
2023-02-13 16:42:21 -06:00
// Manually add clip object (not using SetJson)
std : : stringstream path ;
path < < TEST_MEDIA_PATH < < " test.mp4 " ;
Clip clip_video ( path . str ( ) ) ;
t . AddClip ( & clip_video ) ;
2022-10-06 15:07:31 -05:00
2023-02-13 16:42:21 -06:00
// Manually add effect object (not using SetJson)
Negate effect_top ;
effect_top . Id ( " C " ) ;
t . AddEffect ( & effect_top ) ;
2022-10-06 15:07:31 -05:00
2023-02-13 16:42:21 -06:00
// Count clips & effects
CHECK ( t . Clips ( ) . size ( ) = = 1 ) ;
CHECK ( t . Effects ( ) . size ( ) = = 1 ) ;
2022-10-06 15:07:31 -05:00
2023-02-13 16:42:21 -06:00
// Clear timeline
t . Clear ( ) ;
2022-10-06 15:07:31 -05:00
2023-02-13 16:42:21 -06:00
// Count clips & effects
CHECK ( t . Clips ( ) . size ( ) = = 0 ) ;
CHECK ( t . Effects ( ) . size ( ) = = 0 ) ;
2022-10-06 15:07:31 -05:00
}
2021-04-09 06:30:38 -04:00
TEST_CASE ( " ReaderInfo constructor " , " [libopenshot][timeline] " )
2021-03-31 19:35:58 -04:00
{
// Create a reader
2021-04-09 06:30:38 -04:00
std : : stringstream path ;
2021-03-31 19:35:58 -04:00
path < < TEST_MEDIA_PATH < < " test.mp4 " ;
Clip clip_video ( path . str ( ) ) ;
clip_video . Open ( ) ;
const auto r1 = clip_video . Reader ( ) ;
// Configure a Timeline with the same parameters
Timeline t1 ( r1 - > info ) ;
2021-04-09 06:30:38 -04:00
CHECK ( r1 - > info . width = = t1 . info . width ) ;
CHECK ( r1 - > info . height = = t1 . info . height ) ;
CHECK ( r1 - > info . fps . num = = t1 . info . fps . num ) ;
CHECK ( r1 - > info . fps . den = = t1 . info . fps . den ) ;
CHECK ( r1 - > info . sample_rate = = t1 . info . sample_rate ) ;
CHECK ( r1 - > info . channels = = t1 . info . channels ) ;
CHECK ( r1 - > info . channel_layout = = t1 . info . channel_layout ) ;
2021-03-31 19:35:58 -04:00
}
2021-04-09 04:09:36 -04:00
TEST_CASE ( " width and height functions " , " [libopenshot][timeline] " )
2012-10-05 01:58:27 -05:00
{
2014-01-05 22:37:11 -06:00
Fraction fps ( 30000 , 1000 ) ;
2015-06-01 00:20:14 -07:00
Timeline t1 ( 640 , 480 , fps , 44100 , 2 , LAYOUT_STEREO ) ;
2012-10-05 01:58:27 -05:00
// Check values
2021-04-09 04:09:36 -04:00
CHECK ( t1 . info . width = = 640 ) ;
CHECK ( t1 . info . height = = 480 ) ;
2012-10-05 01:58:27 -05:00
// Set width
2014-01-05 22:37:11 -06:00
t1 . info . width = 600 ;
2012-10-05 01:58:27 -05:00
// Check values
2021-04-09 04:09:36 -04:00
CHECK ( t1 . info . width = = 600 ) ;
CHECK ( t1 . info . height = = 480 ) ;
2012-10-05 01:58:27 -05:00
// Set height
2014-01-05 22:37:11 -06:00
t1 . info . height = 400 ;
2012-10-05 01:58:27 -05:00
// Check values
2021-04-09 04:09:36 -04:00
CHECK ( t1 . info . width = = 600 ) ;
CHECK ( t1 . info . height = = 400 ) ;
2012-10-05 01:58:27 -05:00
}
2012-10-10 01:07:47 -05:00
2021-04-09 04:09:36 -04:00
TEST_CASE ( " Framerate " , " [libopenshot][timeline] " )
2012-10-10 01:07:47 -05:00
{
2014-01-05 22:37:11 -06:00
Fraction fps ( 24 , 1 ) ;
2015-06-01 00:20:14 -07:00
Timeline t1 ( 640 , 480 , fps , 44100 , 2 , LAYOUT_STEREO ) ;
2012-10-10 01:07:47 -05:00
// Check values
2021-04-09 04:09:36 -04:00
CHECK ( t1 . info . fps . ToFloat ( ) = = Approx ( 24.0f ) . margin ( 0.00001 ) ) ;
2012-10-10 01:07:47 -05:00
}
2014-04-03 22:47:21 -05:00
2021-04-09 04:09:36 -04:00
TEST_CASE ( " two-track video " , " [libopenshot][timeline] " )
2014-04-03 22:47:21 -05:00
{
// Create a reader
2021-04-09 04:09:36 -04:00
std : : stringstream path ;
2015-09-28 22:05:50 -05:00
path < < TEST_MEDIA_PATH < < " test.mp4 " ;
Clip clip_video ( path . str ( ) ) ;
2014-04-03 22:47:21 -05:00
clip_video . Layer ( 0 ) ;
2015-02-07 18:06:11 -06:00
clip_video . Position ( 0.0 ) ;
2014-04-03 22:47:21 -05:00
2021-04-09 04:09:36 -04:00
std : : stringstream path_overlay ;
2015-09-28 22:05:50 -05:00
path_overlay < < TEST_MEDIA_PATH < < " front3.png " ;
Clip clip_overlay ( path_overlay . str ( ) ) ;
2014-04-03 22:47:21 -05:00
clip_overlay . Layer ( 1 ) ;
2015-02-07 18:06:11 -06:00
clip_overlay . Position ( 0.05 ) ; // Delay the overlay by 0.05 seconds
clip_overlay . End ( 0.5 ) ; // Make the duration of the overlay 1/2 second
2014-04-03 22:47:21 -05:00
// Create a timeline
2016-09-14 04:11:12 -05:00
Timeline t ( 1280 , 720 , Fraction ( 30 , 1 ) , 44100 , 2 , LAYOUT_STEREO ) ;
2014-04-03 22:47:21 -05:00
// Add clips
t . AddClip ( & clip_video ) ;
t . AddClip ( & clip_overlay ) ;
t . Open ( ) ;
2017-08-20 17:37:39 -05:00
std : : shared_ptr < Frame > f = t . GetFrame ( 1 ) ;
2014-04-03 22:47:21 -05:00
// Get the image data
2016-09-14 04:11:12 -05:00
int pixel_row = 200 ;
2015-06-01 00:20:14 -07:00
int pixel_index = 230 * 4 ; // pixel 230 (4 bytes per pixel)
2014-04-03 22:47:21 -05:00
2015-06-01 00:20:14 -07:00
// Check image properties
2021-04-09 04:09:36 -04:00
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index ] = = Approx ( 21 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 1 ] = = Approx ( 191 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 2 ] = = Approx ( 0 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 3 ] = = Approx ( 255 ) . margin ( 5 ) ) ;
2014-04-03 22:47:21 -05:00
f = t . GetFrame ( 2 ) ;
2015-06-01 00:20:14 -07:00
// Check image properties
2021-04-09 04:09:36 -04:00
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index ] = = Approx ( 176 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 1 ] = = Approx ( 0 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 2 ] = = Approx ( 186 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 3 ] = = Approx ( 255 ) . margin ( 5 ) ) ;
2015-02-07 18:06:11 -06:00
f = t . GetFrame ( 3 ) ;
2015-06-01 00:20:14 -07:00
// Check image properties
2021-04-09 04:09:36 -04:00
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index ] = = Approx ( 23 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 1 ] = = Approx ( 190 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 2 ] = = Approx ( 0 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 3 ] = = Approx ( 255 ) . margin ( 5 ) ) ;
2015-02-07 18:06:11 -06:00
f = t . GetFrame ( 24 ) ;
2015-06-01 00:20:14 -07:00
// Check image properties
2022-07-21 10:07:34 -05:00
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index ] = = Approx ( 176 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 1 ] = = Approx ( 0 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 2 ] = = Approx ( 186 ) . margin ( 5 ) ) ;
2021-04-09 04:09:36 -04:00
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 3 ] = = Approx ( 255 ) . margin ( 5 ) ) ;
2015-02-07 18:06:11 -06:00
f = t . GetFrame ( 5 ) ;
2015-06-01 00:20:14 -07:00
// Check image properties
2021-04-09 04:09:36 -04:00
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index ] = = Approx ( 23 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 1 ] = = Approx ( 190 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 2 ] = = Approx ( 0 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 3 ] = = Approx ( 255 ) . margin ( 5 ) ) ;
2015-02-07 18:06:11 -06:00
f = t . GetFrame ( 25 ) ;
2015-06-01 00:20:14 -07:00
// Check image properties
2022-07-21 10:07:34 -05:00
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index ] = = Approx ( 20 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 1 ] = = Approx ( 190 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 2 ] = = Approx ( 0 ) . margin ( 5 ) ) ;
2021-04-09 04:09:36 -04:00
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 3 ] = = Approx ( 255 ) . margin ( 5 ) ) ;
2015-02-07 18:06:11 -06:00
f = t . GetFrame ( 4 ) ;
2015-06-01 00:20:14 -07:00
// Check image properties
2021-04-09 04:09:36 -04:00
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index ] = = Approx ( 176 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 1 ] = = Approx ( 0 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 2 ] = = Approx ( 186 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 3 ] = = Approx ( 255 ) . margin ( 5 ) ) ;
2014-04-03 22:47:21 -05:00
t . Close ( ) ;
}
2015-02-19 01:03:22 -06:00
2021-04-09 04:09:36 -04:00
TEST_CASE ( " Clip order " , " [libopenshot][timeline] " )
2015-02-19 01:03:22 -06:00
{
// Create a timeline
2015-06-01 00:20:14 -07:00
Timeline t ( 640 , 480 , Fraction ( 30 , 1 ) , 44100 , 2 , LAYOUT_STEREO ) ;
2015-02-19 01:03:22 -06:00
// Add some clips out of order
2021-04-09 04:09:36 -04:00
std : : stringstream path_top ;
2015-09-28 22:05:50 -05:00
path_top < < TEST_MEDIA_PATH < < " front3.png " ;
Clip clip_top ( path_top . str ( ) ) ;
2015-02-19 01:03:22 -06:00
clip_top . Layer ( 2 ) ;
t . AddClip ( & clip_top ) ;
2021-04-09 04:09:36 -04:00
std : : stringstream path_middle ;
2015-09-28 22:05:50 -05:00
path_middle < < TEST_MEDIA_PATH < < " front.png " ;
Clip clip_middle ( path_middle . str ( ) ) ;
2015-02-19 01:03:22 -06:00
clip_middle . Layer ( 0 ) ;
t . AddClip ( & clip_middle ) ;
2021-04-09 04:09:36 -04:00
std : : stringstream path_bottom ;
2015-09-28 22:05:50 -05:00
path_bottom < < TEST_MEDIA_PATH < < " back.png " ;
Clip clip_bottom ( path_bottom . str ( ) ) ;
2015-02-19 01:03:22 -06:00
clip_bottom . Layer ( 1 ) ;
t . AddClip ( & clip_bottom ) ;
t . Open ( ) ;
// Loop through Clips and check order (they should have been sorted into the correct order)
// Bottom layer to top layer, then by position.
2021-04-09 04:09:36 -04:00
std : : list < Clip * > clips = t . Clips ( ) ;
int n = 0 ;
for ( auto clip : clips ) {
CHECK ( clip - > Layer ( ) = = n ) ;
+ + n ;
2015-02-19 01:03:22 -06:00
}
// Add another clip
2021-04-09 04:09:36 -04:00
std : : stringstream path_middle1 ;
2015-09-28 22:05:50 -05:00
path_middle1 < < TEST_MEDIA_PATH < < " interlaced.png " ;
Clip clip_middle1 ( path_middle1 . str ( ) ) ;
2015-02-19 01:03:22 -06:00
clip_middle1 . Layer ( 1 ) ;
clip_middle1 . Position ( 0.5 ) ;
t . AddClip ( & clip_middle1 ) ;
// Loop through clips again, and re-check order
clips = t . Clips ( ) ;
2021-04-09 04:09:36 -04:00
n = 0 ;
for ( auto clip : clips ) {
switch ( n ) {
2015-02-19 01:03:22 -06:00
case 0 :
2021-04-09 04:09:36 -04:00
CHECK ( clip - > Layer ( ) = = 0 ) ;
2015-02-19 01:03:22 -06:00
break ;
case 1 :
2021-04-09 04:09:36 -04:00
CHECK ( clip - > Layer ( ) = = 1 ) ;
CHECK ( clip - > Position ( ) = = Approx ( 0.0 ) . margin ( 0.0001 ) ) ;
2015-02-19 01:03:22 -06:00
break ;
case 2 :
2021-04-09 04:09:36 -04:00
CHECK ( clip - > Layer ( ) = = 1 ) ;
CHECK ( clip - > Position ( ) = = Approx ( 0.5 ) . margin ( 0.0001 ) ) ;
2015-02-19 01:03:22 -06:00
break ;
case 3 :
2021-04-09 04:09:36 -04:00
CHECK ( clip - > Layer ( ) = = 2 ) ;
2015-02-19 01:03:22 -06:00
break ;
}
2021-04-09 04:09:36 -04:00
+ + n ;
2015-02-19 01:03:22 -06:00
}
t . Close ( ) ;
}
2021-06-10 08:01:16 -04:00
TEST_CASE ( " TimelineBase " , " [libopenshot][timeline] " )
{
2023-02-13 16:42:21 -06:00
// Create a timeline
Timeline t ( 640 , 480 , Fraction ( 30 , 1 ) , 44100 , 2 , LAYOUT_STEREO ) ;
2021-06-10 08:01:16 -04:00
2023-02-13 16:42:21 -06:00
// Add some clips out of order
std : : stringstream path ;
path < < TEST_MEDIA_PATH < < " front3.png " ;
Clip clip1 ( path . str ( ) ) ;
clip1 . Layer ( 1 ) ;
t . AddClip ( & clip1 ) ;
2021-06-10 08:01:16 -04:00
2023-02-13 16:42:21 -06:00
Clip clip2 ( path . str ( ) ) ;
clip2 . Layer ( 0 ) ;
t . AddClip ( & clip2 ) ;
2021-06-10 08:01:16 -04:00
2023-02-13 16:42:21 -06:00
// Verify that the list of clips can be accessed
// through the Clips() method of a TimelineBase*
TimelineBase * base = & t ;
auto l = base - > Clips ( ) ;
CHECK ( l . size ( ) = = 2 ) ;
auto find1 = std : : find ( l . begin ( ) , l . end ( ) , & clip1 ) ;
auto find2 = std : : find ( l . begin ( ) , l . end ( ) , & clip2 ) ;
CHECK ( find1 ! = l . end ( ) ) ;
CHECK ( find2 ! = l . end ( ) ) ;
2021-06-10 08:01:16 -04:00
}
2015-02-19 01:03:22 -06:00
2021-04-09 04:09:36 -04:00
TEST_CASE ( " Effect order " , " [libopenshot][timeline] " )
2015-02-19 01:03:22 -06:00
{
// Create a timeline
2015-06-01 00:20:14 -07:00
Timeline t ( 640 , 480 , Fraction ( 30 , 1 ) , 44100 , 2 , LAYOUT_STEREO ) ;
2015-02-19 01:03:22 -06:00
// Add some effects out of order
Negate effect_top ;
effect_top . Id ( " C " ) ;
effect_top . Layer ( 2 ) ;
t . AddEffect ( & effect_top ) ;
Negate effect_middle ;
effect_middle . Id ( " A " ) ;
effect_middle . Layer ( 0 ) ;
t . AddEffect ( & effect_middle ) ;
Negate effect_bottom ;
effect_bottom . Id ( " B " ) ;
effect_bottom . Layer ( 1 ) ;
t . AddEffect ( & effect_bottom ) ;
t . Open ( ) ;
// Loop through effects and check order (they should have been sorted into the correct order)
// Bottom layer to top layer, then by position, and then by order.
2021-04-09 04:09:36 -04:00
std : : list < EffectBase * > effects = t . Effects ( ) ;
int n = 0 ;
for ( auto effect : effects ) {
CHECK ( effect - > Layer ( ) = = n ) ;
CHECK ( effect - > Order ( ) = = 0 ) ;
switch ( n ) {
2015-02-19 01:03:22 -06:00
case 0 :
2021-04-09 04:09:36 -04:00
CHECK ( effect - > Id ( ) = = " A " ) ;
2015-02-19 01:03:22 -06:00
break ;
case 1 :
2021-04-09 04:09:36 -04:00
CHECK ( effect - > Id ( ) = = " B " ) ;
2015-02-19 01:03:22 -06:00
break ;
case 2 :
2021-04-09 04:09:36 -04:00
CHECK ( effect - > Id ( ) = = " C " ) ;
2015-02-19 01:03:22 -06:00
break ;
}
2021-04-09 04:09:36 -04:00
+ + n ;
2015-02-19 01:03:22 -06:00
}
// Add some more effects out of order
Negate effect_top1 ;
effect_top1 . Id ( " B-2 " ) ;
effect_top1 . Layer ( 1 ) ;
effect_top1 . Position ( 0.5 ) ;
effect_top1 . Order ( 2 ) ;
t . AddEffect ( & effect_top1 ) ;
Negate effect_middle1 ;
effect_middle1 . Id ( " B-3 " ) ;
effect_middle1 . Layer ( 1 ) ;
effect_middle1 . Position ( 0.5 ) ;
effect_middle1 . Order ( 1 ) ;
t . AddEffect ( & effect_middle1 ) ;
Negate effect_bottom1 ;
effect_bottom1 . Id ( " B-1 " ) ;
effect_bottom1 . Layer ( 1 ) ;
effect_bottom1 . Position ( 0 ) ;
effect_bottom1 . Order ( 3 ) ;
t . AddEffect ( & effect_bottom1 ) ;
// Loop through effects again, and re-check order
effects = t . Effects ( ) ;
2021-04-09 04:09:36 -04:00
n = 0 ;
for ( auto effect : effects ) {
switch ( n ) {
2015-02-19 01:03:22 -06:00
case 0 :
2021-04-09 04:09:36 -04:00
CHECK ( effect - > Layer ( ) = = 0 ) ;
CHECK ( effect - > Id ( ) = = " A " ) ;
CHECK ( effect - > Order ( ) = = 0 ) ;
2015-02-19 01:03:22 -06:00
break ;
case 1 :
2021-04-09 04:09:36 -04:00
CHECK ( effect - > Layer ( ) = = 1 ) ;
CHECK ( effect - > Id ( ) = = " B-1 " ) ;
CHECK ( effect - > Position ( ) = = Approx ( 0.0 ) . margin ( 0.0001 ) ) ;
CHECK ( effect - > Order ( ) = = 3 ) ;
2015-02-19 01:03:22 -06:00
break ;
case 2 :
2021-04-09 04:09:36 -04:00
CHECK ( effect - > Layer ( ) = = 1 ) ;
CHECK ( effect - > Id ( ) = = " B " ) ;
CHECK ( effect - > Position ( ) = = Approx ( 0.0 ) . margin ( 0.0001 ) ) ;
CHECK ( effect - > Order ( ) = = 0 ) ;
2015-02-19 01:03:22 -06:00
break ;
case 3 :
2021-04-09 04:09:36 -04:00
CHECK ( effect - > Layer ( ) = = 1 ) ;
CHECK ( effect - > Id ( ) = = " B-2 " ) ;
CHECK ( effect - > Position ( ) = = Approx ( 0.5 ) . margin ( 0.0001 ) ) ;
CHECK ( effect - > Order ( ) = = 2 ) ;
2015-02-19 01:03:22 -06:00
break ;
case 4 :
2021-04-09 04:09:36 -04:00
CHECK ( effect - > Layer ( ) = = 1 ) ;
CHECK ( effect - > Id ( ) = = " B-3 " ) ;
CHECK ( effect - > Position ( ) = = Approx ( 0.5 ) . margin ( 0.0001 ) ) ;
CHECK ( effect - > Order ( ) = = 1 ) ;
2015-02-19 01:03:22 -06:00
break ;
case 5 :
2021-04-09 04:09:36 -04:00
CHECK ( effect - > Layer ( ) = = 2 ) ;
CHECK ( effect - > Id ( ) = = " C " ) ;
CHECK ( effect - > Order ( ) = = 0 ) ;
2015-02-19 01:03:22 -06:00
break ;
}
2021-04-09 04:09:36 -04:00
+ + n ;
2015-02-19 01:03:22 -06:00
}
t . Close ( ) ;
}
2015-11-09 00:12:21 -06:00
2021-04-09 04:09:36 -04:00
TEST_CASE ( " GetClip by id " , " [libopenshot][timeline] " )
2020-09-01 22:57:32 -04:00
{
Timeline t ( 640 , 480 , Fraction ( 30 , 1 ) , 44100 , 2 , LAYOUT_STEREO ) ;
2021-04-09 04:09:36 -04:00
std : : stringstream path1 ;
2020-09-01 22:57:32 -04:00
path1 < < TEST_MEDIA_PATH < < " interlaced.png " ;
auto media_path1 = path1 . str ( ) ;
2021-04-09 04:09:36 -04:00
std : : stringstream path2 ;
2020-09-01 22:57:32 -04:00
path2 < < TEST_MEDIA_PATH < < " front.png " ;
auto media_path2 = path2 . str ( ) ;
Clip clip1 ( media_path1 ) ;
std : : string clip1_id ( " CLIP00001 " ) ;
clip1 . Id ( clip1_id ) ;
clip1 . Layer ( 1 ) ;
Clip clip2 ( media_path2 ) ;
std : : string clip2_id ( " CLIP00002 " ) ;
clip2 . Id ( clip2_id ) ;
clip2 . Layer ( 2 ) ;
2020-12-04 09:25:30 -05:00
clip2 . Waveform ( true ) ;
2020-09-01 22:57:32 -04:00
t . AddClip ( & clip1 ) ;
t . AddClip ( & clip2 ) ;
2020-12-04 09:25:30 -05:00
// We explicitly want to get returned a Clip*, here
Clip * matched = t . GetClip ( clip1_id ) ;
2021-04-09 04:09:36 -04:00
CHECK ( matched - > Id ( ) = = clip1_id ) ;
CHECK ( matched - > Layer ( ) = = 1 ) ;
2020-09-01 22:57:32 -04:00
2020-12-04 09:25:30 -05:00
Clip * matched2 = t . GetClip ( clip2_id ) ;
2021-04-09 04:09:36 -04:00
CHECK ( matched2 - > Id ( ) = = clip2_id ) ;
CHECK_FALSE ( matched2 - > Layer ( ) < 2 ) ;
2020-09-01 22:57:32 -04:00
2020-12-04 09:25:30 -05:00
Clip * matched3 = t . GetClip ( " BAD_ID " ) ;
2021-04-09 04:09:36 -04:00
CHECK ( matched3 = = nullptr ) ;
2020-12-04 09:25:30 -05:00
// Ensure we can access the Clip API interfaces after lookup
2021-04-09 04:09:36 -04:00
CHECK_FALSE ( matched - > Waveform ( ) ) ;
CHECK ( matched2 - > Waveform ( ) = = true ) ;
2020-09-01 22:57:32 -04:00
}
2021-04-09 04:09:36 -04:00
TEST_CASE ( " GetClipEffect by id " , " [libopenshot][timeline] " )
2020-09-01 22:57:32 -04:00
{
Timeline t ( 640 , 480 , Fraction ( 30 , 1 ) , 44100 , 2 , LAYOUT_STEREO ) ;
2021-04-09 04:09:36 -04:00
std : : stringstream path1 ;
2020-09-01 22:57:32 -04:00
path1 < < TEST_MEDIA_PATH < < " interlaced.png " ;
auto media_path1 = path1 . str ( ) ;
// Create a clip, nothing special
Clip clip1 ( media_path1 ) ;
std : : string clip1_id ( " CLIP00001 " ) ;
clip1 . Id ( clip1_id ) ;
clip1 . Layer ( 1 ) ;
// Add a blur effect
Keyframe horizontal_radius ( 5.0 ) ;
Keyframe vertical_radius ( 5.0 ) ;
Keyframe sigma ( 3.0 ) ;
Keyframe iterations ( 3.0 ) ;
Blur blur1 ( horizontal_radius , vertical_radius , sigma , iterations ) ;
std : : string blur1_id ( " EFFECT00011 " ) ;
blur1 . Id ( blur1_id ) ;
clip1 . AddEffect ( & blur1 ) ;
// A second clip, different layer
Clip clip2 ( media_path1 ) ;
std : : string clip2_id ( " CLIP00002 " ) ;
clip2 . Id ( clip2_id ) ;
clip2 . Layer ( 2 ) ;
// Some effects for clip2
Negate neg2 ;
std : : string neg2_id ( " EFFECT00021 " ) ;
neg2 . Id ( neg2_id ) ;
neg2 . Layer ( 2 ) ;
clip2 . AddEffect ( & neg2 ) ;
Blur blur2 ( horizontal_radius , vertical_radius , sigma , iterations ) ;
std : : string blur2_id ( " EFFECT00022 " ) ;
blur2 . Id ( blur2_id ) ;
blur2 . Layer ( 2 ) ;
clip2 . AddEffect ( & blur2 ) ;
t . AddClip ( & clip1 ) ;
// Check that we can look up clip1's effect
auto match1 = t . GetClipEffect ( " EFFECT00011 " ) ;
2021-04-09 04:09:36 -04:00
CHECK ( match1 - > Id ( ) = = blur1_id ) ;
2020-09-01 22:57:32 -04:00
// clip2 hasn't been added yet, shouldn't be found
match1 = t . GetClipEffect ( blur2_id ) ;
2021-04-09 04:09:36 -04:00
CHECK ( match1 = = nullptr ) ;
2020-09-01 22:57:32 -04:00
t . AddClip ( & clip2 ) ;
// Check that blur2 can now be found via clip2
match1 = t . GetClipEffect ( blur2_id ) ;
2021-04-09 04:09:36 -04:00
CHECK ( match1 - > Id ( ) = = blur2_id ) ;
CHECK ( match1 - > Layer ( ) = = 2 ) ;
2020-09-01 22:57:32 -04:00
}
2021-04-09 04:09:36 -04:00
TEST_CASE ( " GetEffect by id " , " [libopenshot][timeline] " )
2020-09-01 22:57:32 -04:00
{
Timeline t ( 640 , 480 , Fraction ( 30 , 1 ) , 44100 , 2 , LAYOUT_STEREO ) ;
// Create a timeline effect
Keyframe horizontal_radius ( 5.0 ) ;
Keyframe vertical_radius ( 5.0 ) ;
Keyframe sigma ( 3.0 ) ;
Keyframe iterations ( 3.0 ) ;
Blur blur1 ( horizontal_radius , vertical_radius , sigma , iterations ) ;
std : : string blur1_id ( " EFFECT00011 " ) ;
blur1 . Id ( blur1_id ) ;
blur1 . Layer ( 1 ) ;
t . AddEffect ( & blur1 ) ;
auto match1 = t . GetEffect ( blur1_id ) ;
2021-04-09 04:09:36 -04:00
CHECK ( match1 - > Id ( ) = = blur1_id ) ;
CHECK ( match1 - > Layer ( ) = = 1 ) ;
2020-09-01 22:57:32 -04:00
match1 = t . GetEffect ( " NOSUCHNAME " ) ;
2021-04-09 04:09:36 -04:00
CHECK ( match1 = = nullptr ) ;
2020-09-01 22:57:32 -04:00
}
2021-04-09 04:09:36 -04:00
TEST_CASE ( " Effect: Blur " , " [libopenshot][timeline] " )
2015-11-09 00:12:21 -06:00
{
// Create a timeline
Timeline t ( 640 , 480 , Fraction ( 30 , 1 ) , 44100 , 2 , LAYOUT_STEREO ) ;
2021-04-09 04:09:36 -04:00
std : : stringstream path_top ;
2015-11-09 00:12:21 -06:00
path_top < < TEST_MEDIA_PATH < < " interlaced.png " ;
Clip clip_top ( path_top . str ( ) ) ;
clip_top . Layer ( 2 ) ;
t . AddClip ( & clip_top ) ;
// Add some effects out of order
Keyframe horizontal_radius ( 5.0 ) ;
Keyframe vertical_radius ( 5.0 ) ;
Keyframe sigma ( 3.0 ) ;
Keyframe iterations ( 3.0 ) ;
Blur blur ( horizontal_radius , vertical_radius , sigma , iterations ) ;
blur . Id ( " B " ) ;
blur . Layer ( 2 ) ;
t . AddEffect ( & blur ) ;
// Open Timeline
t . Open ( ) ;
// Get frame
2017-08-20 17:37:39 -05:00
std : : shared_ptr < Frame > f = t . GetFrame ( 1 ) ;
2015-11-09 00:12:21 -06:00
2021-04-09 04:09:36 -04:00
REQUIRE ( f ! = nullptr ) ;
CHECK ( f - > number = = 1 ) ;
2015-11-09 00:12:21 -06:00
// Close reader
t . Close ( ) ;
2017-11-18 14:10:02 +01:00
}
2020-09-01 22:57:32 -04:00
2021-04-09 04:09:36 -04:00
TEST_CASE ( " GetMaxFrame and GetMaxTime " , " [libopenshot][timeline] " )
2020-09-01 22:57:32 -04:00
{
// Create a timeline
Timeline t ( 640 , 480 , Fraction ( 30 , 1 ) , 44100 , 2 , LAYOUT_STEREO ) ;
2021-04-09 04:09:36 -04:00
std : : stringstream path1 ;
2020-09-01 22:57:32 -04:00
path1 < < TEST_MEDIA_PATH < < " interlaced.png " ;
Clip clip1 ( path1 . str ( ) ) ;
2022-10-28 19:01:27 -05:00
clip1 . Id ( " C1 " ) ;
2020-09-01 22:57:32 -04:00
clip1 . Layer ( 1 ) ;
clip1 . Position ( 50 ) ;
clip1 . End ( 45 ) ;
t . AddClip ( & clip1 ) ;
2021-04-09 04:09:36 -04:00
CHECK ( t . GetMaxTime ( ) = = Approx ( 95.0 ) . margin ( 0.001 ) ) ;
CHECK ( t . GetMaxFrame ( ) = = 95 * 30 + 1 ) ;
2020-09-01 22:57:32 -04:00
Clip clip2 ( path1 . str ( ) ) ;
2022-10-28 15:25:30 -05:00
clip2 . Id ( " C2 " ) ;
2020-09-01 22:57:32 -04:00
clip2 . Layer ( 2 ) ;
clip2 . Position ( 0 ) ;
clip2 . End ( 55 ) ;
t . AddClip ( & clip2 ) ;
2021-04-09 04:09:36 -04:00
CHECK ( t . GetMaxFrame ( ) = = 95 * 30 + 1 ) ;
CHECK ( t . GetMaxTime ( ) = = Approx ( 95.0 ) . margin ( 0.001 ) ) ;
2020-09-01 22:57:32 -04:00
clip1 . Position ( 80 ) ;
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
clip2 . Position ( 100 ) ;
2021-04-09 04:09:36 -04:00
CHECK ( t . GetMaxFrame ( ) = = 155 * 30 + 1 ) ;
CHECK ( t . GetMaxTime ( ) = = Approx ( 155.0 ) . margin ( 0.001 ) ) ;
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
clip2 . Start ( 20 ) ;
CHECK ( t . GetMaxFrame ( ) = = 135 * 30 + 1 ) ;
CHECK ( t . GetMaxTime ( ) = = Approx ( 135.0 ) . margin ( 0.001 ) ) ;
clip2 . End ( 35 ) ;
2021-04-09 04:09:36 -04:00
CHECK ( t . GetMaxFrame ( ) = = 125 * 30 + 1 ) ;
CHECK ( t . GetMaxTime ( ) = = Approx ( 125.0 ) . margin ( 0.001 ) ) ;
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
t . RemoveClip ( & clip1 ) ;
CHECK ( t . GetMaxFrame ( ) = = 115 * 30 + 1 ) ;
CHECK ( t . GetMaxTime ( ) = = Approx ( 115.0 ) . margin ( 0.001 ) ) ;
2022-10-28 15:25:30 -05:00
2022-10-28 19:01:27 -05:00
// Update Clip's basic properties with JSON Diff
std : : stringstream json_change1 ;
json_change1 < < " [{ \" type \" : \" update \" , \" key \" :[ \" clips \" ,{ \" id \" : \" C2 \" }], \" value \" :{ \" id \" : \" C2 \" , \" layer \" :4000000, \" position \" :0.0, \" start \" :0, \" end \" :10}, \" partial \" :false}] " ;
t . ApplyJsonDiff ( json_change1 . str ( ) ) ;
2022-10-28 15:25:30 -05:00
2022-10-28 19:01:27 -05:00
CHECK ( t . GetMaxFrame ( ) = = 10 * 30 + 1 ) ;
CHECK ( t . GetMaxTime ( ) = = Approx ( 10.0 ) . margin ( 0.001 ) ) ;
2022-10-28 15:25:30 -05:00
2022-10-28 19:01:27 -05:00
// Insert NEW Clip with JSON Diff
std : : stringstream json_change2 ;
json_change2 < < " [{ \" type \" : \" insert \" , \" key \" :[ \" clips \" ], \" value \" :{ \" id \" : \" C3 \" , \" layer \" :4000000, \" position \" :10.0, \" start \" :0, \" end \" :10, \" reader \" :{ \" acodec \" : \" \" , \" audio_bit_rate \" :0, \" audio_stream_index \" :-1, \" audio_timebase \" :{ \" den \" :1, \" num \" :1}, \" channel_layout \" :4, \" channels \" :0, \" display_ratio \" :{ \" den \" :1, \" num \" :1}, \" duration \" :3600.0, \" file_size \" : \" 160000 \" , \" fps \" :{ \" den \" :1, \" num \" :30}, \" has_audio \" :false, \" has_single_image \" :true, \" has_video \" :true, \" height \" :200, \" interlaced_frame \" :false, \" metadata \" :{}, \" path \" : \" " < < path1 . str ( ) < < " \" , \" pixel_format \" :-1, \" pixel_ratio \" :{ \" den \" :1, \" num \" :1}, \" sample_rate \" :0, \" top_field_first \" :true, \" type \" : \" QtImageReader \" , \" vcodec \" : \" \" , \" video_bit_rate \" :0, \" video_length \" : \" 108000 \" , \" video_stream_index \" :-1, \" video_timebase \" :{ \" den \" :30, \" num \" :1}, \" width \" :200}}, \" partial \" :false}] " ;
t . ApplyJsonDiff ( json_change2 . str ( ) ) ;
2022-10-28 15:25:30 -05:00
2022-10-28 19:01:27 -05:00
CHECK ( t . GetMaxFrame ( ) = = 20 * 30 + 1 ) ;
CHECK ( t . GetMaxTime ( ) = = Approx ( 20.0 ) . margin ( 0.001 ) ) ;
2020-09-01 22:57:32 -04:00
}
2022-10-13 00:01:03 -05:00
TEST_CASE ( " Multi-threaded Timeline GetFrame " , " [libopenshot][timeline] " )
{
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
Timeline * t = new Timeline ( 1280 , 720 , Fraction ( 24 , 1 ) , 48000 , 2 , LAYOUT_STEREO ) ;
2022-10-13 00:01:03 -05:00
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
// Large ugly JSON project (4 clips + 3 transitions)
std : : stringstream project_json ;
project_json < < " { \" id \" : \" CQA0YW6I2Q \" , \" fps \" :{ \" num \" :30, \" den \" :1}, \" display_ratio \" :{ \" num \" :16, \" den \" :9}, \" pixel_ratio \" :{ \" num \" :1, \" den \" :1}, \" width \" :1280, \" height \" :720, \" sample_rate \" :48000, \" channels \" :2, \" channel_layout \" :3, \" settings \" :{}, \" clips \" :[{ \" alpha \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" anchor \" :0, \" channel_filter \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" channel_mapping \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" display \" :0, \" duration \" :51.9466667175293, \" effects \" :[], \" end \" :10.666666666666666, \" gravity \" :4, \" has_audio \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" has_video \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" id \" : \" QHESI4ZW0E \" , \" layer \" :5000000, \" location_x \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :0}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" location_y \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :0}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" mixing \" :0, \" origin_x \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :0.5}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" origin_y \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :0.5}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" parentObjectId \" : \" \" , \" perspective_c1_x \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" perspective_c1_y \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" perspective_c2_x \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" perspective_c2_y \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" perspective_c3_x \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" perspective_c3_y \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" perspective_c4_x \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" perspective_c4_y \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" position \" :0, \" reader \" :{ \" acodec \" : \" aac \" , \" audio_bit_rate \" :126694, \" audio_stream_index \" :1, \" audio_timebase \" :{ \" den \" :48000, \" num \" :1}, \" channel_layout \" :3, \" channels \" :2, \" display_ratio \" :{ \" den \" :9, \" num \" :16}, \" duration \" :51.9466667175293, \" file_size \" : \" 7608204 \" , \" fps \" :{ \" den \" :1, \" num \" :24}, \" has_audio \" :true, \" has_single_image \" :false, \" has_video \" :true, \" height \" :720, \" interlaced_frame \" :false, \" metadata \" :{ \" artist \" : \" Durian Open Movie Team \" , \" compatible_brands \" : \" isomiso2avc1mp41 \" , \" copyright \" : \" (c) copyright Blender Foundation | durian.blender.org \" , \" creation_time \" : \" 1970-01-01T00:00:00.000000Z \" , \" description \" : \" Trailer for the Sintel open movie project \" , \" encoder \" : \" Lavf52.62.0 \" ,
t - > SetJson ( project_json . str ( ) ) ;
t - > Open ( ) ;
2022-10-13 00:01:03 -05:00
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
// A successful test will NOT crash - since this causes many threads to
// call the same Timeline methods asynchronously, to verify mutexes and multi-threaded
// access does not seg fault or crash this test.
2022-10-13 00:01:03 -05:00
# pragma omp parallel
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
{
// Run the following loop in all threads
int64_t frame_count = 60 ;
for ( long int frame = 1 ; frame < = frame_count ; frame + + ) {
std : : shared_ptr < Frame > f = t - > GetFrame ( frame ) ;
2022-10-13 00:01:03 -05:00
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
// Clear cache after every frame
// This is designed to test the mutex for ClearAllCache()
t - > ClearAllCache ( ) ;
}
// Clear all clips after loop is done
// This is designed to test the mutex for Clear()
t - > Clear ( ) ;
}
2022-10-13 00:01:03 -05:00
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
// Close and delete timeline object
t - > Close ( ) ;
delete t ;
t = NULL ;
}
TEST_CASE ( " Multi-threaded Timeline Add/Remove Clip " , " [libopenshot][timeline] " )
{
// Create timeline
Timeline * t = new Timeline ( 1280 , 720 , Fraction ( 24 , 1 ) , 48000 , 2 , LAYOUT_STEREO ) ;
t - > Open ( ) ;
// Calculate test video path
std : : stringstream path ;
path < < TEST_MEDIA_PATH < < " test.mp4 " ;
// A successful test will NOT crash - since this causes many threads to
// call the same Timeline methods asynchronously, to verify mutexes and multi-threaded
// access does not seg fault or crash this test.
# pragma omp parallel
{
// Run the following loop in all threads
int64_t clip_count = 10 ;
for ( int clip_index = 1 ; clip_index < = clip_count ; clip_index + + ) {
// Create clip
Clip * clip_video = new Clip ( path . str ( ) ) ;
clip_video - > Layer ( omp_get_thread_num ( ) ) ;
2022-10-28 19:02:27 -05:00
// Add clip to timeline
t - > AddClip ( clip_video ) ;
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
2022-10-28 19:02:27 -05:00
// Loop through all timeline frames - each new clip makes the timeline longer
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
for ( long int frame = 10 ; frame > = 1 ; frame - - ) {
std : : shared_ptr < Frame > f = t - > GetFrame ( frame ) ;
t - > GetMaxFrame ( ) ;
}
// Remove clip
t - > RemoveClip ( clip_video ) ;
delete clip_video ;
clip_video = NULL ;
}
// Clear all clips after loop is done
// This is designed to test the mutex for Clear()
t - > Clear ( ) ;
}
// Close and delete timeline object
t - > Close ( ) ;
delete t ;
t = NULL ;
}
2022-10-28 11:00:47 -05:00
TEST_CASE ( " ApplyJSONDiff and FrameMappers " , " [libopenshot][timeline] " )
{
2022-10-28 19:01:27 -05:00
// Create a timeline
Timeline t ( 640 , 480 , Fraction ( 60 , 1 ) , 44100 , 2 , LAYOUT_STEREO ) ;
t . Open ( ) ;
2022-10-28 11:00:47 -05:00
2022-10-28 19:01:27 -05:00
// Auto create FrameMappers for each clip
t . AutoMapClips ( true ) ;
2022-10-28 11:00:47 -05:00
2022-10-28 19:01:27 -05:00
// Add clip
std : : stringstream path1 ;
path1 < < TEST_MEDIA_PATH < < " interlaced.png " ;
Clip clip1 ( path1 . str ( ) ) ;
clip1 . Id ( " ABC " ) ;
clip1 . Layer ( 1 ) ;
clip1 . Position ( 0 ) ;
clip1 . End ( 10 ) ;
2022-10-28 11:00:47 -05:00
2022-10-28 19:01:27 -05:00
// Verify clip reader type (not wrapped yet, because we have not added clip to timeline)
CHECK ( clip1 . Reader ( ) - > Name ( ) = = " QtImageReader " ) ;
2022-10-28 11:00:47 -05:00
2022-10-28 19:01:27 -05:00
t . AddClip ( & clip1 ) ;
2022-10-28 11:00:47 -05:00
2022-10-28 19:01:27 -05:00
// Verify clip was wrapped in FrameMapper
CHECK ( clip1 . Reader ( ) - > Name ( ) = = " FrameMapper " ) ;
2022-10-28 11:00:47 -05:00
2022-10-28 19:01:27 -05:00
// Update Clip's basic properties with JSON Diff (i.e. no reader JSON)
std : : stringstream json_change1 ;
json_change1 < < " [{ \" type \" : \" update \" , \" key \" :[ \" clips \" ,{ \" id \" : \" " < < clip1 . Id ( ) < < " \" }], \" value \" :{ \" id \" : \" " < < clip1 . Id ( ) < < " \" , \" layer \" :4000000, \" position \" :14.7, \" start \" :0, \" end \" :10}, \" partial \" :false}] " ;
t . ApplyJsonDiff ( json_change1 . str ( ) ) ;
2022-10-28 11:00:47 -05:00
2022-10-28 19:01:27 -05:00
// Verify clip is still wrapped in FrameMapper
CHECK ( clip1 . Reader ( ) - > Name ( ) = = " FrameMapper " ) ;
2022-10-28 11:00:47 -05:00
2022-10-28 19:01:27 -05:00
// Update clip's reader back to a QtImageReader
std : : stringstream json_change2 ;
json_change2 < < " [{ \" type \" : \" update \" , \" key \" :[ \" clips \" ,{ \" id \" : \" " < < clip1 . Id ( ) < < " \" }], \" value \" :{ \" id \" : \" " < < clip1 . Id ( ) < < " \" , \" reader \" :{ \" acodec \" : \" \" , \" audio_bit_rate \" :0, \" audio_stream_index \" :-1, \" audio_timebase \" :{ \" den \" :1, \" num \" :1}, \" channel_layout \" :4, \" channels \" :0, \" display_ratio \" :{ \" den \" :1, \" num \" :1}, \" duration \" :3600.0, \" file_size \" : \" 160000 \" , \" fps \" :{ \" den \" :1, \" num \" :30}, \" has_audio \" :false, \" has_single_image \" :true, \" has_video \" :true, \" height \" :200, \" interlaced_frame \" :false, \" metadata \" :{}, \" path \" : \" " < < path1 . str ( ) < < " \" , \" pixel_format \" :-1, \" pixel_ratio \" :{ \" den \" :1, \" num \" :1}, \" sample_rate \" :0, \" top_field_first \" :true, \" type \" : \" QtImageReader \" , \" vcodec \" : \" \" , \" video_bit_rate \" :0, \" video_length \" : \" 108000 \" , \" video_stream_index \" :-1, \" video_timebase \" :{ \" den \" :30, \" num \" :1}, \" width \" :200}, \" position \" :14.7, \" start \" :0, \" end \" :10}, \" partial \" :false}] " ;
t . ApplyJsonDiff ( json_change2 . str ( ) ) ;
2022-10-28 11:00:47 -05:00
2022-10-28 19:01:27 -05:00
// Verify clip reader type
CHECK ( clip1 . Reader ( ) - > Name ( ) = = " FrameMapper " ) ;
2022-10-28 11:00:47 -05:00
2022-10-28 19:01:27 -05:00
// Disable Auto FrameMappers for each clip
t . AutoMapClips ( false ) ;
2022-10-28 11:00:47 -05:00
2022-10-28 19:01:27 -05:00
// Update clip's reader back to a QtImageReader
std : : stringstream json_change3 ;
json_change3 < < " [{ \" type \" : \" update \" , \" key \" :[ \" clips \" ,{ \" id \" : \" " < < clip1 . Id ( ) < < " \" }], \" value \" :{ \" id \" : \" " < < clip1 . Id ( ) < < " \" , \" reader \" :{ \" acodec \" : \" \" , \" audio_bit_rate \" :0, \" audio_stream_index \" :-1, \" audio_timebase \" :{ \" den \" :1, \" num \" :1}, \" channel_layout \" :4, \" channels \" :0, \" display_ratio \" :{ \" den \" :1, \" num \" :1}, \" duration \" :3600.0, \" file_size \" : \" 160000 \" , \" fps \" :{ \" den \" :1, \" num \" :30}, \" has_audio \" :false, \" has_single_image \" :true, \" has_video \" :true, \" height \" :200, \" interlaced_frame \" :false, \" metadata \" :{}, \" path \" : \" " < < path1 . str ( ) < < " \" , \" pixel_format \" :-1, \" pixel_ratio \" :{ \" den \" :1, \" num \" :1}, \" sample_rate \" :0, \" top_field_first \" :true, \" type \" : \" QtImageReader \" , \" vcodec \" : \" \" , \" video_bit_rate \" :0, \" video_length \" : \" 108000 \" , \" video_stream_index \" :-1, \" video_timebase \" :{ \" den \" :30, \" num \" :1}, \" width \" :200}, \" position \" :14.7, \" start \" :0, \" end \" :10}, \" partial \" :false}] " ;
t . ApplyJsonDiff ( json_change3 . str ( ) ) ;
2022-10-28 11:00:47 -05:00
2022-10-28 19:01:27 -05:00
// Verify clip reader type
CHECK ( clip1 . Reader ( ) - > Name ( ) = = " QtImageReader " ) ;
2023-02-13 16:42:21 -06:00
}
TEST_CASE ( " ApplyJSONDiff Update Reader Info " , " [libopenshot][timeline] " )
{
// Create a timeline
Timeline t ( 640 , 480 , Fraction ( 24 , 1 ) , 44100 , 2 , LAYOUT_STEREO ) ;
t . Open ( ) ;
// Auto create FrameMappers for each clip
t . AutoMapClips ( true ) ;
// Add clip
std : : stringstream path1 ;
path1 < < TEST_MEDIA_PATH < < " sintel_trailer-720p.mp4 " ;
Clip clip1 ( path1 . str ( ) ) ;
clip1 . Id ( " ABC " ) ;
clip1 . Layer ( 1 ) ;
clip1 . Position ( 0 ) ;
clip1 . End ( 10 ) ;
std : : string reader_json = clip1 . Reader ( ) - > Json ( ) ;
// Verify clip reader type (not wrapped yet, because we have not added clip to timeline)
CHECK ( clip1 . Reader ( ) - > Name ( ) = = " FFmpegReader " ) ;
t . AddClip ( & clip1 ) ;
// Verify clip was wrapped in FrameMapper
CHECK ( clip1 . Reader ( ) - > Name ( ) = = " FrameMapper " ) ;
CHECK ( clip1 . info . fps . num = = 24 ) ;
CHECK ( clip1 . info . fps . den = = 1 ) ;
CHECK ( clip1 . info . video_timebase . num = = 1 ) ;
CHECK ( clip1 . info . video_timebase . den = = 24 ) ;
CHECK ( clip1 . info . duration = = Approx ( 51.94667 ) . margin ( 0.00001 ) ) ;
// Create JSON change to increase FPS from 24 to 60
Json : : Value reader_root = openshot : : stringToJson ( reader_json ) ;
reader_root [ " fps " ] [ " num " ] = 60 ;
reader_root [ " fps " ] [ " den " ] = 1 ;
reader_root [ " video_timebase " ] [ " num " ] = 1 ;
reader_root [ " video_timebase " ] [ " den " ] = 60 ;
reader_root [ " duration " ] = reader_root [ " duration " ] . asDouble ( ) * 0.4 ;
std : : string update_reader = reader_root . toStyledString ( ) ;
// Apply JSON changes to clip
std : : stringstream json_change1 ;
json_change1 < < " [{ \" type \" : \" update \" , \" key \" :[ \" clips \" ,{ \" id \" : \" " < < clip1 . Id ( ) < < " \" }], \" value \" :{ \" reader \" : " < < update_reader < < " }}] " ;
t . ApplyJsonDiff ( json_change1 . str ( ) ) ;
// Verify clip is still wrapped in FrameMapper
CHECK ( clip1 . Reader ( ) - > Name ( ) = = " FrameMapper " ) ;
// Verify clip Reader has updated properties and info struct
openshot : : FrameMapper * mapper = ( openshot : : FrameMapper * ) clip1 . Reader ( ) ;
CHECK ( mapper - > Reader ( ) - > info . fps . num = = 60 ) ;
CHECK ( mapper - > Reader ( ) - > info . fps . den = = 1 ) ;
CHECK ( mapper - > Reader ( ) - > info . video_timebase . num = = 1 ) ;
CHECK ( mapper - > Reader ( ) - > info . video_timebase . den = = 60 ) ;
CHECK ( mapper - > Reader ( ) - > info . duration = = Approx ( 20.77867 ) . margin ( 0.00001 ) ) ;
// Verify clip has updated properties and info struct
CHECK ( clip1 . info . fps . num = = 24 ) ;
CHECK ( clip1 . info . fps . den = = 1 ) ;
CHECK ( clip1 . info . video_timebase . num = = 1 ) ;
CHECK ( clip1 . info . video_timebase . den = = 24 ) ;
CHECK ( clip1 . info . duration = = Approx ( 20.77867 ) . margin ( 0.00001 ) ) ;
// Open Clip object, and verify this does not clobber our 60 FPS change
clip1 . Open ( ) ;
CHECK ( mapper - > Reader ( ) - > info . fps . num = = 60 ) ;
CHECK ( mapper - > Reader ( ) - > info . fps . den = = 1 ) ;
CHECK ( mapper - > Reader ( ) - > info . video_timebase . num = = 1 ) ;
CHECK ( mapper - > Reader ( ) - > info . video_timebase . den = = 60 ) ;
CHECK ( mapper - > Reader ( ) - > info . duration = = Approx ( 20.77867 ) . margin ( 0.00001 ) ) ;
2022-10-28 11:00:47 -05:00
}