2013-09-12 23:41:49 -05:00
/**
* @ file
* @ brief Unit tests for openshot : : Timeline
* @ author Jonathan Thomas < jonathan @ openshot . org >
*
2019-06-09 08:31:04 -04:00
* @ ref License
*/
2021-10-16 01:26:26 -04:00
// Copyright (c) 2008-2019 OpenShot Studios, LLC
//
// SPDX-License-Identifier: LGPL-3.0-or-later
2013-09-12 23:41:49 -05:00
2021-04-09 04:09:36 -04:00
# include <string>
2020-12-26 21:51:24 -05:00
# include <sstream>
# include <memory>
# include <list>
2022-10-13 00:01:03 -05:00
# include <omp.h>
2020-12-26 21:51:24 -05:00
2022-06-17 15:07:16 -04:00
# include "openshot_catch.h"
2021-04-09 04:09:36 -04:00
2023-02-13 16:42:21 -06:00
# include "FrameMapper.h"
2020-12-26 21:51:24 -05:00
# include "Timeline.h"
# include "Clip.h"
# include "Frame.h"
# include "Fraction.h"
2026-02-23 16:55:49 -06:00
# include "Exceptions.h"
2020-12-26 21:51:24 -05:00
# include "effects/Blur.h"
# include "effects/Negate.h"
2012-10-05 01:58:27 -05:00
using namespace openshot ;
2021-04-09 04:09:36 -04:00
TEST_CASE ( " constructor " , " [libopenshot][timeline] " )
2020-09-01 22:57:32 -04:00
{
2014-01-05 22:37:11 -06:00
Fraction fps ( 30000 , 1000 ) ;
2015-06-01 00:20:14 -07:00
Timeline t1 ( 640 , 480 , fps , 44100 , 2 , LAYOUT_STEREO ) ;
2012-10-05 01:58:27 -05:00
// Check values
2021-04-09 04:09:36 -04:00
CHECK ( t1 . info . width = = 640 ) ;
CHECK ( t1 . info . height = = 480 ) ;
2012-10-05 01:58:27 -05:00
2015-06-01 00:20:14 -07:00
Timeline t2 ( 300 , 240 , fps , 44100 , 2 , LAYOUT_STEREO ) ;
2012-10-05 01:58:27 -05:00
// Check values
2021-04-09 04:09:36 -04:00
CHECK ( t2 . info . width = = 300 ) ;
CHECK ( t2 . info . height = = 240 ) ;
2012-10-05 01:58:27 -05:00
}
2026-02-23 16:55:49 -06:00
TEST_CASE ( " project constructor invalid path message " , " [libopenshot][timeline] " )
{
const std : : string invalid_path = " /tmp/__openshot_missing_test_project__.osp " ;
try {
Timeline t ( invalid_path , true ) ;
FAIL ( " Expected InvalidFile for missing timeline project path " ) ;
} catch ( const InvalidFile & e ) {
const std : : string message = e . what ( ) ;
CHECK ( message . find ( " Timeline project file could not be opened. " ) ! = std : : string : : npos ) ;
CHECK ( message . find ( invalid_path ) ! = std : : string : : npos ) ;
}
}
2022-10-06 15:07:31 -05:00
TEST_CASE ( " Set Json and clear clips " , " [libopenshot][timeline] " )
{
2023-02-13 16:42:21 -06:00
Fraction fps ( 30000 , 1000 ) ;
Timeline t ( 640 , 480 , fps , 44100 , 2 , LAYOUT_STEREO ) ;
2022-10-06 15:07:31 -05:00
2023-02-13 16:42:21 -06:00
// Large ugly JSON project (4 clips + 3 transitions)
std : : stringstream project_json ;
project_json < < " { \" id \" : \" CQA0YW6I2Q \" , \" fps \" :{ \" num \" :30, \" den \" :1}, \" display_ratio \" :{ \" num \" :16, \" den \" :9}, \" pixel_ratio \" :{ \" num \" :1, \" den \" :1}, \" width \" :1280, \" height \" :720, \" sample_rate \" :48000, \" channels \" :2, \" channel_layout \" :3, \" settings \" :{}, \" clips \" :[{ \" alpha \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" anchor \" :0, \" channel_filter \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" channel_mapping \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" display \" :0, \" duration \" :51.9466667175293, \" effects \" :[], \" end \" :10.666666666666666, \" gravity \" :4, \" has_audio \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" has_video \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" id \" : \" QHESI4ZW0E \" , \" layer \" :5000000, \" location_x \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :0}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" location_y \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :0}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" mixing \" :0, \" origin_x \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :0.5}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" origin_y \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :0.5}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" parentObjectId \" : \" \" , \" perspective_c1_x \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" perspective_c1_y \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" perspective_c2_x \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" perspective_c2_y \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" perspective_c3_x \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" perspective_c3_y \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" perspective_c4_x \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" perspective_c4_y \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" position \" :0, \" reader \" :{ \" acodec \" : \" aac \" , \" audio_bit_rate \" :126694, \" audio_stream_index \" :1, \" audio_timebase \" :{ \" den \" :48000, \" num \" :1}, \" channel_layout \" :3, \" channels \" :2, \" display_ratio \" :{ \" den \" :9, \" num \" :16}, \" duration \" :51.9466667175293, \" file_size \" : \" 7608204 \" , \" fps \" :{ \" den \" :1, \" num \" :24}, \" has_audio \" :true, \" has_single_image \" :false, \" has_video \" :true, \" height \" :720, \" interlaced_frame \" :false, \" metadata \" :{ \" artist \" : \" Durian Open Movie Team \" , \" compatible_brands \" : \" isomiso2avc1mp41 \" , \" copyright \" : \" (c) copyright Blender Foundation | durian.blender.org \" , \" creation_time \" : \" 1970-01-01T00:00:00.000000Z \" , \" description \" : \" Trailer for the Sintel open movie project \" , \" encoder \" : \" Lavf52.62.0 \" ,
t . SetJson ( project_json . str ( ) ) ;
2022-10-06 15:07:31 -05:00
2023-02-13 16:42:21 -06:00
// Count clips & effects
CHECK ( t . Clips ( ) . size ( ) = = 4 ) ;
CHECK ( t . Effects ( ) . size ( ) = = 3 ) ;
2022-10-06 15:07:31 -05:00
2023-02-13 16:42:21 -06:00
// Clear timeline and clear allocated clips, effects, and frame mappers
t . Clear ( ) ;
2022-10-06 15:07:31 -05:00
2023-02-13 16:42:21 -06:00
// Count clips & effects
CHECK ( t . Clips ( ) . size ( ) = = 0 ) ;
CHECK ( t . Effects ( ) . size ( ) = = 0 ) ;
2022-10-06 15:07:31 -05:00
2023-02-13 16:42:21 -06:00
// Manually add clip object (not using SetJson)
std : : stringstream path ;
path < < TEST_MEDIA_PATH < < " test.mp4 " ;
Clip clip_video ( path . str ( ) ) ;
t . AddClip ( & clip_video ) ;
2022-10-06 15:07:31 -05:00
2023-02-13 16:42:21 -06:00
// Manually add effect object (not using SetJson)
Negate effect_top ;
effect_top . Id ( " C " ) ;
t . AddEffect ( & effect_top ) ;
2022-10-06 15:07:31 -05:00
2023-02-13 16:42:21 -06:00
// Count clips & effects
CHECK ( t . Clips ( ) . size ( ) = = 1 ) ;
CHECK ( t . Effects ( ) . size ( ) = = 1 ) ;
2022-10-06 15:07:31 -05:00
2023-02-13 16:42:21 -06:00
// Clear timeline
t . Clear ( ) ;
2022-10-06 15:07:31 -05:00
2023-02-13 16:42:21 -06:00
// Count clips & effects
CHECK ( t . Clips ( ) . size ( ) = = 0 ) ;
CHECK ( t . Effects ( ) . size ( ) = = 0 ) ;
2022-10-06 15:07:31 -05:00
}
2021-04-09 06:30:38 -04:00
TEST_CASE ( " ReaderInfo constructor " , " [libopenshot][timeline] " )
2021-03-31 19:35:58 -04:00
{
// Create a reader
2021-04-09 06:30:38 -04:00
std : : stringstream path ;
2021-03-31 19:35:58 -04:00
path < < TEST_MEDIA_PATH < < " test.mp4 " ;
Clip clip_video ( path . str ( ) ) ;
clip_video . Open ( ) ;
const auto r1 = clip_video . Reader ( ) ;
// Configure a Timeline with the same parameters
Timeline t1 ( r1 - > info ) ;
2021-04-09 06:30:38 -04:00
CHECK ( r1 - > info . width = = t1 . info . width ) ;
CHECK ( r1 - > info . height = = t1 . info . height ) ;
CHECK ( r1 - > info . fps . num = = t1 . info . fps . num ) ;
CHECK ( r1 - > info . fps . den = = t1 . info . fps . den ) ;
CHECK ( r1 - > info . sample_rate = = t1 . info . sample_rate ) ;
CHECK ( r1 - > info . channels = = t1 . info . channels ) ;
CHECK ( r1 - > info . channel_layout = = t1 . info . channel_layout ) ;
2021-03-31 19:35:58 -04:00
}
2021-04-09 04:09:36 -04:00
TEST_CASE ( " width and height functions " , " [libopenshot][timeline] " )
2012-10-05 01:58:27 -05:00
{
2014-01-05 22:37:11 -06:00
Fraction fps ( 30000 , 1000 ) ;
2015-06-01 00:20:14 -07:00
Timeline t1 ( 640 , 480 , fps , 44100 , 2 , LAYOUT_STEREO ) ;
2012-10-05 01:58:27 -05:00
// Check values
2021-04-09 04:09:36 -04:00
CHECK ( t1 . info . width = = 640 ) ;
CHECK ( t1 . info . height = = 480 ) ;
2012-10-05 01:58:27 -05:00
// Set width
2014-01-05 22:37:11 -06:00
t1 . info . width = 600 ;
2012-10-05 01:58:27 -05:00
// Check values
2021-04-09 04:09:36 -04:00
CHECK ( t1 . info . width = = 600 ) ;
CHECK ( t1 . info . height = = 480 ) ;
2012-10-05 01:58:27 -05:00
// Set height
2014-01-05 22:37:11 -06:00
t1 . info . height = 400 ;
2012-10-05 01:58:27 -05:00
// Check values
2021-04-09 04:09:36 -04:00
CHECK ( t1 . info . width = = 600 ) ;
CHECK ( t1 . info . height = = 400 ) ;
2012-10-05 01:58:27 -05:00
}
2012-10-10 01:07:47 -05:00
2021-04-09 04:09:36 -04:00
TEST_CASE ( " Framerate " , " [libopenshot][timeline] " )
2012-10-10 01:07:47 -05:00
{
2014-01-05 22:37:11 -06:00
Fraction fps ( 24 , 1 ) ;
2015-06-01 00:20:14 -07:00
Timeline t1 ( 640 , 480 , fps , 44100 , 2 , LAYOUT_STEREO ) ;
2012-10-10 01:07:47 -05:00
// Check values
2021-04-09 04:09:36 -04:00
CHECK ( t1 . info . fps . ToFloat ( ) = = Approx ( 24.0f ) . margin ( 0.00001 ) ) ;
2012-10-10 01:07:47 -05:00
}
2014-04-03 22:47:21 -05:00
2021-04-09 04:09:36 -04:00
TEST_CASE ( " two-track video " , " [libopenshot][timeline] " )
2014-04-03 22:47:21 -05:00
{
// Create a reader
2021-04-09 04:09:36 -04:00
std : : stringstream path ;
2015-09-28 22:05:50 -05:00
path < < TEST_MEDIA_PATH < < " test.mp4 " ;
Clip clip_video ( path . str ( ) ) ;
2014-04-03 22:47:21 -05:00
clip_video . Layer ( 0 ) ;
2015-02-07 18:06:11 -06:00
clip_video . Position ( 0.0 ) ;
2014-04-03 22:47:21 -05:00
2021-04-09 04:09:36 -04:00
std : : stringstream path_overlay ;
2015-09-28 22:05:50 -05:00
path_overlay < < TEST_MEDIA_PATH < < " front3.png " ;
Clip clip_overlay ( path_overlay . str ( ) ) ;
2014-04-03 22:47:21 -05:00
clip_overlay . Layer ( 1 ) ;
2015-02-07 18:06:11 -06:00
clip_overlay . Position ( 0.05 ) ; // Delay the overlay by 0.05 seconds
clip_overlay . End ( 0.5 ) ; // Make the duration of the overlay 1/2 second
2014-04-03 22:47:21 -05:00
// Create a timeline
2016-09-14 04:11:12 -05:00
Timeline t ( 1280 , 720 , Fraction ( 30 , 1 ) , 44100 , 2 , LAYOUT_STEREO ) ;
2014-04-03 22:47:21 -05:00
// Add clips
t . AddClip ( & clip_video ) ;
t . AddClip ( & clip_overlay ) ;
t . Open ( ) ;
2017-08-20 17:37:39 -05:00
std : : shared_ptr < Frame > f = t . GetFrame ( 1 ) ;
2014-04-03 22:47:21 -05:00
// Get the image data
2016-09-14 04:11:12 -05:00
int pixel_row = 200 ;
2015-06-01 00:20:14 -07:00
int pixel_index = 230 * 4 ; // pixel 230 (4 bytes per pixel)
2014-04-03 22:47:21 -05:00
2015-06-01 00:20:14 -07:00
// Check image properties
2021-04-09 04:09:36 -04:00
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index ] = = Approx ( 21 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 1 ] = = Approx ( 191 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 2 ] = = Approx ( 0 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 3 ] = = Approx ( 255 ) . margin ( 5 ) ) ;
2014-04-03 22:47:21 -05:00
f = t . GetFrame ( 2 ) ;
2015-06-01 00:20:14 -07:00
// Check image properties
2021-04-09 04:09:36 -04:00
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index ] = = Approx ( 176 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 1 ] = = Approx ( 0 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 2 ] = = Approx ( 186 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 3 ] = = Approx ( 255 ) . margin ( 5 ) ) ;
2015-02-07 18:06:11 -06:00
f = t . GetFrame ( 3 ) ;
2015-06-01 00:20:14 -07:00
// Check image properties
2021-04-09 04:09:36 -04:00
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index ] = = Approx ( 23 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 1 ] = = Approx ( 190 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 2 ] = = Approx ( 0 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 3 ] = = Approx ( 255 ) . margin ( 5 ) ) ;
2015-02-07 18:06:11 -06:00
f = t . GetFrame ( 24 ) ;
2015-06-01 00:20:14 -07:00
// Check image properties
2022-07-21 10:07:34 -05:00
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index ] = = Approx ( 176 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 1 ] = = Approx ( 0 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 2 ] = = Approx ( 186 ) . margin ( 5 ) ) ;
2021-04-09 04:09:36 -04:00
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 3 ] = = Approx ( 255 ) . margin ( 5 ) ) ;
2015-02-07 18:06:11 -06:00
f = t . GetFrame ( 5 ) ;
2015-06-01 00:20:14 -07:00
// Check image properties
2021-04-09 04:09:36 -04:00
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index ] = = Approx ( 23 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 1 ] = = Approx ( 190 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 2 ] = = Approx ( 0 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 3 ] = = Approx ( 255 ) . margin ( 5 ) ) ;
2015-02-07 18:06:11 -06:00
f = t . GetFrame ( 25 ) ;
2015-06-01 00:20:14 -07:00
// Check image properties
2022-07-21 10:07:34 -05:00
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index ] = = Approx ( 20 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 1 ] = = Approx ( 190 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 2 ] = = Approx ( 0 ) . margin ( 5 ) ) ;
2021-04-09 04:09:36 -04:00
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 3 ] = = Approx ( 255 ) . margin ( 5 ) ) ;
2015-02-07 18:06:11 -06:00
f = t . GetFrame ( 4 ) ;
2015-06-01 00:20:14 -07:00
// Check image properties
2021-04-09 04:09:36 -04:00
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index ] = = Approx ( 176 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 1 ] = = Approx ( 0 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 2 ] = = Approx ( 186 ) . margin ( 5 ) ) ;
CHECK ( ( int ) f - > GetPixels ( pixel_row ) [ pixel_index + 3 ] = = Approx ( 255 ) . margin ( 5 ) ) ;
2014-04-03 22:47:21 -05:00
t . Close ( ) ;
}
2015-02-19 01:03:22 -06:00
2021-04-09 04:09:36 -04:00
TEST_CASE ( " Clip order " , " [libopenshot][timeline] " )
2015-02-19 01:03:22 -06:00
{
// Create a timeline
2015-06-01 00:20:14 -07:00
Timeline t ( 640 , 480 , Fraction ( 30 , 1 ) , 44100 , 2 , LAYOUT_STEREO ) ;
2015-02-19 01:03:22 -06:00
// Add some clips out of order
2021-04-09 04:09:36 -04:00
std : : stringstream path_top ;
2015-09-28 22:05:50 -05:00
path_top < < TEST_MEDIA_PATH < < " front3.png " ;
Clip clip_top ( path_top . str ( ) ) ;
2015-02-19 01:03:22 -06:00
clip_top . Layer ( 2 ) ;
t . AddClip ( & clip_top ) ;
2021-04-09 04:09:36 -04:00
std : : stringstream path_middle ;
2015-09-28 22:05:50 -05:00
path_middle < < TEST_MEDIA_PATH < < " front.png " ;
Clip clip_middle ( path_middle . str ( ) ) ;
2015-02-19 01:03:22 -06:00
clip_middle . Layer ( 0 ) ;
t . AddClip ( & clip_middle ) ;
2021-04-09 04:09:36 -04:00
std : : stringstream path_bottom ;
2015-09-28 22:05:50 -05:00
path_bottom < < TEST_MEDIA_PATH < < " back.png " ;
Clip clip_bottom ( path_bottom . str ( ) ) ;
2015-02-19 01:03:22 -06:00
clip_bottom . Layer ( 1 ) ;
t . AddClip ( & clip_bottom ) ;
t . Open ( ) ;
// Loop through Clips and check order (they should have been sorted into the correct order)
// Bottom layer to top layer, then by position.
2021-04-09 04:09:36 -04:00
std : : list < Clip * > clips = t . Clips ( ) ;
int n = 0 ;
for ( auto clip : clips ) {
CHECK ( clip - > Layer ( ) = = n ) ;
+ + n ;
2015-02-19 01:03:22 -06:00
}
// Add another clip
2021-04-09 04:09:36 -04:00
std : : stringstream path_middle1 ;
2015-09-28 22:05:50 -05:00
path_middle1 < < TEST_MEDIA_PATH < < " interlaced.png " ;
Clip clip_middle1 ( path_middle1 . str ( ) ) ;
2015-02-19 01:03:22 -06:00
clip_middle1 . Layer ( 1 ) ;
clip_middle1 . Position ( 0.5 ) ;
t . AddClip ( & clip_middle1 ) ;
// Loop through clips again, and re-check order
clips = t . Clips ( ) ;
2021-04-09 04:09:36 -04:00
n = 0 ;
for ( auto clip : clips ) {
switch ( n ) {
2015-02-19 01:03:22 -06:00
case 0 :
2021-04-09 04:09:36 -04:00
CHECK ( clip - > Layer ( ) = = 0 ) ;
2015-02-19 01:03:22 -06:00
break ;
case 1 :
2021-04-09 04:09:36 -04:00
CHECK ( clip - > Layer ( ) = = 1 ) ;
CHECK ( clip - > Position ( ) = = Approx ( 0.0 ) . margin ( 0.0001 ) ) ;
2015-02-19 01:03:22 -06:00
break ;
case 2 :
2021-04-09 04:09:36 -04:00
CHECK ( clip - > Layer ( ) = = 1 ) ;
CHECK ( clip - > Position ( ) = = Approx ( 0.5 ) . margin ( 0.0001 ) ) ;
2015-02-19 01:03:22 -06:00
break ;
case 3 :
2021-04-09 04:09:36 -04:00
CHECK ( clip - > Layer ( ) = = 2 ) ;
2015-02-19 01:03:22 -06:00
break ;
}
2021-04-09 04:09:36 -04:00
+ + n ;
2015-02-19 01:03:22 -06:00
}
t . Close ( ) ;
}
2021-06-10 08:01:16 -04:00
TEST_CASE ( " TimelineBase " , " [libopenshot][timeline] " )
{
2023-02-13 16:42:21 -06:00
// Create a timeline
Timeline t ( 640 , 480 , Fraction ( 30 , 1 ) , 44100 , 2 , LAYOUT_STEREO ) ;
2021-06-10 08:01:16 -04:00
2023-02-13 16:42:21 -06:00
// Add some clips out of order
std : : stringstream path ;
path < < TEST_MEDIA_PATH < < " front3.png " ;
Clip clip1 ( path . str ( ) ) ;
clip1 . Layer ( 1 ) ;
t . AddClip ( & clip1 ) ;
2021-06-10 08:01:16 -04:00
2023-02-13 16:42:21 -06:00
Clip clip2 ( path . str ( ) ) ;
clip2 . Layer ( 0 ) ;
t . AddClip ( & clip2 ) ;
2021-06-10 08:01:16 -04:00
2023-02-13 16:42:21 -06:00
// Verify that the list of clips can be accessed
// through the Clips() method of a TimelineBase*
TimelineBase * base = & t ;
auto l = base - > Clips ( ) ;
CHECK ( l . size ( ) = = 2 ) ;
auto find1 = std : : find ( l . begin ( ) , l . end ( ) , & clip1 ) ;
auto find2 = std : : find ( l . begin ( ) , l . end ( ) , & clip2 ) ;
CHECK ( find1 ! = l . end ( ) ) ;
CHECK ( find2 ! = l . end ( ) ) ;
2021-06-10 08:01:16 -04:00
}
2015-02-19 01:03:22 -06:00
2021-04-09 04:09:36 -04:00
TEST_CASE ( " Effect order " , " [libopenshot][timeline] " )
2015-02-19 01:03:22 -06:00
{
// Create a timeline
2015-06-01 00:20:14 -07:00
Timeline t ( 640 , 480 , Fraction ( 30 , 1 ) , 44100 , 2 , LAYOUT_STEREO ) ;
2015-02-19 01:03:22 -06:00
// Add some effects out of order
Negate effect_top ;
effect_top . Id ( " C " ) ;
effect_top . Layer ( 2 ) ;
t . AddEffect ( & effect_top ) ;
Negate effect_middle ;
effect_middle . Id ( " A " ) ;
effect_middle . Layer ( 0 ) ;
t . AddEffect ( & effect_middle ) ;
Negate effect_bottom ;
effect_bottom . Id ( " B " ) ;
effect_bottom . Layer ( 1 ) ;
t . AddEffect ( & effect_bottom ) ;
t . Open ( ) ;
// Loop through effects and check order (they should have been sorted into the correct order)
// Bottom layer to top layer, then by position, and then by order.
2021-04-09 04:09:36 -04:00
std : : list < EffectBase * > effects = t . Effects ( ) ;
int n = 0 ;
for ( auto effect : effects ) {
CHECK ( effect - > Layer ( ) = = n ) ;
CHECK ( effect - > Order ( ) = = 0 ) ;
switch ( n ) {
2015-02-19 01:03:22 -06:00
case 0 :
2021-04-09 04:09:36 -04:00
CHECK ( effect - > Id ( ) = = " A " ) ;
2015-02-19 01:03:22 -06:00
break ;
case 1 :
2021-04-09 04:09:36 -04:00
CHECK ( effect - > Id ( ) = = " B " ) ;
2015-02-19 01:03:22 -06:00
break ;
case 2 :
2021-04-09 04:09:36 -04:00
CHECK ( effect - > Id ( ) = = " C " ) ;
2015-02-19 01:03:22 -06:00
break ;
}
2021-04-09 04:09:36 -04:00
+ + n ;
2015-02-19 01:03:22 -06:00
}
// Add some more effects out of order
Negate effect_top1 ;
effect_top1 . Id ( " B-2 " ) ;
effect_top1 . Layer ( 1 ) ;
effect_top1 . Position ( 0.5 ) ;
effect_top1 . Order ( 2 ) ;
t . AddEffect ( & effect_top1 ) ;
Negate effect_middle1 ;
effect_middle1 . Id ( " B-3 " ) ;
effect_middle1 . Layer ( 1 ) ;
effect_middle1 . Position ( 0.5 ) ;
effect_middle1 . Order ( 1 ) ;
t . AddEffect ( & effect_middle1 ) ;
Negate effect_bottom1 ;
effect_bottom1 . Id ( " B-1 " ) ;
effect_bottom1 . Layer ( 1 ) ;
effect_bottom1 . Position ( 0 ) ;
effect_bottom1 . Order ( 3 ) ;
t . AddEffect ( & effect_bottom1 ) ;
// Loop through effects again, and re-check order
effects = t . Effects ( ) ;
2021-04-09 04:09:36 -04:00
n = 0 ;
for ( auto effect : effects ) {
switch ( n ) {
2015-02-19 01:03:22 -06:00
case 0 :
2021-04-09 04:09:36 -04:00
CHECK ( effect - > Layer ( ) = = 0 ) ;
CHECK ( effect - > Id ( ) = = " A " ) ;
CHECK ( effect - > Order ( ) = = 0 ) ;
2015-02-19 01:03:22 -06:00
break ;
case 1 :
2021-04-09 04:09:36 -04:00
CHECK ( effect - > Layer ( ) = = 1 ) ;
CHECK ( effect - > Id ( ) = = " B-1 " ) ;
CHECK ( effect - > Position ( ) = = Approx ( 0.0 ) . margin ( 0.0001 ) ) ;
CHECK ( effect - > Order ( ) = = 3 ) ;
2015-02-19 01:03:22 -06:00
break ;
case 2 :
2021-04-09 04:09:36 -04:00
CHECK ( effect - > Layer ( ) = = 1 ) ;
CHECK ( effect - > Id ( ) = = " B " ) ;
CHECK ( effect - > Position ( ) = = Approx ( 0.0 ) . margin ( 0.0001 ) ) ;
CHECK ( effect - > Order ( ) = = 0 ) ;
2015-02-19 01:03:22 -06:00
break ;
case 3 :
2021-04-09 04:09:36 -04:00
CHECK ( effect - > Layer ( ) = = 1 ) ;
CHECK ( effect - > Id ( ) = = " B-2 " ) ;
CHECK ( effect - > Position ( ) = = Approx ( 0.5 ) . margin ( 0.0001 ) ) ;
CHECK ( effect - > Order ( ) = = 2 ) ;
2015-02-19 01:03:22 -06:00
break ;
case 4 :
2021-04-09 04:09:36 -04:00
CHECK ( effect - > Layer ( ) = = 1 ) ;
CHECK ( effect - > Id ( ) = = " B-3 " ) ;
CHECK ( effect - > Position ( ) = = Approx ( 0.5 ) . margin ( 0.0001 ) ) ;
CHECK ( effect - > Order ( ) = = 1 ) ;
2015-02-19 01:03:22 -06:00
break ;
case 5 :
2021-04-09 04:09:36 -04:00
CHECK ( effect - > Layer ( ) = = 2 ) ;
CHECK ( effect - > Id ( ) = = " C " ) ;
CHECK ( effect - > Order ( ) = = 0 ) ;
2015-02-19 01:03:22 -06:00
break ;
}
2021-04-09 04:09:36 -04:00
+ + n ;
2015-02-19 01:03:22 -06:00
}
t . Close ( ) ;
}
2015-11-09 00:12:21 -06:00
2021-04-09 04:09:36 -04:00
TEST_CASE ( " GetClip by id " , " [libopenshot][timeline] " )
2020-09-01 22:57:32 -04:00
{
Timeline t ( 640 , 480 , Fraction ( 30 , 1 ) , 44100 , 2 , LAYOUT_STEREO ) ;
2021-04-09 04:09:36 -04:00
std : : stringstream path1 ;
2020-09-01 22:57:32 -04:00
path1 < < TEST_MEDIA_PATH < < " interlaced.png " ;
auto media_path1 = path1 . str ( ) ;
2021-04-09 04:09:36 -04:00
std : : stringstream path2 ;
2020-09-01 22:57:32 -04:00
path2 < < TEST_MEDIA_PATH < < " front.png " ;
auto media_path2 = path2 . str ( ) ;
Clip clip1 ( media_path1 ) ;
std : : string clip1_id ( " CLIP00001 " ) ;
clip1 . Id ( clip1_id ) ;
clip1 . Layer ( 1 ) ;
Clip clip2 ( media_path2 ) ;
std : : string clip2_id ( " CLIP00002 " ) ;
clip2 . Id ( clip2_id ) ;
clip2 . Layer ( 2 ) ;
2020-12-04 09:25:30 -05:00
clip2 . Waveform ( true ) ;
2020-09-01 22:57:32 -04:00
t . AddClip ( & clip1 ) ;
t . AddClip ( & clip2 ) ;
2020-12-04 09:25:30 -05:00
// We explicitly want to get returned a Clip*, here
Clip * matched = t . GetClip ( clip1_id ) ;
2021-04-09 04:09:36 -04:00
CHECK ( matched - > Id ( ) = = clip1_id ) ;
CHECK ( matched - > Layer ( ) = = 1 ) ;
2020-09-01 22:57:32 -04:00
2020-12-04 09:25:30 -05:00
Clip * matched2 = t . GetClip ( clip2_id ) ;
2021-04-09 04:09:36 -04:00
CHECK ( matched2 - > Id ( ) = = clip2_id ) ;
CHECK_FALSE ( matched2 - > Layer ( ) < 2 ) ;
2020-09-01 22:57:32 -04:00
2020-12-04 09:25:30 -05:00
Clip * matched3 = t . GetClip ( " BAD_ID " ) ;
2021-04-09 04:09:36 -04:00
CHECK ( matched3 = = nullptr ) ;
2020-12-04 09:25:30 -05:00
// Ensure we can access the Clip API interfaces after lookup
2021-04-09 04:09:36 -04:00
CHECK_FALSE ( matched - > Waveform ( ) ) ;
CHECK ( matched2 - > Waveform ( ) = = true ) ;
2020-09-01 22:57:32 -04:00
}
2021-04-09 04:09:36 -04:00
TEST_CASE ( " GetClipEffect by id " , " [libopenshot][timeline] " )
2020-09-01 22:57:32 -04:00
{
Timeline t ( 640 , 480 , Fraction ( 30 , 1 ) , 44100 , 2 , LAYOUT_STEREO ) ;
2021-04-09 04:09:36 -04:00
std : : stringstream path1 ;
2020-09-01 22:57:32 -04:00
path1 < < TEST_MEDIA_PATH < < " interlaced.png " ;
auto media_path1 = path1 . str ( ) ;
// Create a clip, nothing special
Clip clip1 ( media_path1 ) ;
std : : string clip1_id ( " CLIP00001 " ) ;
clip1 . Id ( clip1_id ) ;
clip1 . Layer ( 1 ) ;
// Add a blur effect
Keyframe horizontal_radius ( 5.0 ) ;
Keyframe vertical_radius ( 5.0 ) ;
Keyframe sigma ( 3.0 ) ;
Keyframe iterations ( 3.0 ) ;
Blur blur1 ( horizontal_radius , vertical_radius , sigma , iterations ) ;
std : : string blur1_id ( " EFFECT00011 " ) ;
blur1 . Id ( blur1_id ) ;
clip1 . AddEffect ( & blur1 ) ;
// A second clip, different layer
Clip clip2 ( media_path1 ) ;
std : : string clip2_id ( " CLIP00002 " ) ;
clip2 . Id ( clip2_id ) ;
clip2 . Layer ( 2 ) ;
// Some effects for clip2
Negate neg2 ;
std : : string neg2_id ( " EFFECT00021 " ) ;
neg2 . Id ( neg2_id ) ;
neg2 . Layer ( 2 ) ;
clip2 . AddEffect ( & neg2 ) ;
Blur blur2 ( horizontal_radius , vertical_radius , sigma , iterations ) ;
std : : string blur2_id ( " EFFECT00022 " ) ;
blur2 . Id ( blur2_id ) ;
blur2 . Layer ( 2 ) ;
clip2 . AddEffect ( & blur2 ) ;
t . AddClip ( & clip1 ) ;
// Check that we can look up clip1's effect
auto match1 = t . GetClipEffect ( " EFFECT00011 " ) ;
2021-04-09 04:09:36 -04:00
CHECK ( match1 - > Id ( ) = = blur1_id ) ;
2020-09-01 22:57:32 -04:00
// clip2 hasn't been added yet, shouldn't be found
match1 = t . GetClipEffect ( blur2_id ) ;
2021-04-09 04:09:36 -04:00
CHECK ( match1 = = nullptr ) ;
2020-09-01 22:57:32 -04:00
t . AddClip ( & clip2 ) ;
// Check that blur2 can now be found via clip2
match1 = t . GetClipEffect ( blur2_id ) ;
2021-04-09 04:09:36 -04:00
CHECK ( match1 - > Id ( ) = = blur2_id ) ;
CHECK ( match1 - > Layer ( ) = = 2 ) ;
2020-09-01 22:57:32 -04:00
}
2021-04-09 04:09:36 -04:00
TEST_CASE ( " GetEffect by id " , " [libopenshot][timeline] " )
2020-09-01 22:57:32 -04:00
{
Timeline t ( 640 , 480 , Fraction ( 30 , 1 ) , 44100 , 2 , LAYOUT_STEREO ) ;
// Create a timeline effect
Keyframe horizontal_radius ( 5.0 ) ;
Keyframe vertical_radius ( 5.0 ) ;
Keyframe sigma ( 3.0 ) ;
Keyframe iterations ( 3.0 ) ;
Blur blur1 ( horizontal_radius , vertical_radius , sigma , iterations ) ;
std : : string blur1_id ( " EFFECT00011 " ) ;
blur1 . Id ( blur1_id ) ;
blur1 . Layer ( 1 ) ;
t . AddEffect ( & blur1 ) ;
auto match1 = t . GetEffect ( blur1_id ) ;
2021-04-09 04:09:36 -04:00
CHECK ( match1 - > Id ( ) = = blur1_id ) ;
CHECK ( match1 - > Layer ( ) = = 1 ) ;
2020-09-01 22:57:32 -04:00
match1 = t . GetEffect ( " NOSUCHNAME " ) ;
2021-04-09 04:09:36 -04:00
CHECK ( match1 = = nullptr ) ;
2020-09-01 22:57:32 -04:00
}
2021-04-09 04:09:36 -04:00
TEST_CASE ( " Effect: Blur " , " [libopenshot][timeline] " )
2015-11-09 00:12:21 -06:00
{
// Create a timeline
Timeline t ( 640 , 480 , Fraction ( 30 , 1 ) , 44100 , 2 , LAYOUT_STEREO ) ;
2021-04-09 04:09:36 -04:00
std : : stringstream path_top ;
2015-11-09 00:12:21 -06:00
path_top < < TEST_MEDIA_PATH < < " interlaced.png " ;
Clip clip_top ( path_top . str ( ) ) ;
clip_top . Layer ( 2 ) ;
t . AddClip ( & clip_top ) ;
// Add some effects out of order
Keyframe horizontal_radius ( 5.0 ) ;
Keyframe vertical_radius ( 5.0 ) ;
Keyframe sigma ( 3.0 ) ;
Keyframe iterations ( 3.0 ) ;
Blur blur ( horizontal_radius , vertical_radius , sigma , iterations ) ;
blur . Id ( " B " ) ;
blur . Layer ( 2 ) ;
t . AddEffect ( & blur ) ;
// Open Timeline
t . Open ( ) ;
// Get frame
2017-08-20 17:37:39 -05:00
std : : shared_ptr < Frame > f = t . GetFrame ( 1 ) ;
2015-11-09 00:12:21 -06:00
2021-04-09 04:09:36 -04:00
REQUIRE ( f ! = nullptr ) ;
CHECK ( f - > number = = 1 ) ;
2015-11-09 00:12:21 -06:00
// Close reader
t . Close ( ) ;
2017-11-18 14:10:02 +01:00
}
2020-09-01 22:57:32 -04:00
2021-04-09 04:09:36 -04:00
TEST_CASE ( " GetMaxFrame and GetMaxTime " , " [libopenshot][timeline] " )
2020-09-01 22:57:32 -04:00
{
// Create a timeline
Timeline t ( 640 , 480 , Fraction ( 30 , 1 ) , 44100 , 2 , LAYOUT_STEREO ) ;
2021-04-09 04:09:36 -04:00
std : : stringstream path1 ;
2020-09-01 22:57:32 -04:00
path1 < < TEST_MEDIA_PATH < < " interlaced.png " ;
Clip clip1 ( path1 . str ( ) ) ;
2022-10-28 19:01:27 -05:00
clip1 . Id ( " C1 " ) ;
2020-09-01 22:57:32 -04:00
clip1 . Layer ( 1 ) ;
clip1 . Position ( 50 ) ;
clip1 . End ( 45 ) ;
t . AddClip ( & clip1 ) ;
2021-04-09 04:09:36 -04:00
CHECK ( t . GetMaxTime ( ) = = Approx ( 95.0 ) . margin ( 0.001 ) ) ;
2024-09-24 12:40:37 -05:00
CHECK ( t . GetMaxFrame ( ) = = 95 * 30 ) ;
2020-09-01 22:57:32 -04:00
Clip clip2 ( path1 . str ( ) ) ;
2022-10-28 15:25:30 -05:00
clip2 . Id ( " C2 " ) ;
2020-09-01 22:57:32 -04:00
clip2 . Layer ( 2 ) ;
clip2 . Position ( 0 ) ;
clip2 . End ( 55 ) ;
t . AddClip ( & clip2 ) ;
2024-09-24 12:40:37 -05:00
CHECK ( t . GetMaxFrame ( ) = = 95 * 30 ) ;
2021-04-09 04:09:36 -04:00
CHECK ( t . GetMaxTime ( ) = = Approx ( 95.0 ) . margin ( 0.001 ) ) ;
2020-09-01 22:57:32 -04:00
clip1 . Position ( 80 ) ;
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
clip2 . Position ( 100 ) ;
2024-09-24 12:40:37 -05:00
CHECK ( t . GetMaxFrame ( ) = = 155 * 30 ) ;
2021-04-09 04:09:36 -04:00
CHECK ( t . GetMaxTime ( ) = = Approx ( 155.0 ) . margin ( 0.001 ) ) ;
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
clip2 . Start ( 20 ) ;
2024-09-24 12:40:37 -05:00
CHECK ( t . GetMaxFrame ( ) = = 135 * 30 ) ;
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
CHECK ( t . GetMaxTime ( ) = = Approx ( 135.0 ) . margin ( 0.001 ) ) ;
clip2 . End ( 35 ) ;
2024-09-24 12:40:37 -05:00
CHECK ( t . GetMaxFrame ( ) = = 125 * 30 ) ;
2021-04-09 04:09:36 -04:00
CHECK ( t . GetMaxTime ( ) = = Approx ( 125.0 ) . margin ( 0.001 ) ) ;
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
t . RemoveClip ( & clip1 ) ;
2024-09-24 12:40:37 -05:00
CHECK ( t . GetMaxFrame ( ) = = 115 * 30 ) ;
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
CHECK ( t . GetMaxTime ( ) = = Approx ( 115.0 ) . margin ( 0.001 ) ) ;
2022-10-28 15:25:30 -05:00
2022-10-28 19:01:27 -05:00
// Update Clip's basic properties with JSON Diff
std : : stringstream json_change1 ;
json_change1 < < " [{ \" type \" : \" update \" , \" key \" :[ \" clips \" ,{ \" id \" : \" C2 \" }], \" value \" :{ \" id \" : \" C2 \" , \" layer \" :4000000, \" position \" :0.0, \" start \" :0, \" end \" :10}, \" partial \" :false}] " ;
t . ApplyJsonDiff ( json_change1 . str ( ) ) ;
2022-10-28 15:25:30 -05:00
2024-09-24 12:40:37 -05:00
CHECK ( t . GetMaxFrame ( ) = = 10 * 30 ) ;
2022-10-28 19:01:27 -05:00
CHECK ( t . GetMaxTime ( ) = = Approx ( 10.0 ) . margin ( 0.001 ) ) ;
2022-10-28 15:25:30 -05:00
2022-10-28 19:01:27 -05:00
// Insert NEW Clip with JSON Diff
std : : stringstream json_change2 ;
json_change2 < < " [{ \" type \" : \" insert \" , \" key \" :[ \" clips \" ], \" value \" :{ \" id \" : \" C3 \" , \" layer \" :4000000, \" position \" :10.0, \" start \" :0, \" end \" :10, \" reader \" :{ \" acodec \" : \" \" , \" audio_bit_rate \" :0, \" audio_stream_index \" :-1, \" audio_timebase \" :{ \" den \" :1, \" num \" :1}, \" channel_layout \" :4, \" channels \" :0, \" display_ratio \" :{ \" den \" :1, \" num \" :1}, \" duration \" :3600.0, \" file_size \" : \" 160000 \" , \" fps \" :{ \" den \" :1, \" num \" :30}, \" has_audio \" :false, \" has_single_image \" :true, \" has_video \" :true, \" height \" :200, \" interlaced_frame \" :false, \" metadata \" :{}, \" path \" : \" " < < path1 . str ( ) < < " \" , \" pixel_format \" :-1, \" pixel_ratio \" :{ \" den \" :1, \" num \" :1}, \" sample_rate \" :0, \" top_field_first \" :true, \" type \" : \" QtImageReader \" , \" vcodec \" : \" \" , \" video_bit_rate \" :0, \" video_length \" : \" 108000 \" , \" video_stream_index \" :-1, \" video_timebase \" :{ \" den \" :30, \" num \" :1}, \" width \" :200}}, \" partial \" :false}] " ;
t . ApplyJsonDiff ( json_change2 . str ( ) ) ;
2022-10-28 15:25:30 -05:00
2024-09-24 12:40:37 -05:00
CHECK ( t . GetMaxFrame ( ) = = 20 * 30 ) ;
2022-10-28 19:01:27 -05:00
CHECK ( t . GetMaxTime ( ) = = Approx ( 20.0 ) . margin ( 0.001 ) ) ;
2020-09-01 22:57:32 -04:00
}
2022-10-13 00:01:03 -05:00
2024-09-30 16:23:30 -05:00
TEST_CASE ( " GetMinFrame and GetMinTime " , " [libopenshot][timeline] " )
{
// Create a timeline
Timeline t ( 640 , 480 , Fraction ( 30 , 1 ) , 44100 , 2 , LAYOUT_STEREO ) ;
std : : stringstream path1 ;
path1 < < TEST_MEDIA_PATH < < " interlaced.png " ;
Clip clip1 ( path1 . str ( ) ) ;
clip1 . Id ( " C1 " ) ;
clip1 . Layer ( 1 ) ;
clip1 . Position ( 50 ) ; // Start at 50 seconds
clip1 . End ( 45 ) ; // Ends at 95 seconds
t . AddClip ( & clip1 ) ;
CHECK ( t . GetMinTime ( ) = = Approx ( 50.0 ) . margin ( 0.001 ) ) ;
2024-12-09 23:30:46 -06:00
CHECK ( t . GetMinFrame ( ) = = ( 50 * 30 ) + 1 ) ;
2024-09-30 16:23:30 -05:00
Clip clip2 ( path1 . str ( ) ) ;
clip2 . Id ( " C2 " ) ;
clip2 . Layer ( 2 ) ;
clip2 . Position ( 0 ) ; // Start at 0 seconds
clip2 . End ( 55 ) ; // Ends at 55 seconds
t . AddClip ( & clip2 ) ;
CHECK ( t . GetMinTime ( ) = = Approx ( 0.0 ) . margin ( 0.001 ) ) ;
2024-12-09 23:30:46 -06:00
CHECK ( t . GetMinFrame ( ) = = 1 ) ;
2024-09-30 16:23:30 -05:00
clip1 . Position ( 80 ) ; // Move clip1 to start at 80 seconds
clip2 . Position ( 100 ) ; // Move clip2 to start at 100 seconds
CHECK ( t . GetMinTime ( ) = = Approx ( 80.0 ) . margin ( 0.001 ) ) ;
2024-12-09 23:30:46 -06:00
CHECK ( t . GetMinFrame ( ) = = ( 80 * 30 ) + 1 ) ;
2024-09-30 16:23:30 -05:00
clip2 . Position ( 20 ) ; // Adjust clip2 to start at 20 seconds
CHECK ( t . GetMinTime ( ) = = Approx ( 20.0 ) . margin ( 0.001 ) ) ;
2024-12-09 23:30:46 -06:00
CHECK ( t . GetMinFrame ( ) = = ( 20 * 30 ) + 1 ) ;
2024-09-30 16:23:30 -05:00
clip2 . End ( 35 ) ; // Adjust clip2 to end at 35 seconds
CHECK ( t . GetMinTime ( ) = = Approx ( 20.0 ) . margin ( 0.001 ) ) ;
2024-12-09 23:30:46 -06:00
CHECK ( t . GetMinFrame ( ) = = ( 20 * 30 ) + 1 ) ;
2024-09-30 16:23:30 -05:00
t . RemoveClip ( & clip1 ) ;
CHECK ( t . GetMinTime ( ) = = Approx ( 20.0 ) . margin ( 0.001 ) ) ;
2024-12-09 23:30:46 -06:00
CHECK ( t . GetMinFrame ( ) = = ( 20 * 30 ) + 1 ) ;
2024-09-30 16:23:30 -05:00
// Update Clip's basic properties with JSON Diff
std : : stringstream json_change1 ;
json_change1 < < " [{ \" type \" : \" update \" , \" key \" :[ \" clips \" ,{ \" id \" : \" C2 \" }], \" value \" :{ \" id \" : \" C2 \" , \" layer \" :4000000, \" position \" :5.0, \" start \" :0, \" end \" :10}, \" partial \" :false}] " ;
t . ApplyJsonDiff ( json_change1 . str ( ) ) ;
CHECK ( t . GetMinTime ( ) = = Approx ( 5.0 ) . margin ( 0.001 ) ) ;
2024-12-09 23:30:46 -06:00
CHECK ( t . GetMinFrame ( ) = = ( 5 * 30 ) + 1 ) ;
2024-09-30 16:23:30 -05:00
// Insert NEW Clip with JSON Diff
std : : stringstream json_change2 ;
json_change2 < < " [{ \" type \" : \" insert \" , \" key \" :[ \" clips \" ], \" value \" :{ \" id \" : \" C3 \" , \" layer \" :4000000, \" position \" :10.0, \" start \" :0, \" end \" :10, \" reader \" :{ \" acodec \" : \" \" , \" audio_bit_rate \" :0, \" audio_stream_index \" :-1, \" audio_timebase \" :{ \" den \" :1, \" num \" :1}, \" channel_layout \" :4, \" channels \" :0, \" display_ratio \" :{ \" den \" :1, \" num \" :1}, \" duration \" :3600.0, \" file_size \" : \" 160000 \" , \" fps \" :{ \" den \" :1, \" num \" :30}, \" has_audio \" :false, \" has_single_image \" :true, \" has_video \" :true, \" height \" :200, \" interlaced_frame \" :false, \" metadata \" :{}, \" path \" : \" " < < path1 . str ( ) < < " \" , \" pixel_format \" :-1, \" pixel_ratio \" :{ \" den \" :1, \" num \" :1}, \" sample_rate \" :0, \" top_field_first \" :true, \" type \" : \" QtImageReader \" , \" vcodec \" : \" \" , \" video_bit_rate \" :0, \" video_length \" : \" 108000 \" , \" video_stream_index \" :-1, \" video_timebase \" :{ \" den \" :30, \" num \" :1}, \" width \" :200}}, \" partial \" :false}] " ;
t . ApplyJsonDiff ( json_change2 . str ( ) ) ;
CHECK ( t . GetMinTime ( ) = = Approx ( 5.0 ) . margin ( 0.001 ) ) ;
2024-12-09 23:30:46 -06:00
CHECK ( t . GetMinFrame ( ) = = ( 5 * 30 ) + 1 ) ;
2024-09-30 16:23:30 -05:00
}
2026-02-12 23:41:51 -06:00
TEST_CASE ( " GetMaxFrame with 24fps clip mapped to 30fps timeline " , " [libopenshot][timeline] " )
{
Timeline t ( 640 , 480 , Fraction ( 30 , 1 ) , 44100 , 2 , LAYOUT_STEREO ) ;
t . AutoMapClips ( true ) ;
std : : stringstream path ;
path < < TEST_MEDIA_PATH < < " sintel_trailer-720p.mp4 " ;
Clip clip ( path . str ( ) ) ;
REQUIRE ( clip . Reader ( ) - > info . fps . num = = 24 ) ;
REQUIRE ( clip . Reader ( ) - > info . fps . den = = 1 ) ;
t . AddClip ( & clip ) ;
REQUIRE ( clip . Reader ( ) - > Name ( ) = = " FrameMapper " ) ;
auto * mapper = static_cast < FrameMapper * > ( clip . Reader ( ) ) ;
REQUIRE ( mapper - > info . fps . num = = 30 ) ;
REQUIRE ( mapper - > info . fps . den = = 1 ) ;
REQUIRE ( mapper - > info . video_length > 0 ) ;
const int64_t timeline_max_frame = t . GetMaxFrame ( ) ;
const int64_t mapped_video_length = mapper - > info . video_length ;
// Timeline max frame is computed from duration (seconds), while mapper length is
// rounded frame count. They should stay aligned within one frame at this boundary.
CHECK ( timeline_max_frame > = mapped_video_length ) ;
CHECK ( ( timeline_max_frame - mapped_video_length ) < = 1 ) ;
// Regression guard: fetching the mapped tail frame should not throw.
t . Open ( ) ;
CHECK_NOTHROW ( t . GetFrame ( mapped_video_length ) ) ;
t . Close ( ) ;
}
2022-10-13 00:01:03 -05:00
TEST_CASE ( " Multi-threaded Timeline GetFrame " , " [libopenshot][timeline] " )
{
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
Timeline * t = new Timeline ( 1280 , 720 , Fraction ( 24 , 1 ) , 48000 , 2 , LAYOUT_STEREO ) ;
2022-10-13 00:01:03 -05:00
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
// Large ugly JSON project (4 clips + 3 transitions)
std : : stringstream project_json ;
project_json < < " { \" id \" : \" CQA0YW6I2Q \" , \" fps \" :{ \" num \" :30, \" den \" :1}, \" display_ratio \" :{ \" num \" :16, \" den \" :9}, \" pixel_ratio \" :{ \" num \" :1, \" den \" :1}, \" width \" :1280, \" height \" :720, \" sample_rate \" :48000, \" channels \" :2, \" channel_layout \" :3, \" settings \" :{}, \" clips \" :[{ \" alpha \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" anchor \" :0, \" channel_filter \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" channel_mapping \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" display \" :0, \" duration \" :51.9466667175293, \" effects \" :[], \" end \" :10.666666666666666, \" gravity \" :4, \" has_audio \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" has_video \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" id \" : \" QHESI4ZW0E \" , \" layer \" :5000000, \" location_x \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :0}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" location_y \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :0}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" mixing \" :0, \" origin_x \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :0.5}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" origin_y \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :0.5}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" parentObjectId \" : \" \" , \" perspective_c1_x \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" perspective_c1_y \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" perspective_c2_x \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" perspective_c2_y \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" perspective_c3_x \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" perspective_c3_y \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" perspective_c4_x \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" perspective_c4_y \" :{ \" Points \" :[{ \" co \" :{ \" X \" :1, \" Y \" :-1}, \" handle_left \" :{ \" X \" :0.5, \" Y \" :1}, \" handle_right \" :{ \" X \" :0.5, \" Y \" :0}, \" handle_type \" :0, \" interpolation \" :0}]}, \" position \" :0, \" reader \" :{ \" acodec \" : \" aac \" , \" audio_bit_rate \" :126694, \" audio_stream_index \" :1, \" audio_timebase \" :{ \" den \" :48000, \" num \" :1}, \" channel_layout \" :3, \" channels \" :2, \" display_ratio \" :{ \" den \" :9, \" num \" :16}, \" duration \" :51.9466667175293, \" file_size \" : \" 7608204 \" , \" fps \" :{ \" den \" :1, \" num \" :24}, \" has_audio \" :true, \" has_single_image \" :false, \" has_video \" :true, \" height \" :720, \" interlaced_frame \" :false, \" metadata \" :{ \" artist \" : \" Durian Open Movie Team \" , \" compatible_brands \" : \" isomiso2avc1mp41 \" , \" copyright \" : \" (c) copyright Blender Foundation | durian.blender.org \" , \" creation_time \" : \" 1970-01-01T00:00:00.000000Z \" , \" description \" : \" Trailer for the Sintel open movie project \" , \" encoder \" : \" Lavf52.62.0 \" ,
t - > SetJson ( project_json . str ( ) ) ;
t - > Open ( ) ;
2022-10-13 00:01:03 -05:00
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
// A successful test will NOT crash - since this causes many threads to
// call the same Timeline methods asynchronously, to verify mutexes and multi-threaded
// access does not seg fault or crash this test.
2022-10-13 00:01:03 -05:00
# pragma omp parallel
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
{
// Run the following loop in all threads
int64_t frame_count = 60 ;
for ( long int frame = 1 ; frame < = frame_count ; frame + + ) {
std : : shared_ptr < Frame > f = t - > GetFrame ( frame ) ;
2022-10-13 00:01:03 -05:00
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
// Clear cache after every frame
// This is designed to test the mutex for ClearAllCache()
t - > ClearAllCache ( ) ;
}
// Clear all clips after loop is done
// This is designed to test the mutex for Clear()
t - > Clear ( ) ;
}
2022-10-13 00:01:03 -05:00
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
// Close and delete timeline object
t - > Close ( ) ;
delete t ;
t = NULL ;
}
2025-09-11 23:27:41 -05:00
// ---------------------------------------------------------------------------
// New tests to validate removing timeline-level effects (incl. threading/locks)
// Paste at the end of tests/Timeline.cpp
// ---------------------------------------------------------------------------
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
2025-09-11 23:27:41 -05:00
TEST_CASE ( " RemoveEffect basic " , " [libopenshot][timeline] " )
{
// Create a simple timeline
Timeline t ( 640 , 480 , Fraction ( 30 , 1 ) , 44100 , 2 , LAYOUT_STEREO ) ;
// Two timeline-level effects
Negate e1 ; e1 . Id ( " E1 " ) ; e1 . Layer ( 0 ) ;
Negate e2 ; e2 . Id ( " E2 " ) ; e2 . Layer ( 1 ) ;
t . AddEffect ( & e1 ) ;
t . AddEffect ( & e2 ) ;
// Sanity check
REQUIRE ( t . Effects ( ) . size ( ) = = 2 ) ;
REQUIRE ( t . GetEffect ( " E1 " ) ! = nullptr ) ;
REQUIRE ( t . GetEffect ( " E2 " ) ! = nullptr ) ;
// Remove one effect and verify it is truly gone
t . RemoveEffect ( & e1 ) ;
auto effects_after = t . Effects ( ) ;
CHECK ( effects_after . size ( ) = = 1 ) ;
CHECK ( t . GetEffect ( " E1 " ) = = nullptr ) ;
CHECK ( t . GetEffect ( " E2 " ) ! = nullptr ) ;
CHECK ( std : : find ( effects_after . begin ( ) , effects_after . end ( ) , & e1 ) = = effects_after . end ( ) ) ;
// Removing the same (already-removed) effect should be a no-op
t . RemoveEffect ( & e1 ) ;
CHECK ( t . Effects ( ) . size ( ) = = 1 ) ;
}
TEST_CASE ( " RemoveEffect not present is no-op " , " [libopenshot][timeline] " )
{
Timeline t ( 640 , 480 , Fraction ( 30 , 1 ) , 44100 , 2 , LAYOUT_STEREO ) ;
Negate existing ; existing . Id ( " KEEP " ) ; existing . Layer ( 0 ) ;
Negate never_added ; never_added . Id ( " GHOST " ) ; never_added . Layer ( 1 ) ;
t . AddEffect ( & existing ) ;
REQUIRE ( t . Effects ( ) . size ( ) = = 1 ) ;
// Try to remove an effect pointer that was never added
t . RemoveEffect ( & never_added ) ;
// State should be unchanged
CHECK ( t . Effects ( ) . size ( ) = = 1 ) ;
CHECK ( t . GetEffect ( " KEEP " ) ! = nullptr ) ;
CHECK ( t . GetEffect ( " GHOST " ) = = nullptr ) ;
}
TEST_CASE ( " RemoveEffect while open (active pipeline safety) " , " [libopenshot][timeline] " )
{
// Timeline with one visible clip so we can request frames
Timeline t ( 640 , 480 , Fraction ( 30 , 1 ) , 44100 , 2 , LAYOUT_STEREO ) ;
std : : stringstream path ;
path < < TEST_MEDIA_PATH < < " front3.png " ;
Clip clip ( path . str ( ) ) ;
clip . Layer ( 0 ) ;
t . AddClip ( & clip ) ;
// Add a timeline-level effect and open the timeline
Negate neg ; neg . Id ( " NEG " ) ; neg . Layer ( 1 ) ;
t . AddEffect ( & neg ) ;
t . Open ( ) ;
// Touch the pipeline before removal
std : : shared_ptr < Frame > f1 = t . GetFrame ( 1 ) ;
REQUIRE ( f1 ! = nullptr ) ;
// Remove the effect while open, this should be safe and effective
t . RemoveEffect ( & neg ) ;
CHECK ( t . GetEffect ( " NEG " ) = = nullptr ) ;
CHECK ( t . Effects ( ) . size ( ) = = 0 ) ;
// Touch the pipeline again after removal (should not crash / deadlock)
std : : shared_ptr < Frame > f2 = t . GetFrame ( 2 ) ;
REQUIRE ( f2 ! = nullptr ) ;
// Close reader
t . Close ( ) ;
}
TEST_CASE ( " RemoveEffect preserves ordering of remaining effects " , " [libopenshot][timeline] " )
{
// Create a timeline
Timeline t ( 640 , 480 , Fraction ( 30 , 1 ) , 44100 , 2 , LAYOUT_STEREO ) ;
// Add effects out of order (Layer/Position/Order)
Negate a ; a . Id ( " A " ) ; a . Layer ( 0 ) ; a . Position ( 0.0 ) ; a . Order ( 0 ) ;
Negate b1 ; b1 . Id ( " B-1 " ) ; b1 . Layer ( 1 ) ; b1 . Position ( 0.0 ) ; b1 . Order ( 3 ) ;
Negate b ; b . Id ( " B " ) ; b . Layer ( 1 ) ; b . Position ( 0.0 ) ; b . Order ( 0 ) ;
Negate b2 ; b2 . Id ( " B-2 " ) ; b2 . Layer ( 1 ) ; b2 . Position ( 0.5 ) ; b2 . Order ( 2 ) ;
Negate b3 ; b3 . Id ( " B-3 " ) ; b3 . Layer ( 1 ) ; b3 . Position ( 0.5 ) ; b3 . Order ( 1 ) ;
Negate c ; c . Id ( " C " ) ; c . Layer ( 2 ) ; c . Position ( 0.0 ) ; c . Order ( 0 ) ;
t . AddEffect ( & c ) ;
t . AddEffect ( & b ) ;
t . AddEffect ( & a ) ;
t . AddEffect ( & b3 ) ;
t . AddEffect ( & b2 ) ;
t . AddEffect ( & b1 ) ;
// Remove a middle effect and verify ordering is still deterministic
t . RemoveEffect ( & b ) ;
std : : list < EffectBase * > effects = t . Effects ( ) ;
REQUIRE ( effects . size ( ) = = 5 ) ;
int n = 0 ;
for ( auto effect : effects ) {
switch ( n ) {
case 0 :
CHECK ( effect - > Layer ( ) = = 0 ) ;
CHECK ( effect - > Id ( ) = = " A " ) ;
CHECK ( effect - > Order ( ) = = 0 ) ;
break ;
case 1 :
CHECK ( effect - > Layer ( ) = = 1 ) ;
CHECK ( effect - > Id ( ) = = " B-1 " ) ;
CHECK ( effect - > Position ( ) = = Approx ( 0.0 ) . margin ( 0.0001 ) ) ;
CHECK ( effect - > Order ( ) = = 3 ) ;
break ;
case 2 :
CHECK ( effect - > Layer ( ) = = 1 ) ;
CHECK ( effect - > Id ( ) = = " B-2 " ) ;
CHECK ( effect - > Position ( ) = = Approx ( 0.5 ) . margin ( 0.0001 ) ) ;
CHECK ( effect - > Order ( ) = = 2 ) ;
break ;
case 3 :
CHECK ( effect - > Layer ( ) = = 1 ) ;
CHECK ( effect - > Id ( ) = = " B-3 " ) ;
CHECK ( effect - > Position ( ) = = Approx ( 0.5 ) . margin ( 0.0001 ) ) ;
CHECK ( effect - > Order ( ) = = 1 ) ;
break ;
case 4 :
CHECK ( effect - > Layer ( ) = = 2 ) ;
CHECK ( effect - > Id ( ) = = " C " ) ;
CHECK ( effect - > Order ( ) = = 0 ) ;
break ;
}
+ + n ;
}
}
TEST_CASE ( " Multi-threaded Timeline Add/Remove Effect " , " [libopenshot][timeline] " )
{
// Create timeline with a clip so frames can be requested
Timeline * t = new Timeline ( 1280 , 720 , Fraction ( 24 , 1 ) , 48000 , 2 , LAYOUT_STEREO ) ;
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
std : : stringstream path ;
path < < TEST_MEDIA_PATH < < " test.mp4 " ;
2025-09-11 23:27:41 -05:00
Clip * clip = new Clip ( path . str ( ) ) ;
clip - > Layer ( 0 ) ;
t - > AddClip ( clip ) ;
t - > Open ( ) ;
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
2025-09-11 23:27:41 -05:00
// A successful test will NOT crash - many threads will add/remove effects
// while also requesting frames, exercising locks around effect mutation.
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
# pragma omp parallel
{
2025-09-11 23:27:41 -05:00
int64_t effect_count = 10 ;
for ( int i = 0 ; i < effect_count ; + + i ) {
// Each thread creates its own effect
Negate * neg = new Negate ( ) ;
std : : stringstream sid ;
sid < < " NEG_T " < < omp_get_thread_num ( ) < < " _I " < < i ;
neg - > Id ( sid . str ( ) ) ;
neg - > Layer ( 1 + omp_get_thread_num ( ) ) ; // spread across layers
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
2025-09-11 23:27:41 -05:00
// Add the effect
t - > AddEffect ( neg ) ;
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
2025-09-11 23:27:41 -05:00
// Touch a few frames to exercise the render pipeline with the effect
for ( long int frame = 1 ; frame < = 6 ; + + frame ) {
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
std : : shared_ptr < Frame > f = t - > GetFrame ( frame ) ;
2025-09-11 23:27:41 -05:00
REQUIRE ( f ! = nullptr ) ;
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
}
2025-09-11 23:27:41 -05:00
// Remove the effect and destroy it
t - > RemoveEffect ( neg ) ;
delete neg ;
neg = nullptr ;
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
}
2025-09-11 23:27:41 -05:00
// Clear all effects at the end from within threads (should be safe)
// This also exercises internal sorting/locking paths
t - > Clear ( ) ;
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
}
t - > Close ( ) ;
delete t ;
2025-09-11 23:27:41 -05:00
t = nullptr ;
delete clip ;
clip = nullptr ;
- Protect AddClip(), RemoveClip(), update_open_clips(), sort_clips(), sort_effects() methods with mutex, making them thread safe
- Refactor sorting of clips & effects, and only sort these arrays when the arrays change (instead of each call to GetFrame)
- Cache max timeline duration, and make Timeline::GetMaxTime() thread safe
- New multi-threaded unit tests, which are designed to verify no seg faults on multi-threaded calls to Timeline::GetFrame(), Timeline::AddClip(), and Timeline::RemoveClip()
- New public Timeline::SortTimeline() method which is called by child Clips automatically, when certain properties are changed
2022-10-22 22:55:40 -05:00
}
2022-10-28 11:00:47 -05:00
TEST_CASE ( " ApplyJSONDiff and FrameMappers " , " [libopenshot][timeline] " )
{
2022-10-28 19:01:27 -05:00
// Create a timeline
Timeline t ( 640 , 480 , Fraction ( 60 , 1 ) , 44100 , 2 , LAYOUT_STEREO ) ;
t . Open ( ) ;
2022-10-28 11:00:47 -05:00
2022-10-28 19:01:27 -05:00
// Auto create FrameMappers for each clip
t . AutoMapClips ( true ) ;
2022-10-28 11:00:47 -05:00
2022-10-28 19:01:27 -05:00
// Add clip
std : : stringstream path1 ;
path1 < < TEST_MEDIA_PATH < < " interlaced.png " ;
Clip clip1 ( path1 . str ( ) ) ;
clip1 . Id ( " ABC " ) ;
clip1 . Layer ( 1 ) ;
clip1 . Position ( 0 ) ;
clip1 . End ( 10 ) ;
2022-10-28 11:00:47 -05:00
2022-10-28 19:01:27 -05:00
// Verify clip reader type (not wrapped yet, because we have not added clip to timeline)
CHECK ( clip1 . Reader ( ) - > Name ( ) = = " QtImageReader " ) ;
2022-10-28 11:00:47 -05:00
2022-10-28 19:01:27 -05:00
t . AddClip ( & clip1 ) ;
2022-10-28 11:00:47 -05:00
2022-10-28 19:01:27 -05:00
// Verify clip was wrapped in FrameMapper
CHECK ( clip1 . Reader ( ) - > Name ( ) = = " FrameMapper " ) ;
2022-10-28 11:00:47 -05:00
2022-10-28 19:01:27 -05:00
// Update Clip's basic properties with JSON Diff (i.e. no reader JSON)
std : : stringstream json_change1 ;
json_change1 < < " [{ \" type \" : \" update \" , \" key \" :[ \" clips \" ,{ \" id \" : \" " < < clip1 . Id ( ) < < " \" }], \" value \" :{ \" id \" : \" " < < clip1 . Id ( ) < < " \" , \" layer \" :4000000, \" position \" :14.7, \" start \" :0, \" end \" :10}, \" partial \" :false}] " ;
t . ApplyJsonDiff ( json_change1 . str ( ) ) ;
2022-10-28 11:00:47 -05:00
2022-10-28 19:01:27 -05:00
// Verify clip is still wrapped in FrameMapper
CHECK ( clip1 . Reader ( ) - > Name ( ) = = " FrameMapper " ) ;
2022-10-28 11:00:47 -05:00
2022-10-28 19:01:27 -05:00
// Update clip's reader back to a QtImageReader
std : : stringstream json_change2 ;
json_change2 < < " [{ \" type \" : \" update \" , \" key \" :[ \" clips \" ,{ \" id \" : \" " < < clip1 . Id ( ) < < " \" }], \" value \" :{ \" id \" : \" " < < clip1 . Id ( ) < < " \" , \" reader \" :{ \" acodec \" : \" \" , \" audio_bit_rate \" :0, \" audio_stream_index \" :-1, \" audio_timebase \" :{ \" den \" :1, \" num \" :1}, \" channel_layout \" :4, \" channels \" :0, \" display_ratio \" :{ \" den \" :1, \" num \" :1}, \" duration \" :3600.0, \" file_size \" : \" 160000 \" , \" fps \" :{ \" den \" :1, \" num \" :30}, \" has_audio \" :false, \" has_single_image \" :true, \" has_video \" :true, \" height \" :200, \" interlaced_frame \" :false, \" metadata \" :{}, \" path \" : \" " < < path1 . str ( ) < < " \" , \" pixel_format \" :-1, \" pixel_ratio \" :{ \" den \" :1, \" num \" :1}, \" sample_rate \" :0, \" top_field_first \" :true, \" type \" : \" QtImageReader \" , \" vcodec \" : \" \" , \" video_bit_rate \" :0, \" video_length \" : \" 108000 \" , \" video_stream_index \" :-1, \" video_timebase \" :{ \" den \" :30, \" num \" :1}, \" width \" :200}, \" position \" :14.7, \" start \" :0, \" end \" :10}, \" partial \" :false}] " ;
t . ApplyJsonDiff ( json_change2 . str ( ) ) ;
2022-10-28 11:00:47 -05:00
2022-10-28 19:01:27 -05:00
// Verify clip reader type
CHECK ( clip1 . Reader ( ) - > Name ( ) = = " FrameMapper " ) ;
2022-10-28 11:00:47 -05:00
2022-10-28 19:01:27 -05:00
// Disable Auto FrameMappers for each clip
t . AutoMapClips ( false ) ;
2022-10-28 11:00:47 -05:00
2022-10-28 19:01:27 -05:00
// Update clip's reader back to a QtImageReader
std : : stringstream json_change3 ;
json_change3 < < " [{ \" type \" : \" update \" , \" key \" :[ \" clips \" ,{ \" id \" : \" " < < clip1 . Id ( ) < < " \" }], \" value \" :{ \" id \" : \" " < < clip1 . Id ( ) < < " \" , \" reader \" :{ \" acodec \" : \" \" , \" audio_bit_rate \" :0, \" audio_stream_index \" :-1, \" audio_timebase \" :{ \" den \" :1, \" num \" :1}, \" channel_layout \" :4, \" channels \" :0, \" display_ratio \" :{ \" den \" :1, \" num \" :1}, \" duration \" :3600.0, \" file_size \" : \" 160000 \" , \" fps \" :{ \" den \" :1, \" num \" :30}, \" has_audio \" :false, \" has_single_image \" :true, \" has_video \" :true, \" height \" :200, \" interlaced_frame \" :false, \" metadata \" :{}, \" path \" : \" " < < path1 . str ( ) < < " \" , \" pixel_format \" :-1, \" pixel_ratio \" :{ \" den \" :1, \" num \" :1}, \" sample_rate \" :0, \" top_field_first \" :true, \" type \" : \" QtImageReader \" , \" vcodec \" : \" \" , \" video_bit_rate \" :0, \" video_length \" : \" 108000 \" , \" video_stream_index \" :-1, \" video_timebase \" :{ \" den \" :30, \" num \" :1}, \" width \" :200}, \" position \" :14.7, \" start \" :0, \" end \" :10}, \" partial \" :false}] " ;
t . ApplyJsonDiff ( json_change3 . str ( ) ) ;
2022-10-28 11:00:47 -05:00
2022-10-28 19:01:27 -05:00
// Verify clip reader type
CHECK ( clip1 . Reader ( ) - > Name ( ) = = " QtImageReader " ) ;
2023-02-13 16:42:21 -06:00
}
2026-02-11 20:11:47 -06:00
TEST_CASE ( " ApplyJSONDiff insert invalidates overlapping timeline cache " , " [libopenshot][timeline] " )
{
// Create timeline with no clips so cached frames are black placeholders
Timeline t ( 640 , 480 , Fraction ( 30 , 1 ) , 44100 , 2 , LAYOUT_STEREO ) ;
t . Open ( ) ;
// Cache a frame in the area where we'll insert a new clip
std : : shared_ptr < Frame > cached_before = t . GetFrame ( 10 ) ;
REQUIRE ( cached_before ! = nullptr ) ;
REQUIRE ( t . GetCache ( ) ! = nullptr ) ;
REQUIRE ( t . GetCache ( ) - > Contains ( 10 ) ) ;
// Insert clip via JSON diff overlapping frame 10
std : : stringstream path1 ;
path1 < < TEST_MEDIA_PATH < < " interlaced.png " ;
std : : stringstream json_change ;
json_change < < " [{ \" type \" : \" insert \" , \" key \" :[ \" clips \" ], \" value \" :{ \" id \" : \" INSERT_CACHE_INVALIDATE \" , \" layer \" :1, \" position \" :0.0, \" start \" :0, \" end \" :10, \" reader \" :{ \" acodec \" : \" \" , \" audio_bit_rate \" :0, \" audio_stream_index \" :-1, \" audio_timebase \" :{ \" den \" :1, \" num \" :1}, \" channel_layout \" :4, \" channels \" :0, \" display_ratio \" :{ \" den \" :1, \" num \" :1}, \" duration \" :3600.0, \" file_size \" : \" 160000 \" , \" fps \" :{ \" den \" :1, \" num \" :30}, \" has_audio \" :false, \" has_single_image \" :true, \" has_video \" :true, \" height \" :200, \" interlaced_frame \" :false, \" metadata \" :{}, \" path \" : \" " < < path1 . str ( ) < < " \" , \" pixel_format \" :-1, \" pixel_ratio \" :{ \" den \" :1, \" num \" :1}, \" sample_rate \" :0, \" top_field_first \" :true, \" type \" : \" QtImageReader \" , \" vcodec \" : \" \" , \" video_bit_rate \" :0, \" video_length \" : \" 108000 \" , \" video_stream_index \" :-1, \" video_timebase \" :{ \" den \" :30, \" num \" :1}, \" width \" :200}}, \" partial \" :false}] " ;
t . ApplyJsonDiff ( json_change . str ( ) ) ;
// Overlapping cached frame should be invalidated
CHECK ( ! t . GetCache ( ) - > Contains ( 10 ) ) ;
}
2023-02-13 16:42:21 -06:00
TEST_CASE ( " ApplyJSONDiff Update Reader Info " , " [libopenshot][timeline] " )
{
// Create a timeline
Timeline t ( 640 , 480 , Fraction ( 24 , 1 ) , 44100 , 2 , LAYOUT_STEREO ) ;
t . Open ( ) ;
// Auto create FrameMappers for each clip
t . AutoMapClips ( true ) ;
// Add clip
std : : stringstream path1 ;
path1 < < TEST_MEDIA_PATH < < " sintel_trailer-720p.mp4 " ;
Clip clip1 ( path1 . str ( ) ) ;
clip1 . Id ( " ABC " ) ;
clip1 . Layer ( 1 ) ;
clip1 . Position ( 0 ) ;
clip1 . End ( 10 ) ;
std : : string reader_json = clip1 . Reader ( ) - > Json ( ) ;
// Verify clip reader type (not wrapped yet, because we have not added clip to timeline)
CHECK ( clip1 . Reader ( ) - > Name ( ) = = " FFmpegReader " ) ;
t . AddClip ( & clip1 ) ;
// Verify clip was wrapped in FrameMapper
CHECK ( clip1 . Reader ( ) - > Name ( ) = = " FrameMapper " ) ;
CHECK ( clip1 . info . fps . num = = 24 ) ;
CHECK ( clip1 . info . fps . den = = 1 ) ;
CHECK ( clip1 . info . video_timebase . num = = 1 ) ;
CHECK ( clip1 . info . video_timebase . den = = 24 ) ;
2025-12-08 17:07:21 -06:00
CHECK ( clip1 . info . duration = = Approx ( 52.20833 ) . margin ( 0.00001 ) ) ;
2023-02-13 16:42:21 -06:00
// Create JSON change to increase FPS from 24 to 60
Json : : Value reader_root = openshot : : stringToJson ( reader_json ) ;
reader_root [ " fps " ] [ " num " ] = 60 ;
reader_root [ " fps " ] [ " den " ] = 1 ;
reader_root [ " video_timebase " ] [ " num " ] = 1 ;
reader_root [ " video_timebase " ] [ " den " ] = 60 ;
reader_root [ " duration " ] = reader_root [ " duration " ] . asDouble ( ) * 0.4 ;
std : : string update_reader = reader_root . toStyledString ( ) ;
// Apply JSON changes to clip
std : : stringstream json_change1 ;
json_change1 < < " [{ \" type \" : \" update \" , \" key \" :[ \" clips \" ,{ \" id \" : \" " < < clip1 . Id ( ) < < " \" }], \" value \" :{ \" reader \" : " < < update_reader < < " }}] " ;
t . ApplyJsonDiff ( json_change1 . str ( ) ) ;
// Verify clip is still wrapped in FrameMapper
CHECK ( clip1 . Reader ( ) - > Name ( ) = = " FrameMapper " ) ;
// Verify clip Reader has updated properties and info struct
openshot : : FrameMapper * mapper = ( openshot : : FrameMapper * ) clip1 . Reader ( ) ;
CHECK ( mapper - > Reader ( ) - > info . fps . num = = 60 ) ;
CHECK ( mapper - > Reader ( ) - > info . fps . den = = 1 ) ;
CHECK ( mapper - > Reader ( ) - > info . video_timebase . num = = 1 ) ;
CHECK ( mapper - > Reader ( ) - > info . video_timebase . den = = 60 ) ;
2025-12-08 17:07:21 -06:00
CHECK ( mapper - > Reader ( ) - > info . duration = = Approx ( 20.88333 ) . margin ( 0.00001 ) ) ;
2023-02-13 16:42:21 -06:00
// Verify clip has updated properties and info struct
CHECK ( clip1 . info . fps . num = = 24 ) ;
CHECK ( clip1 . info . fps . den = = 1 ) ;
CHECK ( clip1 . info . video_timebase . num = = 1 ) ;
CHECK ( clip1 . info . video_timebase . den = = 24 ) ;
2025-12-08 17:07:21 -06:00
CHECK ( clip1 . info . duration = = Approx ( 20.88333 ) . margin ( 0.00001 ) ) ;
2023-02-13 16:42:21 -06:00
// Open Clip object, and verify this does not clobber our 60 FPS change
clip1 . Open ( ) ;
CHECK ( mapper - > Reader ( ) - > info . fps . num = = 60 ) ;
CHECK ( mapper - > Reader ( ) - > info . fps . den = = 1 ) ;
CHECK ( mapper - > Reader ( ) - > info . video_timebase . num = = 1 ) ;
CHECK ( mapper - > Reader ( ) - > info . video_timebase . den = = 60 ) ;
2025-12-08 17:07:21 -06:00
CHECK ( mapper - > Reader ( ) - > info . duration = = Approx ( 20.88333 ) . margin ( 0.00001 ) ) ;
2022-10-28 11:00:47 -05:00
}