2013-09-12 17:52:10 -05:00
|
|
|
/**
|
|
|
|
|
* @file
|
|
|
|
|
* @brief Source file for Timeline class
|
|
|
|
|
* @author Jonathan Thomas <jonathan@openshot.org>
|
|
|
|
|
*
|
2019-06-09 08:31:04 -04:00
|
|
|
* @ref License
|
|
|
|
|
*/
|
|
|
|
|
|
2021-10-16 01:26:26 -04:00
|
|
|
// Copyright (c) 2008-2019 OpenShot Studios, LLC
|
|
|
|
|
//
|
|
|
|
|
// SPDX-License-Identifier: LGPL-3.0-or-later
|
2013-09-12 17:52:10 -05:00
|
|
|
|
2020-10-18 07:43:37 -04:00
|
|
|
#include "Timeline.h"
|
2021-03-31 19:35:58 -04:00
|
|
|
|
|
|
|
|
#include "CacheBase.h"
|
|
|
|
|
#include "CacheDisk.h"
|
|
|
|
|
#include "CacheMemory.h"
|
|
|
|
|
#include "CrashHandler.h"
|
|
|
|
|
#include "FrameMapper.h"
|
2021-01-26 10:52:04 -05:00
|
|
|
#include "Exceptions.h"
|
2012-10-03 01:55:24 -05:00
|
|
|
|
2021-03-31 19:35:58 -04:00
|
|
|
#include <QDir>
|
|
|
|
|
#include <QFileInfo>
|
|
|
|
|
|
2012-10-03 01:55:24 -05:00
|
|
|
using namespace openshot;
|
|
|
|
|
|
|
|
|
|
// Default Constructor for the timeline (which sets the canvas width and height)
|
2015-06-01 00:20:14 -07:00
|
|
|
Timeline::Timeline(int width, int height, Fraction fps, int sample_rate, int channels, ChannelLayout channel_layout) :
|
2021-02-04 17:28:07 -06:00
|
|
|
is_open(false), auto_map_clips(true), managed_cache(true), path(""),
|
|
|
|
|
max_concurrent_frames(OPEN_MP_NUM_PROCESSORS)
|
2012-10-03 01:55:24 -05:00
|
|
|
{
|
2016-11-03 02:19:48 -05:00
|
|
|
// Create CrashHandler and Attach (incase of errors)
|
|
|
|
|
CrashHandler::Instance();
|
|
|
|
|
|
2012-10-03 01:55:24 -05:00
|
|
|
// Init viewport size (curve based, because it can be animated)
|
|
|
|
|
viewport_scale = Keyframe(100.0);
|
|
|
|
|
viewport_x = Keyframe(0.0);
|
|
|
|
|
viewport_y = Keyframe(0.0);
|
2012-11-12 17:21:21 -06:00
|
|
|
|
2012-11-29 17:28:22 -06:00
|
|
|
// Init background color
|
|
|
|
|
color.red = Keyframe(0.0);
|
|
|
|
|
color.green = Keyframe(0.0);
|
|
|
|
|
color.blue = Keyframe(0.0);
|
|
|
|
|
|
2012-11-20 16:22:50 -06:00
|
|
|
// Init FileInfo struct (clear all values)
|
|
|
|
|
info.width = width;
|
|
|
|
|
info.height = height;
|
Large refactor of Timeline, TimelineBase, ClipBase, and Clip, to allow a Clip access to the parent timeline instance (if available), and thus, certain properties (preview size, timeline FPS, etc...). This allows for a simpler rendering of Clip keyframes (during the Clip::GetFrame method), and a simpler Timeline class, that can change the preview window size dynamically and no longer requires a Singleton Settings class.
- Also removed "crop" from Clip class, as it was never implmeneted correctly, and we have a fully functional "crop" effect when needed
- Added caching to Clip class, to optimize previewing of cached frames (much faster than previous)
2020-10-04 16:59:21 -05:00
|
|
|
preview_width = info.width;
|
|
|
|
|
preview_height = info.height;
|
2014-01-05 22:37:11 -06:00
|
|
|
info.fps = fps;
|
2012-11-20 16:22:50 -06:00
|
|
|
info.sample_rate = sample_rate;
|
|
|
|
|
info.channels = channels;
|
2015-06-01 00:20:14 -07:00
|
|
|
info.channel_layout = channel_layout;
|
2014-01-05 22:37:11 -06:00
|
|
|
info.video_timebase = fps.Reciprocal();
|
2014-01-27 23:31:38 -06:00
|
|
|
info.duration = 60 * 30; // 30 minute default duration
|
2015-06-01 00:20:14 -07:00
|
|
|
info.has_audio = true;
|
|
|
|
|
info.has_video = true;
|
|
|
|
|
info.video_length = info.fps.ToFloat() * info.duration;
|
2019-05-02 11:43:34 -05:00
|
|
|
info.display_ratio = openshot::Fraction(width, height);
|
|
|
|
|
info.display_ratio.Reduce();
|
|
|
|
|
info.pixel_ratio = openshot::Fraction(1, 1);
|
2020-03-09 16:49:06 -05:00
|
|
|
info.acodec = "openshot::timeline";
|
|
|
|
|
info.vcodec = "openshot::timeline";
|
2015-06-01 00:20:14 -07:00
|
|
|
|
2020-06-08 16:07:04 -04:00
|
|
|
// Init max image size
|
2019-01-19 02:18:52 -06:00
|
|
|
SetMaxSize(info.width, info.height);
|
2021-06-10 16:32:53 -04:00
|
|
|
|
|
|
|
|
// Init cache
|
|
|
|
|
final_cache = new CacheMemory();
|
|
|
|
|
final_cache->SetMaxBytesFromInfo(max_concurrent_frames * 4, info.width, info.height, info.sample_rate, info.channels);
|
2012-10-05 01:58:27 -05:00
|
|
|
}
|
|
|
|
|
|
2021-03-31 19:35:58 -04:00
|
|
|
// Delegating constructor that copies parameters from a provided ReaderInfo
|
2021-06-10 08:00:30 -04:00
|
|
|
Timeline::Timeline(const ReaderInfo info) : Timeline::Timeline(
|
|
|
|
|
info.width, info.height, info.fps, info.sample_rate,
|
|
|
|
|
info.channels, info.channel_layout) {}
|
2021-03-31 19:35:58 -04:00
|
|
|
|
2020-03-09 16:49:06 -05:00
|
|
|
// Constructor for the timeline (which loads a JSON structure from a file path, and initializes a timeline)
|
2020-06-08 16:07:55 -04:00
|
|
|
Timeline::Timeline(const std::string& projectPath, bool convert_absolute_paths) :
|
2021-02-04 17:28:07 -06:00
|
|
|
is_open(false), auto_map_clips(true), managed_cache(true), path(projectPath),
|
2021-06-10 16:32:53 -04:00
|
|
|
max_concurrent_frames(OPEN_MP_NUM_PROCESSORS) {
|
2020-03-09 16:49:06 -05:00
|
|
|
|
|
|
|
|
// Create CrashHandler and Attach (incase of errors)
|
|
|
|
|
CrashHandler::Instance();
|
|
|
|
|
|
|
|
|
|
// Init final cache as NULL (will be created after loading json)
|
|
|
|
|
final_cache = NULL;
|
|
|
|
|
|
|
|
|
|
// Init viewport size (curve based, because it can be animated)
|
|
|
|
|
viewport_scale = Keyframe(100.0);
|
|
|
|
|
viewport_x = Keyframe(0.0);
|
|
|
|
|
viewport_y = Keyframe(0.0);
|
|
|
|
|
|
|
|
|
|
// Init background color
|
|
|
|
|
color.red = Keyframe(0.0);
|
|
|
|
|
color.green = Keyframe(0.0);
|
|
|
|
|
color.blue = Keyframe(0.0);
|
|
|
|
|
|
|
|
|
|
// Check if path exists
|
|
|
|
|
QFileInfo filePath(QString::fromStdString(path));
|
|
|
|
|
if (!filePath.exists()) {
|
|
|
|
|
throw InvalidFile("File could not be opened.", path);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Check OpenShot Install Path exists
|
|
|
|
|
Settings *s = Settings::Instance();
|
|
|
|
|
QDir openshotPath(QString::fromStdString(s->PATH_OPENSHOT_INSTALL));
|
|
|
|
|
if (!openshotPath.exists()) {
|
|
|
|
|
throw InvalidFile("PATH_OPENSHOT_INSTALL could not be found.", s->PATH_OPENSHOT_INSTALL);
|
|
|
|
|
}
|
|
|
|
|
QDir openshotTransPath(openshotPath.filePath("transitions"));
|
|
|
|
|
if (!openshotTransPath.exists()) {
|
|
|
|
|
throw InvalidFile("PATH_OPENSHOT_INSTALL/transitions could not be found.", openshotTransPath.path().toStdString());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Determine asset path
|
|
|
|
|
QString asset_name = filePath.baseName().left(30) + "_assets";
|
|
|
|
|
QDir asset_folder(filePath.dir().filePath(asset_name));
|
|
|
|
|
if (!asset_folder.exists()) {
|
|
|
|
|
// Create directory if needed
|
2020-03-10 17:35:21 -05:00
|
|
|
asset_folder.mkpath(".");
|
2020-03-09 16:49:06 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Load UTF-8 project file into QString
|
|
|
|
|
QFile projectFile(QString::fromStdString(path));
|
|
|
|
|
projectFile.open(QFile::ReadOnly);
|
|
|
|
|
QString projectContents = QString::fromUtf8(projectFile.readAll());
|
|
|
|
|
|
2020-03-10 17:35:21 -05:00
|
|
|
// Convert all relative paths into absolute paths (if requested)
|
2020-03-09 16:49:06 -05:00
|
|
|
if (convert_absolute_paths) {
|
|
|
|
|
|
2020-03-10 17:35:21 -05:00
|
|
|
// Find all "image" or "path" references in JSON (using regex). Must loop through match results
|
|
|
|
|
// due to our path matching needs, which are not possible with the QString::replace() function.
|
|
|
|
|
QRegularExpression allPathsRegex(QStringLiteral("\"(image|path)\":.*?\"(.*?)\""));
|
|
|
|
|
std::vector<QRegularExpressionMatch> matchedPositions;
|
|
|
|
|
QRegularExpressionMatchIterator i = allPathsRegex.globalMatch(projectContents);
|
|
|
|
|
while (i.hasNext()) {
|
|
|
|
|
QRegularExpressionMatch match = i.next();
|
|
|
|
|
if (match.hasMatch()) {
|
|
|
|
|
// Push all match objects into a vector (so we can reverse them later)
|
|
|
|
|
matchedPositions.push_back(match);
|
|
|
|
|
}
|
|
|
|
|
}
|
2020-03-09 16:49:06 -05:00
|
|
|
|
2020-03-10 17:35:21 -05:00
|
|
|
// Reverse the matches (bottom of file to top, so our replacements don't break our match positions)
|
|
|
|
|
std::vector<QRegularExpressionMatch>::reverse_iterator itr;
|
|
|
|
|
for (itr = matchedPositions.rbegin(); itr != matchedPositions.rend(); itr++) {
|
|
|
|
|
QRegularExpressionMatch match = *itr;
|
|
|
|
|
QString relativeKey = match.captured(1); // image or path
|
|
|
|
|
QString relativePath = match.captured(2); // relative file path
|
|
|
|
|
QString absolutePath = "";
|
|
|
|
|
|
|
|
|
|
// Find absolute path of all path, image (including special replacements of @assets and @transitions)
|
|
|
|
|
if (relativePath.startsWith("@assets")) {
|
|
|
|
|
absolutePath = QFileInfo(asset_folder.absoluteFilePath(relativePath.replace("@assets", "."))).canonicalFilePath();
|
|
|
|
|
} else if (relativePath.startsWith("@transitions")) {
|
|
|
|
|
absolutePath = QFileInfo(openshotTransPath.absoluteFilePath(relativePath.replace("@transitions", "."))).canonicalFilePath();
|
|
|
|
|
} else {
|
|
|
|
|
absolutePath = QFileInfo(filePath.absoluteDir().absoluteFilePath(relativePath)).canonicalFilePath();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Replace path in JSON content, if an absolute path was successfully found
|
|
|
|
|
if (!absolutePath.isEmpty()) {
|
|
|
|
|
projectContents.replace(match.capturedStart(0), match.capturedLength(0), "\"" + relativeKey + "\": \"" + absolutePath + "\"");
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
// Clear matches
|
|
|
|
|
matchedPositions.clear();
|
2020-03-09 16:49:06 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Set JSON of project
|
|
|
|
|
SetJson(projectContents.toStdString());
|
|
|
|
|
|
|
|
|
|
// Calculate valid duration and set has_audio and has_video
|
|
|
|
|
// based on content inside this Timeline's clips.
|
|
|
|
|
float calculated_duration = 0.0;
|
|
|
|
|
for (auto clip : clips)
|
|
|
|
|
{
|
|
|
|
|
float clip_last_frame = clip->Position() + clip->Duration();
|
|
|
|
|
if (clip_last_frame > calculated_duration)
|
|
|
|
|
calculated_duration = clip_last_frame;
|
|
|
|
|
if (clip->Reader() && clip->Reader()->info.has_audio)
|
|
|
|
|
info.has_audio = true;
|
|
|
|
|
if (clip->Reader() && clip->Reader()->info.has_video)
|
|
|
|
|
info.has_video = true;
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
info.video_length = calculated_duration * info.fps.ToFloat();
|
|
|
|
|
info.duration = calculated_duration;
|
|
|
|
|
|
|
|
|
|
// Init FileInfo settings
|
|
|
|
|
info.acodec = "openshot::timeline";
|
|
|
|
|
info.vcodec = "openshot::timeline";
|
|
|
|
|
info.video_timebase = info.fps.Reciprocal();
|
|
|
|
|
info.has_video = true;
|
|
|
|
|
info.has_audio = true;
|
|
|
|
|
|
|
|
|
|
// Init max image size
|
|
|
|
|
SetMaxSize(info.width, info.height);
|
2021-06-10 16:32:53 -04:00
|
|
|
|
|
|
|
|
// Init cache
|
|
|
|
|
final_cache = new CacheMemory();
|
|
|
|
|
final_cache->SetMaxBytesFromInfo(max_concurrent_frames * 4, info.width, info.height, info.sample_rate, info.channels);
|
2020-03-09 16:49:06 -05:00
|
|
|
}
|
|
|
|
|
|
2019-05-08 14:53:23 -07:00
|
|
|
Timeline::~Timeline() {
|
2022-10-10 11:17:53 -05:00
|
|
|
if (is_open) {
|
|
|
|
|
// Auto Close if not already
|
|
|
|
|
Close();
|
|
|
|
|
}
|
2019-05-09 10:51:40 -07:00
|
|
|
|
2022-10-10 11:17:53 -05:00
|
|
|
// Remove all clips, effects, and frame mappers
|
|
|
|
|
Clear();
|
2019-05-13 16:18:15 -05:00
|
|
|
|
2022-10-10 11:17:53 -05:00
|
|
|
// Destroy previous cache (if managed by timeline)
|
|
|
|
|
if (managed_cache && final_cache) {
|
|
|
|
|
delete final_cache;
|
|
|
|
|
final_cache = NULL;
|
|
|
|
|
}
|
2019-05-08 14:53:23 -07:00
|
|
|
}
|
|
|
|
|
|
2021-06-10 08:00:30 -04:00
|
|
|
// Add to the tracked_objects map a pointer to a tracked object (TrackedObjectBBox)
|
2021-01-18 14:52:01 -03:00
|
|
|
void Timeline::AddTrackedObject(std::shared_ptr<openshot::TrackedObjectBase> trackedObject){
|
2020-12-22 21:32:36 -03:00
|
|
|
|
|
|
|
|
// Search for the tracked object on the map
|
|
|
|
|
auto iterator = tracked_objects.find(trackedObject->Id());
|
|
|
|
|
|
|
|
|
|
if (iterator != tracked_objects.end()){
|
|
|
|
|
// Tracked object's id already present on the map, overwrite it
|
|
|
|
|
iterator->second = trackedObject;
|
2021-06-10 08:00:30 -04:00
|
|
|
}
|
2020-12-22 21:32:36 -03:00
|
|
|
else{
|
|
|
|
|
// Tracked object's id not present -> insert it on the map
|
|
|
|
|
tracked_objects[trackedObject->Id()] = trackedObject;
|
|
|
|
|
}
|
2021-01-18 14:52:01 -03:00
|
|
|
|
2020-12-22 21:32:36 -03:00
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Return tracked object pointer by it's id
|
2021-01-18 14:52:01 -03:00
|
|
|
std::shared_ptr<openshot::TrackedObjectBase> Timeline::GetTrackedObject(std::string id) const{
|
2020-12-22 21:32:36 -03:00
|
|
|
|
|
|
|
|
// Search for the tracked object on the map
|
|
|
|
|
auto iterator = tracked_objects.find(id);
|
|
|
|
|
|
|
|
|
|
if (iterator != tracked_objects.end()){
|
|
|
|
|
// Id found, return the pointer to the tracked object
|
2021-01-18 14:52:01 -03:00
|
|
|
std::shared_ptr<openshot::TrackedObjectBase> trackedObject = iterator->second;
|
2020-12-22 21:32:36 -03:00
|
|
|
return trackedObject;
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
// Id not found, return a null pointer
|
|
|
|
|
return nullptr;
|
2021-06-10 08:00:30 -04:00
|
|
|
}
|
2020-12-22 21:32:36 -03:00
|
|
|
}
|
|
|
|
|
|
2021-01-18 14:52:01 -03:00
|
|
|
// Return the ID's of the tracked objects as a list of strings
|
|
|
|
|
std::list<std::string> Timeline::GetTrackedObjectsIds() const{
|
2020-12-22 21:32:36 -03:00
|
|
|
|
2021-01-18 14:52:01 -03:00
|
|
|
// Create a list of strings
|
|
|
|
|
std::list<std::string> trackedObjects_ids;
|
2020-12-22 21:32:36 -03:00
|
|
|
|
|
|
|
|
// Iterate through the tracked_objects map
|
|
|
|
|
for (auto const& it: tracked_objects){
|
2021-01-18 14:52:01 -03:00
|
|
|
// Add the IDs to the list
|
2020-12-22 21:32:36 -03:00
|
|
|
trackedObjects_ids.push_back(it.first);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return trackedObjects_ids;
|
|
|
|
|
}
|
|
|
|
|
|
2021-04-15 22:05:35 -03:00
|
|
|
#ifdef USE_OPENCV
|
2021-01-27 16:52:15 -03:00
|
|
|
// Return the trackedObject's properties as a JSON string
|
|
|
|
|
std::string Timeline::GetTrackedObjectValues(std::string id, int64_t frame_number) const {
|
2021-01-22 19:28:16 -03:00
|
|
|
|
|
|
|
|
// Initialize the JSON object
|
|
|
|
|
Json::Value trackedObjectJson;
|
|
|
|
|
|
|
|
|
|
// Search for the tracked object on the map
|
|
|
|
|
auto iterator = tracked_objects.find(id);
|
|
|
|
|
|
|
|
|
|
if (iterator != tracked_objects.end())
|
|
|
|
|
{
|
|
|
|
|
// Id found, Get the object pointer and cast it as a TrackedObjectBBox
|
|
|
|
|
std::shared_ptr<TrackedObjectBBox> trackedObject = std::static_pointer_cast<TrackedObjectBBox>(iterator->second);
|
|
|
|
|
|
|
|
|
|
// Get the trackedObject values for it's first frame
|
2021-01-27 16:52:15 -03:00
|
|
|
if (trackedObject->ExactlyContains(frame_number)){
|
|
|
|
|
BBox box = trackedObject->GetBox(frame_number);
|
|
|
|
|
float x1 = box.cx - (box.width/2);
|
|
|
|
|
float y1 = box.cy - (box.height/2);
|
|
|
|
|
float x2 = box.cx + (box.width/2);
|
|
|
|
|
float y2 = box.cy + (box.height/2);
|
|
|
|
|
float rotation = box.angle;
|
2021-01-22 19:28:16 -03:00
|
|
|
|
2021-01-27 16:52:15 -03:00
|
|
|
trackedObjectJson["x1"] = x1;
|
|
|
|
|
trackedObjectJson["y1"] = y1;
|
|
|
|
|
trackedObjectJson["x2"] = x2;
|
|
|
|
|
trackedObjectJson["y2"] = y2;
|
|
|
|
|
trackedObjectJson["rotation"] = rotation;
|
2021-06-10 08:00:30 -04:00
|
|
|
|
2021-01-27 16:52:15 -03:00
|
|
|
} else {
|
|
|
|
|
BBox box = trackedObject->BoxVec.begin()->second;
|
|
|
|
|
float x1 = box.cx - (box.width/2);
|
|
|
|
|
float y1 = box.cy - (box.height/2);
|
|
|
|
|
float x2 = box.cx + (box.width/2);
|
|
|
|
|
float y2 = box.cy + (box.height/2);
|
|
|
|
|
float rotation = box.angle;
|
|
|
|
|
|
|
|
|
|
trackedObjectJson["x1"] = x1;
|
|
|
|
|
trackedObjectJson["y1"] = y1;
|
|
|
|
|
trackedObjectJson["x2"] = x2;
|
|
|
|
|
trackedObjectJson["y2"] = y2;
|
|
|
|
|
trackedObjectJson["rotation"] = rotation;
|
|
|
|
|
}
|
2021-01-22 19:28:16 -03:00
|
|
|
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
// Id not found, return all 0 values
|
|
|
|
|
trackedObjectJson["x1"] = 0;
|
|
|
|
|
trackedObjectJson["y1"] = 0;
|
|
|
|
|
trackedObjectJson["x2"] = 0;
|
|
|
|
|
trackedObjectJson["y2"] = 0;
|
2021-01-27 16:52:15 -03:00
|
|
|
trackedObjectJson["rotation"] = 0;
|
2021-06-10 08:00:30 -04:00
|
|
|
}
|
2021-01-22 19:28:16 -03:00
|
|
|
|
|
|
|
|
return trackedObjectJson.toStyledString();
|
|
|
|
|
}
|
2021-04-15 22:05:35 -03:00
|
|
|
#endif
|
2021-01-22 19:28:16 -03:00
|
|
|
|
2012-10-05 17:05:33 -05:00
|
|
|
// Add an openshot::Clip to the timeline
|
2017-10-26 18:44:35 -05:00
|
|
|
void Timeline::AddClip(Clip* clip)
|
2012-10-05 17:05:33 -05:00
|
|
|
{
|
Large refactor of Timeline, TimelineBase, ClipBase, and Clip, to allow a Clip access to the parent timeline instance (if available), and thus, certain properties (preview size, timeline FPS, etc...). This allows for a simpler rendering of Clip keyframes (during the Clip::GetFrame method), and a simpler Timeline class, that can change the preview window size dynamically and no longer requires a Singleton Settings class.
- Also removed "crop" from Clip class, as it was never implmeneted correctly, and we have a fully functional "crop" effect when needed
- Added caching to Clip class, to optimize previewing of cached frames (much faster than previous)
2020-10-04 16:59:21 -05:00
|
|
|
// Assign timeline to clip
|
|
|
|
|
clip->ParentTimeline(this);
|
|
|
|
|
|
2020-10-23 01:35:46 -05:00
|
|
|
// Clear cache of clip and nested reader (if any)
|
|
|
|
|
if (clip->Reader() && clip->Reader()->GetCache())
|
|
|
|
|
clip->Reader()->GetCache()->Clear();
|
|
|
|
|
|
2015-06-01 00:20:14 -07:00
|
|
|
// All clips should be converted to the frame rate of this timeline
|
|
|
|
|
if (auto_map_clips)
|
|
|
|
|
// Apply framemapper (or update existing framemapper)
|
|
|
|
|
apply_mapper_to_clip(clip);
|
2012-10-14 02:36:05 -05:00
|
|
|
|
2012-10-05 17:05:33 -05:00
|
|
|
// Add clip to list
|
|
|
|
|
clips.push_back(clip);
|
|
|
|
|
|
|
|
|
|
// Sort clips
|
2015-03-14 01:36:13 -05:00
|
|
|
sort_clips();
|
2012-10-05 17:05:33 -05:00
|
|
|
}
|
|
|
|
|
|
2013-09-28 22:00:52 -05:00
|
|
|
// Add an effect to the timeline
|
|
|
|
|
void Timeline::AddEffect(EffectBase* effect)
|
|
|
|
|
{
|
Large refactor of Timeline, TimelineBase, ClipBase, and Clip, to allow a Clip access to the parent timeline instance (if available), and thus, certain properties (preview size, timeline FPS, etc...). This allows for a simpler rendering of Clip keyframes (during the Clip::GetFrame method), and a simpler Timeline class, that can change the preview window size dynamically and no longer requires a Singleton Settings class.
- Also removed "crop" from Clip class, as it was never implmeneted correctly, and we have a fully functional "crop" effect when needed
- Added caching to Clip class, to optimize previewing of cached frames (much faster than previous)
2020-10-04 16:59:21 -05:00
|
|
|
// Assign timeline to effect
|
|
|
|
|
effect->ParentTimeline(this);
|
|
|
|
|
|
2013-09-28 22:00:52 -05:00
|
|
|
// Add effect to list
|
|
|
|
|
effects.push_back(effect);
|
2013-10-01 17:19:53 -05:00
|
|
|
|
|
|
|
|
// Sort effects
|
2015-03-14 01:36:13 -05:00
|
|
|
sort_effects();
|
2013-09-28 22:00:52 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Remove an effect from the timeline
|
|
|
|
|
void Timeline::RemoveEffect(EffectBase* effect)
|
|
|
|
|
{
|
|
|
|
|
effects.remove(effect);
|
2022-10-06 15:07:31 -05:00
|
|
|
|
|
|
|
|
// Delete effect object (if timeline allocated it)
|
|
|
|
|
bool allocated = allocated_effects.count(effect);
|
|
|
|
|
if (allocated) {
|
|
|
|
|
delete effect;
|
|
|
|
|
effect = NULL;
|
2022-10-10 11:17:53 -05:00
|
|
|
allocated_effects.erase(effect);
|
2022-10-06 15:07:31 -05:00
|
|
|
}
|
2013-09-28 22:00:52 -05:00
|
|
|
}
|
|
|
|
|
|
2013-02-13 02:46:55 -06:00
|
|
|
// Remove an openshot::Clip to the timeline
|
|
|
|
|
void Timeline::RemoveClip(Clip* clip)
|
|
|
|
|
{
|
|
|
|
|
clips.remove(clip);
|
2022-10-06 15:07:31 -05:00
|
|
|
|
|
|
|
|
// Delete clip object (if timeline allocated it)
|
|
|
|
|
bool allocated = allocated_clips.count(clip);
|
|
|
|
|
if (allocated) {
|
|
|
|
|
delete clip;
|
|
|
|
|
clip = NULL;
|
2022-10-10 11:17:53 -05:00
|
|
|
allocated_clips.erase(clip);
|
2022-10-06 15:07:31 -05:00
|
|
|
}
|
2013-02-13 02:46:55 -06:00
|
|
|
}
|
|
|
|
|
|
2020-09-01 22:56:33 -04:00
|
|
|
// Look up a clip
|
2020-12-03 10:52:27 -06:00
|
|
|
openshot::Clip* Timeline::GetClip(const std::string& id)
|
2020-09-01 22:56:33 -04:00
|
|
|
{
|
|
|
|
|
// Find the matching clip (if any)
|
|
|
|
|
for (const auto& clip : clips) {
|
|
|
|
|
if (clip->Id() == id) {
|
|
|
|
|
return clip;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return nullptr;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Look up a timeline effect
|
|
|
|
|
openshot::EffectBase* Timeline::GetEffect(const std::string& id)
|
|
|
|
|
{
|
|
|
|
|
// Find the matching effect (if any)
|
|
|
|
|
for (const auto& effect : effects) {
|
|
|
|
|
if (effect->Id() == id) {
|
|
|
|
|
return effect;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return nullptr;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
openshot::EffectBase* Timeline::GetClipEffect(const std::string& id)
|
|
|
|
|
{
|
|
|
|
|
// Search all clips for matching effect ID
|
|
|
|
|
for (const auto& clip : clips) {
|
2020-09-10 18:39:24 -04:00
|
|
|
const auto e = clip->GetEffect(id);
|
2020-09-01 22:56:33 -04:00
|
|
|
if (e != nullptr) {
|
|
|
|
|
return e;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return nullptr;
|
|
|
|
|
}
|
|
|
|
|
|
2021-02-04 16:11:27 -03:00
|
|
|
// Return the list of effects on all clips
|
|
|
|
|
std::list<openshot::EffectBase*> Timeline::ClipEffects() const {
|
|
|
|
|
|
|
|
|
|
// Initialize the list
|
2021-04-12 21:18:21 -03:00
|
|
|
std::list<EffectBase*> timelineEffectsList;
|
2021-02-04 16:11:27 -03:00
|
|
|
|
|
|
|
|
// Loop through all clips
|
|
|
|
|
for (const auto& clip : clips) {
|
2021-06-10 08:00:30 -04:00
|
|
|
|
2021-02-04 16:11:27 -03:00
|
|
|
// Get the clip's list of effects
|
2021-04-12 21:18:21 -03:00
|
|
|
std::list<EffectBase*> clipEffectsList = clip->Effects();
|
2021-02-04 16:11:27 -03:00
|
|
|
|
|
|
|
|
// Append the clip's effects to the list
|
2021-04-12 21:18:21 -03:00
|
|
|
timelineEffectsList.insert(timelineEffectsList.end(), clipEffectsList.begin(), clipEffectsList.end());
|
2021-02-04 16:11:27 -03:00
|
|
|
}
|
|
|
|
|
|
2021-04-12 21:18:21 -03:00
|
|
|
return timelineEffectsList;
|
2021-02-04 16:11:27 -03:00
|
|
|
}
|
|
|
|
|
|
2020-09-02 01:03:06 -04:00
|
|
|
// Compute the end time of the latest timeline element
|
|
|
|
|
double Timeline::GetMaxTime() {
|
|
|
|
|
double last_clip = 0.0;
|
|
|
|
|
double last_effect = 0.0;
|
2020-09-01 22:56:33 -04:00
|
|
|
|
|
|
|
|
if (!clips.empty()) {
|
|
|
|
|
const auto max_clip = std::max_element(
|
|
|
|
|
clips.begin(), clips.end(), CompareClipEndFrames());
|
|
|
|
|
last_clip = (*max_clip)->Position() + (*max_clip)->Duration();
|
|
|
|
|
}
|
|
|
|
|
if (!effects.empty()) {
|
|
|
|
|
const auto max_effect = std::max_element(
|
|
|
|
|
effects.begin(), effects.end(), CompareEffectEndFrames());
|
|
|
|
|
last_effect = (*max_effect)->Position() + (*max_effect)->Duration();
|
|
|
|
|
}
|
|
|
|
|
return std::max(last_clip, last_effect);
|
|
|
|
|
}
|
|
|
|
|
|
2020-09-02 01:03:06 -04:00
|
|
|
// Compute the highest frame# based on the latest time and FPS
|
|
|
|
|
int64_t Timeline::GetMaxFrame() {
|
|
|
|
|
double fps = info.fps.ToDouble();
|
|
|
|
|
auto max_time = GetMaxTime();
|
|
|
|
|
return std::round(max_time * fps) + 1;
|
|
|
|
|
}
|
|
|
|
|
|
2015-06-01 00:20:14 -07:00
|
|
|
// Apply a FrameMapper to a clip which matches the settings of this timeline
|
|
|
|
|
void Timeline::apply_mapper_to_clip(Clip* clip)
|
|
|
|
|
{
|
|
|
|
|
// Determine type of reader
|
|
|
|
|
ReaderBase* clip_reader = NULL;
|
2016-01-05 01:59:50 -06:00
|
|
|
if (clip->Reader()->Name() == "FrameMapper")
|
2015-06-01 00:20:14 -07:00
|
|
|
{
|
|
|
|
|
// Get the existing reader
|
|
|
|
|
clip_reader = (ReaderBase*) clip->Reader();
|
|
|
|
|
|
2022-10-06 15:07:31 -05:00
|
|
|
// Update the mapping
|
|
|
|
|
FrameMapper* clip_mapped_reader = (FrameMapper*) clip_reader;
|
|
|
|
|
clip_mapped_reader->ChangeMapping(info.fps, PULLDOWN_NONE, info.sample_rate, info.channels, info.channel_layout);
|
|
|
|
|
|
2015-06-01 00:20:14 -07:00
|
|
|
} else {
|
|
|
|
|
|
|
|
|
|
// Create a new FrameMapper to wrap the current reader
|
2020-10-13 17:08:27 -05:00
|
|
|
FrameMapper* mapper = new FrameMapper(clip->Reader(), info.fps, PULLDOWN_NONE, info.sample_rate, info.channels, info.channel_layout);
|
2019-05-09 10:51:40 -07:00
|
|
|
allocated_frame_mappers.insert(mapper);
|
|
|
|
|
clip_reader = (ReaderBase*) mapper;
|
2015-06-01 00:20:14 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Update clip reader
|
|
|
|
|
clip->Reader(clip_reader);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Apply the timeline's framerate and samplerate to all clips
|
|
|
|
|
void Timeline::ApplyMapperToClips()
|
|
|
|
|
{
|
|
|
|
|
// Clear all cached frames
|
2017-03-14 11:42:05 -05:00
|
|
|
ClearAllCache();
|
2015-06-01 00:20:14 -07:00
|
|
|
|
|
|
|
|
// Loop through all clips
|
2019-12-27 01:01:48 -05:00
|
|
|
for (auto clip : clips)
|
2015-06-01 00:20:14 -07:00
|
|
|
{
|
|
|
|
|
// Apply framemapper (or update existing framemapper)
|
|
|
|
|
apply_mapper_to_clip(clip);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2012-10-05 17:05:33 -05:00
|
|
|
// Calculate time of a frame number, based on a framerate
|
2017-09-28 16:03:01 -05:00
|
|
|
double Timeline::calculate_time(int64_t number, Fraction rate)
|
2012-10-05 17:05:33 -05:00
|
|
|
{
|
|
|
|
|
// Get float version of fps fraction
|
2017-05-26 01:08:20 -05:00
|
|
|
double raw_fps = rate.ToFloat();
|
2012-10-05 17:05:33 -05:00
|
|
|
|
|
|
|
|
// Return the time (in seconds) of this frame
|
2017-05-26 01:08:20 -05:00
|
|
|
return double(number - 1) / raw_fps;
|
2012-10-05 17:05:33 -05:00
|
|
|
}
|
|
|
|
|
|
2013-10-06 18:11:33 -05:00
|
|
|
// Apply effects to the source frame (if any)
|
2017-09-28 16:03:01 -05:00
|
|
|
std::shared_ptr<Frame> Timeline::apply_effects(std::shared_ptr<Frame> frame, int64_t timeline_frame_number, int layer)
|
2013-10-06 18:11:33 -05:00
|
|
|
{
|
2015-02-19 15:59:57 -06:00
|
|
|
// Debug output
|
2022-01-12 10:52:05 -05:00
|
|
|
ZmqLogger::Instance()->AppendDebugMethod(
|
|
|
|
|
"Timeline::apply_effects",
|
|
|
|
|
"frame->number", frame->number,
|
|
|
|
|
"timeline_frame_number", timeline_frame_number,
|
|
|
|
|
"layer", layer);
|
2015-02-19 15:59:57 -06:00
|
|
|
|
2013-10-06 18:11:33 -05:00
|
|
|
// Find Effects at this position and layer
|
2019-12-27 01:01:48 -05:00
|
|
|
for (auto effect : effects)
|
2013-10-06 18:11:33 -05:00
|
|
|
{
|
|
|
|
|
// Does clip intersect the current requested time
|
2017-03-10 00:51:08 -06:00
|
|
|
long effect_start_position = round(effect->Position() * info.fps.ToDouble()) + 1;
|
2022-08-27 16:59:14 -05:00
|
|
|
long effect_end_position = round((effect->Position() + (effect->Duration())) * info.fps.ToDouble());
|
2017-03-10 00:51:08 -06:00
|
|
|
|
|
|
|
|
bool does_effect_intersect = (effect_start_position <= timeline_frame_number && effect_end_position >= timeline_frame_number && effect->Layer() == layer);
|
2013-10-06 18:11:33 -05:00
|
|
|
|
2015-02-25 17:39:59 -06:00
|
|
|
// Debug output
|
2022-01-12 10:52:05 -05:00
|
|
|
ZmqLogger::Instance()->AppendDebugMethod(
|
|
|
|
|
"Timeline::apply_effects (Does effect intersect)",
|
|
|
|
|
"effect->Position()", effect->Position(),
|
|
|
|
|
"does_effect_intersect", does_effect_intersect,
|
|
|
|
|
"timeline_frame_number", timeline_frame_number,
|
|
|
|
|
"layer", layer);
|
2015-02-25 17:39:59 -06:00
|
|
|
|
2013-10-06 18:11:33 -05:00
|
|
|
// Clip is visible
|
|
|
|
|
if (does_effect_intersect)
|
|
|
|
|
{
|
|
|
|
|
// Determine the frame needed for this clip (based on the position on the timeline)
|
2017-03-11 00:51:43 -06:00
|
|
|
long effect_start_frame = (effect->Start() * info.fps.ToDouble()) + 1;
|
|
|
|
|
long effect_frame_number = timeline_frame_number - effect_start_position + effect_start_frame;
|
2013-10-06 18:11:33 -05:00
|
|
|
|
2015-02-19 15:59:57 -06:00
|
|
|
// Debug output
|
2022-01-12 10:52:05 -05:00
|
|
|
ZmqLogger::Instance()->AppendDebugMethod(
|
|
|
|
|
"Timeline::apply_effects (Process Effect)",
|
|
|
|
|
"effect_frame_number", effect_frame_number,
|
|
|
|
|
"does_effect_intersect", does_effect_intersect);
|
2015-02-19 15:59:57 -06:00
|
|
|
|
2013-10-06 18:11:33 -05:00
|
|
|
// Apply the effect to this frame
|
|
|
|
|
frame = effect->GetFrame(frame, effect_frame_number);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
} // end effect loop
|
|
|
|
|
|
|
|
|
|
// Return modified frame
|
|
|
|
|
return frame;
|
|
|
|
|
}
|
|
|
|
|
|
2016-01-30 17:12:41 -06:00
|
|
|
// Get or generate a blank frame
|
2021-05-18 14:25:36 -05:00
|
|
|
std::shared_ptr<Frame> Timeline::GetOrCreateFrame(std::shared_ptr<Frame> background_frame, Clip* clip, int64_t number, openshot::TimelineInfoStruct* options)
|
2016-01-30 17:12:41 -06:00
|
|
|
{
|
2017-08-20 17:37:39 -05:00
|
|
|
std::shared_ptr<Frame> new_frame;
|
2016-01-30 17:12:41 -06:00
|
|
|
|
|
|
|
|
// Init some basic properties about this frame
|
|
|
|
|
int samples_in_frame = Frame::GetSamplesPerFrame(number, info.fps, info.sample_rate, info.channels);
|
|
|
|
|
|
|
|
|
|
try {
|
2016-04-24 15:37:47 -05:00
|
|
|
// Debug output
|
2022-01-12 10:52:05 -05:00
|
|
|
ZmqLogger::Instance()->AppendDebugMethod(
|
|
|
|
|
"Timeline::GetOrCreateFrame (from reader)",
|
|
|
|
|
"number", number,
|
|
|
|
|
"samples_in_frame", samples_in_frame);
|
2016-04-24 15:37:47 -05:00
|
|
|
|
2016-01-30 17:12:41 -06:00
|
|
|
// Attempt to get a frame (but this could fail if a reader has just been closed)
|
2021-05-18 14:25:36 -05:00
|
|
|
new_frame = std::shared_ptr<Frame>(clip->GetFrame(background_frame, number, options));
|
2016-01-30 17:12:41 -06:00
|
|
|
|
|
|
|
|
// Return real frame
|
|
|
|
|
return new_frame;
|
|
|
|
|
|
|
|
|
|
} catch (const ReaderClosed & e) {
|
|
|
|
|
// ...
|
|
|
|
|
} catch (const OutOfBoundsFrame & e) {
|
|
|
|
|
// ...
|
|
|
|
|
}
|
|
|
|
|
|
2016-04-24 15:37:47 -05:00
|
|
|
// Debug output
|
2022-01-12 10:52:05 -05:00
|
|
|
ZmqLogger::Instance()->AppendDebugMethod(
|
|
|
|
|
"Timeline::GetOrCreateFrame (create blank)",
|
|
|
|
|
"number", number,
|
|
|
|
|
"samples_in_frame", samples_in_frame);
|
2016-04-24 15:37:47 -05:00
|
|
|
|
2016-01-30 17:12:41 -06:00
|
|
|
// Create blank frame
|
|
|
|
|
return new_frame;
|
|
|
|
|
}
|
|
|
|
|
|
2012-11-07 17:45:13 -06:00
|
|
|
// Process a new layer of video or audio
|
2021-05-20 13:15:13 -05:00
|
|
|
void Timeline::add_layer(std::shared_ptr<Frame> new_frame, Clip* source_clip, int64_t clip_frame_number, bool is_top_clip, float max_volume)
|
2012-11-07 17:45:13 -06:00
|
|
|
{
|
2021-05-18 14:25:36 -05:00
|
|
|
// Create timeline options (with details about this current frame request)
|
|
|
|
|
TimelineInfoStruct* options = new TimelineInfoStruct();
|
|
|
|
|
options->is_top_clip = is_top_clip;
|
|
|
|
|
|
|
|
|
|
// Get the clip's frame, composited on top of the current timeline frame
|
2017-11-11 17:16:56 -06:00
|
|
|
std::shared_ptr<Frame> source_frame;
|
2021-05-18 14:25:36 -05:00
|
|
|
source_frame = GetOrCreateFrame(new_frame, source_clip, clip_frame_number, options);
|
|
|
|
|
delete options;
|
2013-02-12 01:28:48 -06:00
|
|
|
|
|
|
|
|
// No frame found... so bail
|
|
|
|
|
if (!source_frame)
|
|
|
|
|
return;
|
|
|
|
|
|
2015-02-19 15:59:57 -06:00
|
|
|
// Debug output
|
2022-01-12 10:52:05 -05:00
|
|
|
ZmqLogger::Instance()->AppendDebugMethod(
|
|
|
|
|
"Timeline::add_layer",
|
|
|
|
|
"new_frame->number", new_frame->number,
|
|
|
|
|
"clip_frame_number", clip_frame_number);
|
2013-10-06 18:11:33 -05:00
|
|
|
|
2012-11-29 16:32:48 -06:00
|
|
|
/* COPY AUDIO - with correct volume */
|
2015-02-19 15:59:57 -06:00
|
|
|
if (source_clip->Reader()->info.has_audio) {
|
|
|
|
|
// Debug output
|
2022-01-12 10:52:05 -05:00
|
|
|
ZmqLogger::Instance()->AppendDebugMethod(
|
|
|
|
|
"Timeline::add_layer (Copy Audio)",
|
|
|
|
|
"source_clip->Reader()->info.has_audio", source_clip->Reader()->info.has_audio,
|
|
|
|
|
"source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(),
|
|
|
|
|
"info.channels", info.channels,
|
|
|
|
|
"clip_frame_number", clip_frame_number);
|
2012-11-29 16:32:48 -06:00
|
|
|
|
2018-01-06 01:55:42 -06:00
|
|
|
if (source_frame->GetAudioChannelsCount() == info.channels && source_clip->has_audio.GetInt(clip_frame_number) != 0)
|
2015-02-19 15:59:57 -06:00
|
|
|
for (int channel = 0; channel < source_frame->GetAudioChannelsCount(); channel++)
|
|
|
|
|
{
|
2018-06-27 01:35:38 -05:00
|
|
|
// Get volume from previous frame and this frame
|
|
|
|
|
float previous_volume = source_clip->volume.GetValue(clip_frame_number - 1);
|
|
|
|
|
float volume = source_clip->volume.GetValue(clip_frame_number);
|
2016-04-24 15:37:47 -05:00
|
|
|
int channel_filter = source_clip->channel_filter.GetInt(clip_frame_number); // optional channel to filter (if not -1)
|
|
|
|
|
int channel_mapping = source_clip->channel_mapping.GetInt(clip_frame_number); // optional channel to map this channel to (if not -1)
|
|
|
|
|
|
2018-06-27 01:35:38 -05:00
|
|
|
// Apply volume mixing strategy
|
|
|
|
|
if (source_clip->mixing == VOLUME_MIX_AVERAGE && max_volume > 1.0) {
|
|
|
|
|
// Don't allow this clip to exceed 100% (divide volume equally between all overlapping clips with volume
|
|
|
|
|
previous_volume = previous_volume / max_volume;
|
|
|
|
|
volume = volume / max_volume;
|
|
|
|
|
}
|
|
|
|
|
else if (source_clip->mixing == VOLUME_MIX_REDUCE && max_volume > 1.0) {
|
|
|
|
|
// Reduce clip volume by a bit, hoping it will prevent exceeding 100% (but it is very possible it will)
|
|
|
|
|
previous_volume = previous_volume * 0.77;
|
|
|
|
|
volume = volume * 0.77;
|
|
|
|
|
}
|
|
|
|
|
|
2016-04-24 15:37:47 -05:00
|
|
|
// If channel filter enabled, check for correct channel (and skip non-matching channels)
|
|
|
|
|
if (channel_filter != -1 && channel_filter != channel)
|
|
|
|
|
continue; // skip to next channel
|
|
|
|
|
|
2018-01-06 01:55:42 -06:00
|
|
|
// If no volume on this frame or previous frame, do nothing
|
|
|
|
|
if (previous_volume == 0.0 && volume == 0.0)
|
|
|
|
|
continue; // skip to next channel
|
|
|
|
|
|
2016-04-24 15:37:47 -05:00
|
|
|
// If channel mapping disabled, just use the current channel
|
|
|
|
|
if (channel_mapping == -1)
|
|
|
|
|
channel_mapping = channel;
|
2012-11-29 16:32:48 -06:00
|
|
|
|
2015-02-19 15:59:57 -06:00
|
|
|
// Apply ramp to source frame (if needed)
|
2018-06-11 12:02:21 -07:00
|
|
|
if (!isEqual(previous_volume, 1.0) || !isEqual(volume, 1.0))
|
2016-04-24 15:37:47 -05:00
|
|
|
source_frame->ApplyGainRamp(channel_mapping, 0, source_frame->GetAudioSamplesCount(), previous_volume, volume);
|
2015-02-19 15:59:57 -06:00
|
|
|
|
2015-06-01 00:20:14 -07:00
|
|
|
// TODO: Improve FrameMapper (or Timeline) to always get the correct number of samples per frame.
|
|
|
|
|
// Currently, the ResampleContext sometimes leaves behind a few samples for the next call, and the
|
|
|
|
|
// number of samples returned is variable... and does not match the number expected.
|
|
|
|
|
// This is a crude solution at best. =)
|
2020-10-10 17:01:24 -03:00
|
|
|
if (new_frame->GetAudioSamplesCount() != source_frame->GetAudioSamplesCount()){
|
2015-06-01 00:20:14 -07:00
|
|
|
// Force timeline frame to match the source frame
|
2020-10-10 17:01:24 -03:00
|
|
|
new_frame->ResizeAudio(info.channels, source_frame->GetAudioSamplesCount(), info.sample_rate, info.channel_layout);
|
|
|
|
|
}
|
2015-02-19 15:59:57 -06:00
|
|
|
// Copy audio samples (and set initial volume). Mix samples with existing audio samples. The gains are added together, to
|
|
|
|
|
// be sure to set the gain's correctly, so the sum does not exceed 1.0 (of audio distortion will happen).
|
2018-06-11 12:02:21 -07:00
|
|
|
new_frame->AddAudio(false, channel_mapping, 0, source_frame->GetAudioSamples(channel), source_frame->GetAudioSamplesCount(), 1.0);
|
2015-02-19 15:59:57 -06:00
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
// Debug output
|
2022-01-12 10:52:05 -05:00
|
|
|
ZmqLogger::Instance()->AppendDebugMethod(
|
|
|
|
|
"Timeline::add_layer (No Audio Copied - Wrong # of Channels)",
|
|
|
|
|
"source_clip->Reader()->info.has_audio",
|
|
|
|
|
source_clip->Reader()->info.has_audio,
|
|
|
|
|
"source_frame->GetAudioChannelsCount()",
|
|
|
|
|
source_frame->GetAudioChannelsCount(),
|
|
|
|
|
"info.channels", info.channels,
|
|
|
|
|
"clip_frame_number", clip_frame_number);
|
2015-02-19 15:59:57 -06:00
|
|
|
}
|
2012-11-29 16:32:48 -06:00
|
|
|
|
2015-03-15 02:28:28 -05:00
|
|
|
// Debug output
|
2022-01-12 10:52:05 -05:00
|
|
|
ZmqLogger::Instance()->AppendDebugMethod(
|
|
|
|
|
"Timeline::add_layer (Transform: Composite Image Layer: Completed)",
|
|
|
|
|
"source_frame->number", source_frame->number,
|
|
|
|
|
"new_frame->GetImage()->width()", new_frame->GetImage()->width(),
|
|
|
|
|
"new_frame->GetImage()->height()", new_frame->GetImage()->height());
|
2012-11-07 17:45:13 -06:00
|
|
|
}
|
|
|
|
|
|
2012-10-09 02:09:44 -05:00
|
|
|
// Update the list of 'opened' clips
|
2015-03-15 02:28:28 -05:00
|
|
|
void Timeline::update_open_clips(Clip *clip, bool does_clip_intersect)
|
2012-10-09 02:09:44 -05:00
|
|
|
{
|
2022-01-12 10:52:05 -05:00
|
|
|
ZmqLogger::Instance()->AppendDebugMethod(
|
|
|
|
|
"Timeline::update_open_clips (before)",
|
|
|
|
|
"does_clip_intersect", does_clip_intersect,
|
|
|
|
|
"closing_clips.size()", closing_clips.size(),
|
|
|
|
|
"open_clips.size()", open_clips.size());
|
2015-03-15 02:28:28 -05:00
|
|
|
|
2012-10-09 02:09:44 -05:00
|
|
|
// is clip already in list?
|
|
|
|
|
bool clip_found = open_clips.count(clip);
|
|
|
|
|
|
2015-03-15 02:28:28 -05:00
|
|
|
if (clip_found && !does_clip_intersect)
|
2012-10-09 02:09:44 -05:00
|
|
|
{
|
2015-03-15 02:28:28 -05:00
|
|
|
// Remove clip from 'opened' list, because it's closed now
|
|
|
|
|
open_clips.erase(clip);
|
|
|
|
|
|
|
|
|
|
// Close clip
|
|
|
|
|
clip->Close();
|
2012-10-09 02:09:44 -05:00
|
|
|
}
|
2015-03-15 02:28:28 -05:00
|
|
|
else if (!clip_found && does_clip_intersect)
|
2012-10-09 02:09:44 -05:00
|
|
|
{
|
|
|
|
|
// Add clip to 'opened' list, because it's missing
|
|
|
|
|
open_clips[clip] = clip;
|
|
|
|
|
|
2018-09-11 00:40:31 -05:00
|
|
|
try {
|
|
|
|
|
// Open the clip
|
|
|
|
|
clip->Open();
|
|
|
|
|
|
|
|
|
|
} catch (const InvalidFile & e) {
|
|
|
|
|
// ...
|
|
|
|
|
}
|
2012-10-09 02:09:44 -05:00
|
|
|
}
|
2015-02-07 18:06:11 -06:00
|
|
|
|
|
|
|
|
// Debug output
|
2022-01-12 10:52:05 -05:00
|
|
|
ZmqLogger::Instance()->AppendDebugMethod(
|
|
|
|
|
"Timeline::update_open_clips (after)",
|
|
|
|
|
"does_clip_intersect", does_clip_intersect,
|
|
|
|
|
"clip_found", clip_found,
|
|
|
|
|
"closing_clips.size()", closing_clips.size(),
|
|
|
|
|
"open_clips.size()", open_clips.size());
|
2012-12-03 22:55:46 -06:00
|
|
|
}
|
|
|
|
|
|
2012-10-05 17:05:33 -05:00
|
|
|
// Sort clips by position on the timeline
|
2015-03-14 01:36:13 -05:00
|
|
|
void Timeline::sort_clips()
|
2012-10-05 17:05:33 -05:00
|
|
|
{
|
2015-02-07 18:06:11 -06:00
|
|
|
// Debug output
|
2022-01-12 10:52:05 -05:00
|
|
|
ZmqLogger::Instance()->AppendDebugMethod(
|
|
|
|
|
"Timeline::SortClips",
|
|
|
|
|
"clips.size()", clips.size());
|
2015-02-07 18:06:11 -06:00
|
|
|
|
2012-10-05 17:05:33 -05:00
|
|
|
// sort clips
|
2013-09-10 12:59:06 -05:00
|
|
|
clips.sort(CompareClips());
|
2012-10-05 17:05:33 -05:00
|
|
|
}
|
|
|
|
|
|
2013-10-01 17:19:53 -05:00
|
|
|
// Sort effects by position on the timeline
|
2015-03-14 01:36:13 -05:00
|
|
|
void Timeline::sort_effects()
|
2013-10-01 17:19:53 -05:00
|
|
|
{
|
|
|
|
|
// sort clips
|
|
|
|
|
effects.sort(CompareEffects());
|
|
|
|
|
}
|
|
|
|
|
|
2022-10-06 15:07:31 -05:00
|
|
|
// Clear all clips from timeline
|
|
|
|
|
void Timeline::Clear()
|
|
|
|
|
{
|
|
|
|
|
ZmqLogger::Instance()->AppendDebugMethod("Timeline::Clear");
|
2022-10-06 21:59:11 -05:00
|
|
|
|
|
|
|
|
// Get lock (prevent getting frames while this happens)
|
|
|
|
|
const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
|
|
|
|
|
|
2022-10-06 15:07:31 -05:00
|
|
|
// Close all open clips
|
|
|
|
|
for (auto clip : clips)
|
|
|
|
|
{
|
|
|
|
|
update_open_clips(clip, false);
|
|
|
|
|
|
|
|
|
|
// Delete clip object (if timeline allocated it)
|
|
|
|
|
bool allocated = allocated_clips.count(clip);
|
|
|
|
|
if (allocated) {
|
|
|
|
|
delete clip;
|
|
|
|
|
clip = NULL;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
// Clear all clips
|
|
|
|
|
clips.clear();
|
2022-10-10 11:17:53 -05:00
|
|
|
allocated_clips.clear();
|
2022-10-06 15:07:31 -05:00
|
|
|
|
|
|
|
|
// Close all effects
|
|
|
|
|
for (auto effect : effects)
|
|
|
|
|
{
|
|
|
|
|
// Delete effect object (if timeline allocated it)
|
|
|
|
|
bool allocated = allocated_effects.count(effect);
|
|
|
|
|
if (allocated) {
|
|
|
|
|
delete effect;
|
|
|
|
|
effect = NULL;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
// Clear all effects
|
|
|
|
|
effects.clear();
|
2022-10-10 11:17:53 -05:00
|
|
|
allocated_effects.clear();
|
2022-10-06 15:07:31 -05:00
|
|
|
|
|
|
|
|
// Delete all FrameMappers
|
|
|
|
|
for (auto mapper : allocated_frame_mappers)
|
|
|
|
|
{
|
|
|
|
|
mapper->Reader(NULL);
|
|
|
|
|
mapper->Close();
|
|
|
|
|
delete mapper;
|
|
|
|
|
}
|
|
|
|
|
allocated_frame_mappers.clear();
|
|
|
|
|
}
|
|
|
|
|
|
2012-10-08 16:22:18 -05:00
|
|
|
// Close the reader (and any resources it was consuming)
|
|
|
|
|
void Timeline::Close()
|
|
|
|
|
{
|
2019-07-03 14:14:02 -04:00
|
|
|
ZmqLogger::Instance()->AppendDebugMethod("Timeline::Close");
|
2016-04-24 15:37:47 -05:00
|
|
|
|
2012-10-10 00:52:47 -05:00
|
|
|
// Close all open clips
|
2019-12-27 01:01:48 -05:00
|
|
|
for (auto clip : clips)
|
2012-10-10 00:52:47 -05:00
|
|
|
{
|
|
|
|
|
// Open or Close this clip, based on if it's intersecting or not
|
|
|
|
|
update_open_clips(clip, false);
|
|
|
|
|
}
|
2012-12-03 22:55:46 -06:00
|
|
|
|
2015-03-15 02:28:28 -05:00
|
|
|
// Mark timeline as closed
|
2013-12-18 21:55:43 -06:00
|
|
|
is_open = false;
|
2014-01-05 22:37:11 -06:00
|
|
|
|
2022-09-15 18:33:06 -05:00
|
|
|
// Clear all cache (deep clear, including nested Readers)
|
|
|
|
|
ClearAllCache(true);
|
2012-10-08 16:22:18 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Open the reader (and start consuming resources)
|
|
|
|
|
void Timeline::Open()
|
|
|
|
|
{
|
2013-12-18 21:55:43 -06:00
|
|
|
is_open = true;
|
2012-10-08 16:22:18 -05:00
|
|
|
}
|
|
|
|
|
|
2012-11-16 17:29:12 -06:00
|
|
|
// Compare 2 floating point numbers for equality
|
|
|
|
|
bool Timeline::isEqual(double a, double b)
|
|
|
|
|
{
|
|
|
|
|
return fabs(a - b) < 0.000001;
|
|
|
|
|
}
|
2012-11-07 17:45:13 -06:00
|
|
|
|
2012-10-05 01:58:27 -05:00
|
|
|
// Get an openshot::Frame object for a specific frame number of this reader.
|
2017-10-26 18:44:35 -05:00
|
|
|
std::shared_ptr<Frame> Timeline::GetFrame(int64_t requested_frame)
|
2012-10-05 01:58:27 -05:00
|
|
|
{
|
2012-10-05 17:05:33 -05:00
|
|
|
// Adjust out of bounds frame number
|
|
|
|
|
if (requested_frame < 1)
|
|
|
|
|
requested_frame = 1;
|
|
|
|
|
|
2012-11-12 17:21:21 -06:00
|
|
|
// Check cache
|
2017-11-11 17:16:56 -06:00
|
|
|
std::shared_ptr<Frame> frame;
|
2022-02-12 13:31:21 -06:00
|
|
|
frame = final_cache->GetFrame(requested_frame);
|
2015-08-05 23:40:58 -05:00
|
|
|
if (frame) {
|
2015-02-07 18:06:11 -06:00
|
|
|
// Debug output
|
2022-01-12 10:52:05 -05:00
|
|
|
ZmqLogger::Instance()->AppendDebugMethod(
|
|
|
|
|
"Timeline::GetFrame (Cached frame found)",
|
|
|
|
|
"requested_frame", requested_frame);
|
2015-02-07 18:06:11 -06:00
|
|
|
|
2015-03-15 02:28:28 -05:00
|
|
|
// Return cached frame
|
2015-08-05 23:40:58 -05:00
|
|
|
return frame;
|
2015-02-07 18:06:11 -06:00
|
|
|
}
|
2012-11-12 17:21:21 -06:00
|
|
|
else
|
2012-10-05 17:05:33 -05:00
|
|
|
{
|
2022-10-11 18:14:36 -05:00
|
|
|
// Prevent async calls to the following code
|
|
|
|
|
const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
|
|
|
|
|
|
2022-09-15 18:33:06 -05:00
|
|
|
// Get a list of clips that intersect with the requested section of timeline
|
2015-03-15 02:28:28 -05:00
|
|
|
// This also opens the readers for intersecting clips, and marks non-intersecting clips as 'needs closing'
|
2019-08-04 22:23:06 -04:00
|
|
|
std::vector<Clip*> nearby_clips;
|
2021-02-17 19:44:44 -06:00
|
|
|
nearby_clips = find_intersecting_clips(requested_frame, 1, true);
|
2014-04-02 16:48:27 -05:00
|
|
|
|
2021-02-17 19:44:44 -06:00
|
|
|
// Debug output
|
2022-01-12 10:52:05 -05:00
|
|
|
ZmqLogger::Instance()->AppendDebugMethod(
|
|
|
|
|
"Timeline::GetFrame (processing frame)",
|
|
|
|
|
"requested_frame", requested_frame,
|
|
|
|
|
"omp_get_thread_num()", omp_get_thread_num());
|
2015-03-15 02:28:28 -05:00
|
|
|
|
2021-02-17 19:44:44 -06:00
|
|
|
// Init some basic properties about this frame
|
|
|
|
|
int samples_in_frame = Frame::GetSamplesPerFrame(requested_frame, info.fps, info.sample_rate, info.channels);
|
2017-03-10 00:51:08 -06:00
|
|
|
|
2021-02-17 19:44:44 -06:00
|
|
|
// Create blank frame (which will become the requested frame)
|
|
|
|
|
std::shared_ptr<Frame> new_frame(std::make_shared<Frame>(requested_frame, preview_width, preview_height, "#000000", samples_in_frame, info.channels));
|
|
|
|
|
new_frame->AddAudioSilence(samples_in_frame);
|
|
|
|
|
new_frame->SampleRate(info.sample_rate);
|
|
|
|
|
new_frame->ChannelsLayout(info.channel_layout);
|
2020-08-26 17:05:50 -05:00
|
|
|
|
2021-02-17 19:44:44 -06:00
|
|
|
// Debug output
|
2022-01-12 10:52:05 -05:00
|
|
|
ZmqLogger::Instance()->AppendDebugMethod(
|
|
|
|
|
"Timeline::GetFrame (Adding solid color)",
|
|
|
|
|
"requested_frame", requested_frame,
|
|
|
|
|
"info.width", info.width,
|
|
|
|
|
"info.height", info.height);
|
2016-01-30 17:12:41 -06:00
|
|
|
|
2021-02-17 19:44:44 -06:00
|
|
|
// Add Background Color to 1st layer (if animated or not black)
|
|
|
|
|
if ((color.red.GetCount() > 1 || color.green.GetCount() > 1 || color.blue.GetCount() > 1) ||
|
|
|
|
|
(color.red.GetValue(requested_frame) != 0.0 || color.green.GetValue(requested_frame) != 0.0 || color.blue.GetValue(requested_frame) != 0.0))
|
|
|
|
|
new_frame->AddColor(preview_width, preview_height, color.GetColorHex(requested_frame));
|
2015-02-07 18:06:11 -06:00
|
|
|
|
2021-02-17 19:44:44 -06:00
|
|
|
// Debug output
|
2022-01-12 10:52:05 -05:00
|
|
|
ZmqLogger::Instance()->AppendDebugMethod(
|
|
|
|
|
"Timeline::GetFrame (Loop through clips)",
|
|
|
|
|
"requested_frame", requested_frame,
|
|
|
|
|
"clips.size()", clips.size(),
|
|
|
|
|
"nearby_clips.size()", nearby_clips.size());
|
2015-06-01 00:20:14 -07:00
|
|
|
|
2021-02-17 19:44:44 -06:00
|
|
|
// Find Clips near this time
|
|
|
|
|
for (auto clip : nearby_clips)
|
|
|
|
|
{
|
|
|
|
|
long clip_start_position = round(clip->Position() * info.fps.ToDouble()) + 1;
|
2022-06-25 17:39:48 -05:00
|
|
|
long clip_end_position = round((clip->Position() + clip->Duration()) * info.fps.ToDouble());
|
2021-02-17 19:44:44 -06:00
|
|
|
bool does_clip_intersect = (clip_start_position <= requested_frame && clip_end_position >= requested_frame);
|
2015-06-01 00:20:14 -07:00
|
|
|
|
2021-02-17 19:44:44 -06:00
|
|
|
// Debug output
|
2022-01-12 10:52:05 -05:00
|
|
|
ZmqLogger::Instance()->AppendDebugMethod(
|
|
|
|
|
"Timeline::GetFrame (Does clip intersect)",
|
|
|
|
|
"requested_frame", requested_frame,
|
|
|
|
|
"clip->Position()", clip->Position(),
|
|
|
|
|
"clip->Duration()", clip->Duration(),
|
|
|
|
|
"does_clip_intersect", does_clip_intersect);
|
2015-06-01 00:20:14 -07:00
|
|
|
|
2021-02-17 19:44:44 -06:00
|
|
|
// Clip is visible
|
|
|
|
|
if (does_clip_intersect)
|
|
|
|
|
{
|
|
|
|
|
// Determine if clip is "top" clip on this layer (only happens when multiple clips are overlapping)
|
|
|
|
|
bool is_top_clip = true;
|
|
|
|
|
float max_volume = 0.0;
|
|
|
|
|
for (auto nearby_clip : nearby_clips)
|
|
|
|
|
{
|
|
|
|
|
long nearby_clip_start_position = round(nearby_clip->Position() * info.fps.ToDouble()) + 1;
|
|
|
|
|
long nearby_clip_end_position = round((nearby_clip->Position() + nearby_clip->Duration()) * info.fps.ToDouble()) + 1;
|
|
|
|
|
long nearby_clip_start_frame = (nearby_clip->Start() * info.fps.ToDouble()) + 1;
|
|
|
|
|
long nearby_clip_frame_number = requested_frame - nearby_clip_start_position + nearby_clip_start_frame;
|
2015-03-15 02:28:28 -05:00
|
|
|
|
2021-02-17 19:44:44 -06:00
|
|
|
// Determine if top clip
|
|
|
|
|
if (clip->Id() != nearby_clip->Id() && clip->Layer() == nearby_clip->Layer() &&
|
|
|
|
|
nearby_clip_start_position <= requested_frame && nearby_clip_end_position >= requested_frame &&
|
|
|
|
|
nearby_clip_start_position > clip_start_position && is_top_clip == true) {
|
|
|
|
|
is_top_clip = false;
|
|
|
|
|
}
|
2015-03-15 02:28:28 -05:00
|
|
|
|
2021-02-17 19:44:44 -06:00
|
|
|
// Determine max volume of overlapping clips
|
|
|
|
|
if (nearby_clip->Reader() && nearby_clip->Reader()->info.has_audio &&
|
|
|
|
|
nearby_clip->has_audio.GetInt(nearby_clip_frame_number) != 0 &&
|
|
|
|
|
nearby_clip_start_position <= requested_frame && nearby_clip_end_position >= requested_frame) {
|
|
|
|
|
max_volume += nearby_clip->volume.GetValue(nearby_clip_frame_number);
|
|
|
|
|
}
|
|
|
|
|
}
|
2015-03-15 02:28:28 -05:00
|
|
|
|
2021-02-17 19:44:44 -06:00
|
|
|
// Determine the frame needed for this clip (based on the position on the timeline)
|
|
|
|
|
long clip_start_frame = (clip->Start() * info.fps.ToDouble()) + 1;
|
|
|
|
|
long clip_frame_number = requested_frame - clip_start_position + clip_start_frame;
|
2015-03-15 02:28:28 -05:00
|
|
|
|
2021-02-17 19:44:44 -06:00
|
|
|
// Debug output
|
2022-01-12 10:52:05 -05:00
|
|
|
ZmqLogger::Instance()->AppendDebugMethod(
|
|
|
|
|
"Timeline::GetFrame (Calculate clip's frame #)",
|
|
|
|
|
"clip->Position()", clip->Position(),
|
|
|
|
|
"clip->Start()", clip->Start(),
|
|
|
|
|
"info.fps.ToFloat()", info.fps.ToFloat(),
|
|
|
|
|
"clip_frame_number", clip_frame_number);
|
2017-03-10 00:51:08 -06:00
|
|
|
|
2021-02-17 19:44:44 -06:00
|
|
|
// Add clip's frame as layer
|
2021-05-20 13:15:13 -05:00
|
|
|
add_layer(new_frame, clip, clip_frame_number, is_top_clip, max_volume);
|
2018-06-11 12:02:21 -07:00
|
|
|
|
2021-02-17 19:44:44 -06:00
|
|
|
} else {
|
|
|
|
|
// Debug output
|
2022-01-12 10:52:05 -05:00
|
|
|
ZmqLogger::Instance()->AppendDebugMethod(
|
|
|
|
|
"Timeline::GetFrame (clip does not intersect)",
|
|
|
|
|
"requested_frame", requested_frame,
|
|
|
|
|
"does_clip_intersect", does_clip_intersect);
|
2021-02-17 19:44:44 -06:00
|
|
|
}
|
2014-05-14 23:04:35 -05:00
|
|
|
|
2021-02-17 19:44:44 -06:00
|
|
|
} // end clip loop
|
2014-05-14 23:04:35 -05:00
|
|
|
|
2021-02-17 19:44:44 -06:00
|
|
|
// Debug output
|
2022-01-12 10:52:05 -05:00
|
|
|
ZmqLogger::Instance()->AppendDebugMethod(
|
|
|
|
|
"Timeline::GetFrame (Add frame to cache)",
|
|
|
|
|
"requested_frame", requested_frame,
|
|
|
|
|
"info.width", info.width,
|
|
|
|
|
"info.height", info.height);
|
2012-11-12 17:21:21 -06:00
|
|
|
|
2021-02-17 19:44:44 -06:00
|
|
|
// Set frame # on mapped frame
|
|
|
|
|
new_frame->SetFrameNumber(requested_frame);
|
2012-12-03 22:55:46 -06:00
|
|
|
|
2021-02-17 19:44:44 -06:00
|
|
|
// Add final frame to cache
|
|
|
|
|
final_cache->Add(new_frame);
|
2012-11-12 17:21:21 -06:00
|
|
|
|
|
|
|
|
// Return frame (or blank frame)
|
2021-11-04 17:33:14 -05:00
|
|
|
return new_frame;
|
2012-10-05 17:05:33 -05:00
|
|
|
}
|
2012-10-03 01:55:24 -05:00
|
|
|
}
|
2013-12-07 21:09:55 -06:00
|
|
|
|
|
|
|
|
|
2015-02-19 01:03:22 -06:00
|
|
|
// Find intersecting clips (or non intersecting clips)
|
2019-08-04 22:23:06 -04:00
|
|
|
std::vector<Clip*> Timeline::find_intersecting_clips(int64_t requested_frame, int number_of_frames, bool include)
|
2015-02-19 01:03:22 -06:00
|
|
|
{
|
|
|
|
|
// Find matching clips
|
2019-08-04 22:23:06 -04:00
|
|
|
std::vector<Clip*> matching_clips;
|
2015-02-19 01:03:22 -06:00
|
|
|
|
|
|
|
|
// Calculate time of frame
|
2017-03-10 00:51:08 -06:00
|
|
|
float min_requested_frame = requested_frame;
|
|
|
|
|
float max_requested_frame = requested_frame + (number_of_frames - 1);
|
2015-02-19 01:03:22 -06:00
|
|
|
|
|
|
|
|
// Re-Sort Clips (since they likely changed)
|
2015-03-14 01:36:13 -05:00
|
|
|
sort_clips();
|
2015-02-19 01:03:22 -06:00
|
|
|
|
|
|
|
|
// Find Clips at this time
|
2019-12-27 01:01:48 -05:00
|
|
|
for (auto clip : clips)
|
2015-02-19 01:03:22 -06:00
|
|
|
{
|
|
|
|
|
// Does clip intersect the current requested time
|
2017-03-10 00:51:08 -06:00
|
|
|
long clip_start_position = round(clip->Position() * info.fps.ToDouble()) + 1;
|
2017-03-11 00:51:43 -06:00
|
|
|
long clip_end_position = round((clip->Position() + clip->Duration()) * info.fps.ToDouble()) + 1;
|
2017-03-10 00:51:08 -06:00
|
|
|
|
|
|
|
|
bool does_clip_intersect =
|
|
|
|
|
(clip_start_position <= min_requested_frame || clip_start_position <= max_requested_frame) &&
|
|
|
|
|
(clip_end_position >= min_requested_frame || clip_end_position >= max_requested_frame);
|
2015-02-19 01:03:22 -06:00
|
|
|
|
|
|
|
|
// Debug output
|
2022-01-12 10:52:05 -05:00
|
|
|
ZmqLogger::Instance()->AppendDebugMethod(
|
|
|
|
|
"Timeline::find_intersecting_clips (Is clip near or intersecting)",
|
|
|
|
|
"requested_frame", requested_frame,
|
|
|
|
|
"min_requested_frame", min_requested_frame,
|
|
|
|
|
"max_requested_frame", max_requested_frame,
|
|
|
|
|
"clip->Position()", clip->Position(),
|
|
|
|
|
"does_clip_intersect", does_clip_intersect);
|
2015-02-19 01:03:22 -06:00
|
|
|
|
|
|
|
|
// Open (or schedule for closing) this clip, based on if it's intersecting or not
|
|
|
|
|
update_open_clips(clip, does_clip_intersect);
|
|
|
|
|
|
|
|
|
|
// Clip is visible
|
|
|
|
|
if (does_clip_intersect && include)
|
|
|
|
|
// Add the intersecting clip
|
|
|
|
|
matching_clips.push_back(clip);
|
|
|
|
|
|
|
|
|
|
else if (!does_clip_intersect && !include)
|
|
|
|
|
// Add the non-intersecting clip
|
|
|
|
|
matching_clips.push_back(clip);
|
|
|
|
|
|
|
|
|
|
} // end clip loop
|
|
|
|
|
|
|
|
|
|
// return list
|
|
|
|
|
return matching_clips;
|
|
|
|
|
}
|
|
|
|
|
|
2019-05-14 00:20:32 -05:00
|
|
|
// Set the cache object used by this reader
|
2016-09-07 00:40:01 -05:00
|
|
|
void Timeline::SetCache(CacheBase* new_cache) {
|
2019-05-13 17:11:40 -05:00
|
|
|
// Destroy previous cache (if managed by timeline)
|
|
|
|
|
if (managed_cache && final_cache) {
|
|
|
|
|
delete final_cache;
|
|
|
|
|
final_cache = NULL;
|
|
|
|
|
managed_cache = false;
|
|
|
|
|
}
|
|
|
|
|
|
2016-09-07 00:40:01 -05:00
|
|
|
// Set new cache
|
|
|
|
|
final_cache = new_cache;
|
|
|
|
|
}
|
|
|
|
|
|
2013-12-07 21:09:55 -06:00
|
|
|
// Generate JSON string of this object
|
2019-12-27 08:51:51 -05:00
|
|
|
std::string Timeline::Json() const {
|
2013-12-07 21:09:55 -06:00
|
|
|
|
|
|
|
|
// Return formatted string
|
|
|
|
|
return JsonValue().toStyledString();
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-27 08:51:51 -05:00
|
|
|
// Generate Json::Value for this object
|
|
|
|
|
Json::Value Timeline::JsonValue() const {
|
2013-12-07 21:09:55 -06:00
|
|
|
|
|
|
|
|
// Create root json object
|
|
|
|
|
Json::Value root = ReaderBase::JsonValue(); // get parent properties
|
|
|
|
|
root["type"] = "Timeline";
|
2014-01-05 22:37:11 -06:00
|
|
|
root["viewport_scale"] = viewport_scale.JsonValue();
|
|
|
|
|
root["viewport_x"] = viewport_x.JsonValue();
|
|
|
|
|
root["viewport_y"] = viewport_y.JsonValue();
|
|
|
|
|
root["color"] = color.JsonValue();
|
2020-03-09 16:49:06 -05:00
|
|
|
root["path"] = path;
|
2014-01-05 22:37:11 -06:00
|
|
|
|
|
|
|
|
// Add array of clips
|
2014-01-27 23:31:38 -06:00
|
|
|
root["clips"] = Json::Value(Json::arrayValue);
|
2014-01-05 22:37:11 -06:00
|
|
|
|
|
|
|
|
// Find Clips at this time
|
2020-01-20 15:25:40 -05:00
|
|
|
for (const auto existing_clip : clips)
|
2014-01-05 22:37:11 -06:00
|
|
|
{
|
2014-01-27 23:31:38 -06:00
|
|
|
root["clips"].append(existing_clip->JsonValue());
|
2014-01-05 22:37:11 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Add array of effects
|
2014-01-27 23:31:38 -06:00
|
|
|
root["effects"] = Json::Value(Json::arrayValue);
|
2014-01-05 22:37:11 -06:00
|
|
|
|
|
|
|
|
// loop through effects
|
2020-01-20 15:25:40 -05:00
|
|
|
for (const auto existing_effect: effects)
|
2014-01-05 22:37:11 -06:00
|
|
|
{
|
2014-01-27 23:31:38 -06:00
|
|
|
root["effects"].append(existing_effect->JsonValue());
|
2014-01-05 22:37:11 -06:00
|
|
|
}
|
2013-12-07 21:09:55 -06:00
|
|
|
|
|
|
|
|
// return JsonValue
|
|
|
|
|
return root;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Load JSON string into this object
|
2019-12-27 08:51:51 -05:00
|
|
|
void Timeline::SetJson(const std::string value) {
|
2013-12-07 21:09:55 -06:00
|
|
|
|
|
|
|
|
// Parse JSON string into JSON objects
|
|
|
|
|
try
|
|
|
|
|
{
|
2019-12-27 08:51:51 -05:00
|
|
|
const Json::Value root = openshot::stringToJson(value);
|
2013-12-07 21:09:55 -06:00
|
|
|
// Set all values that match
|
|
|
|
|
SetJsonValue(root);
|
|
|
|
|
}
|
2019-07-03 12:58:02 -04:00
|
|
|
catch (const std::exception& e)
|
2013-12-07 21:09:55 -06:00
|
|
|
{
|
|
|
|
|
// Error parsing JSON (or missing keys)
|
2019-08-27 15:47:39 -04:00
|
|
|
throw InvalidJSON("JSON is invalid (missing keys or invalid data types)");
|
2013-12-07 21:09:55 -06:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-27 08:51:51 -05:00
|
|
|
// Load Json::Value into this object
|
|
|
|
|
void Timeline::SetJsonValue(const Json::Value root) {
|
2014-01-05 22:37:11 -06:00
|
|
|
|
2022-10-06 21:59:11 -05:00
|
|
|
// Get lock (prevent getting frames while this happens)
|
|
|
|
|
const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
|
|
|
|
|
|
2022-10-06 15:07:31 -05:00
|
|
|
// Close timeline before we do anything (this closes all clips)
|
2017-05-18 17:04:34 -05:00
|
|
|
bool was_open = is_open;
|
2014-01-05 22:37:11 -06:00
|
|
|
Close();
|
2013-12-07 21:09:55 -06:00
|
|
|
|
|
|
|
|
// Set parent data
|
|
|
|
|
ReaderBase::SetJsonValue(root);
|
|
|
|
|
|
2020-03-09 16:49:06 -05:00
|
|
|
// Set data from Json (if key is found)
|
|
|
|
|
if (!root["path"].isNull())
|
|
|
|
|
path = root["path"].asString();
|
|
|
|
|
|
2015-03-14 01:36:13 -05:00
|
|
|
if (!root["clips"].isNull()) {
|
|
|
|
|
// Clear existing clips
|
|
|
|
|
clips.clear();
|
2014-01-05 22:37:11 -06:00
|
|
|
|
|
|
|
|
// loop through clips
|
2019-12-27 01:01:48 -05:00
|
|
|
for (const Json::Value existing_clip : root["clips"]) {
|
2014-01-05 22:37:11 -06:00
|
|
|
// Create Clip
|
|
|
|
|
Clip *c = new Clip();
|
|
|
|
|
|
2022-10-06 15:07:31 -05:00
|
|
|
// Keep track of allocated clip objects
|
|
|
|
|
allocated_clips.insert(c);
|
|
|
|
|
|
2021-01-18 15:30:11 -03:00
|
|
|
// When a clip is attached to an object, it searches for the object
|
|
|
|
|
// on it's parent timeline. Setting the parent timeline of the clip here
|
|
|
|
|
// allows attaching it to an object when exporting the project (because)
|
2021-10-27 14:34:05 -04:00
|
|
|
// the exporter script initializes the clip and it's effects
|
2021-06-10 16:32:53 -04:00
|
|
|
// before setting its parent timeline.
|
2021-01-18 15:30:11 -03:00
|
|
|
c->ParentTimeline(this);
|
|
|
|
|
|
2014-01-05 22:37:11 -06:00
|
|
|
// Load Json into Clip
|
|
|
|
|
c->SetJsonValue(existing_clip);
|
|
|
|
|
|
|
|
|
|
// Add Clip to Timeline
|
|
|
|
|
AddClip(c);
|
|
|
|
|
}
|
2015-03-14 01:36:13 -05:00
|
|
|
}
|
2014-01-05 22:37:11 -06:00
|
|
|
|
2015-03-14 01:36:13 -05:00
|
|
|
if (!root["effects"].isNull()) {
|
|
|
|
|
// Clear existing effects
|
|
|
|
|
effects.clear();
|
2014-01-05 22:37:11 -06:00
|
|
|
|
|
|
|
|
// loop through effects
|
2019-12-27 01:01:48 -05:00
|
|
|
for (const Json::Value existing_effect :root["effects"]) {
|
2014-01-05 22:37:11 -06:00
|
|
|
// Create Effect
|
|
|
|
|
EffectBase *e = NULL;
|
|
|
|
|
|
2016-08-16 22:40:51 -05:00
|
|
|
if (!existing_effect["type"].isNull()) {
|
|
|
|
|
// Create instance of effect
|
2019-12-02 10:45:06 -05:00
|
|
|
if ( (e = EffectInfo().CreateEffect(existing_effect["type"].asString())) ) {
|
2014-01-05 22:37:11 -06:00
|
|
|
|
2022-10-06 15:07:31 -05:00
|
|
|
// Keep track of allocated effect objects
|
|
|
|
|
allocated_effects.insert(e);
|
|
|
|
|
|
2018-08-11 18:22:18 -05:00
|
|
|
// Load Json into Effect
|
|
|
|
|
e->SetJsonValue(existing_effect);
|
2014-01-05 22:37:11 -06:00
|
|
|
|
2018-08-11 18:22:18 -05:00
|
|
|
// Add Effect to Timeline
|
|
|
|
|
AddEffect(e);
|
|
|
|
|
}
|
2016-08-16 22:40:51 -05:00
|
|
|
}
|
2014-01-05 22:37:11 -06:00
|
|
|
}
|
2015-03-14 01:36:13 -05:00
|
|
|
}
|
2016-08-15 00:44:51 -05:00
|
|
|
|
|
|
|
|
if (!root["duration"].isNull()) {
|
|
|
|
|
// Update duration of timeline
|
|
|
|
|
info.duration = root["duration"].asDouble();
|
|
|
|
|
info.video_length = info.fps.ToFloat() * info.duration;
|
|
|
|
|
}
|
2017-05-18 17:04:34 -05:00
|
|
|
|
Large refactor of Timeline, TimelineBase, ClipBase, and Clip, to allow a Clip access to the parent timeline instance (if available), and thus, certain properties (preview size, timeline FPS, etc...). This allows for a simpler rendering of Clip keyframes (during the Clip::GetFrame method), and a simpler Timeline class, that can change the preview window size dynamically and no longer requires a Singleton Settings class.
- Also removed "crop" from Clip class, as it was never implmeneted correctly, and we have a fully functional "crop" effect when needed
- Added caching to Clip class, to optimize previewing of cached frames (much faster than previous)
2020-10-04 16:59:21 -05:00
|
|
|
// Update preview settings
|
|
|
|
|
preview_width = info.width;
|
|
|
|
|
preview_height = info.height;
|
|
|
|
|
|
2017-05-18 17:04:34 -05:00
|
|
|
// Re-open if needed
|
|
|
|
|
if (was_open)
|
|
|
|
|
Open();
|
2013-12-07 21:09:55 -06:00
|
|
|
}
|
2014-01-08 01:43:58 -06:00
|
|
|
|
|
|
|
|
// Apply a special formatted JSON object, which represents a change to the timeline (insert, update, delete)
|
2019-08-04 22:23:06 -04:00
|
|
|
void Timeline::ApplyJsonDiff(std::string value) {
|
2014-01-08 01:43:58 -06:00
|
|
|
|
2017-03-05 04:34:32 -06:00
|
|
|
// Get lock (prevent getting frames while this happens)
|
2021-10-27 14:34:05 -04:00
|
|
|
const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
|
2017-03-05 04:34:32 -06:00
|
|
|
|
2014-01-08 01:43:58 -06:00
|
|
|
// Parse JSON string into JSON objects
|
|
|
|
|
try
|
|
|
|
|
{
|
2019-12-27 08:51:51 -05:00
|
|
|
const Json::Value root = openshot::stringToJson(value);
|
2014-01-08 01:43:58 -06:00
|
|
|
// Process the JSON change array, loop through each item
|
2019-12-27 01:01:48 -05:00
|
|
|
for (const Json::Value change : root) {
|
|
|
|
|
std::string change_key = change["key"][(uint)0].asString();
|
2014-01-08 01:43:58 -06:00
|
|
|
|
|
|
|
|
// Process each type of change
|
2019-12-27 01:01:48 -05:00
|
|
|
if (change_key == "clips")
|
2014-01-08 01:43:58 -06:00
|
|
|
// Apply to CLIPS
|
|
|
|
|
apply_json_to_clips(change);
|
|
|
|
|
|
2019-12-27 01:01:48 -05:00
|
|
|
else if (change_key == "effects")
|
2014-01-08 01:43:58 -06:00
|
|
|
// Apply to EFFECTS
|
|
|
|
|
apply_json_to_effects(change);
|
|
|
|
|
|
|
|
|
|
else
|
|
|
|
|
// Apply to TIMELINE
|
|
|
|
|
apply_json_to_timeline(change);
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-07-03 12:58:02 -04:00
|
|
|
catch (const std::exception& e)
|
2014-01-08 01:43:58 -06:00
|
|
|
{
|
|
|
|
|
// Error parsing JSON (or missing keys)
|
2019-08-27 15:47:39 -04:00
|
|
|
throw InvalidJSON("JSON is invalid (missing keys or invalid data types)");
|
2014-01-08 01:43:58 -06:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Apply JSON diff to clips
|
2017-10-26 18:44:35 -05:00
|
|
|
void Timeline::apply_json_to_clips(Json::Value change) {
|
2014-01-08 01:43:58 -06:00
|
|
|
|
|
|
|
|
// Get key and type of change
|
2019-08-04 22:23:06 -04:00
|
|
|
std::string change_type = change["type"].asString();
|
|
|
|
|
std::string clip_id = "";
|
2014-01-08 01:43:58 -06:00
|
|
|
Clip *existing_clip = NULL;
|
|
|
|
|
|
|
|
|
|
// Find id of clip (if any)
|
2019-12-27 01:01:48 -05:00
|
|
|
for (auto key_part : change["key"]) {
|
2014-01-08 01:43:58 -06:00
|
|
|
// Get each change
|
|
|
|
|
if (key_part.isObject()) {
|
|
|
|
|
// Check for id
|
|
|
|
|
if (!key_part["id"].isNull()) {
|
|
|
|
|
// Set the id
|
|
|
|
|
clip_id = key_part["id"].asString();
|
|
|
|
|
|
|
|
|
|
// Find matching clip in timeline (if any)
|
2019-12-27 01:01:48 -05:00
|
|
|
for (auto c : clips)
|
2014-01-08 01:43:58 -06:00
|
|
|
{
|
|
|
|
|
if (c->Id() == clip_id) {
|
|
|
|
|
existing_clip = c;
|
|
|
|
|
break; // clip found, exit loop
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
break; // id found, exit loop
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2015-08-07 23:11:03 -05:00
|
|
|
// Check for a more specific key (targetting this clip's effects)
|
|
|
|
|
// For example: ["clips", {"id:123}, "effects", {"id":432}]
|
|
|
|
|
if (existing_clip && change["key"].size() == 4 && change["key"][2] == "effects")
|
|
|
|
|
{
|
|
|
|
|
// This change is actually targetting a specific effect under a clip (and not the clip)
|
|
|
|
|
Json::Value key_part = change["key"][3];
|
|
|
|
|
|
|
|
|
|
if (key_part.isObject()) {
|
|
|
|
|
// Check for id
|
|
|
|
|
if (!key_part["id"].isNull())
|
|
|
|
|
{
|
|
|
|
|
// Set the id
|
2019-08-04 22:23:06 -04:00
|
|
|
std::string effect_id = key_part["id"].asString();
|
2015-08-07 23:11:03 -05:00
|
|
|
|
|
|
|
|
// Find matching effect in timeline (if any)
|
2019-08-04 22:23:06 -04:00
|
|
|
std::list<EffectBase*> effect_list = existing_clip->Effects();
|
2019-12-27 01:01:48 -05:00
|
|
|
for (auto e : effect_list)
|
2015-08-07 23:11:03 -05:00
|
|
|
{
|
|
|
|
|
if (e->Id() == effect_id) {
|
|
|
|
|
// Apply the change to the effect directly
|
2016-08-16 22:40:51 -05:00
|
|
|
apply_json_to_effects(change, e);
|
2017-03-10 00:51:08 -06:00
|
|
|
|
|
|
|
|
// Calculate start and end frames that this impacts, and remove those frames from the cache
|
2017-09-28 16:03:01 -05:00
|
|
|
int64_t new_starting_frame = (existing_clip->Position() * info.fps.ToDouble()) + 1;
|
|
|
|
|
int64_t new_ending_frame = ((existing_clip->Position() + existing_clip->Duration()) * info.fps.ToDouble()) + 1;
|
2017-03-14 11:42:05 -05:00
|
|
|
final_cache->Remove(new_starting_frame - 8, new_ending_frame + 8);
|
2017-03-10 00:51:08 -06:00
|
|
|
|
2015-08-07 23:11:03 -05:00
|
|
|
return; // effect found, don't update clip
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-09-07 00:40:01 -05:00
|
|
|
// Calculate start and end frames that this impacts, and remove those frames from the cache
|
2016-09-15 17:43:46 -05:00
|
|
|
if (!change["value"].isArray() && !change["value"]["position"].isNull()) {
|
2017-09-28 16:03:01 -05:00
|
|
|
int64_t new_starting_frame = (change["value"]["position"].asDouble() * info.fps.ToDouble()) + 1;
|
|
|
|
|
int64_t new_ending_frame = ((change["value"]["position"].asDouble() + change["value"]["end"].asDouble() - change["value"]["start"].asDouble()) * info.fps.ToDouble()) + 1;
|
2017-03-14 11:42:05 -05:00
|
|
|
final_cache->Remove(new_starting_frame - 8, new_ending_frame + 8);
|
2016-09-15 17:43:46 -05:00
|
|
|
}
|
2016-09-07 00:40:01 -05:00
|
|
|
|
2014-01-08 01:43:58 -06:00
|
|
|
// Determine type of change operation
|
|
|
|
|
if (change_type == "insert") {
|
|
|
|
|
|
2022-10-06 15:07:31 -05:00
|
|
|
// Create clip
|
2014-01-08 01:43:58 -06:00
|
|
|
Clip *clip = new Clip();
|
2022-10-06 15:07:31 -05:00
|
|
|
|
|
|
|
|
// Keep track of allocated clip objects
|
|
|
|
|
allocated_clips.insert(clip);
|
|
|
|
|
|
|
|
|
|
// Set properties of clip from JSON
|
|
|
|
|
clip->SetJsonValue(change["value"]);
|
2014-01-08 01:43:58 -06:00
|
|
|
AddClip(clip); // Add clip to timeline
|
|
|
|
|
|
2018-01-06 01:55:42 -06:00
|
|
|
// Apply framemapper (or update existing framemapper)
|
|
|
|
|
apply_mapper_to_clip(clip);
|
|
|
|
|
|
2014-01-08 01:43:58 -06:00
|
|
|
} else if (change_type == "update") {
|
|
|
|
|
|
|
|
|
|
// Update existing clip
|
2016-09-07 00:40:01 -05:00
|
|
|
if (existing_clip) {
|
|
|
|
|
|
|
|
|
|
// Calculate start and end frames that this impacts, and remove those frames from the cache
|
2017-09-28 16:03:01 -05:00
|
|
|
int64_t old_starting_frame = (existing_clip->Position() * info.fps.ToDouble()) + 1;
|
|
|
|
|
int64_t old_ending_frame = ((existing_clip->Position() + existing_clip->Duration()) * info.fps.ToDouble()) + 1;
|
2017-03-14 11:42:05 -05:00
|
|
|
final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
|
2016-09-07 00:40:01 -05:00
|
|
|
|
2017-07-27 02:25:20 -05:00
|
|
|
// Remove cache on clip's Reader (if found)
|
|
|
|
|
if (existing_clip->Reader() && existing_clip->Reader()->GetCache())
|
|
|
|
|
existing_clip->Reader()->GetCache()->Remove(old_starting_frame - 8, old_ending_frame + 8);
|
|
|
|
|
|
2016-09-07 00:40:01 -05:00
|
|
|
// Update clip properties from JSON
|
|
|
|
|
existing_clip->SetJsonValue(change["value"]);
|
2017-07-27 02:25:20 -05:00
|
|
|
|
2018-01-06 01:55:42 -06:00
|
|
|
// Apply framemapper (or update existing framemapper)
|
|
|
|
|
apply_mapper_to_clip(existing_clip);
|
2016-09-07 00:40:01 -05:00
|
|
|
}
|
2014-01-08 01:43:58 -06:00
|
|
|
|
|
|
|
|
} else if (change_type == "delete") {
|
|
|
|
|
|
|
|
|
|
// Remove existing clip
|
2016-09-07 00:40:01 -05:00
|
|
|
if (existing_clip) {
|
|
|
|
|
|
|
|
|
|
// Calculate start and end frames that this impacts, and remove those frames from the cache
|
2017-09-28 16:03:01 -05:00
|
|
|
int64_t old_starting_frame = (existing_clip->Position() * info.fps.ToDouble()) + 1;
|
|
|
|
|
int64_t old_ending_frame = ((existing_clip->Position() + existing_clip->Duration()) * info.fps.ToDouble()) + 1;
|
2017-03-14 11:42:05 -05:00
|
|
|
final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
|
2016-09-07 00:40:01 -05:00
|
|
|
|
|
|
|
|
// Remove clip from timeline
|
|
|
|
|
RemoveClip(existing_clip);
|
|
|
|
|
}
|
2014-01-08 01:43:58 -06:00
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Apply JSON diff to effects
|
2017-10-26 18:44:35 -05:00
|
|
|
void Timeline::apply_json_to_effects(Json::Value change) {
|
2014-01-08 01:43:58 -06:00
|
|
|
|
|
|
|
|
// Get key and type of change
|
2019-08-04 22:23:06 -04:00
|
|
|
std::string change_type = change["type"].asString();
|
2014-01-08 01:43:58 -06:00
|
|
|
EffectBase *existing_effect = NULL;
|
|
|
|
|
|
|
|
|
|
// Find id of an effect (if any)
|
2019-12-27 01:01:48 -05:00
|
|
|
for (auto key_part : change["key"]) {
|
2014-01-08 01:43:58 -06:00
|
|
|
|
|
|
|
|
if (key_part.isObject()) {
|
|
|
|
|
// Check for id
|
|
|
|
|
if (!key_part["id"].isNull())
|
|
|
|
|
{
|
|
|
|
|
// Set the id
|
2019-08-04 22:23:06 -04:00
|
|
|
std::string effect_id = key_part["id"].asString();
|
2014-01-08 01:43:58 -06:00
|
|
|
|
|
|
|
|
// Find matching effect in timeline (if any)
|
2019-12-27 01:01:48 -05:00
|
|
|
for (auto e : effects)
|
2014-01-08 01:43:58 -06:00
|
|
|
{
|
|
|
|
|
if (e->Id() == effect_id) {
|
|
|
|
|
existing_effect = e;
|
|
|
|
|
break; // effect found, exit loop
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
break; // id found, exit loop
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2015-08-07 23:11:03 -05:00
|
|
|
// Now that we found the effect, apply the change to it
|
|
|
|
|
if (existing_effect || change_type == "insert")
|
|
|
|
|
// Apply change to effect
|
|
|
|
|
apply_json_to_effects(change, existing_effect);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Apply JSON diff to effects (if you already know which effect needs to be updated)
|
2017-10-26 18:44:35 -05:00
|
|
|
void Timeline::apply_json_to_effects(Json::Value change, EffectBase* existing_effect) {
|
2015-08-07 23:11:03 -05:00
|
|
|
|
|
|
|
|
// Get key and type of change
|
2019-08-04 22:23:06 -04:00
|
|
|
std::string change_type = change["type"].asString();
|
2015-08-07 23:11:03 -05:00
|
|
|
|
2016-09-07 00:40:01 -05:00
|
|
|
// Calculate start and end frames that this impacts, and remove those frames from the cache
|
2016-09-15 17:43:46 -05:00
|
|
|
if (!change["value"].isArray() && !change["value"]["position"].isNull()) {
|
2017-09-28 16:03:01 -05:00
|
|
|
int64_t new_starting_frame = (change["value"]["position"].asDouble() * info.fps.ToDouble()) + 1;
|
|
|
|
|
int64_t new_ending_frame = ((change["value"]["position"].asDouble() + change["value"]["end"].asDouble() - change["value"]["start"].asDouble()) * info.fps.ToDouble()) + 1;
|
2017-03-14 11:42:05 -05:00
|
|
|
final_cache->Remove(new_starting_frame - 8, new_ending_frame + 8);
|
2016-09-15 17:43:46 -05:00
|
|
|
}
|
2016-09-07 00:40:01 -05:00
|
|
|
|
2014-01-08 01:43:58 -06:00
|
|
|
// Determine type of change operation
|
|
|
|
|
if (change_type == "insert") {
|
|
|
|
|
|
|
|
|
|
// Determine type of effect
|
2019-08-04 22:23:06 -04:00
|
|
|
std::string effect_type = change["value"]["type"].asString();
|
2014-01-08 01:43:58 -06:00
|
|
|
|
|
|
|
|
// Create Effect
|
|
|
|
|
EffectBase *e = NULL;
|
|
|
|
|
|
|
|
|
|
// Init the matching effect object
|
2019-12-02 10:45:06 -05:00
|
|
|
if ( (e = EffectInfo().CreateEffect(effect_type)) ) {
|
2015-11-25 23:54:10 -06:00
|
|
|
|
2022-10-06 15:07:31 -05:00
|
|
|
// Keep track of allocated effect objects
|
|
|
|
|
allocated_effects.insert(e);
|
|
|
|
|
|
2018-08-11 18:22:18 -05:00
|
|
|
// Load Json into Effect
|
|
|
|
|
e->SetJsonValue(change["value"]);
|
2014-01-08 01:43:58 -06:00
|
|
|
|
2018-08-11 18:22:18 -05:00
|
|
|
// Add Effect to Timeline
|
|
|
|
|
AddEffect(e);
|
|
|
|
|
}
|
2014-01-08 01:43:58 -06:00
|
|
|
|
|
|
|
|
} else if (change_type == "update") {
|
|
|
|
|
|
|
|
|
|
// Update existing effect
|
2016-09-07 00:40:01 -05:00
|
|
|
if (existing_effect) {
|
|
|
|
|
|
|
|
|
|
// Calculate start and end frames that this impacts, and remove those frames from the cache
|
2017-09-28 16:03:01 -05:00
|
|
|
int64_t old_starting_frame = (existing_effect->Position() * info.fps.ToDouble()) + 1;
|
|
|
|
|
int64_t old_ending_frame = ((existing_effect->Position() + existing_effect->Duration()) * info.fps.ToDouble()) + 1;
|
2017-03-14 11:42:05 -05:00
|
|
|
final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
|
2016-09-07 00:40:01 -05:00
|
|
|
|
|
|
|
|
// Update effect properties from JSON
|
|
|
|
|
existing_effect->SetJsonValue(change["value"]);
|
|
|
|
|
}
|
2014-01-08 01:43:58 -06:00
|
|
|
|
|
|
|
|
} else if (change_type == "delete") {
|
|
|
|
|
|
|
|
|
|
// Remove existing effect
|
2016-09-07 00:40:01 -05:00
|
|
|
if (existing_effect) {
|
|
|
|
|
|
|
|
|
|
// Calculate start and end frames that this impacts, and remove those frames from the cache
|
2017-09-28 16:03:01 -05:00
|
|
|
int64_t old_starting_frame = (existing_effect->Position() * info.fps.ToDouble()) + 1;
|
|
|
|
|
int64_t old_ending_frame = ((existing_effect->Position() + existing_effect->Duration()) * info.fps.ToDouble()) + 1;
|
2017-03-14 11:42:05 -05:00
|
|
|
final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
|
2016-09-07 00:40:01 -05:00
|
|
|
|
|
|
|
|
// Remove effect from timeline
|
|
|
|
|
RemoveEffect(existing_effect);
|
|
|
|
|
}
|
2014-01-08 01:43:58 -06:00
|
|
|
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Apply JSON diff to timeline properties
|
2017-10-26 18:44:35 -05:00
|
|
|
void Timeline::apply_json_to_timeline(Json::Value change) {
|
2014-01-08 01:43:58 -06:00
|
|
|
|
|
|
|
|
// Get key and type of change
|
2019-08-04 22:23:06 -04:00
|
|
|
std::string change_type = change["type"].asString();
|
|
|
|
|
std::string root_key = change["key"][(uint)0].asString();
|
|
|
|
|
std::string sub_key = "";
|
2016-01-10 16:50:54 -06:00
|
|
|
if (change["key"].size() >= 2)
|
|
|
|
|
sub_key = change["key"][(uint)1].asString();
|
2014-01-08 01:43:58 -06:00
|
|
|
|
2016-09-07 00:40:01 -05:00
|
|
|
// Clear entire cache
|
Large refactor of Timeline, TimelineBase, ClipBase, and Clip, to allow a Clip access to the parent timeline instance (if available), and thus, certain properties (preview size, timeline FPS, etc...). This allows for a simpler rendering of Clip keyframes (during the Clip::GetFrame method), and a simpler Timeline class, that can change the preview window size dynamically and no longer requires a Singleton Settings class.
- Also removed "crop" from Clip class, as it was never implmeneted correctly, and we have a fully functional "crop" effect when needed
- Added caching to Clip class, to optimize previewing of cached frames (much faster than previous)
2020-10-04 16:59:21 -05:00
|
|
|
ClearAllCache();
|
2016-09-07 00:40:01 -05:00
|
|
|
|
2014-01-08 01:43:58 -06:00
|
|
|
// Determine type of change operation
|
|
|
|
|
if (change_type == "insert" || change_type == "update") {
|
|
|
|
|
|
|
|
|
|
// INSERT / UPDATE
|
|
|
|
|
// Check for valid property
|
|
|
|
|
if (root_key == "color")
|
|
|
|
|
// Set color
|
|
|
|
|
color.SetJsonValue(change["value"]);
|
|
|
|
|
else if (root_key == "viewport_scale")
|
|
|
|
|
// Set viewport scale
|
|
|
|
|
viewport_scale.SetJsonValue(change["value"]);
|
|
|
|
|
else if (root_key == "viewport_x")
|
|
|
|
|
// Set viewport x offset
|
|
|
|
|
viewport_x.SetJsonValue(change["value"]);
|
|
|
|
|
else if (root_key == "viewport_y")
|
|
|
|
|
// Set viewport y offset
|
|
|
|
|
viewport_y.SetJsonValue(change["value"]);
|
2016-08-15 00:44:51 -05:00
|
|
|
else if (root_key == "duration") {
|
|
|
|
|
// Update duration of timeline
|
|
|
|
|
info.duration = change["value"].asDouble();
|
|
|
|
|
info.video_length = info.fps.ToFloat() * info.duration;
|
|
|
|
|
}
|
Large refactor of Timeline, TimelineBase, ClipBase, and Clip, to allow a Clip access to the parent timeline instance (if available), and thus, certain properties (preview size, timeline FPS, etc...). This allows for a simpler rendering of Clip keyframes (during the Clip::GetFrame method), and a simpler Timeline class, that can change the preview window size dynamically and no longer requires a Singleton Settings class.
- Also removed "crop" from Clip class, as it was never implmeneted correctly, and we have a fully functional "crop" effect when needed
- Added caching to Clip class, to optimize previewing of cached frames (much faster than previous)
2020-10-04 16:59:21 -05:00
|
|
|
else if (root_key == "width") {
|
2015-06-04 17:28:39 -05:00
|
|
|
// Set width
|
|
|
|
|
info.width = change["value"].asInt();
|
Large refactor of Timeline, TimelineBase, ClipBase, and Clip, to allow a Clip access to the parent timeline instance (if available), and thus, certain properties (preview size, timeline FPS, etc...). This allows for a simpler rendering of Clip keyframes (during the Clip::GetFrame method), and a simpler Timeline class, that can change the preview window size dynamically and no longer requires a Singleton Settings class.
- Also removed "crop" from Clip class, as it was never implmeneted correctly, and we have a fully functional "crop" effect when needed
- Added caching to Clip class, to optimize previewing of cached frames (much faster than previous)
2020-10-04 16:59:21 -05:00
|
|
|
preview_width = info.width;
|
|
|
|
|
}
|
|
|
|
|
else if (root_key == "height") {
|
2015-06-04 17:28:39 -05:00
|
|
|
// Set height
|
|
|
|
|
info.height = change["value"].asInt();
|
Large refactor of Timeline, TimelineBase, ClipBase, and Clip, to allow a Clip access to the parent timeline instance (if available), and thus, certain properties (preview size, timeline FPS, etc...). This allows for a simpler rendering of Clip keyframes (during the Clip::GetFrame method), and a simpler Timeline class, that can change the preview window size dynamically and no longer requires a Singleton Settings class.
- Also removed "crop" from Clip class, as it was never implmeneted correctly, and we have a fully functional "crop" effect when needed
- Added caching to Clip class, to optimize previewing of cached frames (much faster than previous)
2020-10-04 16:59:21 -05:00
|
|
|
preview_height = info.height;
|
|
|
|
|
}
|
2016-01-10 16:50:54 -06:00
|
|
|
else if (root_key == "fps" && sub_key == "" && change["value"].isObject()) {
|
|
|
|
|
// Set fps fraction
|
|
|
|
|
if (!change["value"]["num"].isNull())
|
|
|
|
|
info.fps.num = change["value"]["num"].asInt();
|
|
|
|
|
if (!change["value"]["den"].isNull())
|
|
|
|
|
info.fps.den = change["value"]["den"].asInt();
|
|
|
|
|
}
|
|
|
|
|
else if (root_key == "fps" && sub_key == "num")
|
2015-06-04 17:28:39 -05:00
|
|
|
// Set fps.num
|
|
|
|
|
info.fps.num = change["value"].asInt();
|
2016-01-10 16:50:54 -06:00
|
|
|
else if (root_key == "fps" && sub_key == "den")
|
2015-06-04 17:28:39 -05:00
|
|
|
// Set fps.den
|
|
|
|
|
info.fps.den = change["value"].asInt();
|
2019-05-01 18:02:25 -05:00
|
|
|
else if (root_key == "display_ratio" && sub_key == "" && change["value"].isObject()) {
|
|
|
|
|
// Set display_ratio fraction
|
|
|
|
|
if (!change["value"]["num"].isNull())
|
|
|
|
|
info.display_ratio.num = change["value"]["num"].asInt();
|
|
|
|
|
if (!change["value"]["den"].isNull())
|
|
|
|
|
info.display_ratio.den = change["value"]["den"].asInt();
|
|
|
|
|
}
|
|
|
|
|
else if (root_key == "display_ratio" && sub_key == "num")
|
|
|
|
|
// Set display_ratio.num
|
|
|
|
|
info.display_ratio.num = change["value"].asInt();
|
|
|
|
|
else if (root_key == "display_ratio" && sub_key == "den")
|
|
|
|
|
// Set display_ratio.den
|
|
|
|
|
info.display_ratio.den = change["value"].asInt();
|
|
|
|
|
else if (root_key == "pixel_ratio" && sub_key == "" && change["value"].isObject()) {
|
|
|
|
|
// Set pixel_ratio fraction
|
|
|
|
|
if (!change["value"]["num"].isNull())
|
|
|
|
|
info.pixel_ratio.num = change["value"]["num"].asInt();
|
|
|
|
|
if (!change["value"]["den"].isNull())
|
|
|
|
|
info.pixel_ratio.den = change["value"]["den"].asInt();
|
|
|
|
|
}
|
|
|
|
|
else if (root_key == "pixel_ratio" && sub_key == "num")
|
|
|
|
|
// Set pixel_ratio.num
|
|
|
|
|
info.pixel_ratio.num = change["value"].asInt();
|
|
|
|
|
else if (root_key == "pixel_ratio" && sub_key == "den")
|
|
|
|
|
// Set pixel_ratio.den
|
|
|
|
|
info.pixel_ratio.den = change["value"].asInt();
|
|
|
|
|
|
2016-01-10 16:50:54 -06:00
|
|
|
else if (root_key == "sample_rate")
|
|
|
|
|
// Set sample rate
|
|
|
|
|
info.sample_rate = change["value"].asInt();
|
|
|
|
|
else if (root_key == "channels")
|
|
|
|
|
// Set channels
|
|
|
|
|
info.channels = change["value"].asInt();
|
|
|
|
|
else if (root_key == "channel_layout")
|
|
|
|
|
// Set channel layout
|
|
|
|
|
info.channel_layout = (ChannelLayout) change["value"].asInt();
|
2014-01-08 01:43:58 -06:00
|
|
|
else
|
|
|
|
|
// Error parsing JSON (or missing keys)
|
|
|
|
|
throw InvalidJSONKey("JSON change key is invalid", change.toStyledString());
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
} else if (change["type"].asString() == "delete") {
|
|
|
|
|
|
|
|
|
|
// DELETE / RESET
|
|
|
|
|
// Reset the following properties (since we can't delete them)
|
|
|
|
|
if (root_key == "color") {
|
|
|
|
|
color = Color();
|
|
|
|
|
color.red = Keyframe(0.0);
|
|
|
|
|
color.green = Keyframe(0.0);
|
|
|
|
|
color.blue = Keyframe(0.0);
|
|
|
|
|
}
|
|
|
|
|
else if (root_key == "viewport_scale")
|
|
|
|
|
viewport_scale = Keyframe(1.0);
|
|
|
|
|
else if (root_key == "viewport_x")
|
|
|
|
|
viewport_x = Keyframe(0.0);
|
|
|
|
|
else if (root_key == "viewport_y")
|
|
|
|
|
viewport_y = Keyframe(0.0);
|
|
|
|
|
else
|
|
|
|
|
// Error parsing JSON (or missing keys)
|
|
|
|
|
throw InvalidJSONKey("JSON change key is invalid", change.toStyledString());
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2017-03-10 00:51:08 -06:00
|
|
|
// Clear all caches
|
2022-09-15 18:33:06 -05:00
|
|
|
void Timeline::ClearAllCache(bool deep) {
|
2017-05-18 02:48:00 -05:00
|
|
|
|
2017-03-10 00:51:08 -06:00
|
|
|
// Clear primary cache
|
2022-01-26 17:56:33 -06:00
|
|
|
if (final_cache) {
|
|
|
|
|
final_cache->Clear();
|
|
|
|
|
}
|
2014-01-08 01:43:58 -06:00
|
|
|
|
2017-03-10 00:51:08 -06:00
|
|
|
// Loop through all clips
|
2019-12-27 01:01:48 -05:00
|
|
|
for (auto clip : clips)
|
2017-03-10 00:51:08 -06:00
|
|
|
{
|
|
|
|
|
// Clear cache on clip
|
|
|
|
|
clip->Reader()->GetCache()->Clear();
|
2014-01-08 01:43:58 -06:00
|
|
|
|
2022-09-15 18:33:06 -05:00
|
|
|
// Clear nested Reader (if deep clear requested)
|
|
|
|
|
if (deep && clip->Reader()->Name() == "FrameMapper") {
|
2021-09-27 10:56:21 -04:00
|
|
|
FrameMapper* nested_reader = (FrameMapper*) clip->Reader();
|
|
|
|
|
if (nested_reader->Reader() && nested_reader->Reader()->GetCache())
|
|
|
|
|
nested_reader->Reader()->GetCache()->Clear();
|
|
|
|
|
}
|
2017-03-10 00:51:08 -06:00
|
|
|
}
|
2017-11-11 17:16:56 -06:00
|
|
|
}
|
2019-01-19 02:18:52 -06:00
|
|
|
|
|
|
|
|
// Set Max Image Size (used for performance optimization). Convenience function for setting
|
|
|
|
|
// Settings::Instance()->MAX_WIDTH and Settings::Instance()->MAX_HEIGHT.
|
|
|
|
|
void Timeline::SetMaxSize(int width, int height) {
|
2019-05-01 18:02:25 -05:00
|
|
|
// Maintain aspect ratio regardless of what size is passed in
|
2021-10-07 13:45:16 -05:00
|
|
|
QSize display_ratio_size = QSize(info.width, info.height);
|
2019-08-04 23:51:02 -04:00
|
|
|
QSize proposed_size = QSize(std::min(width, info.width), std::min(height, info.height));
|
2019-05-01 18:02:25 -05:00
|
|
|
|
|
|
|
|
// Scale QSize up to proposed size
|
|
|
|
|
display_ratio_size.scale(proposed_size, Qt::KeepAspectRatio);
|
|
|
|
|
|
Large refactor of Timeline, TimelineBase, ClipBase, and Clip, to allow a Clip access to the parent timeline instance (if available), and thus, certain properties (preview size, timeline FPS, etc...). This allows for a simpler rendering of Clip keyframes (during the Clip::GetFrame method), and a simpler Timeline class, that can change the preview window size dynamically and no longer requires a Singleton Settings class.
- Also removed "crop" from Clip class, as it was never implmeneted correctly, and we have a fully functional "crop" effect when needed
- Added caching to Clip class, to optimize previewing of cached frames (much faster than previous)
2020-10-04 16:59:21 -05:00
|
|
|
// Update preview settings
|
|
|
|
|
preview_width = display_ratio_size.width();
|
|
|
|
|
preview_height = display_ratio_size.height();
|
2021-06-10 16:32:53 -04:00
|
|
|
}
|