Merge branch 'develop' into qt6-support

This commit is contained in:
Jonathan Thomas
2026-02-13 23:07:53 -06:00
18 changed files with 761 additions and 236 deletions

View File

@@ -13,83 +13,116 @@
#include <chrono>
#include <iostream>
#include <memory>
#include <string>
#include "Clip.h"
#include "Frame.h"
#include "FFmpegReader.h"
#include "FFmpegWriter.h"
#include "Settings.h"
#include "Timeline.h"
#include "Qt/VideoCacheThread.h" // <— your new header
using namespace openshot;
int main(int argc, char* argv[]) {
using clock = std::chrono::high_resolution_clock;
auto total_start = clock::now();
const std::string output_dir = "/home/jonathan/Downloads";
const std::string input_paths[] = {
"/home/jonathan/Videos/3.4 Release/Screencasts/Timing.mp4",
"/home/jonathan/Downloads/openshot-testing/sintel_trailer-720p.mp4"
};
const int64_t frames_to_fetch[] = {175, 225, 240, 500, 1000};
const bool use_hw_decode = false;
// 1) Open the FFmpegReader as usual
const char* input_path = "/home/jonathan/Downloads/openshot-testing/sintel_trailer-720p.mp4";
FFmpegReader reader(input_path);
reader.Open();
std::cout << "Hardware decode: " << (use_hw_decode ? "ON" : "OFF") << "\n";
openshot::Settings::Instance()->HARDWARE_DECODER = use_hw_decode ? 1 : 0;
const int64_t total_frames = reader.info.video_length;
std::cout << "Total frames: " << total_frames << "\n";
for (const std::string& input_path : input_paths) {
auto file_start = clock::now();
std::string base = input_path;
size_t slash = base.find_last_of('/');
if (slash != std::string::npos) {
base = base.substr(slash + 1);
}
std::cout << "\n=== File: " << base << " ===\n";
auto t0 = clock::now();
FFmpegReader reader(input_path.c_str());
auto t1 = clock::now();
std::cout << "FFmpegReader ctor: "
<< std::chrono::duration_cast<std::chrono::milliseconds>(t1 - t0).count()
<< " ms\n";
Timeline timeline(reader.info.width, reader.info.height, reader.info.fps, reader.info.sample_rate, reader.info.channels, reader.info.channel_layout);
Clip c1(&reader);
timeline.AddClip(&c1);
timeline.Open();
timeline.DisplayInfo();
auto t2 = clock::now();
reader.Open();
auto t3 = clock::now();
std::cout << "FFmpegReader Open(): "
<< std::chrono::duration_cast<std::chrono::milliseconds>(t3 - t2).count()
<< " ms\n";
auto t4 = clock::now();
Timeline timeline(1920, 1080, Fraction(30, 1), reader.info.sample_rate, reader.info.channels, reader.info.channel_layout);
timeline.SetMaxSize(640, 480);
auto t5 = clock::now();
std::cout << "Timeline ctor (1080p30): "
<< std::chrono::duration_cast<std::chrono::milliseconds>(t5 - t4).count()
<< " ms\n";
// 2) Construct a VideoCacheThread around 'reader' and start its background loop
// (VideoCacheThread inherits juce::Thread)
std::shared_ptr<VideoCacheThread> cache = std::make_shared<VideoCacheThread>();
cache->Reader(&timeline); // attaches the FFmpegReader and internally calls Play()
cache->StartThread(); // juce::Thread method, begins run()
auto t6 = clock::now();
Clip c1(&reader);
auto t7 = clock::now();
std::cout << "Clip ctor: "
<< std::chrono::duration_cast<std::chrono::milliseconds>(t7 - t6).count()
<< " ms\n";
// 3) Set up the writer exactly as before
FFmpegWriter writer("/home/jonathan/Downloads/performancecachetest.mp4");
writer.SetAudioOptions("aac", 48000, 192000);
writer.SetVideoOptions("libx264", 1280, 720, Fraction(30, 1), 5000000);
writer.Open();
timeline.AddClip(&c1);
// 4) Forward pass: for each frame 1…N, tell the cache thread to seek to that frame,
// then immediately call cache->GetFrame(frame), which will block only if that frame
// hasnt been decoded into the cache yet.
auto t0 = std::chrono::high_resolution_clock::now();
cache->setSpeed(1);
for (int64_t f = 1; f <= total_frames; ++f) {
float pct = (float(f) / total_frames) * 100.0f;
std::cout << "Forward: requesting frame " << f << " (" << pct << "%)\n";
auto t8 = clock::now();
timeline.Open();
auto t9 = clock::now();
std::cout << "Timeline Open(): "
<< std::chrono::duration_cast<std::chrono::milliseconds>(t9 - t8).count()
<< " ms\n";
cache->Seek(f); // signal “I need frame f now (and please prefetch f+1, f+2, …)”
std::shared_ptr<Frame> framePtr = timeline.GetFrame(f);
writer.WriteFrame(framePtr);
for (int64_t frame_number : frames_to_fetch) {
auto loop_start = clock::now();
std::cout << "Requesting frame " << frame_number << "...\n";
auto t10 = clock::now();
std::shared_ptr<Frame> frame = timeline.GetFrame(frame_number);
auto t11 = clock::now();
std::cout << "Timeline GetFrame(" << frame_number << "): "
<< std::chrono::duration_cast<std::chrono::milliseconds>(t11 - t10).count()
<< " ms\n";
std::string out_path = output_dir + "/frame-" + base + "-" + std::to_string(frame_number) + ".jpg";
auto t12 = clock::now();
frame->Thumbnail(out_path, 200, 80, "", "", "#000000", false, "JPEG", 95, 0.0f);
auto t13 = clock::now();
std::cout << "Frame Thumbnail() JPEG (" << frame_number << "): "
<< std::chrono::duration_cast<std::chrono::milliseconds>(t13 - t12).count()
<< " ms\n";
auto loop_end = clock::now();
std::cout << "Frame loop total (" << frame_number << "): "
<< std::chrono::duration_cast<std::chrono::milliseconds>(loop_end - loop_start).count()
<< " ms\n";
}
reader.Close();
timeline.Close();
auto file_end = clock::now();
std::cout << "File total (" << base << "): "
<< std::chrono::duration_cast<std::chrono::milliseconds>(file_end - file_start).count()
<< " ms\n";
}
auto t1 = std::chrono::high_resolution_clock::now();
auto forward_ms = std::chrono::duration_cast<std::chrono::milliseconds>(t1 - t0).count();
// 5) Backward pass: same idea in reverse
auto t2 = std::chrono::high_resolution_clock::now();
cache->setSpeed(-1);
for (int64_t f = total_frames; f >= 1; --f) {
float pct = (float(total_frames - f + 1) / total_frames) * 100.0f;
std::cout << "Backward: requesting frame " << f << " (" << pct << "%)\n";
cache->Seek(f);
std::shared_ptr<Frame> framePtr = timeline.GetFrame(f);
writer.WriteFrame(framePtr);
}
auto t3 = std::chrono::high_resolution_clock::now();
auto backward_ms = std::chrono::duration_cast<std::chrono::milliseconds>(t3 - t2).count();
std::cout << "\nForward pass elapsed: " << forward_ms << " ms\n";
std::cout << "Backward pass elapsed: " << backward_ms << " ms\n";
// 6) Shut down the cache thread, close everything
cache->StopThread(10000); // politely tells run() to exit, waits up to 10s
reader.Close();
writer.Close();
timeline.Close();
auto total_end = clock::now();
std::cout << "Total elapsed: "
<< std::chrono::duration_cast<std::chrono::milliseconds>(total_end - total_start).count()
<< " ms\n";
return 0;
}

View File

@@ -42,6 +42,8 @@ void AudioReaderSource::getNextAudioBlock(const juce::AudioSourceChannelInfo& in
}
while (remaining_samples > 0) {
const int previous_remaining = remaining_samples;
frame.reset();
try {
// Get current frame object
if (reader) {
@@ -53,9 +55,19 @@ void AudioReaderSource::getNextAudioBlock(const juce::AudioSourceChannelInfo& in
// Get audio samples
if (reader && frame) {
const int frame_samples = frame->GetAudioSamplesCount();
const int frame_channels = frame->GetAudioChannelsCount();
// Corrupt/unsupported streams can yield frames without audio data.
// Avoid a tight loop that never consumes remaining_samples.
if (frame_samples <= 0 || frame_channels <= 0) {
info.buffer->clear(remaining_position, remaining_samples);
break;
}
if (sample_position + remaining_samples <= frame->GetAudioSamplesCount()) {
// Success, we have enough samples
for (int channel = 0; channel < frame->GetAudioChannelsCount(); channel++) {
for (int channel = 0; channel < frame_channels; channel++) {
if (channel < info.buffer->getNumChannels()) {
info.buffer->addFrom(channel, remaining_position, *frame->GetAudioSampleBuffer(),
channel, sample_position, remaining_samples);
@@ -68,7 +80,12 @@ void AudioReaderSource::getNextAudioBlock(const juce::AudioSourceChannelInfo& in
} else if (sample_position + remaining_samples > frame->GetAudioSamplesCount()) {
// Not enough samples, take what we can
int amount_to_copy = frame->GetAudioSamplesCount() - sample_position;
for (int channel = 0; channel < frame->GetAudioChannelsCount(); channel++) {
if (amount_to_copy <= 0) {
info.buffer->clear(remaining_position, remaining_samples);
break;
}
for (int channel = 0; channel < frame_channels; channel++) {
if (channel < info.buffer->getNumChannels()) {
info.buffer->addFrom(channel, remaining_position, *frame->GetAudioSampleBuffer(), channel,
sample_position, amount_to_copy);
@@ -84,7 +101,14 @@ void AudioReaderSource::getNextAudioBlock(const juce::AudioSourceChannelInfo& in
frame_position += speed;
sample_position = 0; // reset for new frame
}
} else {
info.buffer->clear(remaining_position, remaining_samples);
break;
}
if (remaining_samples == previous_remaining) {
info.buffer->clear(remaining_position, remaining_samples);
break;
}
}
}

View File

@@ -19,7 +19,7 @@ using namespace std;
using namespace openshot;
// Default constructor, no max bytes
CacheMemory::CacheMemory() : CacheBase(0), bytes_freed_since_trim(0) {
CacheMemory::CacheMemory() : CacheBase(0) {
// Set cache type name
cache_type = "CacheMemory";
range_version = 0;
@@ -27,7 +27,7 @@ CacheMemory::CacheMemory() : CacheBase(0), bytes_freed_since_trim(0) {
}
// Constructor that sets the max bytes to cache
CacheMemory::CacheMemory(int64_t max_bytes) : CacheBase(max_bytes), bytes_freed_since_trim(0) {
CacheMemory::CacheMemory(int64_t max_bytes) : CacheBase(max_bytes) {
// Set cache type name
cache_type = "CacheMemory";
range_version = 0;
@@ -70,6 +70,9 @@ void CacheMemory::Add(std::shared_ptr<Frame> frame)
// Check if frame is already contained in cache
bool CacheMemory::Contains(int64_t frame_number) {
// Create a scoped lock, to protect the cache from multiple threads
const std::lock_guard<std::recursive_mutex> lock(*cacheMutex);
if (frames.count(frame_number) > 0) {
return true;
} else {
@@ -162,8 +165,6 @@ void CacheMemory::Remove(int64_t start_frame_number, int64_t end_frame_number)
{
// Create a scoped lock, to protect the cache from multiple threads
const std::lock_guard<std::recursive_mutex> lock(*cacheMutex);
int64_t removed_bytes = 0;
// Loop through frame numbers
std::deque<int64_t>::iterator itr;
for(itr = frame_numbers.begin(); itr != frame_numbers.end();)
@@ -182,10 +183,6 @@ void CacheMemory::Remove(int64_t start_frame_number, int64_t end_frame_number)
{
if (*itr_ordered >= start_frame_number && *itr_ordered <= end_frame_number)
{
// Count bytes freed before erasing the frame
if (frames.count(*itr_ordered))
removed_bytes += frames[*itr_ordered]->GetBytes();
// erase frame number
frames.erase(*itr_ordered);
itr_ordered = ordered_frame_numbers.erase(itr_ordered);
@@ -193,17 +190,6 @@ void CacheMemory::Remove(int64_t start_frame_number, int64_t end_frame_number)
itr_ordered++;
}
if (removed_bytes > 0)
{
bytes_freed_since_trim += removed_bytes;
if (bytes_freed_since_trim >= TRIM_THRESHOLD_BYTES)
{
// Periodically return freed arenas to the OS
if (TrimMemoryToOS())
bytes_freed_since_trim = 0;
}
}
// Needs range processing (since cache has changed)
needs_range_processing = true;
}
@@ -246,10 +232,8 @@ void CacheMemory::Clear()
ordered_frame_numbers.clear();
ordered_frame_numbers.shrink_to_fit();
needs_range_processing = true;
bytes_freed_since_trim = 0;
// Trim freed arenas back to OS after large clears
TrimMemoryToOS(true);
// Trim freed arenas back to OS after large clears (debounced)
TrimMemoryToOS();
}
// Count the frames in the queue

View File

@@ -28,10 +28,8 @@ namespace openshot {
*/
class CacheMemory : public CacheBase {
private:
static constexpr int64_t TRIM_THRESHOLD_BYTES = 1024LL * 1024 * 1024; ///< Release memory after freeing this much memory
std::map<int64_t, std::shared_ptr<openshot::Frame> > frames; ///< This map holds the frame number and Frame objects
std::deque<int64_t> frame_numbers; ///< This queue holds a sequential list of cached Frame numbers
int64_t bytes_freed_since_trim; ///< Tracks bytes freed to trigger a heap trim
/// Clean up cached frames that exceed the max number of bytes
void CleanUp();

File diff suppressed because it is too large Load Diff

View File

@@ -128,6 +128,7 @@ namespace openshot {
int64_t pts_total;
int64_t pts_counter;
std::shared_ptr<openshot::Frame> last_video_frame;
std::shared_ptr<openshot::Frame> last_final_video_frame;
bool is_seeking;
int64_t seeking_pts;
@@ -136,6 +137,8 @@ namespace openshot {
int seek_count;
int64_t seek_audio_frame_found;
int64_t seek_video_frame_found;
int64_t last_seek_max_frame;
int seek_stagnant_count;
int64_t last_frame;
int64_t largest_frame_processed;
@@ -172,7 +175,7 @@ namespace openshot {
void CheckFPS();
/// Check the current seek position and determine if we need to seek again
bool CheckSeek(bool is_video);
bool CheckSeek();
/// Check the working queue, and move finished frames to the finished queue
void CheckWorkingFrames(int64_t requested_frame);

View File

@@ -455,9 +455,9 @@ void Frame::SetFrameNumber(int64_t new_number)
// Calculate the # of samples per video frame (for a specific frame number and frame rate)
int Frame::GetSamplesPerFrame(int64_t number, Fraction fps, int sample_rate, int channels)
{
// Directly return 0 if there are no channels
// Directly return 0 for invalid audio/frame-rate parameters
// so that we do not need to deal with NaNs later
if (channels == 0) return 0;
if (channels <= 0 || sample_rate <= 0 || fps.num <= 0 || fps.den <= 0) return 0;
// Get the total # of samples for the previous frame, and the current frame (rounded)
double fps_rate = fps.Reciprocal().ToDouble();

View File

@@ -11,6 +11,7 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
#include <cmath>
#include <algorithm>
#include <iostream>
#include <iomanip>
@@ -49,7 +50,8 @@ FrameMapper::FrameMapper(ReaderBase *reader, Fraction target, PulldownType targe
field_toggle = true;
// Adjust cache size based on size of frame and audio
final_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS, info.width, info.height, info.sample_rate, info.channels);
const int initial_cache_frames = std::max(Settings::Instance()->CACHE_MIN_FRAMES, OPEN_MP_NUM_PROCESSORS);
final_cache.SetMaxBytesFromInfo(initial_cache_frames, info.width, info.height, info.sample_rate, info.channels);
}
// Destructor
@@ -746,9 +748,6 @@ void FrameMapper::Close()
SWR_FREE(&avr);
avr = NULL;
}
// Release freed arenas back to OS after heavy teardown
TrimMemoryToOS(true);
}
@@ -845,7 +844,8 @@ void FrameMapper::ChangeMapping(Fraction target_fps, PulldownType target_pulldow
final_cache.Clear();
// Adjust cache size based on size of frame and audio
final_cache.SetMaxBytesFromInfo(24, info.width, info.height, info.sample_rate, info.channels);
const int reset_cache_frames = std::max(Settings::Instance()->CACHE_MIN_FRAMES, OPEN_MP_NUM_PROCESSORS * 4);
final_cache.SetMaxBytesFromInfo(reset_cache_frames, info.width, info.height, info.sample_rate, info.channels);
// Deallocate resample buffer
if (avr) {

View File

@@ -25,7 +25,7 @@
namespace {
// Limit trim attempts to once per interval to avoid spamming platform calls
constexpr uint64_t kMinTrimIntervalMs = 1000; // 1s debounce
constexpr uint64_t kMinTrimIntervalMs = 30000; // 30s debounce
std::atomic<uint64_t> g_last_trim_ms{0};
std::atomic<bool> g_trim_in_progress{false};
@@ -37,12 +37,12 @@ uint64_t NowMs() {
namespace openshot {
bool TrimMemoryToOS(bool force) noexcept {
bool TrimMemoryToOS() noexcept {
const uint64_t now_ms = NowMs();
const uint64_t last_ms = g_last_trim_ms.load(std::memory_order_relaxed);
// Skip if we recently trimmed (unless forced)
if (!force && now_ms - last_ms < kMinTrimIntervalMs)
// Skip if we recently trimmed
if (now_ms - last_ms < kMinTrimIntervalMs)
return false;
// Only one trim attempt runs at a time
@@ -70,8 +70,9 @@ bool TrimMemoryToOS(bool force) noexcept {
did_trim = false;
#endif
if (did_trim)
if (did_trim) {
g_last_trim_ms.store(now_ms, std::memory_order_relaxed);
}
g_trim_in_progress.store(false, std::memory_order_release);
return did_trim;

View File

@@ -25,6 +25,6 @@ namespace openshot {
* @param force If true, bypass the debounce interval (useful for teardown).
* @return true if a platform-specific trim call was made, false otherwise.
*/
bool TrimMemoryToOS(bool force = false) noexcept;
bool TrimMemoryToOS() noexcept;
} // namespace openshot

View File

@@ -29,6 +29,7 @@ namespace openshot
, last_speed(1)
, last_dir(1) // assume forward (+1) on first launch
, userSeeked(false)
, preroll_on_next_fill(false)
, requested_display_frame(1)
, current_display_frame(1)
, cached_frame_count(0)
@@ -52,21 +53,47 @@ namespace openshot
return false;
}
if (min_frames_ahead < 0) {
const int64_t ready_min = min_frames_ahead.load();
if (ready_min < 0) {
return true;
}
return (cached_frame_count > min_frames_ahead);
const int64_t cached_index = last_cached_index.load();
const int64_t playhead = requested_display_frame.load();
int dir = computeDirection();
// Near timeline boundaries, don't require more pre-roll than can exist.
int64_t max_frame = reader->info.video_length;
if (auto* timeline = dynamic_cast<Timeline*>(reader)) {
const int64_t timeline_max = timeline->GetMaxFrame();
if (timeline_max > 0) {
max_frame = timeline_max;
}
}
if (max_frame < 1) {
return false;
}
int64_t required_ahead = ready_min;
int64_t available_ahead = (dir > 0)
? std::max<int64_t>(0, max_frame - playhead)
: std::max<int64_t>(0, playhead - 1);
required_ahead = std::min(required_ahead, available_ahead);
if (dir > 0) {
return (cached_index >= playhead + required_ahead);
}
return (cached_index <= playhead - required_ahead);
}
void VideoCacheThread::setSpeed(int new_speed)
{
// Only update last_speed and last_dir when new_speed != 0
if (new_speed != 0) {
last_speed = new_speed;
last_dir = (new_speed > 0 ? 1 : -1);
last_speed.store(new_speed);
last_dir.store(new_speed > 0 ? 1 : -1);
}
speed = new_speed;
speed.store(new_speed);
}
// Get the size in bytes of a frame (rough estimate)
@@ -101,24 +128,38 @@ namespace openshot
void VideoCacheThread::Seek(int64_t new_position, bool start_preroll)
{
if (start_preroll) {
userSeeked = true;
bool should_mark_seek = false;
bool should_preroll = false;
int64_t new_cached_count = cached_frame_count.load();
if (start_preroll) {
should_mark_seek = true;
CacheBase* cache = reader ? reader->GetCache() : nullptr;
if (cache && !cache->Contains(new_position))
{
// If user initiated seek, and current frame not found (
Timeline* timeline = static_cast<Timeline*>(reader);
timeline->ClearAllCache();
cached_frame_count = 0;
if (Timeline* timeline = dynamic_cast<Timeline*>(reader)) {
timeline->ClearAllCache();
}
new_cached_count = 0;
should_preroll = true;
}
else if (cache)
{
cached_frame_count = cache->Count();
new_cached_count = cache->Count();
}
}
{
std::lock_guard<std::mutex> guard(seek_state_mutex);
requested_display_frame.store(new_position);
cached_frame_count.store(new_cached_count);
if (start_preroll) {
preroll_on_next_fill.store(should_preroll);
userSeeked.store(should_mark_seek);
}
}
requested_display_frame = new_position;
}
void VideoCacheThread::Seek(int64_t new_position)
@@ -129,13 +170,50 @@ namespace openshot
int VideoCacheThread::computeDirection() const
{
// If speed ≠ 0, use its sign; if speed==0, keep last_dir
return (speed != 0 ? (speed > 0 ? 1 : -1) : last_dir);
const int current_speed = speed.load();
if (current_speed != 0) {
return (current_speed > 0 ? 1 : -1);
}
return last_dir.load();
}
void VideoCacheThread::handleUserSeek(int64_t playhead, int dir)
{
// Place last_cached_index just “behind” playhead in the given dir
last_cached_index = playhead - dir;
last_cached_index.store(playhead - dir);
}
void VideoCacheThread::handleUserSeekWithPreroll(int64_t playhead,
int dir,
int64_t timeline_end,
int64_t preroll_frames)
{
int64_t preroll_start = playhead;
if (preroll_frames > 0) {
if (dir > 0) {
preroll_start = std::max<int64_t>(1, playhead - preroll_frames);
}
else {
preroll_start = std::min<int64_t>(timeline_end, playhead + preroll_frames);
}
}
last_cached_index.store(preroll_start - dir);
}
int64_t VideoCacheThread::computePrerollFrames(const Settings* settings) const
{
if (!settings) {
return 0;
}
int64_t min_frames = settings->VIDEO_CACHE_MIN_PREROLL_FRAMES;
int64_t max_frames = settings->VIDEO_CACHE_MAX_PREROLL_FRAMES;
if (min_frames < 0) {
return 0;
}
if (max_frames > 0 && min_frames > max_frames) {
min_frames = max_frames;
}
return min_frames;
}
bool VideoCacheThread::clearCacheIfPaused(int64_t playhead,
@@ -144,9 +222,10 @@ namespace openshot
{
if (paused && !cache->Contains(playhead)) {
// If paused and playhead not in cache, clear everything
Timeline* timeline = static_cast<Timeline*>(reader);
timeline->ClearAllCache();
cached_frame_count = 0;
if (Timeline* timeline = dynamic_cast<Timeline*>(reader)) {
timeline->ClearAllCache();
}
cached_frame_count.store(0);
return true;
}
return false;
@@ -181,7 +260,7 @@ namespace openshot
ReaderBase* reader)
{
bool window_full = true;
int64_t next_frame = last_cached_index + dir;
int64_t next_frame = last_cached_index.load() + dir;
// Advance from last_cached_index toward window boundary
while ((dir > 0 && next_frame <= window_end) ||
@@ -191,7 +270,7 @@ namespace openshot
break;
}
// If a Seek was requested mid-caching, bail out immediately
if (userSeeked) {
if (userSeeked.load()) {
break;
}
@@ -200,7 +279,7 @@ namespace openshot
try {
auto framePtr = reader->GetFrame(next_frame);
cache->Add(framePtr);
cached_frame_count = cache->Count();
cached_frame_count.store(cache->Count());
}
catch (const OutOfBoundsFrame&) {
break;
@@ -211,7 +290,7 @@ namespace openshot
cache->Touch(next_frame);
}
last_cached_index = next_frame;
last_cached_index.store(next_frame);
next_frame += dir;
}
@@ -229,26 +308,31 @@ namespace openshot
// If caching disabled or no reader, mark cache as ready and sleep briefly
if (!settings->ENABLE_PLAYBACK_CACHING || !cache) {
cached_frame_count = (cache ? cache->Count() : 0);
min_frames_ahead = -1;
cached_frame_count.store(cache ? cache->Count() : 0);
min_frames_ahead.store(-1);
std::this_thread::sleep_for(double_micro_sec(50000));
continue;
}
// init local vars
min_frames_ahead = settings->VIDEO_CACHE_MIN_PREROLL_FRAMES;
min_frames_ahead.store(settings->VIDEO_CACHE_MIN_PREROLL_FRAMES);
Timeline* timeline = static_cast<Timeline*>(reader);
Timeline* timeline = dynamic_cast<Timeline*>(reader);
if (!timeline) {
std::this_thread::sleep_for(double_micro_sec(50000));
continue;
}
int64_t timeline_end = timeline->GetMaxFrame();
int64_t playhead = requested_display_frame;
bool paused = (speed == 0);
int64_t playhead = requested_display_frame.load();
bool paused = (speed.load() == 0);
int64_t preroll_frames = computePrerollFrames(settings);
cached_frame_count = cache->Count();
cached_frame_count.store(cache->Count());
// Compute effective direction (±1)
int dir = computeDirection();
if (speed != 0) {
last_dir = dir;
if (speed.load() != 0) {
last_dir.store(dir);
}
// Compute bytes_per_frame, max_bytes, and capacity once
@@ -269,9 +353,25 @@ namespace openshot
}
// Handle a user-initiated seek
if (userSeeked) {
handleUserSeek(playhead, dir);
userSeeked = false;
bool did_user_seek = false;
bool use_preroll = false;
{
std::lock_guard<std::mutex> guard(seek_state_mutex);
playhead = requested_display_frame.load();
did_user_seek = userSeeked.load();
use_preroll = preroll_on_next_fill.load();
if (did_user_seek) {
userSeeked.store(false);
preroll_on_next_fill.store(false);
}
}
if (did_user_seek) {
if (use_preroll) {
handleUserSeekWithPreroll(playhead, dir, timeline_end, preroll_frames);
}
else {
handleUserSeek(playhead, dir);
}
}
else if (!paused && capacity >= 1) {
// In playback mode, check if last_cached_index drifted outside the new window
@@ -288,8 +388,8 @@ namespace openshot
);
bool outside_window =
(dir > 0 && last_cached_index > window_end) ||
(dir < 0 && last_cached_index < window_begin);
(dir > 0 && last_cached_index.load() > window_end) ||
(dir < 0 && last_cached_index.load() < window_begin);
if (outside_window) {
handleUserSeek(playhead, dir);
}
@@ -311,12 +411,12 @@ namespace openshot
ready_target = 0;
}
int64_t configured_min = settings->VIDEO_CACHE_MIN_PREROLL_FRAMES;
min_frames_ahead = std::min<int64_t>(configured_min, ready_target);
min_frames_ahead.store(std::min<int64_t>(configured_min, ready_target));
// If paused and playhead is no longer in cache, clear everything
bool did_clear = clearCacheIfPaused(playhead, paused, cache);
if (did_clear) {
handleUserSeek(playhead, dir);
handleUserSeekWithPreroll(playhead, dir, timeline_end, preroll_frames);
}
// Compute the current caching window

View File

@@ -17,10 +17,13 @@
#include <AppConfig.h>
#include <juce_audio_basics/juce_audio_basics.h>
#include <atomic>
#include <mutex>
#include <memory>
namespace openshot
{
class Settings;
using juce::Thread;
/**
@@ -56,7 +59,7 @@ namespace openshot
void setSpeed(int new_speed);
/// @return The current speed (1=normal, 2=fast, 1=rewind, etc.)
int getSpeed() const { return speed; }
int getSpeed() const { return speed.load(); }
/// Seek to a specific frame (no preroll).
void Seek(int64_t new_position);
@@ -107,6 +110,21 @@ namespace openshot
*/
void handleUserSeek(int64_t playhead, int dir);
/**
* @brief Reset last_cached_index to start caching with a directional preroll offset.
* @param playhead Current requested_display_frame
* @param dir Effective direction (±1)
* @param timeline_end Last valid frame index
* @param preroll_frames Number of frames to offset the cache start
*/
void handleUserSeekWithPreroll(int64_t playhead,
int dir,
int64_t timeline_end,
int64_t preroll_frames);
/// @brief Compute preroll frame count from settings.
int64_t computePrerollFrames(const Settings* settings) const;
/**
* @brief When paused and playhead is outside current cache, clear all frames.
* @param playhead Current requested_display_frame
@@ -159,22 +177,24 @@ namespace openshot
std::shared_ptr<Frame> last_cached_frame; ///< Last frame pointer added to cache.
int speed; ///< Current playback speed (0=paused, >0 forward, <0 backward).
int last_speed; ///< Last non-zero speed (for tracking).
int last_dir; ///< Last direction sign (+1 forward, 1 backward).
bool userSeeked; ///< True if Seek(..., true) was called (forces a cache reset).
std::atomic<int> speed; ///< Current playback speed (0=paused, >0 forward, <0 backward).
std::atomic<int> last_speed; ///< Last non-zero speed (for tracking).
std::atomic<int> last_dir; ///< Last direction sign (+1 forward, 1 backward).
std::atomic<bool> userSeeked; ///< True if Seek(..., true) was called (forces a cache reset).
std::atomic<bool> preroll_on_next_fill; ///< True if next cache rebuild should include preroll offset.
int64_t requested_display_frame; ///< Frame index the user requested.
std::atomic<int64_t> requested_display_frame; ///< Frame index the user requested.
int64_t current_display_frame; ///< Currently displayed frame (unused here, reserved).
int64_t cached_frame_count; ///< Estimated count of frames currently stored in cache.
std::atomic<int64_t> cached_frame_count; ///< Estimated count of frames currently stored in cache.
int64_t min_frames_ahead; ///< Minimum number of frames considered “ready” (pre-roll).
std::atomic<int64_t> min_frames_ahead; ///< Minimum number of frames considered “ready” (pre-roll).
int64_t timeline_max_frame; ///< Highest valid frame index in the timeline.
ReaderBase* reader; ///< The source reader (e.g., Timeline, FFmpegReader).
bool force_directional_cache; ///< (Reserved for future use).
int64_t last_cached_index; ///< Index of the most recently cached frame.
std::atomic<int64_t> last_cached_index; ///< Index of the most recently cached frame.
mutable std::mutex seek_state_mutex; ///< Protects coherent seek state updates/consumption.
};
} // namespace openshot

View File

@@ -67,11 +67,14 @@ namespace openshot {
/// Number of threads of OpenMP
int OMP_THREADS = 16;
/// Number of threads that ffmpeg uses
int FF_THREADS = 16;
/// Number of threads that ffmpeg uses
int FF_THREADS = 16;
/// Maximum rows that hardware decode can handle
int DE_LIMIT_HEIGHT_MAX = 1100;
/// Minimum number of frames for frame-count-based caches
int CACHE_MIN_FRAMES = 24;
/// Maximum rows that hardware decode can handle
int DE_LIMIT_HEIGHT_MAX = 1100;
/// Maximum columns that hardware decode can handle
int DE_LIMIT_WIDTH_MAX = 1950;
@@ -86,10 +89,10 @@ namespace openshot {
float VIDEO_CACHE_PERCENT_AHEAD = 0.7;
/// Minimum number of frames to cache before playback begins
int VIDEO_CACHE_MIN_PREROLL_FRAMES = 24;
int VIDEO_CACHE_MIN_PREROLL_FRAMES = 30;
/// Max number of frames (ahead of playhead) to cache during playback
int VIDEO_CACHE_MAX_PREROLL_FRAMES = 48;
int VIDEO_CACHE_MAX_PREROLL_FRAMES = 60;
/// Max number of frames (when paused) to cache for playback
int VIDEO_CACHE_MAX_FRAMES = 30 * 10;

View File

@@ -19,6 +19,7 @@
#include "FrameMapper.h"
#include "Exceptions.h"
#include <algorithm>
#include <QDir>
#include <QFileInfo>
#include <unordered_map>
@@ -69,7 +70,8 @@ Timeline::Timeline(int width, int height, Fraction fps, int sample_rate, int cha
// Init cache
final_cache = new CacheMemory();
final_cache->SetMaxBytesFromInfo(24, info.width, info.height, info.sample_rate, info.channels);
const int cache_frames = std::max(Settings::Instance()->CACHE_MIN_FRAMES, OPEN_MP_NUM_PROCESSORS * 4);
final_cache->SetMaxBytesFromInfo(cache_frames, info.width, info.height, info.sample_rate, info.channels);
}
// Delegating constructor that copies parameters from a provided ReaderInfo
@@ -201,7 +203,8 @@ Timeline::Timeline(const std::string& projectPath, bool convert_absolute_paths)
// Init cache
final_cache = new CacheMemory();
final_cache->SetMaxBytesFromInfo(24, info.width, info.height, info.sample_rate, info.channels);
const int cache_frames = std::max(Settings::Instance()->CACHE_MIN_FRAMES, OPEN_MP_NUM_PROCESSORS * 4);
final_cache->SetMaxBytesFromInfo(cache_frames, info.width, info.height, info.sample_rate, info.channels);
}
Timeline::~Timeline() {
@@ -1458,6 +1461,11 @@ void Timeline::apply_json_to_clips(Json::Value change) {
// Add clip to timeline
AddClip(clip);
// Calculate start and end frames that this impacts, and remove those frames from the cache
int64_t new_starting_frame = (clip->Position() * info.fps.ToDouble()) + 1;
int64_t new_ending_frame = ((clip->Position() + clip->Duration()) * info.fps.ToDouble()) + 1;
final_cache->Remove(new_starting_frame - 8, new_ending_frame + 8);
} else if (change_type == "update") {
// Update existing clip
@@ -1744,6 +1752,8 @@ void Timeline::apply_json_to_timeline(Json::Value change) {
// Clear all caches
void Timeline::ClearAllCache(bool deep) {
// Get lock (prevent getting frames while this happens)
const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
// Clear primary cache
if (final_cache) {

View File

@@ -74,13 +74,13 @@ namespace openshot {
/// the Clip with the highest end-frame number using std::max_element
struct CompareClipEndFrames {
bool operator()(const openshot::Clip* lhs, const openshot::Clip* rhs) {
return (lhs->Position() + lhs->Duration()) <= (rhs->Position() + rhs->Duration());
return (lhs->Position() + lhs->Duration()) < (rhs->Position() + rhs->Duration());
}};
/// Like CompareClipEndFrames, but for effects
struct CompareEffectEndFrames {
bool operator()(const openshot::EffectBase* lhs, const openshot::EffectBase* rhs) {
return (lhs->Position() + lhs->Duration()) <= (rhs->Position() + rhs->Duration());
return (lhs->Position() + lhs->Duration()) < (rhs->Position() + rhs->Duration());
}};
/**

View File

@@ -138,6 +138,14 @@ TEST_CASE( "Copy_Constructor", "[libopenshot][frame]" )
CHECK(f1.GetAudioSamplesCount() == f2.GetAudioSamplesCount());
}
TEST_CASE( "GetSamplesPerFrame invalid rate inputs", "[libopenshot][frame]" )
{
CHECK(Frame::GetSamplesPerFrame(/*frame_number=*/1, Fraction(0, 1), /*sample_rate=*/44100, /*channels=*/2) == 0);
CHECK(Frame::GetSamplesPerFrame(/*frame_number=*/1, Fraction(30, 0), /*sample_rate=*/44100, /*channels=*/2) == 0);
CHECK(Frame::GetSamplesPerFrame(/*frame_number=*/1, Fraction(30, 1), /*sample_rate=*/0, /*channels=*/2) == 0);
CHECK(Frame::GetSamplesPerFrame(/*frame_number=*/1, Fraction(30, 1), /*sample_rate=*/44100, /*channels=*/0) == 0);
}
#ifdef USE_OPENCV
TEST_CASE( "Convert_Image", "[libopenshot][opencv][frame]" )
{

View File

@@ -722,6 +722,40 @@ TEST_CASE( "GetMinFrame and GetMinTime", "[libopenshot][timeline]" )
CHECK(t.GetMinFrame() == (5 * 30) + 1);
}
TEST_CASE( "GetMaxFrame with 24fps clip mapped to 30fps timeline", "[libopenshot][timeline]" )
{
Timeline t(640, 480, Fraction(30, 1), 44100, 2, LAYOUT_STEREO);
t.AutoMapClips(true);
std::stringstream path;
path << TEST_MEDIA_PATH << "sintel_trailer-720p.mp4";
Clip clip(path.str());
REQUIRE(clip.Reader()->info.fps.num == 24);
REQUIRE(clip.Reader()->info.fps.den == 1);
t.AddClip(&clip);
REQUIRE(clip.Reader()->Name() == "FrameMapper");
auto* mapper = static_cast<FrameMapper*>(clip.Reader());
REQUIRE(mapper->info.fps.num == 30);
REQUIRE(mapper->info.fps.den == 1);
REQUIRE(mapper->info.video_length > 0);
const int64_t timeline_max_frame = t.GetMaxFrame();
const int64_t mapped_video_length = mapper->info.video_length;
// Timeline max frame is computed from duration (seconds), while mapper length is
// rounded frame count. They should stay aligned within one frame at this boundary.
CHECK(timeline_max_frame >= mapped_video_length);
CHECK((timeline_max_frame - mapped_video_length) <= 1);
// Regression guard: fetching the mapped tail frame should not throw.
t.Open();
CHECK_NOTHROW(t.GetFrame(mapped_video_length));
t.Close();
}
TEST_CASE( "Multi-threaded Timeline GetFrame", "[libopenshot][timeline]" )
{
Timeline *t = new Timeline(1280, 720, Fraction(24, 1), 48000, 2, LAYOUT_STEREO);
@@ -1010,6 +1044,29 @@ TEST_CASE( "ApplyJSONDiff and FrameMappers", "[libopenshot][timeline]" )
CHECK(clip1.Reader()->Name() == "QtImageReader");
}
TEST_CASE( "ApplyJSONDiff insert invalidates overlapping timeline cache", "[libopenshot][timeline]" )
{
// Create timeline with no clips so cached frames are black placeholders
Timeline t(640, 480, Fraction(30, 1), 44100, 2, LAYOUT_STEREO);
t.Open();
// Cache a frame in the area where we'll insert a new clip
std::shared_ptr<Frame> cached_before = t.GetFrame(10);
REQUIRE(cached_before != nullptr);
REQUIRE(t.GetCache() != nullptr);
REQUIRE(t.GetCache()->Contains(10));
// Insert clip via JSON diff overlapping frame 10
std::stringstream path1;
path1 << TEST_MEDIA_PATH << "interlaced.png";
std::stringstream json_change;
json_change << "[{\"type\":\"insert\",\"key\":[\"clips\"],\"value\":{\"id\":\"INSERT_CACHE_INVALIDATE\",\"layer\":1,\"position\":0.0,\"start\":0,\"end\":10,\"reader\":{\"acodec\":\"\",\"audio_bit_rate\":0,\"audio_stream_index\":-1,\"audio_timebase\":{\"den\":1,\"num\":1},\"channel_layout\":4,\"channels\":0,\"display_ratio\":{\"den\":1,\"num\":1},\"duration\":3600.0,\"file_size\":\"160000\",\"fps\":{\"den\":1,\"num\":30},\"has_audio\":false,\"has_single_image\":true,\"has_video\":true,\"height\":200,\"interlaced_frame\":false,\"metadata\":{},\"path\":\"" << path1.str() << "\",\"pixel_format\":-1,\"pixel_ratio\":{\"den\":1,\"num\":1},\"sample_rate\":0,\"top_field_first\":true,\"type\":\"QtImageReader\",\"vcodec\":\"\",\"video_bit_rate\":0,\"video_length\":\"108000\",\"video_stream_index\":-1,\"video_timebase\":{\"den\":30,\"num\":1},\"width\":200}},\"partial\":false}]";
t.ApplyJsonDiff(json_change.str());
// Overlapping cached frame should be invalidated
CHECK(!t.GetCache()->Contains(10));
}
TEST_CASE( "ApplyJSONDiff Update Reader Info", "[libopenshot][timeline]" )
{
// Create a timeline

View File

@@ -33,11 +33,15 @@ public:
using VideoCacheThread::clearCacheIfPaused;
using VideoCacheThread::prefetchWindow;
using VideoCacheThread::handleUserSeek;
using VideoCacheThread::handleUserSeekWithPreroll;
using VideoCacheThread::computePrerollFrames;
int64_t getLastCachedIndex() const { return last_cached_index; }
void setLastCachedIndex(int64_t v) { last_cached_index = v; }
void setLastDir(int d) { last_dir = d; }
void forceUserSeekFlag() { userSeeked = true; }
int64_t getLastCachedIndex() const { return last_cached_index.load(); }
void setLastCachedIndex(int64_t v) { last_cached_index.store(v); }
void setPlayhead(int64_t v) { requested_display_frame.store(v); }
void setMinFramesAhead(int64_t v) { min_frames_ahead.store(v); }
void setLastDir(int d) { last_dir.store(d); }
void forceUserSeekFlag() { userSeeked.store(true); }
};
// ----------------------------------------------------------------------------
@@ -95,6 +99,66 @@ TEST_CASE("computeWindowBounds: forward and backward bounds, clamped", "[VideoCa
CHECK(we == 3);
}
TEST_CASE("isReady: requires cached frames ahead of playhead", "[VideoCacheThread]") {
TestableVideoCacheThread thread;
Timeline timeline(/*width=*/1280, /*height=*/720, /*fps=*/Fraction(60,1),
/*sample_rate=*/48000, /*channels=*/2, ChannelLayout::LAYOUT_STEREO);
thread.Reader(&timeline);
thread.setMinFramesAhead(30);
thread.setPlayhead(200);
thread.setSpeed(1);
thread.setLastCachedIndex(200);
CHECK(!thread.isReady());
thread.setLastCachedIndex(229);
CHECK(!thread.isReady());
thread.setLastCachedIndex(230);
CHECK(thread.isReady());
thread.setSpeed(-1);
thread.setLastCachedIndex(200);
CHECK(!thread.isReady());
thread.setLastCachedIndex(171);
CHECK(!thread.isReady());
thread.setLastCachedIndex(170);
CHECK(thread.isReady());
}
TEST_CASE("isReady: clamps preroll requirement at timeline boundaries", "[VideoCacheThread]") {
TestableVideoCacheThread thread;
Timeline timeline(/*width=*/1280, /*height=*/720, /*fps=*/Fraction(30,1),
/*sample_rate=*/48000, /*channels=*/2, ChannelLayout::LAYOUT_STEREO);
thread.Reader(&timeline);
const int64_t end = timeline.info.video_length;
REQUIRE(end > 10);
// Forward near end: only a few frames remain, so don't require full preroll.
thread.setMinFramesAhead(30);
thread.setSpeed(1);
thread.setPlayhead(end - 5);
thread.setLastCachedIndex(end - 4);
CHECK(!thread.isReady());
thread.setLastCachedIndex(end);
CHECK(thread.isReady());
// Backward near start: only a few frames exist behind playhead.
thread.setMinFramesAhead(30);
thread.setSpeed(-1);
thread.setPlayhead(3);
thread.setLastCachedIndex(2);
CHECK(!thread.isReady());
thread.setLastCachedIndex(1);
CHECK(thread.isReady());
}
TEST_CASE("clearCacheIfPaused: clears only when paused and not in cache", "[VideoCacheThread]") {
TestableVideoCacheThread thread;
CacheMemory cache(/*max_bytes=*/100000000);
@@ -139,6 +203,22 @@ TEST_CASE("handleUserSeek: sets last_cached_index to playhead - dir", "[VideoCac
CHECK(thread.getLastCachedIndex() == 51);
}
TEST_CASE("handleUserSeekWithPreroll: offsets start by preroll frames", "[VideoCacheThread]") {
TestableVideoCacheThread thread;
thread.handleUserSeekWithPreroll(/*playhead=*/60, /*dir=*/1, /*timeline_end=*/200, /*preroll_frames=*/30);
CHECK(thread.getLastCachedIndex() == 29);
thread.handleUserSeekWithPreroll(/*playhead=*/10, /*dir=*/1, /*timeline_end=*/200, /*preroll_frames=*/30);
CHECK(thread.getLastCachedIndex() == 0);
thread.handleUserSeekWithPreroll(/*playhead=*/1, /*dir=*/1, /*timeline_end=*/200, /*preroll_frames=*/30);
CHECK(thread.getLastCachedIndex() == 0);
thread.handleUserSeekWithPreroll(/*playhead=*/60, /*dir=*/-1, /*timeline_end=*/200, /*preroll_frames=*/30);
CHECK(thread.getLastCachedIndex() == 91);
}
TEST_CASE("prefetchWindow: forward caching with FFmpegReader & CacheMemory", "[VideoCacheThread]") {
TestableVideoCacheThread thread;
CacheMemory cache(/*max_bytes=*/100000000);