You've already forked libopenshot
mirror of
https://github.com/OpenShot/libopenshot.git
synced 2026-03-02 08:53:52 -08:00
Added new CacheDisk class, which caches frames to the hard drive, dramatically speeding up preview speeds, at the expense of IO operations. New unittests for caching framework. Fixed a few bugs with Frame constructor, which was causing invalid # width & height. Integrated JSON into the cache framework, to quickly share the state of the cache (including ranges of cached frame numbers). Fixed a bug where some Timeline frames could have no audio samples.
This commit is contained in:
@@ -32,12 +32,18 @@ using namespace openshot;
|
||||
|
||||
// Default constructor, no max bytes
|
||||
CacheMemory::CacheMemory() : CacheBase(0) {
|
||||
|
||||
// Set cache type name
|
||||
cache_type = "CacheMemory";
|
||||
range_version = 0;
|
||||
needs_range_processing = false;
|
||||
};
|
||||
|
||||
// Constructor that sets the max bytes to cache
|
||||
CacheMemory::CacheMemory(int64 max_bytes) : CacheBase(max_bytes) {
|
||||
|
||||
CacheMemory::CacheMemory(long long int max_bytes) : CacheBase(max_bytes) {
|
||||
// Set cache type name
|
||||
cache_type = "CacheMemory";
|
||||
range_version = 0;
|
||||
needs_range_processing = false;
|
||||
};
|
||||
|
||||
// Default destructor
|
||||
@@ -45,12 +51,79 @@ CacheMemory::~CacheMemory()
|
||||
{
|
||||
frames.clear();
|
||||
frame_numbers.clear();
|
||||
ordered_frame_numbers.clear();
|
||||
|
||||
// remove critical section
|
||||
delete cacheCriticalSection;
|
||||
cacheCriticalSection = NULL;
|
||||
}
|
||||
|
||||
|
||||
// Calculate ranges of frames
|
||||
void CacheMemory::CalculateRanges() {
|
||||
// Only calculate when something has changed
|
||||
if (needs_range_processing) {
|
||||
|
||||
// Create a scoped lock, to protect the cache from multiple threads
|
||||
const GenericScopedLock<CriticalSection> lock(*cacheCriticalSection);
|
||||
|
||||
// Sort ordered frame #s, and calculate JSON ranges
|
||||
std::sort(ordered_frame_numbers.begin(), ordered_frame_numbers.end());
|
||||
|
||||
// Clear existing JSON variable
|
||||
ranges.clear();
|
||||
ranges = Json::Value(Json::arrayValue);
|
||||
|
||||
// Increment range version
|
||||
range_version++;
|
||||
|
||||
vector<long int>::iterator itr_ordered;
|
||||
long int starting_frame = *ordered_frame_numbers.begin();
|
||||
long int ending_frame = *ordered_frame_numbers.begin();
|
||||
|
||||
// Loop through all known frames (in sequential order)
|
||||
for (itr_ordered = ordered_frame_numbers.begin(); itr_ordered != ordered_frame_numbers.end(); ++itr_ordered) {
|
||||
long int frame_number = *itr_ordered;
|
||||
if (frame_number - ending_frame > 1) {
|
||||
// End of range detected
|
||||
Json::Value range;
|
||||
|
||||
// Add JSON object with start/end attributes
|
||||
// Use strings, since long ints are supported in JSON
|
||||
stringstream start_str;
|
||||
start_str << starting_frame;
|
||||
stringstream end_str;
|
||||
end_str << ending_frame;
|
||||
range["start"] = start_str.str();
|
||||
range["end"] = end_str.str();
|
||||
ranges.append(range);
|
||||
|
||||
// Set new starting range
|
||||
starting_frame = frame_number;
|
||||
}
|
||||
|
||||
// Set current frame as end of range, and keep looping
|
||||
ending_frame = frame_number;
|
||||
}
|
||||
|
||||
// APPEND FINAL VALUE
|
||||
Json::Value range;
|
||||
|
||||
// Add JSON object with start/end attributes
|
||||
// Use strings, since long ints are supported in JSON
|
||||
stringstream start_str;
|
||||
start_str << starting_frame;
|
||||
stringstream end_str;
|
||||
end_str << ending_frame;
|
||||
range["start"] = start_str.str();
|
||||
range["end"] = end_str.str();
|
||||
ranges.append(range);
|
||||
|
||||
// Reset needs_range_processing
|
||||
needs_range_processing = false;
|
||||
}
|
||||
}
|
||||
|
||||
// Add a Frame to the cache
|
||||
void CacheMemory::Add(tr1::shared_ptr<Frame> frame)
|
||||
{
|
||||
@@ -68,6 +141,8 @@ void CacheMemory::Add(tr1::shared_ptr<Frame> frame)
|
||||
// Add frame to queue and map
|
||||
frames[frame_number] = frame;
|
||||
frame_numbers.push_front(frame_number);
|
||||
ordered_frame_numbers.push_back(frame_number);
|
||||
needs_range_processing = true;
|
||||
|
||||
// Clean up old frames
|
||||
CleanUp();
|
||||
@@ -113,12 +188,12 @@ tr1::shared_ptr<Frame> CacheMemory::GetSmallestFrame()
|
||||
}
|
||||
|
||||
// Gets the maximum bytes value
|
||||
int64 CacheMemory::GetBytes()
|
||||
long long int CacheMemory::GetBytes()
|
||||
{
|
||||
// Create a scoped lock, to protect the cache from multiple threads
|
||||
const GenericScopedLock<CriticalSection> lock(*cacheCriticalSection);
|
||||
|
||||
int64 total_bytes = 0;
|
||||
long long int total_bytes = 0;
|
||||
|
||||
// Loop through frames, and calculate total bytes
|
||||
deque<long int>::reverse_iterator itr;
|
||||
@@ -132,24 +207,43 @@ int64 CacheMemory::GetBytes()
|
||||
|
||||
// Remove a specific frame
|
||||
void CacheMemory::Remove(long int frame_number)
|
||||
{
|
||||
Remove(frame_number, frame_number);
|
||||
}
|
||||
|
||||
// Remove range of frames
|
||||
void CacheMemory::Remove(long int start_frame_number, long int end_frame_number)
|
||||
{
|
||||
// Create a scoped lock, to protect the cache from multiple threads
|
||||
const GenericScopedLock<CriticalSection> lock(*cacheCriticalSection);
|
||||
|
||||
// Loop through frame numbers
|
||||
deque<long int>::iterator itr;
|
||||
for(itr = frame_numbers.begin(); itr != frame_numbers.end(); ++itr)
|
||||
deque<long int>::iterator itr = frame_numbers.begin();
|
||||
while (itr != frame_numbers.end())
|
||||
{
|
||||
if (*itr == frame_number)
|
||||
if (*itr >= start_frame_number && *itr <= end_frame_number)
|
||||
{
|
||||
// erase frame number
|
||||
frame_numbers.erase(itr);
|
||||
break;
|
||||
}
|
||||
itr = frame_numbers.erase(itr++);
|
||||
}else
|
||||
++itr;
|
||||
}
|
||||
|
||||
// Remove frame from map. If frame_number doesn't exist, frames.erase returns zero.
|
||||
frames.erase(frame_number);
|
||||
// Loop through ordered frame numbers
|
||||
vector<long int>::iterator itr_ordered = ordered_frame_numbers.begin();
|
||||
while (itr_ordered != ordered_frame_numbers.end())
|
||||
{
|
||||
if (*itr_ordered >= start_frame_number && *itr_ordered <= end_frame_number)
|
||||
{
|
||||
// erase frame number
|
||||
frames.erase(*itr_ordered);
|
||||
itr_ordered = ordered_frame_numbers.erase(itr_ordered++);
|
||||
}else
|
||||
++itr_ordered;
|
||||
}
|
||||
|
||||
// Needs range processing (since cache has changed)
|
||||
needs_range_processing = true;
|
||||
}
|
||||
|
||||
// Move frame to front of queue (so it lasts longer)
|
||||
@@ -161,7 +255,7 @@ void CacheMemory::MoveToFront(long int frame_number)
|
||||
// Does frame exists in cache?
|
||||
/* FIXME if the frame number isn't present, the loop will do nothing, so why protect it?
|
||||
* Is it to save time by avoiding a loop?
|
||||
* Do we really need to optmize the case where we've been given a nonexisting frame_number? */
|
||||
* Do we really need to optimize the case where we've been given a nonexisting frame_number? */
|
||||
if (frames.count(frame_number))
|
||||
{
|
||||
// Loop through frame numbers
|
||||
@@ -189,6 +283,7 @@ void CacheMemory::Clear()
|
||||
|
||||
frames.clear();
|
||||
frame_numbers.clear();
|
||||
ordered_frame_numbers.clear();
|
||||
}
|
||||
|
||||
// Count the frames in the queue
|
||||
@@ -220,3 +315,67 @@ void CacheMemory::CleanUp()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Generate JSON string of this object
|
||||
string CacheMemory::Json() {
|
||||
|
||||
// Return formatted string
|
||||
return JsonValue().toStyledString();
|
||||
}
|
||||
|
||||
// Generate Json::JsonValue for this object
|
||||
Json::Value CacheMemory::JsonValue() {
|
||||
|
||||
// Proccess range data (if anything has changed)
|
||||
CalculateRanges();
|
||||
|
||||
// Create root json object
|
||||
Json::Value root = CacheBase::JsonValue(); // get parent properties
|
||||
root["type"] = cache_type;
|
||||
root["ranges"] = ranges;
|
||||
|
||||
Json::Value version;
|
||||
stringstream range_version_str;
|
||||
range_version_str << range_version;
|
||||
root["version"] = range_version_str.str();
|
||||
|
||||
// return JsonValue
|
||||
return root;
|
||||
}
|
||||
|
||||
// Load JSON string into this object
|
||||
void CacheMemory::SetJson(string value) throw(InvalidJSON) {
|
||||
|
||||
// Parse JSON string into JSON objects
|
||||
Json::Value root;
|
||||
Json::Reader reader;
|
||||
bool success = reader.parse( value, root );
|
||||
if (!success)
|
||||
// Raise exception
|
||||
throw InvalidJSON("JSON could not be parsed (or is invalid)", "");
|
||||
|
||||
try
|
||||
{
|
||||
// Set all values that match
|
||||
SetJsonValue(root);
|
||||
}
|
||||
catch (exception e)
|
||||
{
|
||||
// Error parsing JSON (or missing keys)
|
||||
throw InvalidJSON("JSON is invalid (missing keys or invalid data types)", "");
|
||||
}
|
||||
}
|
||||
|
||||
// Load Json::JsonValue into this object
|
||||
void CacheMemory::SetJsonValue(Json::Value root) throw(InvalidFile, ReaderClosed) {
|
||||
|
||||
// Close timeline before we do anything (this also removes all open and closing clips)
|
||||
Clear();
|
||||
|
||||
// Set parent data
|
||||
CacheBase::SetJsonValue(root);
|
||||
|
||||
if (!root["type"].isNull())
|
||||
cache_type = root["type"].asString();
|
||||
}
|
||||
Reference in New Issue
Block a user