Merge remote-tracking branch 'upstream/master'

This commit is contained in:
Jeff Shillitto
2017-06-21 21:42:54 +10:00
8 changed files with 87 additions and 46 deletions

View File

@@ -34,6 +34,8 @@
#include <fstream>
#include <QtCore/qstring.h>
#include <QtCore/qstringlist.h>
#include <QtCore/qfile.h>
#include <QTextStream>
#include <stdio.h>
#include <stdlib.h>
#include "Exceptions.h"

View File

@@ -165,7 +165,7 @@ namespace openshot {
void apply_json_to_timeline(Json::Value change) throw(InvalidJSONKey); ///<Apply JSON diff to timeline properties
/// Calculate time of a frame number, based on a framerate
float calculate_time(long int number, Fraction rate);
double calculate_time(long int number, Fraction rate);
/// Find intersecting (or non-intersecting) openshot::Clip objects
///

View File

@@ -34,10 +34,10 @@
#define STRINGIZE(x) STRINGIZE_(x)
#endif
#define OPENSHOT_VERSION_MAJOR 0; /// Major version number is incremented when huge features are added or improved.
#define OPENSHOT_VERSION_MINOR 1; /// Minor version is incremented when smaller (but still very important) improvements are added.
#define OPENSHOT_VERSION_BUILD 4; /// Build number is incremented when minor bug fixes and less important improvements are added.
#define OPENSHOT_VERSION_SO 11; /// Shared object version number. This increments any time the API and ABI changes (so old apps will no longer link)
#define OPENSHOT_VERSION_MAJOR 0; /// Major version number is incremented when huge features are added or improved.
#define OPENSHOT_VERSION_MINOR 1; /// Minor version is incremented when smaller (but still very important) improvements are added.
#define OPENSHOT_VERSION_BUILD 7; /// Build number is incremented when minor bug fixes and less important improvements are added.
#define OPENSHOT_VERSION_SO 12; /// Shared object version number. This increments any time the API and ABI changes (so old apps will no longer link)
#define OPENSHOT_VERSION_MAJOR_MINOR STRINGIZE(OPENSHOT_VERSION_MAJOR) "." STRINGIZE(OPENSHOT_VERSION_MINOR); /// A string of the "Major.Minor" version
#define OPENSHOT_VERSION_ALL STRINGIZE(OPENSHOT_VERSION_MAJOR) "." STRINGIZE(OPENSHOT_VERSION_MINOR) "." STRINGIZE(OPENSHOT_VERSION_BUILD); /// A string of the entire version "Major.Minor.Build"

View File

@@ -274,19 +274,11 @@ tr1::shared_ptr<Frame> Clip::GetFrame(long int requested_frame) throw(ReaderClos
else if (enabled_video == -1 && reader && !reader->info.has_audio)
enabled_video = 0;
// Adjust parent reader with same settings (for performance gains)
if (reader) {
// Override parent reader
reader->info.has_audio = enabled_audio;
reader->info.has_video = enabled_video;
}
// Is a time map detected
long int new_frame_number = requested_frame;
if (time.Values.size() > 1)
new_frame_number = time.GetLong(requested_frame);
// Now that we have re-mapped what frame number is needed, go and get the frame pointer
tr1::shared_ptr<Frame> original_frame = GetOrCreateFrame(new_frame_number);

View File

@@ -486,6 +486,7 @@ tr1::shared_ptr<Frame> FFmpegReader::ReadStream(long int requested_frame)
// Minimum number of packets to process (for performance reasons)
int packets_processed = 0;
int minimum_packets = OPEN_MP_NUM_PROCESSORS;
int max_packets = 4096;
// Set the number of threads in OpenMP
omp_set_num_threads(OPEN_MP_NUM_PROCESSORS);
@@ -518,7 +519,7 @@ tr1::shared_ptr<Frame> FFmpegReader::ReadStream(long int requested_frame)
}
// Debug output
ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (GetNextPacket)", "requested_frame", requested_frame, "processing_video_frames.size()", processing_video_frames.size(), "processing_audio_frames.size()", processing_audio_frames.size(), "minimum_packets", minimum_packets, "packets_processed", packets_processed, "", -1);
ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream (GetNextPacket)", "requested_frame", requested_frame, "processing_video_frames.size()", processing_video_frames.size(), "processing_audio_frames.size()", processing_audio_frames.size(), "minimum_packets", minimum_packets, "packets_processed", packets_processed, "is_seeking", is_seeking);
// Video packet
if (info.has_video && packet->stream_index == videoStream)
@@ -603,7 +604,7 @@ tr1::shared_ptr<Frame> FFmpegReader::ReadStream(long int requested_frame)
packets_processed++;
// Break once the frame is found
if ((is_cache_found && packets_processed >= minimum_packets))
if ((is_cache_found && packets_processed >= minimum_packets) || packets_processed > max_packets)
break;
} // end while
@@ -1220,6 +1221,8 @@ void FFmpegReader::Seek(long int requested_frame) throw(TooManySeeks)
num_checks_since_final = 0;
num_packets_since_video_frame = 0;
has_missing_frames = false;
bool has_audio_override = info.has_audio;
bool has_video_override = info.has_video;
// Increment seek count
seek_count++;
@@ -1232,6 +1235,10 @@ void FFmpegReader::Seek(long int requested_frame) throw(TooManySeeks)
Close();
Open();
// Update overrides (since closing and re-opening might update these)
info.has_audio = has_audio_override;
info.has_video = has_video_override;
// Not actually seeking, so clear these flags
is_seeking = false;
if (seek_count == 1) {
@@ -1316,6 +1323,10 @@ void FFmpegReader::Seek(long int requested_frame) throw(TooManySeeks)
// Close and re-open file (basically seeking to frame 1)
Close();
Open();
// Update overrides (since closing and re-opening might update these)
info.has_audio = has_audio_override;
info.has_video = has_video_override;
}
}
}
@@ -1339,15 +1350,25 @@ void FFmpegReader::UpdatePTSOffset(bool is_video)
{
// VIDEO PACKET
if (video_pts_offset == 99999) // Has the offset been set yet?
// Find the difference between PTS and frame number
video_pts_offset = 0 - GetVideoPTS();
{
// Find the difference between PTS and frame number (no more than 10 timebase units allowed)
video_pts_offset = 0 - max(GetVideoPTS(), (long) info.video_timebase.ToInt() * 10);
// debug output
ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::UpdatePTSOffset (Video)", "video_pts_offset", video_pts_offset, "is_video", is_video, "", -1, "", -1, "", -1, "", -1);
}
}
else
{
// AUDIO PACKET
if (audio_pts_offset == 99999) // Has the offset been set yet?
// Find the difference between PTS and frame number
audio_pts_offset = 0 - packet->pts;
{
// Find the difference between PTS and frame number (no more than 10 timebase units allowed)
audio_pts_offset = 0 - max(packet->pts, (int64_t) info.audio_timebase.ToInt() * 10);
// debug output
ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::UpdatePTSOffset (Audio)", "audio_pts_offset", audio_pts_offset, "is_video", is_video, "", -1, "", -1, "", -1, "", -1);
}
}
}
@@ -1623,6 +1644,9 @@ bool FFmpegReader::CheckMissingFrame(long int requested_frame)
void FFmpegReader::CheckWorkingFrames(bool end_of_stream, long int requested_frame)
{
// Loop through all working queue frames
bool checked_count_tripped = false;
int max_checked_count = 80;
while (true)
{
// Get the front frame of working cache
@@ -1647,7 +1671,11 @@ void FFmpegReader::CheckWorkingFrames(bool end_of_stream, long int requested_fra
is_audio_ready = processed_audio_frames.count(f->number);
// Get check count for this frame
checked_count = checked_frames[f->number];
if (!checked_count_tripped || f->number >= requested_frame)
checked_count = checked_frames[f->number];
else
// Force checked count over the limit
checked_count = max_checked_count;
}
if (previous_packet_location.frame == f->number && !end_of_stream)
@@ -1659,10 +1687,13 @@ void FFmpegReader::CheckWorkingFrames(bool end_of_stream, long int requested_fra
if (!info.has_audio) is_audio_ready = true;
// Make final any frames that get stuck (for whatever reason)
if (checked_count > 80 && (!is_video_ready || !is_audio_ready)) {
if (checked_count >= max_checked_count && (!is_video_ready || !is_audio_ready)) {
// Debug output
ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckWorkingFrames (exceeded checked_count)", "requested_frame", requested_frame, "frame_number", f->number, "is_video_ready", is_video_ready, "is_audio_ready", is_audio_ready, "checked_count", checked_count, "checked_frames.size()", checked_frames.size());
// Trigger checked count tripped mode (clear out all frames before requested frame)
checked_count_tripped = true;
if (info.has_video && !is_video_ready && last_video_frame) {
// Copy image from last frame
f->AddImage(tr1::shared_ptr<QImage>(new QImage(*last_video_frame->GetImage())));
@@ -1701,6 +1732,9 @@ void FFmpegReader::CheckWorkingFrames(bool end_of_stream, long int requested_fra
ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckWorkingFrames (add frame to missing cache)", "f->number", f->number, "is_seek_trash", is_seek_trash, "Missing Cache Count", missing_frames.Count(), "Working Cache Count", working_cache.Count(), "Final Cache Count", final_cache.Count(), "", -1);
missing_frames.Add(f);
}
// Remove from 'checked' count
checked_frames.erase(f->number);
}
// Remove frame from working cache
@@ -1709,9 +1743,6 @@ void FFmpegReader::CheckWorkingFrames(bool end_of_stream, long int requested_fra
// Update last frame processed
last_frame = f->number;
// Remove from 'checked' count
checked_frames.erase(f->number);
} else {
// Seek trash, so delete the frame from the working cache, and never add it to the final cache.
working_cache.Remove(f->number);

View File

@@ -207,7 +207,7 @@ void FrameMapper::Init()
} else {
// Map the remaining framerates using a simple Keyframe curve
// Calculate the difference (to be used as a multiplier)
float rate_diff = target.ToFloat() / original.ToFloat();
double rate_diff = target.ToDouble() / original.ToDouble();
long int new_length = reader->info.video_length * rate_diff;
// Build curve for framerate mapping
@@ -726,13 +726,18 @@ void FrameMapper::SetJsonValue(Json::Value root) throw(InvalidFile) {
// Change frame rate or audio mapping details
void FrameMapper::ChangeMapping(Fraction target_fps, PulldownType target_pulldown, int target_sample_rate, int target_channels, ChannelLayout target_channel_layout)
{
ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::ChangeMapping", "target_fps.num", target_fps.num, "target_fps.den", target_fps.num, "target_pulldown", target_pulldown, "target_sample_rate", target_sample_rate, "target_channels", target_channels, "target_channel_layout", target_channel_layout);
ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::ChangeMapping", "target_fps.num", target_fps.num, "target_fps.den", target_fps.den, "target_pulldown", target_pulldown, "target_sample_rate", target_sample_rate, "target_channels", target_channels, "target_channel_layout", target_channel_layout);
// Mark as dirty
is_dirty = true;
// Update mapping details
target = target_fps;
target.num = target_fps.num;
target.den = target_fps.den;
info.fps.num = target_fps.num;
info.fps.den = target_fps.den;
info.video_timebase.num = target_fps.den;
info.video_timebase.den = target_fps.num;
pulldown = target_pulldown;
info.sample_rate = target_sample_rate;
info.channels = target_channels;
@@ -750,6 +755,9 @@ void FrameMapper::ChangeMapping(Fraction target_fps, PulldownType target_pulldow
avresample_free(&avr);
avr = NULL;
}
// Re-init mapping
Init();
}
// Set offset relative to parent timeline

View File

@@ -51,24 +51,19 @@ Profile::Profile(string path) throw(InvalidFile, InvalidJSON) {
info.display_ratio.den = 0;
info.interlaced_frame = false;
// Read the profile file
ifstream myfile (path.c_str());
if (myfile.is_open())
QFile inputFile(path.c_str());
if (inputFile.open(QIODevice::ReadOnly))
{
// Loop through each line
while (myfile.good())
QTextStream in(&inputFile);
while (!in.atEnd())
{
// read current line of file
read_file = true;
string line = "";
getline (myfile, line);
QString line = in.readLine();
if (line.length() <= 0)
continue;
// Split current line
QString qline(line.c_str());
QStringList parts = qline.split( "=" );
QStringList parts = line.split( "=" );
string setting = parts[0].toStdString();
string value = parts[1].toStdString();
int value_int = 0;
@@ -117,7 +112,8 @@ Profile::Profile(string path) throw(InvalidFile, InvalidJSON) {
info.pixel_format = value_int;
}
}
myfile.close();
read_file = true;
inputFile.close();
}
}

View File

@@ -155,13 +155,13 @@ void Timeline::ApplyMapperToClips()
}
// Calculate time of a frame number, based on a framerate
float Timeline::calculate_time(long int number, Fraction rate)
double Timeline::calculate_time(long int number, Fraction rate)
{
// Get float version of fps fraction
float raw_fps = rate.ToFloat();
double raw_fps = rate.ToFloat();
// Return the time (in seconds) of this frame
return float(number - 1) / raw_fps;
return double(number - 1) / raw_fps;
}
// Apply effects to the source frame (if any)
@@ -631,10 +631,6 @@ bool Timeline::isEqual(double a, double b)
// Get an openshot::Frame object for a specific frame number of this reader.
tr1::shared_ptr<Frame> Timeline::GetFrame(long int requested_frame) throw(ReaderClosed, OutOfBoundsFrame)
{
// Check for open reader (or throw exception)
if (!is_open)
throw ReaderClosed("The Timeline is closed. Call Open() before calling this method.", "");
// Adjust out of bounds frame number
if (requested_frame < 1)
requested_frame = 1;
@@ -653,6 +649,10 @@ tr1::shared_ptr<Frame> Timeline::GetFrame(long int requested_frame) throw(Reader
// Create a scoped lock, allowing only a single thread to run the following code at one time
const GenericScopedLock<CriticalSection> lock(getFrameCriticalSection);
// Check for open reader (or throw exception)
if (!is_open)
throw ReaderClosed("The Timeline is closed. Call Open() before calling this method.", "");
// Check cache again (due to locking)
frame = final_cache->GetFrame(requested_frame);
if (frame) {
@@ -904,6 +904,9 @@ Json::Value Timeline::JsonValue() {
// Load JSON string into this object
void Timeline::SetJson(string value) throw(InvalidJSON) {
// Get lock (prevent getting frames while this happens)
const GenericScopedLock<CriticalSection> lock(getFrameCriticalSection);
// Parse JSON string into JSON objects
Json::Value root;
Json::Reader reader;
@@ -928,6 +931,7 @@ void Timeline::SetJson(string value) throw(InvalidJSON) {
void Timeline::SetJsonValue(Json::Value root) throw(InvalidFile, ReaderClosed) {
// Close timeline before we do anything (this also removes all open and closing clips)
bool was_open = is_open;
Close();
// Set parent data
@@ -983,6 +987,10 @@ void Timeline::SetJsonValue(Json::Value root) throw(InvalidFile, ReaderClosed) {
info.duration = root["duration"].asDouble();
info.video_length = info.fps.ToFloat() * info.duration;
}
// Re-open if needed
if (was_open)
Open();
}
// Apply a special formatted JSON object, which represents a change to the timeline (insert, update, delete)
@@ -1347,6 +1355,10 @@ void Timeline::apply_json_to_timeline(Json::Value change) throw(InvalidJSONKey)
// Clear all caches
void Timeline::ClearAllCache() {
// Get lock (prevent getting frames while this happens)
const GenericScopedLock<CriticalSection> lock(getFrameCriticalSection);
// Clear primary cache
final_cache->Clear();