diff --git a/include/Clip.h b/include/Clip.h
index c534d805..333df3c4 100644
--- a/include/Clip.h
+++ b/include/Clip.h
@@ -249,6 +249,13 @@ namespace openshot {
Keyframe perspective_c4_x; ///< Curves representing X for coordinate 4
Keyframe perspective_c4_y; ///< Curves representing Y for coordinate 4
+ /// Audio channel filter and mappings
+ Keyframe channel_filter; ///< A number representing an audio channel to filter (clears all other channels)
+ Keyframe channel_mapping; ///< A number representing an audio channel to output (only works when filtering a channel)
+
+ /// Override has_video and has_audio properties of clip (and their readers)
+ Keyframe has_audio; ///< An optional override to determine if this clip has audio (-1=undefined, 0=no, 1=yes)
+ Keyframe has_video; ///< An optional override to determine if this clip has video (-1=undefined, 0=no, 1=yes)
};
diff --git a/src/Clip.cpp b/src/Clip.cpp
index 333c7210..98ebcd98 100644
--- a/src/Clip.cpp
+++ b/src/Clip.cpp
@@ -81,6 +81,14 @@ void Clip::init_settings()
perspective_c4_x = Keyframe(-1.0);
perspective_c4_y = Keyframe(-1.0);
+ // Init audio channel filter and mappings
+ channel_filter = Keyframe(-1.0);
+ channel_mapping = Keyframe(-1.0);
+
+ // Init audio and video overrides
+ has_audio = Keyframe(-1.0);
+ has_video = Keyframe(-1.0);
+
// Default pointers
reader = NULL;
resampler = NULL;
@@ -213,6 +221,8 @@ void Clip::Open() throw(InvalidFile, ReaderClosed)
void Clip::Close() throw(ReaderClosed)
{
if (reader) {
+ ZmqLogger::Instance()->AppendDebugMethod("Clip::Close", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1);
+
// Close the reader
reader->Close();
}
@@ -251,6 +261,25 @@ tr1::shared_ptr Clip::GetFrame(long int requested_frame) throw(ReaderClos
// Adjust out of bounds frame number
requested_frame = adjust_frame_number_minimum(requested_frame);
+ // Adjust has_video and has_audio overrides
+ int enabled_audio = has_audio.GetInt(requested_frame);
+ if (enabled_audio == -1 && reader && reader->info.has_audio)
+ enabled_audio = 1;
+ else if (enabled_audio == -1 && reader && !reader->info.has_audio)
+ enabled_audio = 0;
+ int enabled_video = has_video.GetInt(requested_frame);
+ if (enabled_video == -1 && reader && reader->info.has_video)
+ enabled_video = 1;
+ else if (enabled_video == -1 && reader && !reader->info.has_audio)
+ enabled_video = 0;
+
+ // Adjust parent reader with same settings (for performance gains)
+ if (reader) {
+ // Override parent reader
+ reader->info.has_audio = enabled_audio;
+ reader->info.has_video = enabled_video;
+ }
+
// Is a time map detected
long int new_frame_number = requested_frame;
if (time.Values.size() > 1)
@@ -266,10 +295,11 @@ tr1::shared_ptr Clip::GetFrame(long int requested_frame) throw(ReaderClos
frame->ChannelsLayout(original_frame->ChannelsLayout());
// Copy the image from the odd field
- frame->AddImage(tr1::shared_ptr(new QImage(*original_frame->GetImage())));
+ if (enabled_video)
+ frame->AddImage(tr1::shared_ptr(new QImage(*original_frame->GetImage())));
// Loop through each channel, add audio
- if (reader->info.has_audio)
+ if (enabled_audio && reader->info.has_audio)
for (int channel = 0; channel < original_frame->GetAudioChannelsCount(); channel++)
frame->AddAudio(true, channel, 0, original_frame->GetAudioSamples(channel), original_frame->GetAudioSamplesCount(), 1.0);
@@ -563,6 +593,9 @@ tr1::shared_ptr Clip::GetOrCreateFrame(long int number)
int samples_in_frame = Frame::GetSamplesPerFrame(number, reader->info.fps, reader->info.sample_rate, reader->info.channels);
try {
+ // Debug output
+ ZmqLogger::Instance()->AppendDebugMethod("Clip::GetOrCreateFrame (from reader)", "number", number, "samples_in_frame", samples_in_frame, "", -1, "", -1, "", -1, "", -1);
+
// Attempt to get a frame (but this could fail if a reader has just been closed)
new_frame = reader->GetFrame(number);
@@ -577,6 +610,9 @@ tr1::shared_ptr Clip::GetOrCreateFrame(long int number)
// ...
}
+ // Debug output
+ ZmqLogger::Instance()->AppendDebugMethod("Clip::GetOrCreateFrame (create blank)", "number", number, "samples_in_frame", samples_in_frame, "", -1, "", -1, "", -1, "", -1);
+
// Create blank frame
new_frame = tr1::shared_ptr(new Frame(number, reader->info.width, reader->info.height, "#000000", samples_in_frame, reader->info.channels));
new_frame->SampleRate(reader->info.sample_rate);
@@ -644,6 +680,10 @@ string Clip::PropertiesJSON(long int requested_frame) {
root["rotation"] = add_property_json("Rotation", rotation.GetValue(requested_frame), "float", "", rotation.Contains(requested_point), rotation.GetCount(), -10000, 10000, rotation.GetClosestPoint(requested_point).interpolation, rotation.GetClosestPoint(requested_point).co.X, false);
root["volume"] = add_property_json("Volume", volume.GetValue(requested_frame), "float", "", volume.Contains(requested_point), volume.GetCount(), 0.0, 1.0, volume.GetClosestPoint(requested_point).interpolation, volume.GetClosestPoint(requested_point).co.X, false);
root["time"] = add_property_json("Time", time.GetValue(requested_frame), "float", "", time.Contains(requested_point), time.GetCount(), 0.0, 1000 * 60 * 30, time.GetClosestPoint(requested_point).interpolation, time.GetClosestPoint(requested_point).co.X, false);
+ root["channel_filter"] = add_property_json("Channel Filter", channel_filter.GetValue(requested_frame), "float", "", channel_filter.Contains(requested_point), channel_filter.GetCount(), 0.0, 1000 * 60 * 30, channel_filter.GetClosestPoint(requested_point).interpolation, channel_filter.GetClosestPoint(requested_point).co.X, false);
+ root["channel_mapping"] = add_property_json("Channel Mapping", channel_mapping.GetValue(requested_frame), "float", "", channel_mapping.Contains(requested_point), channel_mapping.GetCount(), 0.0, 1000 * 60 * 30, channel_mapping.GetClosestPoint(requested_point).interpolation, channel_mapping.GetClosestPoint(requested_point).co.X, false);
+ root["has_audio"] = add_property_json("Enable Audio", has_audio.GetValue(requested_frame), "float", "", has_audio.Contains(requested_point), has_audio.GetCount(), 0.0, 1000 * 60 * 30, has_audio.GetClosestPoint(requested_point).interpolation, has_audio.GetClosestPoint(requested_point).co.X, false);
+ root["has_video"] = add_property_json("Enable Video", has_video.GetValue(requested_frame), "float", "", has_video.Contains(requested_point), has_video.GetCount(), 0.0, 1000 * 60 * 30, has_video.GetClosestPoint(requested_point).interpolation, has_video.GetClosestPoint(requested_point).co.X, false);
root["wave_color"] = add_property_json("Wave Color", 0.0, "color", "", wave_color.red.Contains(requested_point), wave_color.red.GetCount(), -10000, 10000, wave_color.red.GetClosestPoint(requested_point).interpolation, wave_color.red.GetClosestPoint(requested_point).co.X, false);
root["wave_color"]["red"] = add_property_json("Red", wave_color.red.GetValue(requested_frame), "float", "", wave_color.red.Contains(requested_point), wave_color.red.GetCount(), -10000, 10000, wave_color.red.GetClosestPoint(requested_point).interpolation, wave_color.red.GetClosestPoint(requested_point).co.X, false);
@@ -679,6 +719,10 @@ Json::Value Clip::JsonValue() {
root["crop_y"] = crop_y.JsonValue();
root["shear_x"] = shear_x.JsonValue();
root["shear_y"] = shear_y.JsonValue();
+ root["channel_filter"] = channel_filter.JsonValue();
+ root["channel_mapping"] = channel_mapping.JsonValue();
+ root["has_audio"] = has_audio.JsonValue();
+ root["has_video"] = has_video.JsonValue();
root["perspective_c1_x"] = perspective_c1_x.JsonValue();
root["perspective_c1_y"] = perspective_c1_y.JsonValue();
root["perspective_c2_x"] = perspective_c2_x.JsonValue();
@@ -775,6 +819,14 @@ void Clip::SetJsonValue(Json::Value root) {
shear_x.SetJsonValue(root["shear_x"]);
if (!root["shear_y"].isNull())
shear_y.SetJsonValue(root["shear_y"]);
+ if (!root["channel_filter"].isNull())
+ channel_filter.SetJsonValue(root["channel_filter"]);
+ if (!root["channel_mapping"].isNull())
+ channel_mapping.SetJsonValue(root["channel_mapping"]);
+ if (!root["has_audio"].isNull())
+ has_audio.SetJsonValue(root["has_audio"]);
+ if (!root["has_video"].isNull())
+ has_video.SetJsonValue(root["has_video"]);
if (!root["perspective_c1_x"].isNull())
perspective_c1_x.SetJsonValue(root["perspective_c1_x"]);
if (!root["perspective_c1_y"].isNull())
diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp
index b27cea13..e46dc9bb 100644
--- a/src/FFmpegReader.cpp
+++ b/src/FFmpegReader.cpp
@@ -124,6 +124,9 @@ void FFmpegReader::Open() throw(InvalidFile, NoStreamsFound, InvalidCodec)
// Open reader if not already open
if (!is_open)
{
+ // Create a scoped lock, allowing only a single thread to run the following code at one time
+ const GenericScopedLock lock(getFrameCriticalSection);
+
// Initialize format context
pFormatCtx = NULL;
@@ -225,6 +228,11 @@ void FFmpegReader::Close()
// Close all objects, if reader is 'open'
if (is_open)
{
+ // Create a scoped lock, allowing only a single thread to run the following code at one time
+ const GenericScopedLock lock(getFrameCriticalSection);
+
+ ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::Close", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1);
+
// Mark as "closed"
is_open = false;
diff --git a/src/FrameMapper.cpp b/src/FrameMapper.cpp
index 52b081fa..7c9b4c2e 100644
--- a/src/FrameMapper.cpp
+++ b/src/FrameMapper.cpp
@@ -320,6 +320,9 @@ MappedFrame FrameMapper::GetMappedFrame(long int TargetFrameNumber) throw(OutOfB
// frame too large, set to end frame
TargetFrameNumber = frames.size();
+ // Debug output
+ ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::GetMappedFrame", "TargetFrameNumber", TargetFrameNumber, "frames.size()", frames.size(), "frames[...].Odd", frames[TargetFrameNumber - 1].Odd.Frame, "frames[...].Even", frames[TargetFrameNumber - 1].Even.Frame, "", -1, "", -1);
+
// Return frame
return frames[TargetFrameNumber - 1];
}
@@ -333,6 +336,9 @@ tr1::shared_ptr FrameMapper::GetOrCreateFrame(long int number)
int samples_in_frame = Frame::GetSamplesPerFrame(number, target, info.sample_rate, info.channels);
try {
+ // Debug output
+ ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::GetOrCreateFrame (from reader)", "number", number, "samples_in_frame", samples_in_frame, "", -1, "", -1, "", -1, "", -1);
+
// Attempt to get a frame (but this could fail if a reader has just been closed)
new_frame = reader->GetFrame(number);
@@ -347,6 +353,9 @@ tr1::shared_ptr FrameMapper::GetOrCreateFrame(long int number)
// ...
}
+ // Debug output
+ ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::GetOrCreateFrame (create blank)", "number", number, "samples_in_frame", samples_in_frame, "", -1, "", -1, "", -1, "", -1);
+
// Create blank frame
new_frame = tr1::shared_ptr(new Frame(number, info.width, info.height, "#000000", samples_in_frame, info.channels));
new_frame->SampleRate(info.sample_rate);
@@ -387,14 +396,18 @@ tr1::shared_ptr FrameMapper::GetFrame(long int requested_frame) throw(Rea
#pragma omp parallel
{
// Loop through all requested frames, each frame gets it's own thread
- #pragma omp for ordered
+ #pragma omp for ordered firstprivate(requested_frame, minimum_frames)
for (long int frame_number = requested_frame; frame_number < requested_frame + minimum_frames; frame_number++)
{
+
+ // Debug output
+ ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::GetFrame (inside omp for loop)", "frame_number", frame_number, "minimum_frames", minimum_frames, "requested_frame", requested_frame, "", -1, "", -1, "", -1);
+
// Get the mapped frame
MappedFrame mapped = GetMappedFrame(frame_number);
tr1::shared_ptr mapped_frame;
- #pragma omp ordered
+ // Get the mapped frame
mapped_frame = GetOrCreateFrame(mapped.Odd.Frame);
// Get # of channels in the actual frame
@@ -562,7 +575,7 @@ void FrameMapper::Close()
// Create a scoped lock, allowing only a single thread to run the following code at one time
const GenericScopedLock lock(getFrameCriticalSection);
- ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::Open", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1);
+ ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::Close", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1);
// Close internal reader
reader->Close();
diff --git a/src/Qt/PlayerPrivate.cpp b/src/Qt/PlayerPrivate.cpp
index 2e78670a..b66b4227 100644
--- a/src/Qt/PlayerPrivate.cpp
+++ b/src/Qt/PlayerPrivate.cpp
@@ -110,6 +110,9 @@ namespace openshot
// Calculate the amount of time to sleep (by subtracting the render time)
int sleep_time = int(frame_time - render_time);
+ // Debug
+ ZmqLogger::Instance()->AppendDebugMethod("PlayerPrivate::run (determine sleep)", "video_frame_diff", video_frame_diff, "video_position", video_position, "audio_position", audio_position, "speed", speed, "render_time", render_time, "sleep_time", sleep_time);
+
// Adjust drift (if more than a few frames off between audio and video)
if (video_frame_diff > 0 && reader->info.has_audio && reader->info.has_video)
// Since the audio and video threads are running independently, they will quickly get out of sync.
@@ -122,8 +125,7 @@ namespace openshot
else if (video_frame_diff < -4 && reader->info.has_audio && reader->info.has_video) {
// Skip frame(s) to catch up to the audio (if more than 4 frames behind)
- video_position++;
- sleep_time = 0;
+ video_position += abs(video_frame_diff);
}
// Sleep (leaving the video frame on the screen for the correct amount of time)
diff --git a/src/Qt/VideoCacheThread.cpp b/src/Qt/VideoCacheThread.cpp
index 49c20b26..e0e41b8a 100644
--- a/src/Qt/VideoCacheThread.cpp
+++ b/src/Qt/VideoCacheThread.cpp
@@ -89,9 +89,12 @@ namespace openshot
// Only cache up till the max_frames amount... then sleep
try
{
- if (reader)
+ if (reader) {
+ ZmqLogger::Instance()->AppendDebugMethod("VideoCacheThread::run (cache frame)", "position", position, "current_display_frame", current_display_frame, "max_frames", max_frames, "needed_frames", (position - current_display_frame), "", -1, "", -1);
+
// Force the frame to be generated
reader->GetFrame(position);
+ }
}
catch (const OutOfBoundsFrame & e)
diff --git a/src/Timeline.cpp b/src/Timeline.cpp
index ea9b178f..100619e3 100644
--- a/src/Timeline.cpp
+++ b/src/Timeline.cpp
@@ -201,8 +201,10 @@ tr1::shared_ptr Timeline::GetOrCreateFrame(Clip* clip, long int number)
int samples_in_frame = Frame::GetSamplesPerFrame(number, info.fps, info.sample_rate, info.channels);
try {
+ // Debug output
+ ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetOrCreateFrame (from reader)", "number", number, "samples_in_frame", samples_in_frame, "", -1, "", -1, "", -1, "", -1);
+
// Attempt to get a frame (but this could fail if a reader has just been closed)
- //new_frame = tr1::shared_ptr(clip->GetFrame(number));
new_frame = tr1::shared_ptr(clip->GetFrame(number));
// Return real frame
@@ -216,6 +218,9 @@ tr1::shared_ptr Timeline::GetOrCreateFrame(Clip* clip, long int number)
// ...
}
+ // Debug output
+ ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetOrCreateFrame (create blank)", "number", number, "samples_in_frame", samples_in_frame, "", -1, "", -1, "", -1, "", -1);
+
// Create blank frame
new_frame = tr1::shared_ptr(new Frame(number, info.width, info.height, "#000000", samples_in_frame, info.channels));
new_frame->SampleRate(info.sample_rate);
@@ -273,6 +278,16 @@ void Timeline::add_layer(tr1::shared_ptr new_frame, Clip* source_clip, lo
float initial_volume = 1.0f;
float previous_volume = source_clip->volume.GetValue(clip_frame_number - 1); // previous frame's percentage of volume (0 to 1)
float volume = source_clip->volume.GetValue(clip_frame_number); // percentage of volume (0 to 1)
+ int channel_filter = source_clip->channel_filter.GetInt(clip_frame_number); // optional channel to filter (if not -1)
+ int channel_mapping = source_clip->channel_mapping.GetInt(clip_frame_number); // optional channel to map this channel to (if not -1)
+
+ // If channel filter enabled, check for correct channel (and skip non-matching channels)
+ if (channel_filter != -1 && channel_filter != channel)
+ continue; // skip to next channel
+
+ // If channel mapping disabled, just use the current channel
+ if (channel_mapping == -1)
+ channel_mapping = channel;
// If no ramp needed, set initial volume = clip's volume
if (isEqual(previous_volume, volume))
@@ -280,7 +295,7 @@ void Timeline::add_layer(tr1::shared_ptr new_frame, Clip* source_clip, lo
// Apply ramp to source frame (if needed)
if (!isEqual(previous_volume, volume))
- source_frame->ApplyGainRamp(channel, 0, source_frame->GetAudioSamplesCount(), previous_volume, volume);
+ source_frame->ApplyGainRamp(channel_mapping, 0, source_frame->GetAudioSamplesCount(), previous_volume, volume);
// TODO: Improve FrameMapper (or Timeline) to always get the correct number of samples per frame.
// Currently, the ResampleContext sometimes leaves behind a few samples for the next call, and the
@@ -292,7 +307,7 @@ void Timeline::add_layer(tr1::shared_ptr new_frame, Clip* source_clip, lo
// Copy audio samples (and set initial volume). Mix samples with existing audio samples. The gains are added together, to
// be sure to set the gain's correctly, so the sum does not exceed 1.0 (of audio distortion will happen).
- new_frame->AddAudio(false, channel, 0, source_frame->GetAudioSamples(channel), source_frame->GetAudioSamplesCount(), initial_volume);
+ new_frame->AddAudio(false, channel_mapping, 0, source_frame->GetAudioSamples(channel), source_frame->GetAudioSamplesCount(), initial_volume);
}
else
@@ -550,6 +565,8 @@ void Timeline::sort_effects()
// Close the reader (and any resources it was consuming)
void Timeline::Close()
{
+ ZmqLogger::Instance()->AppendDebugMethod("Timeline::Close", "", -1, "", -1, "", -1, "", -1, "", -1, "", -1);
+
// Close all open clips
list::iterator clip_itr;
for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
diff --git a/src/ZmqLogger.cpp b/src/ZmqLogger.cpp
index 4370de5a..db0abb42 100644
--- a/src/ZmqLogger.cpp
+++ b/src/ZmqLogger.cpp
@@ -162,35 +162,37 @@ void ZmqLogger::AppendDebugMethod(string method_name, string arg1_name, float ar
// Don't do anything
return;
- // Create a scoped lock, allowing only a single thread to run the following code at one time
- const GenericScopedLock lock(loggerCriticalSection);
+ {
+ // Create a scoped lock, allowing only a single thread to run the following code at one time
+ const GenericScopedLock lock(loggerCriticalSection);
- stringstream message;
- message << fixed << setprecision(4);
- message << method_name << " (";
+ stringstream message;
+ message << fixed << setprecision(4);
+ message << method_name << " (";
- // Add attributes to method JSON
- if (arg1_name.length() > 0)
- message << arg1_name << "=" << arg1_value;
+ // Add attributes to method JSON
+ if (arg1_name.length() > 0)
+ message << arg1_name << "=" << arg1_value;
- if (arg2_name.length() > 0)
- message << ", " << arg2_name << "=" << arg2_value;
+ if (arg2_name.length() > 0)
+ message << ", " << arg2_name << "=" << arg2_value;
- if (arg3_name.length() > 0)
- message << ", " << arg3_name << "=" << arg3_value;
+ if (arg3_name.length() > 0)
+ message << ", " << arg3_name << "=" << arg3_value;
- if (arg4_name.length() > 0)
- message << ", " << arg4_name << "=" << arg4_value;
+ if (arg4_name.length() > 0)
+ message << ", " << arg4_name << "=" << arg4_value;
- if (arg5_name.length() > 0)
- message << ", " << arg5_name << "=" << arg5_value;
+ if (arg5_name.length() > 0)
+ message << ", " << arg5_name << "=" << arg5_value;
- if (arg6_name.length() > 0)
- message << ", " << arg6_name << "=" << arg6_value;
+ if (arg6_name.length() > 0)
+ message << ", " << arg6_name << "=" << arg6_value;
- // Output to standard output
- message << ")" << endl;
+ // Output to standard output
+ message << ")" << endl;
- // Send message through ZMQ
- Log(message.str());
+ // Send message through ZMQ
+ Log(message.str());
+ }
}
\ No newline at end of file