Fixed a regression with audio files and seeking, added a few new methods, and more accurately detect the End() of clips.

This commit is contained in:
Jonathan Thomas
2012-10-14 02:36:05 -05:00
parent eace9f5efd
commit b647ff77c8
12 changed files with 309 additions and 269 deletions

View File

@@ -137,7 +137,7 @@ namespace openshot {
float Position() { return position; } ///<Get position on timeline
int Layer() { return layer; } ///<Get layer of clip on timeline (lower number is covered by higher numbers)
float Start() { return start; } ///<Get start position of clip (trim start of video)
float End() { return end; } ///<Get end position of clip (trim end of video)
float End(); ///<Get end position of clip (trim end of video), which can be affected by the time curve.
/// Set basic properties
void Position(float value) { position = value; } ///<Get position on timeline

View File

@@ -35,7 +35,6 @@ namespace openshot
{
private:
Magick::Image *image;
Magick::Image *small_image;
Magick::Image *wave_image;
juce::AudioSampleBuffer *audio;
Fraction pixel_ratio;
@@ -132,9 +131,6 @@ namespace openshot
/// Get pixel data (for only a single scan-line)
const Magick::PixelPacket* GetPixels(int row);
/// Get pixel data (for a resized image)
const Magick::PixelPacket* GetPixels(unsigned int width, unsigned int height, int frame);
/// Get height of image
int GetHeight();

View File

@@ -106,6 +106,18 @@ namespace openshot
/// Get a frame based on the target frame rate and the new frame number of a frame
MappedFrame GetFrame(int TargetFrameNumber) throw(OutOfBoundsFrame);
/// Get the target framerate
Framerate TargetFPS() { return m_target; };
/// Get the source framerate
Framerate SourceFPS() { return m_original; };
/// Set the target framerate
void TargetFPS(Framerate new_fps) { m_target = new_fps; };
/// Set the source framerate
void SourceFPS(Framerate new_fps) { m_original = new_fps; };
/**
* \brief Re-map time to slow down, speed up, or reverse a clip based on a Keyframe.
*

View File

@@ -8,6 +8,7 @@
*/
#include <math.h>
#include "Fraction.h"
namespace openshot
{
@@ -43,6 +44,9 @@ namespace openshot
/// Return a float of the frame rate (for example 30000/1001 returns 29.97...)
float GetFPS();
/// Return a Fraction of the framerate
Fraction GetFraction();
};
}

View File

@@ -10,6 +10,7 @@
#include <list>
#include "Clip.h"
#include "FileReaderBase.h"
#include "Fraction.h"
#include "Frame.h"
#include "FrameRate.h"
#include "KeyFrame.h"

View File

@@ -149,6 +149,26 @@ void Clip::Close()
file_reader->Close();
}
// Get end position of clip (trim end of video), which can be affected by the time curve.
float Clip::End()
{
// Determine the FPS fo this clip
float fps = 24.0;
if (frame_map)
// frame mapper
fps = frame_map->TargetFPS().GetFPS();
else if (file_reader)
// file reader
fps = file_reader->info.fps.ToFloat();
// if a time curve is present, use it's length
if (time.Points.size() > 1)
return float(time.Values.size()) / fps;
else
// just use the duration (as detected by the reader)
return end;
}
// Get an openshot::Frame object for a specific frame number of this reader.
Frame* Clip::GetFrame(int requested_frame) throw(ReaderClosed)
{

View File

@@ -461,10 +461,8 @@ Frame* FFmpegReader::ReadStream(int requested_frame)
// Return requested frame (if found)
if (final_cache.Exists(requested_frame))
{
// Return prepared frame
return final_cache.GetFrame(requested_frame);
}
else
// Return blank frame
return CreateFrame(requested_frame);
@@ -941,10 +939,12 @@ void FFmpegReader::Seek(int requested_frame) throw(TooManySeeks)
if (seek_worked)
{
// Flush audio buffer
avcodec_flush_buffers(aCodecCtx);
if (info.has_audio)
avcodec_flush_buffers(aCodecCtx);
// Flush video buffer
avcodec_flush_buffers(pCodecCtx);
if (info.has_video)
avcodec_flush_buffers(pCodecCtx);
// init seek flags
is_seeking = true;

View File

@@ -106,6 +106,9 @@ void Frame::DeepCopy(const Frame& other)
pixel_ratio = Fraction(other.pixel_ratio.num, other.pixel_ratio.den);
sample_rate = other.sample_rate;
channels = other.channels;
if (other.wave_image)
wave_image = new Magick::Image(*(other.wave_image));
}
// Deallocate image and audio memory
@@ -116,6 +119,8 @@ void Frame::DeletePointers()
image = NULL;
delete audio;
audio = NULL;
delete wave_image;
wave_image = NULL;
}
// Display the frame image to the screen (primarily used for debugging reasons)
@@ -272,7 +277,7 @@ const Magick::PixelPacket* Frame::GetWaveformPixels(int width, int height)
void Frame::DisplayWaveform()
{
// Get audio wave form image
Magick::Image *wave_image = GetWaveform(720, 480);
GetWaveform(720, 480);
// Display Image
wave_image->display();
@@ -385,24 +390,6 @@ const Magick::PixelPacket* Frame::GetPixels(int row)
return image->getConstPixels(0,row, image->columns(), 1);
}
// Get pixel data (for a resized image)
const Magick::PixelPacket* Frame::GetPixels(unsigned int width, unsigned int height, int frame)
{
// Create a new resized image
//Magick::Image newImage = *image;
small_image = new Magick::Image(*(image));
small_image->resize(Magick::Geometry(width, height));
small_image->colorize(255, 0, 0, Magick::Color(0,0,255));
small_image->blur(5.0, 5.0);
// stringstream file;
// file << "frame" << frame << ".png";
// small_image->write(file.str());
// Return arry of pixel packets
return small_image->getConstPixels(0,0, small_image->columns(), small_image->rows());
}
// Set Pixel Aspect Ratio
void Frame::SetPixelRatio(int num, int den)
{

View File

@@ -27,3 +27,9 @@ int Framerate::GetRoundedFPS() {
float Framerate::GetFPS() {
return (float) m_numerator / m_denominator;
}
// Return a Fraction of the framerate
Fraction Framerate::GetFraction()
{
return Fraction(m_numerator, m_denominator);
}

View File

@@ -14,61 +14,70 @@ void FrameReady(int number)
int main()
{
// Create timeline
Timeline t(640, 360, Framerate(24,1));
// Add some clips
Clip c1("/home/jonathan/Videos/sintel-1024-stereo.mp4");
c1.Position(0.0);
//c1.time.AddPoint(500, 500, LINEAR);
c1.time.AddPoint(1, 300);
c1.time.AddPoint(200, 500, LINEAR);
c1.time.AddPoint(400, 100);
c1.time.AddPoint(500, 500);
// Add clips
t.AddClip(&c1);
// Create a writer
FFmpegWriter w("/home/jonathan/output.webm");
w.DisplayInfo();
// Set options
w.SetAudioOptions(true, "libvorbis", 44100, 2, 128000, false);
w.SetVideoOptions(true, "libvpx", Fraction(24, 1), 640, 360, Fraction(1,1), false, false, 2000000);
// Prepare Streams
w.PrepareStreams();
// Write header
w.WriteHeader();
// Output stream info
w.OutputStreamInfo();
for (int frame = 1; frame <= 500; frame++)
{
Frame *f = t.GetFrame(frame);
f->AddOverlayNumber(0);
// Write frame
cout << "queue frame " << frame << " (" << f << ")" << endl;
w.WriteFrame(f);
}
// Write Footer
w.WriteTrailer();
// Close writer & reader
w.Close();
// Close timeline
t.Close();
cout << "Successfully Finished Timeline DEMO" << endl;
return 0;
// // Create timeline
// Timeline t(640, 360, Framerate(24,1));
//
// // Add some clips
// Clip c1("../../src/examples/piano.wav");
// c1.Position(0.0);
//
// c1.time.AddPoint(1, 50);
// c1.time.AddPoint(100, 1);
// c1.time.AddPoint(200, 90);
// c1.time.PrintValues();
//
// //c1.time.AddPoint(500, 500, LINEAR);
//// c1.time.AddPoint(1, 300);
//// c1.time.AddPoint(200, 500, LINEAR);
//// c1.time.AddPoint(400, 100);
//// c1.time.AddPoint(500, 500);
//
// // Add clips
// t.AddClip(&c1);
//
//
// // Create a writer
// FFmpegWriter w("/home/jonathan/output.webm");
// w.DisplayInfo();
//
// // Set options
// w.SetAudioOptions(true, "libvorbis", 44100, 2, 128000, false);
// //w.SetVideoOptions(true, "libvpx", Fraction(24, 1), 640, 360, Fraction(1,1), false, false, 2000000);
//
// // Prepare Streams
// w.PrepareStreams();
//
// // Write header
// w.WriteHeader();
//
// // Output stream info
// w.OutputStreamInfo();
//
// for (int frame = 1; frame <= 162; frame++)
// {
// Frame *f = t.GetFrame(frame);
// if (f)
// {
// //f->AddOverlayNumber(0);
//
// // Write frame
// //cout << "queue frame " << frame << endl;
// cout << "queue frame " << frame << " (" << f->number << ", " << f << ")" << endl;
// w.WriteFrame(f);
// }
// }
//
// // Write Footer
// w.WriteTrailer();
//
// // Close writer & reader
// w.Close();
//
// // Close timeline
// t.Close();
//
// cout << "Successfully Finished Timeline DEMO" << endl;
// return 0;
@@ -104,10 +113,10 @@ int main()
// openshot::FFmpegReader r("../../src/examples/test.mp4");
// openshot::FFmpegReader r("../../src/examples/test1.mp4");
// openshot::FFmpegReader r("../../src/examples/piano.wav");
openshot::FFmpegReader r("../../src/examples/piano.wav");
// openshot::FFmpegReader r("/home/jonathan/Videos/big-buck-bunny_trailer.webm");
openshot::FFmpegReader r("/home/jonathan/Videos/sintel-1024-stereo.mp4");
// openshot::FFmpegReader r("/home/jonathan/Videos/sintel-1024-stereo.mp4");
// openshot::FFmpegReader r("/home/jonathan/Videos/OpenShot_Now_In_3d.mp4");
// openshot::FFmpegReader r("/home/jonathan/Videos/sintel_trailer-720p.mp4");
// openshot::FFmpegReader r("/home/jonathan/Aptana Studio Workspace/OpenShotLibrary/src/examples/piano.wav");
@@ -116,63 +125,63 @@ int main()
// openshot::FFmpegReader r("/home/jonathan/Videos/60fps.mp4");
// openshot::FFmpegReader r("/home/jonathan/Aptana Studio Workspace/OpenShotLibrary/src/examples/asdf.wdf");
// // Display debug info
// r.Open();
// r.DisplayInfo();
//
// // Create a writer
// FFmpegWriter w("/home/jonathan/output.webm");
// w.DisplayInfo();
//
// // Set options
// w.SetAudioOptions(true, "libvorbis", 44100, 2, 128000, false);
// w.SetVideoOptions(true, "libvpx", Fraction(24, 1), 640, 360, Fraction(1,1), false, false, 2000000);
//
// // Prepare Streams
// w.PrepareStreams();
//
// // Set Options
//// w.SetOption(VIDEO_STREAM, "quality", "good");
//// w.SetOption(VIDEO_STREAM, "g", "120");
//// w.SetOption(VIDEO_STREAM, "qmin", "11");
//// w.SetOption(VIDEO_STREAM, "qmax", "51");
//// w.SetOption(VIDEO_STREAM, "profile", "0");
//// w.SetOption(VIDEO_STREAM, "speed", "0");
//// w.SetOption(VIDEO_STREAM, "level", "216");
//// w.SetOption(VIDEO_STREAM, "rc_lookahead", "16");
//// w.SetOption(VIDEO_STREAM, "rc_min_rate", "100000");
//// w.SetOption(VIDEO_STREAM, "rc_max_rate", "24000000");
//// w.SetOption(VIDEO_STREAM, "slices", "4");
//// w.SetOption(VIDEO_STREAM, "arnr_max_frames", "7");
//// w.SetOption(VIDEO_STREAM, "arnr_strength", "5");
//// w.SetOption(VIDEO_STREAM, "arnr_type", "3");
//
// // Write header
// w.WriteHeader();
//
// // Output stream info
// w.OutputStreamInfo();
//
// //Frame *f = r.GetFrame(1);
//
// //for (int frame = 800; frame >= 600; frame--)
// for (int frame = 1; frame <= 200; frame++)
// {
// Frame *f = r.GetFrame(frame);
// f->AddOverlayNumber(0);
// //f->Display();
//
// // Write frame
// cout << "queue frame " << frame << endl;
// w.WriteFrame(f);
// }
//
// // Write Footer
// w.WriteTrailer();
//
// // Close writer & reader
// w.Close();
// r.Close();
// Display debug info
r.Open();
r.DisplayInfo();
// Create a writer
FFmpegWriter w("/home/jonathan/output.webm");
w.DisplayInfo();
// Set options
w.SetAudioOptions(true, "libvorbis", 44100, 2, 128000, false);
//w.SetVideoOptions(true, "libvpx", Fraction(24, 1), 640, 360, Fraction(1,1), false, false, 2000000);
// Prepare Streams
w.PrepareStreams();
// Set Options
// w.SetOption(VIDEO_STREAM, "quality", "good");
// w.SetOption(VIDEO_STREAM, "g", "120");
// w.SetOption(VIDEO_STREAM, "qmin", "11");
// w.SetOption(VIDEO_STREAM, "qmax", "51");
// w.SetOption(VIDEO_STREAM, "profile", "0");
// w.SetOption(VIDEO_STREAM, "speed", "0");
// w.SetOption(VIDEO_STREAM, "level", "216");
// w.SetOption(VIDEO_STREAM, "rc_lookahead", "16");
// w.SetOption(VIDEO_STREAM, "rc_min_rate", "100000");
// w.SetOption(VIDEO_STREAM, "rc_max_rate", "24000000");
// w.SetOption(VIDEO_STREAM, "slices", "4");
// w.SetOption(VIDEO_STREAM, "arnr_max_frames", "7");
// w.SetOption(VIDEO_STREAM, "arnr_strength", "5");
// w.SetOption(VIDEO_STREAM, "arnr_type", "3");
// Write header
w.WriteHeader();
// Output stream info
w.OutputStreamInfo();
//Frame *f = r.GetFrame(1);
//for (int frame = 131; frame >= 1; frame--)
for (int frame = 1; frame <= 131; frame++)
{
Frame *f = r.GetFrame(frame);
//f->AddOverlayNumber(0);
//f->Display();
// Write frame
cout << "queue frame " << frame << endl;
w.WriteFrame(f);
}
// Write Footer
w.WriteTrailer();
// Close writer & reader
w.Close();
r.Close();
cout << "Successfully executed Main.cpp!" << endl;

View File

@@ -76,133 +76,133 @@ void Player::Play()
cout << setprecision(6);
cout << "START PREPARING SURFACES..." << endl;
for (int stuff = 0; stuff < number_of_cycles; stuff++)
{
// Get pointer to pixels of image.
Frame *f = reader->GetFrame(300 + stuff);
// Create YUV Overlay
SDL_Overlay *bmp;
bmp = SDL_CreateYUVOverlay(reader->info.width, reader->info.height, SDL_YV12_OVERLAY, screen);
SDL_LockYUVOverlay(bmp);
// Get pixels for resized frame (for reduced color needed by YUV420p)
int divider = 2;
const Magick::PixelPacket *reduced_color = f->GetPixels(reader->info.width / divider, reader->info.height / divider, f->number);
int number_of_colors = (reader->info.width / divider) * (reader->info.height / divider);
int pixel_index = 0;
int biggest_y = 0;
int smallest_y = 512;
for (int row = 0; row < screen->h; row++) {
// Get array of pixels for this row
//cout << "row: " << row << endl;
const Magick::PixelPacket *imagepixels = f->GetPixels(row);
// Loop through pixels on this row
for (int column = 0; column < screen->w; column++) {
// Get a pixel from this row
const Magick::PixelPacket *pixel = imagepixels;
// Get the RGB colors
float r = pixel[column].red / 255.0;
float b = pixel[column].blue / 255.0;
float g = pixel[column].green / 255.0;
// Calculate the Y value (brightness or luminance)
float y = (0.299 * r) + (0.587 * g) + (0.114 * b);
// if (y > biggest_y)
// biggest_y = y;
// if (y < smallest_y)
// smallest_y = y;
// Update the Y value for every pixel
bmp->pixels[0][pixel_index] = y;
//bmp->pixels[1][pixel_index] = 0;
//bmp->pixels[2][pixel_index] = 0;
// Increment counter
pixel_index++;
}
}
// cout << "Biggest Y: " << biggest_y << ", Smallest Y: " << smallest_y << endl;
// cout << "ADD COLOR TO YUV OVERLAY" << endl;
// Loop through the UV (color info)
//int color_counter = 511;
//number_of_colors = bmp->pitches[1] * 218;
// int biggest_v = 0;
// int smallest_v = 512;
// int biggest_u = 0;
// int smallest_u = 512;
for (int pixel_index = 0; pixel_index < number_of_colors; pixel_index++)
{
// Get a pixel from this row
const Magick::PixelPacket *pixel = reduced_color;
// Get the RGB colors
float r = pixel[pixel_index].red / 255.0;
float b = pixel[pixel_index].blue / 255.0;
float g = pixel[pixel_index].green / 255.0;
// float r = 100.0;
// float g = 100.0;
// float b = 100.0;
// Calculate UV colors
float v = (0.439 * r) - (0.368 * g) - (0.071 * b) + 128;
float u = (-0.148 * r) - (0.291 * g) + (0.439 * b) + 128;
// // Grey pixel
// if (pixel_index == 40650)
// {
// cout << "GREY FOUND!!!" << endl;
// cout << "r: " << int(r) << ", g: " << int(g) << ", b: " << int(b) << " v: " << int(v) << ", u: " << int(u) << endl;
// }
//for (int stuff = 0; stuff < number_of_cycles; stuff++)
//{
// // Get pointer to pixels of image.
// Frame *f = reader->GetFrame(300 + stuff);
//
// // Pink pixel
// if (pixel_index == 42698)
// {
// cout << "PINK FOUND!!!" << endl;
// cout << "r: " << int(r) << ", g: " << int(g) << ", b: " << int(b) << " v: " << int(v) << ", u: " << int(u) << endl;
// }
// if (v > 255.0 || v <= 0.0)
// cout << "TOO BIG v!!!!" << endl;
// if (u > 255.0 || u <= 0.0)
// cout << "TOO BIG u!!!!" << endl;
// if (v > biggest_v)
// biggest_v = v;
// if (v < smallest_v)
// smallest_v = v;
// // Create YUV Overlay
// SDL_Overlay *bmp;
// bmp = SDL_CreateYUVOverlay(reader->info.width, reader->info.height, SDL_YV12_OVERLAY, screen);
// SDL_LockYUVOverlay(bmp);
//
// if (u > biggest_u)
// biggest_u = u;
// if (u < smallest_u)
// smallest_u = u;
// Update the UV values for every pixel
bmp->pixels[1][pixel_index] = v * 1.0;
bmp->pixels[2][pixel_index] = u * 1.0;
//color_counter++;
}
//cout << "Biggest V: " << biggest_v << ", Smallest V: " << smallest_v << endl;
//cout << "Biggest U: " << biggest_u << ", Smallest U: " << smallest_u << endl;
SDL_UnlockYUVOverlay(bmp);
// Add to vector
overlays.push_back(bmp);
// Update surface.
//SDL_UpdateRect(screen, 0, 0, reader->info.width, reader->info.height);
}
// // Get pixels for resized frame (for reduced color needed by YUV420p)
// int divider = 2;
// //const Magick::PixelPacket *reduced_color = f->GetPixels(reader->info.width / divider, reader->info.height / divider, f->number);
// int number_of_colors = (reader->info.width / divider) * (reader->info.height / divider);
//
// int pixel_index = 0;
// int biggest_y = 0;
// int smallest_y = 512;
// for (int row = 0; row < screen->h; row++) {
// // Get array of pixels for this row
// //cout << "row: " << row << endl;
// const Magick::PixelPacket *imagepixels = f->GetPixels(row);
//
// // Loop through pixels on this row
// for (int column = 0; column < screen->w; column++) {
//
// // Get a pixel from this row
// const Magick::PixelPacket *pixel = imagepixels;
//
// // Get the RGB colors
// float r = pixel[column].red / 255.0;
// float b = pixel[column].blue / 255.0;
// float g = pixel[column].green / 255.0;
//
// // Calculate the Y value (brightness or luminance)
// float y = (0.299 * r) + (0.587 * g) + (0.114 * b);
//
//// if (y > biggest_y)
//// biggest_y = y;
//// if (y < smallest_y)
//// smallest_y = y;
//
// // Update the Y value for every pixel
// bmp->pixels[0][pixel_index] = y;
// //bmp->pixels[1][pixel_index] = 0;
// //bmp->pixels[2][pixel_index] = 0;
//
// // Increment counter
// pixel_index++;
//
// }
// }
//
//// cout << "Biggest Y: " << biggest_y << ", Smallest Y: " << smallest_y << endl;
//// cout << "ADD COLOR TO YUV OVERLAY" << endl;
//
// // Loop through the UV (color info)
// //int color_counter = 511;
// //number_of_colors = bmp->pitches[1] * 218;
//// int biggest_v = 0;
//// int smallest_v = 512;
//// int biggest_u = 0;
//// int smallest_u = 512;
// for (int pixel_index = 0; pixel_index < number_of_colors; pixel_index++)
// {
// // Get a pixel from this row
// const Magick::PixelPacket *pixel = reduced_color;
//
// // Get the RGB colors
// float r = pixel[pixel_index].red / 255.0;
// float b = pixel[pixel_index].blue / 255.0;
// float g = pixel[pixel_index].green / 255.0;
//// float r = 100.0;
//// float g = 100.0;
//// float b = 100.0;
//
// // Calculate UV colors
// float v = (0.439 * r) - (0.368 * g) - (0.071 * b) + 128;
// float u = (-0.148 * r) - (0.291 * g) + (0.439 * b) + 128;
//
//// // Grey pixel
//// if (pixel_index == 40650)
//// {
//// cout << "GREY FOUND!!!" << endl;
//// cout << "r: " << int(r) << ", g: " << int(g) << ", b: " << int(b) << " v: " << int(v) << ", u: " << int(u) << endl;
//// }
////
//// // Pink pixel
//// if (pixel_index == 42698)
//// {
//// cout << "PINK FOUND!!!" << endl;
//// cout << "r: " << int(r) << ", g: " << int(g) << ", b: " << int(b) << " v: " << int(v) << ", u: " << int(u) << endl;
//// }
//
//// if (v > 255.0 || v <= 0.0)
//// cout << "TOO BIG v!!!!" << endl;
//// if (u > 255.0 || u <= 0.0)
//// cout << "TOO BIG u!!!!" << endl;
//
//// if (v > biggest_v)
//// biggest_v = v;
//// if (v < smallest_v)
//// smallest_v = v;
////
//// if (u > biggest_u)
//// biggest_u = u;
//// if (u < smallest_u)
//// smallest_u = u;
//
// // Update the UV values for every pixel
// bmp->pixels[1][pixel_index] = v * 1.0;
// bmp->pixels[2][pixel_index] = u * 1.0;
//
// //color_counter++;
// }
//
// //cout << "Biggest V: " << biggest_v << ", Smallest V: " << smallest_v << endl;
// //cout << "Biggest U: " << biggest_u << ", Smallest U: " << smallest_u << endl;
//
// SDL_UnlockYUVOverlay(bmp);
//
// // Add to vector
// overlays.push_back(bmp);
//
// // Update surface.
// //SDL_UpdateRect(screen, 0, 0, reader->info.width, reader->info.height);
//}
cout << "START DISPLAYING SURFACES..." << endl;

View File

@@ -15,6 +15,10 @@ Timeline::Timeline(int width, int height, Framerate fps) :
// Add an openshot::Clip to the timeline
void Timeline::AddClip(Clip* clip)
{
// All clips must be converted to the frame rate of this timeline,
// so assign the same frame rate to each clip.
clip->Reader()->info.fps = fps.GetFraction();
// Add clip to list
clips.push_back(clip);
@@ -102,7 +106,7 @@ Frame* Timeline::GetFrame(int requested_frame) throw(ReaderClosed)
Clip *clip = (*clip_itr);
// Does clip intersect the current requested time
bool does_clip_intersect = (clip->Position() <= requested_time && clip->Position() + clip->Reader()->info.duration >= requested_time);
bool does_clip_intersect = (clip->Position() <= requested_time && clip->Position() + clip->End() >= requested_time);
// Open or Close this clip, based on if it's intersecting or not
update_open_clips(clip, does_clip_intersect);
@@ -112,7 +116,8 @@ Frame* Timeline::GetFrame(int requested_frame) throw(ReaderClosed)
{
// Display the clip (DEBUG)
return clip->GetFrame(requested_frame);
}
} else
cout << "FRAME NOT IN CLIP DURATION: frame: " << requested_frame << ", pos: " << clip->Position() << ", end: " << clip->End() << endl;
}
// No clips found