The timeline classes are finally working correctly, and have the following features:

1) Layers (unlimited # of layers)
2) IN, OUT, and Position (time)
3) X, Y offset
4) Alpha
5) Rotation
6) Transparency + compositing + overlays
This commit is contained in:
Jonathan Thomas
2012-11-08 04:35:21 -06:00
parent 42d7565ba1
commit 5b77abdf27
8 changed files with 82 additions and 53 deletions

View File

@@ -147,6 +147,7 @@ namespace openshot {
int Layer() { return layer; } ///<Get layer of clip on timeline (lower number is covered by higher numbers)
float Start() { return start; } ///<Get start position of clip (trim start of video)
float End(); ///<Get end position of clip (trim end of video), which can be affected by the time curve.
float Duration() { End() - Start(); } ///<Get the length of this clip (in seconds)
/// Set basic properties
void Position(float value) { position = value; } ///<Get position on timeline

View File

@@ -32,7 +32,7 @@ namespace openshot
{
private:
string path;
tr1::shared_ptr<Frame> image_frame;
tr1::shared_ptr<Magick::Image> image;
bool is_open;
public:

View File

@@ -23,7 +23,7 @@ void Clip::init_settings()
location_y = Keyframe(0.0);
// Init alpha & rotation
alpha = Keyframe(100.0);
alpha = Keyframe(0.0);
rotation = Keyframe(0.0);
// Init time & volume
@@ -143,7 +143,8 @@ void Clip::Open() throw(InvalidFile)
file_reader->Open();
// Set some clip properties from the file reader
End(file_reader->info.duration);
if (end == 0)
End(file_reader->info.duration);
}
}
@@ -157,15 +158,17 @@ void Clip::Close()
// Get end position of clip (trim end of video), which can be affected by the time curve.
float Clip::End()
{
// Determine the FPS fo this clip
float fps = 24.0;
if (file_reader)
// file reader
fps = file_reader->info.fps.ToFloat();
// if a time curve is present, use it's length
if (time.Points.size() > 1)
{
// Determine the FPS fo this clip
float fps = 24.0;
if (file_reader)
// file reader
fps = file_reader->info.fps.ToFloat();
return float(time.GetLength()) / fps;
}
else
// just use the duration (as detected by the reader)
return end;
@@ -233,7 +236,6 @@ void Clip::reverse_buffer(juce::AudioSampleBuffer* buffer)
// Adjust the audio and image of a time mapped frame
tr1::shared_ptr<Frame> Clip::get_time_mapped_frame(tr1::shared_ptr<Frame> frame, int frame_number)
{
cout << "TIME MAPPER: " << frame_number << endl;
tr1::shared_ptr<Frame> new_frame;
// Check for a valid time map curve

View File

@@ -442,23 +442,6 @@ void FFmpegWriter::WriteTrailer()
if (info.has_audio && audio_st)
write_audio_packets(true);
// Experimental: Repeat last frame many times, to pad
// the end of the video, to ensure the codec does not
// ignore the final frames.
// if (last_frame)
// {
// // Create black frame
// tr1::shared_ptr<Frame> padding_frame(new Frame(999999, last_frame->GetWidth(), last_frame->GetHeight(), "#000000", last_frame->GetAudioSamplesCount(), last_frame->GetAudioChannelsCount()));
// padding_frame->AddColor(last_frame->GetWidth(), last_frame->GetHeight(), "#000000");
//
// // Add the black frame many times
// for (int p = 0; p < 100; p++)
// WriteFrame(padding_frame);
//
// // Write these blank frames
// write_queued_frames();
// }
// Flush encoders (who sometimes hold on to frames)
flush_encoders();

View File

@@ -430,6 +430,9 @@ void Frame::AddColor(int width, int height, string color)
// Create new image object, and fill with pixel data
image = tr1::shared_ptr<Magick::Image>(new Magick::Image(Magick::Geometry(width, height), Magick::Color(color)));
// Give image a transparent background color
image->backgroundColor(Magick::Color("none"));
// Update height and width
width = image->columns();
height = image->rows();
@@ -441,6 +444,9 @@ void Frame::AddImage(int width, int height, const string map, const Magick::Stor
// Create new image object, and fill with pixel data
image = tr1::shared_ptr<Magick::Image>(new Magick::Image(width, height, map, type, pixels));
// Give image a transparent background color
image->backgroundColor(Magick::Color("none"));
// Update height and width
width = image->columns();
height = image->rows();
@@ -520,6 +526,10 @@ void Frame::AddImage(tr1::shared_ptr<Magick::Image> new_image, float alpha)
// Add audio samples to a specific channel
void Frame::AddAudio(int destChannel, int destStartSample, const float* source, int numSamples, float gainToApplyToSource = 1.0f)
{
// Extend audio buffer (if needed)
if (destStartSample + numSamples > audio->getNumSamples())
audio->setSize(audio->getNumChannels(), destStartSample + numSamples, true, true, false);
// Always clear the range of samples first
audio->clear(destChannel, destStartSample, numSamples);

View File

@@ -19,30 +19,26 @@ void ImageReader::Open() throw(InvalidFile)
if (!is_open)
{
// Attempt to open file
Magick::Image* source = NULL;
try
{
// load image
source = new Magick::Image(path);
image = tr1::shared_ptr<Magick::Image>(new Magick::Image(path));
// Give image a transparent background color
image->backgroundColor(Magick::Color("none"));
}
catch (Magick::Exception e) {
// raise exception
throw InvalidFile("File could not be opened.", path);
}
// Create or get frame object
image_frame = tr1::shared_ptr<Frame>(new Frame(1, source->size().width(), source->size().height(), "#000000", 0, 2));
// Add Image data to frame
image_frame->AddImage(tr1::shared_ptr<Magick::Image>(source));
// Update image properties
info.has_audio = false;
info.has_video = true;
info.file_size = source->fileSize();
info.vcodec = source->format();
info.width = source->size().width();
info.height = source->size().height();
info.file_size = image->fileSize();
info.vcodec = image->format();
info.width = image->size().width();
info.height = image->size().height();
info.pixel_ratio.num = 1;
info.pixel_ratio.den = 1;
info.duration = 60 * 60 * 24; // 24 hour duration
@@ -85,10 +81,16 @@ tr1::shared_ptr<Frame> ImageReader::GetFrame(int requested_frame) throw(ReaderCl
if (!is_open)
throw ReaderClosed("The ImageReader is closed. Call Open() before calling this method.", path);
if (image_frame)
if (image)
{
// Always return same frame (regardless of which frame number was requested)
image_frame->number = requested_frame;
// Create or get frame object
tr1::shared_ptr<Frame> image_frame(new Frame(requested_frame, image->size().width(), image->size().height(), "#000000", 0, 2));
// Add Image data to frame
tr1::shared_ptr<Magick::Image> copy_image(new Magick::Image(*image.get()));
image_frame->AddImage(copy_image);
// return frame object
return image_frame;
}
else

View File

@@ -31,8 +31,33 @@ int main()
Timeline t(640, 360, Framerate(24,1), 44100, 2);
// Add some clips
Clip c1(new FFmpegReader("/home/jonathan/Videos/sintel_trailer-720p.mp4"));
Clip c1(new FFmpegReader("/home/jonathan/Videos/sintel-1024-stereo.mp4"));
Clip c2(new ImageReader("/home/jonathan/Apps/videcho_site/media/logos/watermark.png"));
c1.Position(1.0);
c2.Position(1.0);
c2.Layer(1);
c1.rotation.AddPoint(1, 1);
c1.rotation.AddPoint(300, 360);
c1.alpha.AddPoint(1, 0);
c1.alpha.AddPoint(300, 1);
c2.alpha.AddPoint(1, 1);
c2.alpha.AddPoint(30, 0);
c2.alpha.AddPoint(100, 0, LINEAR);
c2.alpha.AddPoint(150, 1);
c2.End(6.25);
c2.location_x.AddPoint(1, 0);
c2.location_x.AddPoint(300, 530);
c2.location_y.AddPoint(1, 0);
c2.location_y.AddPoint(300, 300);
c2.rotation.AddPoint(60, 0, LINEAR);
c2.rotation.AddPoint(150, 360);
// LINEAR Reverse
//c1.time.AddPoint(1, 500, LINEAR);
@@ -79,6 +104,7 @@ int main()
// Add clips
t.AddClip(&c1);
t.AddClip(&c2);
// Create a writer
@@ -99,7 +125,7 @@ int main()
// Output stream info
w.OutputStreamInfo();
for (int frame = 1; frame <= 30; frame++)
for (int frame = 1; frame <= 300; frame++)
{
tr1::shared_ptr<Frame> f = t.GetFrame(frame);
if (f)
@@ -159,7 +185,7 @@ int main()
// openshot::FFmpegReader r("../../src/examples/piano.wav");
// openshot::FFmpegReader r("/home/jonathan/Videos/big-buck-bunny_trailer.webm");
openshot::FFmpegReader r("/home/jonathan/Desktop/test2.flv");
openshot::FFmpegReader r("/home/jonathan/Videos/sintel-1024-stereo.mp4");
// openshot::FFmpegReader r("/home/jonathan/Videos/OpenShot_Now_In_3d.mp4");
// openshot::FFmpegReader r("/home/jonathan/Videos/sintel_trailer-720p.mp4");
// openshot::FFmpegReader r("/home/jonathan/Aptana Studio Workspace/OpenShotLibrary/src/examples/piano.wav");

View File

@@ -48,8 +48,15 @@ void Timeline::add_layer(tr1::shared_ptr<Frame> new_frame, Clip* source_clip, in
new_frame->AddColor(width, height, "#000000");
// Apply image effects
source_image->rotate(source_clip->rotation.GetValue(clip_frame_number));
source_image->opacity(1 - source_clip->alpha.GetValue(clip_frame_number));
if (source_clip->rotation.GetValue(clip_frame_number) != 0)
source_image->rotate(source_clip->rotation.GetValue(clip_frame_number));
if (source_clip->alpha.GetValue(clip_frame_number) != 0)
{
// Calculate opacity of new image
int new_opacity = 65535.0f * source_clip->alpha.GetValue(clip_frame_number);
if (new_opacity < 0) new_opacity = 0; // completely invisible
source_image->opacity(new_opacity);
}
// Copy audio from source frame
for (int channel = 0; channel < source_frame->GetAudioChannelsCount(); channel++)
@@ -57,9 +64,7 @@ void Timeline::add_layer(tr1::shared_ptr<Frame> new_frame, Clip* source_clip, in
// Composite images together
tr1::shared_ptr<Magick::Image> new_image = new_frame->GetImage();
new_image->composite(*source_image.get(), source_clip->location_x.GetInt(clip_frame_number), source_clip->location_y.GetInt(clip_frame_number), Magick::InCompositeOp);
new_image->composite(*source_image.get(), source_clip->location_x.GetInt(clip_frame_number), source_clip->location_y.GetInt(clip_frame_number), Magick::BlendCompositeOp);
}
// Update the list of 'opened' clips
@@ -137,7 +142,7 @@ tr1::shared_ptr<Frame> Timeline::GetFrame(int requested_frame) throw(ReaderClose
requested_frame = 1;
// Create blank frame (which will become the requested frame)
tr1::shared_ptr<Frame> new_frame = tr1::shared_ptr<Frame>(new Frame(requested_frame, width, height, "#000000", GetSamplesPerFrame(requested_frame), channels));
tr1::shared_ptr<Frame> new_frame(tr1::shared_ptr<Frame>(new Frame(requested_frame, width, height, "#000000", GetSamplesPerFrame(requested_frame), channels)));
// Calculate time of frame
float requested_time = calculate_time(requested_frame, fps);
@@ -150,7 +155,8 @@ tr1::shared_ptr<Frame> Timeline::GetFrame(int requested_frame) throw(ReaderClose
Clip *clip = (*clip_itr);
// Does clip intersect the current requested time
bool does_clip_intersect = (clip->Position() <= requested_time && clip->Position() + clip->End() >= requested_time);
float clip_duration = clip->End() - clip->Start();
bool does_clip_intersect = (clip->Position() <= requested_time && clip->Position() + clip_duration >= requested_time);
// Open or Close this clip, based on if it's intersecting or not
update_open_clips(clip, does_clip_intersect);
@@ -161,7 +167,6 @@ tr1::shared_ptr<Frame> Timeline::GetFrame(int requested_frame) throw(ReaderClose
// Determine the frame needed for this clip (based on the position on the timeline)
float time_diff = (requested_time - clip->Position()) + clip->Start();
int clip_frame_number = round(time_diff * fps.GetFPS()) + 1;
cout << "CLIP #: " << clip_frame_number << endl;
// Add clip's frame as layer
add_layer(new_frame, clip, clip_frame_number);