diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp
index dc84bae9..57c9171e 100644
--- a/src/FFmpegReader.cpp
+++ b/src/FFmpegReader.cpp
@@ -45,9 +45,9 @@ FFmpegReader::FFmpegReader(string path) throw(InvalidFile, NoStreamsFound, Inval
avcodec_register_all();
// Init cache
- working_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 60, info.width, info.height, info.sample_rate, info.channels);
- missing_frames.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 4, info.width, info.height, info.sample_rate, info.channels);
- final_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 4, info.width, info.height, info.sample_rate, info.channels);
+ working_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 30, info.width, info.height, info.sample_rate, info.channels);
+ missing_frames.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels);
+ final_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels);
// Open and Close the reader, to populate it's attributes (such as height, width, etc...)
Open();
@@ -213,9 +213,9 @@ void FFmpegReader::Open() throw(InvalidFile, NoStreamsFound, InvalidCodec)
previous_packet_location.sample_start = 0;
// Adjust cache size based on size of frame and audio
- working_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 60, info.width, info.height, info.sample_rate, info.channels);
- missing_frames.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 4, info.width, info.height, info.sample_rate, info.channels);
- final_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 4, info.width, info.height, info.sample_rate, info.channels);
+ working_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 30, info.width, info.height, info.sample_rate, info.channels);
+ missing_frames.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels);
+ final_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels);
// Mark as "open"
is_open = true;
diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp
index 86a67f82..4f298b33 100644
--- a/src/FFmpegWriter.cpp
+++ b/src/FFmpegWriter.cpp
@@ -476,7 +476,7 @@ void FFmpegWriter::write_queued_frames() throw (ErrorEncodingVideo)
AVFrame *av_frame = av_frames[frame];
// Deallocate AVPicture and AVFrame
- free(av_frame->data[0]); // TODO: Determine why av_free crashes on Windows
+ av_freep(&(av_frame->data[0]));
AV_FREE_FRAME(&av_frame);
av_frames.erase(frame);
}
@@ -1048,7 +1048,7 @@ void FFmpegWriter::write_audio_packets(bool final)
ChannelLayout channel_layout_in_frame = LAYOUT_MONO; // default channel layout
// Create a new array (to hold all S16 audio samples, for the current queued frames
- int16_t* all_queued_samples = new int16_t[(queued_audio_frames.size() * AVCODEC_MAX_AUDIO_FRAME_SIZE)];
+ int16_t* all_queued_samples = (int16_t*)av_malloc((sizeof(int16_t)*(queued_audio_frames.size() * AVCODEC_MAX_AUDIO_FRAME_SIZE)));
int16_t* all_resampled_samples = NULL;
int16_t* final_samples_planar = NULL;
int16_t* final_samples = NULL;
@@ -1183,9 +1183,9 @@ void FFmpegWriter::write_audio_packets(bool final)
memcpy(all_resampled_samples, audio_converted->data[0], nb_samples * info.channels * av_get_bytes_per_sample(output_sample_fmt));
// Remove converted audio
- free(audio_frame->data[0]); // TODO: Determine why av_free crashes on Windows
+ av_freep(&(audio_frame->data[0]));
AV_FREE_FRAME(&audio_frame);
- av_free(audio_converted->data[0]);
+ av_freep(&audio_converted->data[0]);
AV_FREE_FRAME(&audio_converted);
all_queued_samples = NULL; // this array cleared with above call
@@ -1247,7 +1247,7 @@ void FFmpegWriter::write_audio_packets(bool final)
audio_frame->nb_samples = audio_input_position / info.channels;
// Create a new array
- final_samples_planar = new int16_t[audio_frame->nb_samples * info.channels * (av_get_bytes_per_sample(output_sample_fmt) / av_get_bytes_per_sample(AV_SAMPLE_FMT_S16))];
+ final_samples_planar = (int16_t*)av_malloc(sizeof(int16_t) * audio_frame->nb_samples * info.channels * (av_get_bytes_per_sample(output_sample_fmt) / av_get_bytes_per_sample(AV_SAMPLE_FMT_S16)));
// Copy audio into buffer for frame
memcpy(final_samples_planar, samples, audio_frame->nb_samples * info.channels * av_get_bytes_per_sample(output_sample_fmt));
@@ -1274,8 +1274,9 @@ void FFmpegWriter::write_audio_packets(bool final)
memcpy(samples, frame_final->data[0], nb_samples * av_get_bytes_per_sample(audio_codec->sample_fmt) * info.channels);
// deallocate AVFrame
- free(audio_frame->data[0]); // TODO: Determine why av_free crashes on Windows
- AV_FREE_FRAME(&audio_frame);
+ av_freep(&(audio_frame->data[0]));
+ AV_FREE_FRAME(&audio_frame);
+ all_queued_samples = NULL; // this array cleared with above call
ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_audio_packets (Successfully completed 2nd resampling for Planar formats)", "nb_samples", nb_samples, "", -1, "", -1, "", -1, "", -1, "", -1);
@@ -1344,7 +1345,7 @@ void FFmpegWriter::write_audio_packets(bool final)
}
// deallocate AVFrame
- //free(frame_final->data[0]); // TODO: Determine why av_free crashes on Windows
+ av_freep(&(frame_final->data[0]));
AV_FREE_FRAME(&frame_final);
// deallocate memory for packet
@@ -1361,7 +1362,7 @@ void FFmpegWriter::write_audio_packets(bool final)
all_resampled_samples = NULL;
}
if (all_queued_samples) {
- delete[] all_queued_samples;
+ av_freep(all_queued_samples);
all_queued_samples = NULL;
}
@@ -1386,7 +1387,7 @@ AVFrame* FFmpegWriter::allocate_avframe(PixelFormat pix_fmt, int width, int heig
if (!new_buffer)
{
// New Buffer
- new_buffer = new uint8_t[*buffer_size];
+ new_buffer = (uint8_t*)av_malloc(*buffer_size * sizeof(uint8_t));
// Attach buffer to AVFrame
avpicture_fill((AVPicture *)new_av_frame, new_buffer, pix_fmt, width, height);
new_av_frame->width = width;
diff --git a/src/Frame.cpp b/src/Frame.cpp
index 436be554..278b7cac 100644
--- a/src/Frame.cpp
+++ b/src/Frame.cpp
@@ -952,7 +952,7 @@ void Frame::cleanUpBuffer(void *info)
{
// Remove buffer since QImage tells us to
unsigned char* ptr_to_qbuffer = (unsigned char*) info;
- delete ptr_to_qbuffer;
+ delete[] ptr_to_qbuffer;
}
}
diff --git a/src/FrameMapper.cpp b/src/FrameMapper.cpp
index 7c9b4c2e..7a82806f 100644
--- a/src/FrameMapper.cpp
+++ b/src/FrameMapper.cpp
@@ -31,7 +31,7 @@ using namespace std;
using namespace openshot;
FrameMapper::FrameMapper(ReaderBase *reader, Fraction target, PulldownType target_pulldown, int target_sample_rate, int target_channels, ChannelLayout target_channel_layout) :
- reader(reader), target(target), pulldown(target_pulldown), final_cache(820 * 1024), is_dirty(true), avr(NULL)
+ reader(reader), target(target), pulldown(target_pulldown), is_dirty(true), avr(NULL)
{
// Set the original frame rate from the reader
original = Fraction(reader->info.fps.num, reader->info.fps.den);
@@ -52,6 +52,9 @@ FrameMapper::FrameMapper(ReaderBase *reader, Fraction target, PulldownType targe
// Used to toggle odd / even fields
field_toggle = true;
+ // Adjust cache size based on size of frame and audio
+ final_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels);
+
// init mapping between original and target frames
Init();
}
@@ -103,8 +106,8 @@ void FrameMapper::Init()
// Some framerates are handled special, and some use a generic Keyframe curve to
// map the framerates. These are the special framerates:
- if ((original.ToInt() == 24 || original.ToInt() == 25 || original.ToInt() == 30) &&
- (target.ToInt() == 24 || target.ToInt() == 25 || target.ToInt() == 30)) {
+ if ((fabs(original.ToFloat() - 24.0) < 1e-7 || fabs(original.ToFloat() - 25.0) < 1e-7 || fabs(original.ToFloat() - 30.0) < 1e-7) &&
+ (fabs(target.ToFloat() - 24.0) < 1e-7 || fabs(target.ToFloat() - 25.0) < 1e-7 || fabs(target.ToFloat() - 30.0) < 1e-7)) {
// Get the difference (in frames) between the original and target frame rates
float difference = target.ToInt() - original.ToInt();
@@ -332,8 +335,8 @@ tr1::shared_ptr FrameMapper::GetOrCreateFrame(long int number)
{
tr1::shared_ptr new_frame;
- // Init some basic properties about this frame
- int samples_in_frame = Frame::GetSamplesPerFrame(number, target, info.sample_rate, info.channels);
+ // Init some basic properties about this frame (keep sample rate and # channels the same as the original reader for now)
+ int samples_in_frame = Frame::GetSamplesPerFrame(number, target, reader->info.sample_rate, reader->info.channels);
try {
// Debug output
@@ -357,8 +360,8 @@ tr1::shared_ptr FrameMapper::GetOrCreateFrame(long int number)
ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::GetOrCreateFrame (create blank)", "number", number, "samples_in_frame", samples_in_frame, "", -1, "", -1, "", -1, "", -1);
// Create blank frame
- new_frame = tr1::shared_ptr(new Frame(number, info.width, info.height, "#000000", samples_in_frame, info.channels));
- new_frame->SampleRate(info.sample_rate);
+ new_frame = tr1::shared_ptr(new Frame(number, info.width, info.height, "#000000", samples_in_frame, reader->info.channels));
+ new_frame->SampleRate(reader->info.sample_rate);
new_frame->ChannelsLayout(info.channel_layout);
return new_frame;
}
@@ -407,7 +410,7 @@ tr1::shared_ptr FrameMapper::GetFrame(long int requested_frame) throw(Rea
MappedFrame mapped = GetMappedFrame(frame_number);
tr1::shared_ptr mapped_frame;
- // Get the mapped frame
+ // Get the mapped frame (keeping the sample rate and channels the same as the original... for the moment)
mapped_frame = GetOrCreateFrame(mapped.Odd.Frame);
// Get # of channels in the actual frame
@@ -520,6 +523,11 @@ tr1::shared_ptr FrameMapper::GetFrame(long int requested_frame) throw(Rea
void FrameMapper::PrintMapping()
{
+ // Check if mappings are dirty (and need to be recalculated)
+ if (is_dirty)
+ // Recalculate mappings
+ Init();
+
// Get the difference (in frames) between the original and target frame rates
float difference = target.ToInt() - original.ToInt();
@@ -663,6 +671,9 @@ void FrameMapper::ChangeMapping(Fraction target_fps, PulldownType target_pulldow
// Clear cache
final_cache.Clear();
+ // Adjust cache size based on size of frame and audio
+ final_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels);
+
// Deallocate resample buffer
if (avr) {
avresample_close(avr);
@@ -692,7 +703,7 @@ void FrameMapper::ResampleMappedAudio(tr1::shared_ptr frame, long int ori
total_frame_samples = samples_in_frame * channels_in_frame;
// Create a new array (to hold all S16 audio samples for the current queued frames)
- int16_t* frame_samples = new int16_t[total_frame_samples];
+ int16_t* frame_samples = (int16_t*) av_malloc(sizeof(int16_t)*total_frame_samples);
// Translate audio sample values back to 16 bit integers
for (int s = 0; s < total_frame_samples; s++)
@@ -770,9 +781,9 @@ void FrameMapper::ResampleMappedAudio(tr1::shared_ptr frame, long int ori
memcpy(resampled_samples, audio_converted->data[0], (nb_samples * av_get_bytes_per_sample(AV_SAMPLE_FMT_S16) * info.channels));
// Free frames
- free(audio_frame->data[0]); // TODO: Determine why av_free crashes on Windows
+ av_freep(&audio_frame->data[0]);
AV_FREE_FRAME(&audio_frame);
- av_free(audio_converted->data[0]);
+ av_freep(&audio_converted->data[0]);
AV_FREE_FRAME(&audio_converted);
frame_samples = NULL;
diff --git a/src/Timeline.cpp b/src/Timeline.cpp
index 100619e3..990d260d 100644
--- a/src/Timeline.cpp
+++ b/src/Timeline.cpp
@@ -57,7 +57,7 @@ Timeline::Timeline(int width, int height, Fraction fps, int sample_rate, int cha
info.video_length = info.fps.ToFloat() * info.duration;
// Init cache
- final_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 3, info.width, info.height, info.sample_rate, info.channels);
+ final_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels);
}
// Add an openshot::Clip to the timeline
@@ -987,7 +987,7 @@ void Timeline::ApplyJsonDiff(string value) throw(InvalidJSON, InvalidJSONKey) {
}
// Adjust cache (in case something changed)
- final_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 4, info.width, info.height, info.sample_rate, info.channels);
+ final_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels);
}
// Apply JSON diff to clips
diff --git a/tests/FrameMapper_Tests.cpp b/tests/FrameMapper_Tests.cpp
index 176ce565..6af909a0 100644
--- a/tests/FrameMapper_Tests.cpp
+++ b/tests/FrameMapper_Tests.cpp
@@ -71,8 +71,8 @@ TEST(FrameMapper_24_fps_to_30_fps_Pulldown_Classic)
// Create a reader
DummyReader r(Fraction(24,1), 720, 480, 22000, 2, 5.0);
- // Create mapping 24 fps and 29.97 fps
- FrameMapper mapping(&r, Fraction(30000, 1001), PULLDOWN_CLASSIC, 22000, 2, LAYOUT_STEREO);
+ // Create mapping 24 fps and 30 fps
+ FrameMapper mapping(&r, Fraction(30, 1), PULLDOWN_CLASSIC, 22000, 2, LAYOUT_STEREO);
MappedFrame frame2 = mapping.GetMappedFrame(2);
MappedFrame frame3 = mapping.GetMappedFrame(3);
@@ -88,8 +88,8 @@ TEST(FrameMapper_24_fps_to_30_fps_Pulldown_Advanced)
// Create a reader
DummyReader r(Fraction(24,1), 720, 480, 22000, 2, 5.0);
- // Create mapping 24 fps and 29.97 fps
- FrameMapper mapping(&r, Fraction(30000, 1001), PULLDOWN_ADVANCED, 22000, 2, LAYOUT_STEREO);
+ // Create mapping 24 fps and 30 fps
+ FrameMapper mapping(&r, Fraction(30, 1), PULLDOWN_ADVANCED, 22000, 2, LAYOUT_STEREO);
MappedFrame frame2 = mapping.GetMappedFrame(2);
MappedFrame frame3 = mapping.GetMappedFrame(3);
MappedFrame frame4 = mapping.GetMappedFrame(4);
@@ -108,8 +108,8 @@ TEST(FrameMapper_24_fps_to_30_fps_Pulldown_None)
// Create a reader
DummyReader r(Fraction(24,1), 720, 480, 22000, 2, 5.0);
- // Create mapping 24 fps and 29.97 fps
- FrameMapper mapping(&r, Fraction(30000, 1001), PULLDOWN_NONE, 22000, 2, LAYOUT_STEREO);
+ // Create mapping 24 fps and 30 fps
+ FrameMapper mapping(&r, Fraction(30, 1), PULLDOWN_NONE, 22000, 2, LAYOUT_STEREO);
MappedFrame frame4 = mapping.GetMappedFrame(4);
MappedFrame frame5 = mapping.GetMappedFrame(5);
@@ -123,9 +123,9 @@ TEST(FrameMapper_24_fps_to_30_fps_Pulldown_None)
TEST(FrameMapper_30_fps_to_24_fps_Pulldown_Classic)
{
// Create a reader
- DummyReader r(Fraction(30000, 1001), 720, 480, 22000, 2, 5.0);
+ DummyReader r(Fraction(30, 1), 720, 480, 22000, 2, 5.0);
- // Create mapping between 29.97 fps and 24 fps
+ // Create mapping between 30 fps and 24 fps
FrameMapper mapping(&r, Fraction(24, 1), PULLDOWN_CLASSIC, 22000, 2, LAYOUT_STEREO);
MappedFrame frame3 = mapping.GetMappedFrame(3);
MappedFrame frame4 = mapping.GetMappedFrame(4);
@@ -143,9 +143,9 @@ TEST(FrameMapper_30_fps_to_24_fps_Pulldown_Classic)
TEST(FrameMapper_30_fps_to_24_fps_Pulldown_Advanced)
{
// Create a reader
- DummyReader r(Fraction(30000, 1001), 720, 480, 22000, 2, 5.0);
+ DummyReader r(Fraction(30, 1), 720, 480, 22000, 2, 5.0);
- // Create mapping between 29.97 fps and 24 fps
+ // Create mapping between 30 fps and 24 fps
FrameMapper mapping(&r, Fraction(24, 1), PULLDOWN_ADVANCED, 22000, 2, LAYOUT_STEREO);
MappedFrame frame2 = mapping.GetMappedFrame(2);
MappedFrame frame3 = mapping.GetMappedFrame(3);
@@ -163,9 +163,9 @@ TEST(FrameMapper_30_fps_to_24_fps_Pulldown_Advanced)
TEST(FrameMapper_30_fps_to_24_fps_Pulldown_None)
{
// Create a reader
- DummyReader r(Fraction(30000, 1001), 720, 480, 22000, 2, 5.0);
+ DummyReader r(Fraction(30, 1), 720, 480, 22000, 2, 5.0);
- // Create mapping between 29.97 fps and 24 fps
+ // Create mapping between 30 fps and 24 fps
FrameMapper mapping(&r, Fraction(24, 1), PULLDOWN_NONE, 22000, 2, LAYOUT_STEREO);
MappedFrame frame4 = mapping.GetMappedFrame(4);
MappedFrame frame5 = mapping.GetMappedFrame(5);