Merge branch 'develop' into clip-refactor-keyframes

# Conflicts:
#	src/CacheDisk.cpp
#	src/Clip.cpp
#	src/Frame.cpp
#	src/QtHtmlReader.cpp
#	src/QtImageReader.cpp
#	src/QtTextReader.cpp
#	src/effects/Bars.cpp
#	src/effects/Crop.cpp
This commit is contained in:
Jonathan Thomas
2020-10-16 15:32:43 -05:00
32 changed files with 521 additions and 421 deletions

View File

@@ -2,6 +2,9 @@ stages:
- build-libopenshot
- trigger-openshot-qt
variables:
GIT_LOG_FORMAT: "- %h %ad %s [%aN]"
linux-builder:
stage: build-libopenshot
artifacts:
@@ -22,7 +25,7 @@ linux-builder:
- make doc
- ~/auto-update-docs "$CI_PROJECT_DIR/build" "$CI_COMMIT_REF_NAME"
- echo -e "CI_PROJECT_NAME:$CI_PROJECT_NAME\nCI_COMMIT_REF_NAME:$CI_COMMIT_REF_NAME\nCI_COMMIT_SHA:$CI_COMMIT_SHA\nCI_JOB_ID:$CI_JOB_ID" > "install-x64/share/$CI_PROJECT_NAME"
- git log $(git describe --tags --abbrev=0 @^)..@ --oneline --pretty=format:"%C(auto,yellow)%h%C(auto,magenta)% %C(auto,blue)%>(12,trunc)%ad %C(auto,green)%<(25,trunc)%aN%C(auto,reset)%s%C(auto,red)% gD% D" --date=short > "install-x64/share/$CI_PROJECT_NAME.log"
- git log $(git describe --tags --abbrev=0 '@^')..@ --oneline --no-abbrev --date=short --no-merges --pretty="tformat:$GIT_LOG_FORMAT" > "install-x64/share/$CI_PROJECT_NAME.log"
when: always
except:
- tags
@@ -47,7 +50,7 @@ mac-builder:
- make
- make install
- echo -e "CI_PROJECT_NAME:$CI_PROJECT_NAME\nCI_COMMIT_REF_NAME:$CI_COMMIT_REF_NAME\nCI_COMMIT_SHA:$CI_COMMIT_SHA\nCI_JOB_ID:$CI_JOB_ID" > "install-x64/share/$CI_PROJECT_NAME"
- git log $(git describe --tags --abbrev=0 @^)..@ --oneline --pretty=format:"%C(auto,yellow)%h%C(auto,magenta)% %C(auto,blue)%>(12,trunc)%ad %C(auto,green)%<(25,trunc)%aN%C(auto,reset)%s%C(auto,red)% gD% D" --date=short > "install-x64/share/$CI_PROJECT_NAME.log"
- git log $(git describe --tags --abbrev=0 '@^')..@ --oneline --no-abbrev --date=short --no-merges --pretty="tformat:$GIT_LOG_FORMAT" > "install-x64/share/$CI_PROJECT_NAME.log"
when: always
except:
- tags
@@ -74,7 +77,7 @@ windows-builder-x64:
- mingw32-make install
- New-Item -path "install-x64/share/" -Name "$CI_PROJECT_NAME" -Value "CI_PROJECT_NAME:$CI_PROJECT_NAME`nCI_COMMIT_REF_NAME:$CI_COMMIT_REF_NAME`nCI_COMMIT_SHA:$CI_COMMIT_SHA`nCI_JOB_ID:$CI_JOB_ID" -ItemType file -force
- $PREV_GIT_LABEL=(git describe --tags --abbrev=0 '@^')
- git log "$PREV_GIT_LABEL..@" --oneline --pretty=format:"%C(auto,yellow)%h%C(auto,magenta)% %C(auto,blue)%>(12,trunc)%ad %C(auto,green)%<(25,trunc)%aN%C(auto,reset)%s%C(auto,red)% gD% D" --date=short > "install-x64/share/$CI_PROJECT_NAME.log"
- git log "$PREV_GIT_LABEL..@" --oneline --no-abbrev --date=short --no-merges --pretty="tformat:$GIT_LOG_FORMAT" > "install-x64/share/$CI_PROJECT_NAME.log"
when: always
except:
- tags
@@ -101,7 +104,7 @@ windows-builder-x86:
- mingw32-make install
- New-Item -path "install-x86/share/" -Name "$CI_PROJECT_NAME" -Value "CI_PROJECT_NAME:$CI_PROJECT_NAME`nCI_COMMIT_REF_NAME:$CI_COMMIT_REF_NAME`nCI_COMMIT_SHA:$CI_COMMIT_SHA`nCI_JOB_ID:$CI_JOB_ID" -ItemType file -force
- $PREV_GIT_LABEL=(git describe --tags --abbrev=0 '@^')
- git log "$PREV_GIT_LABEL..@" --oneline --pretty=format:"%C(auto,yellow)%h%C(auto,magenta)% %C(auto,blue)%>(12,trunc)%ad %C(auto,green)%<(25,trunc)%aN%C(auto,reset)%s%C(auto,red)% gD% D" --date=short > "install-x86/share/$CI_PROJECT_NAME.log"
- git log "$PREV_GIT_LABEL..@" --oneline --no-abbrev --date=short --no-merges --pretty="tformat:$GIT_LOG_FORMAT" > "install-x86/share/$CI_PROJECT_NAME.log"
when: always
except:
- tags

View File

@@ -69,7 +69,7 @@ namespace openshot
* // Create blank frame (with specific frame #, samples, and channels)
* // Sample count should be 44100 / 30 fps = 1470 samples per frame
* int sample_count = 1470;
* std::shared_ptr<openshot::Frame> f(new openshot::Frame(frame_number, sample_count, 2));
* auto f = std::make_shared<openshot::Frame>(frame_number, sample_count, 2);
*
* // Create test samples with incrementing value
* float *audio_buffer = new float[sample_count];

View File

@@ -233,14 +233,13 @@ namespace openshot {
/// codecs have trouble seeking, and can introduce artifacts or blank images into the video.
bool enable_seek;
/// Constructor for FFmpegReader. This automatically opens the media file and loads
/// frame 1, or it throws one of the following exceptions.
FFmpegReader(std::string path);
/// Constructor for FFmpegReader. This only opens the media file to inspect its properties
/// if inspect_reader=true. When not inspecting the media file, it's much faster, and useful
/// when you are inflating the object using JSON after instantiating it.
FFmpegReader(std::string path, bool inspect_reader);
/// @brief Constructor for FFmpegReader.
///
/// Sets (and possibly opens) the media file path,
/// or throws an exception.
/// @param path The filesystem location to load
/// @param inspect_reader if true (the default), automatically open the media file and loads frame 1.
FFmpegReader(const std::string& path, bool inspect_reader=true);
/// Destructor
virtual ~FFmpegReader();

View File

@@ -47,8 +47,6 @@
#include <cmath>
#include <ctime>
#include <iostream>
#include <stdio.h>
#include <unistd.h>
#include "CacheMemory.h"
#include "Exceptions.h"
@@ -251,9 +249,11 @@ namespace openshot {
public:
/// @brief Constructor for FFmpegWriter. Throws one of the following exceptions.
/// @brief Constructor for FFmpegWriter.
/// Throws an exception on failure to open path.
///
/// @param path The file path of the video file you want to open and read
FFmpegWriter(std::string path);
FFmpegWriter(const std::string& path);
/// Close the writer
void Close();

View File

@@ -34,16 +34,8 @@
#include <iomanip>
#include <sstream>
#include <queue>
#include <QtWidgets/QApplication>
#include <QtGui/QImage>
#include <QtGui/QColor>
#include <QtGui/QBitmap>
#include <QtCore/QString>
#include <QtCore/QVector>
#include <QtGui/QPainter>
#include <QtWidgets/QHBoxLayout>
#include <QtWidgets/QWidget>
#include <QtWidgets/QLabel>
#include <QApplication>
#include <QImage>
#include <memory>
#include <unistd.h>
#include "ZmqLogger.h"
@@ -73,17 +65,17 @@ namespace openshot
* There are many ways to create an instance of an openshot::Frame:
* @code
*
* // Most basic: a blank frame (300x200 blank image, 48kHz audio silence)
* // Most basic: a blank frame (all default values)
* Frame();
*
* // Image only settings (48kHz audio silence)
* // Image only settings
* Frame(1, // Frame number
* 720, // Width of image
* 480, // Height of image
* "#000000" // HTML color code of background color
* );
*
* // Audio only (300x200 blank image)
* // Audio only
* Frame(number, // Frame number
* 44100, // Sample rate of audio stream
* 2 // Number of audio channels
@@ -99,7 +91,7 @@ namespace openshot
* );
*
* // Some methods require a shared pointer to an openshot::Frame object.
* std::shared_ptr<Frame> f(new Frame(1, 720, 480, "#000000", 44100, 2));
* auto f = std::make_shared<openshot::Frame>(1, 720, 480, "#000000", 44100, 2);
*
* @endcode
*/
@@ -131,13 +123,13 @@ namespace openshot
bool has_image_data; ///< This frame has been loaded with pixel data
/// Constructor - blank frame (300x200 blank image, 48kHz audio silence)
/// Constructor - blank frame
Frame();
/// Constructor - image only (48kHz audio silence)
/// Constructor - image only
Frame(int64_t number, int width, int height, std::string color);
/// Constructor - audio only (300x200 blank image)
/// Constructor - audio only
Frame(int64_t number, int samples, int channels);
/// Constructor - image & audio

View File

@@ -41,5 +41,12 @@
#define OPEN_MP_NUM_PROCESSORS (std::min(omp_get_num_procs(), std::max(2, openshot::Settings::Instance()->OMP_THREADS) ))
#define FF_NUM_PROCESSORS (std::min(omp_get_num_procs(), std::max(2, openshot::Settings::Instance()->FF_THREADS) ))
// Set max-active-levels to the max supported, if possible
// (supported_active_levels is OpenMP 5.0 (November 2018) or later, only.)
#if (_OPENMP >= 201811)
#define OPEN_MP_MAX_ACTIVE openmp_get_supported_active_levels()
#else
#define OPEN_MP_MAX_ACTIVE OPEN_MP_NUM_PROCESSORS
#endif
#endif

View File

@@ -31,11 +31,13 @@
#ifndef OPENSHOT_VIDEO_RENDERER_WIDGET_H
#define OPENSHOT_VIDEO_RENDERER_WIDGET_H
#include <QtWidgets/QWidget>
#include <QtGui/QImage>
#include "../Fraction.h"
#include "VideoRenderer.h"
#include <QWidget>
#include <QImage>
#include <QPaintEvent>
#include <QRect>
class VideoRenderWidget : public QWidget
{

View File

@@ -219,19 +219,23 @@ namespace openshot {
public:
/// @brief Default Constructor for the timeline (which sets the canvas width and height and FPS)
/// @param width The width of the timeline (and thus, the generated openshot::Frame objects)
/// @param height The height of the timeline (and thus, the generated openshot::Frame objects)
/// @param fps The frames rate of the timeline
/// @param sample_rate The sample rate of the timeline's audio
/// @param channels The number of audio channels of the timeline
/// @brief Default Constructor for the timeline (which configures the default frame properties)
/// @param width The image width of generated openshot::Frame objects
/// @param height The image height of generated openshot::Frame objects
/// @param fps The frame rate of the generated video
/// @param sample_rate The audio sample rate
/// @param channels The number of audio channels
/// @param channel_layout The channel layout (i.e. mono, stereo, 3 point surround, etc...)
Timeline(int width, int height, openshot::Fraction fps, int sample_rate, int channels, openshot::ChannelLayout channel_layout);
/// @brief Constructor for the timeline (which loads a JSON structure from a file path, and initializes a timeline)
/// @brief Project-file constructor for the timeline
///
/// Loads a JSON structure from a file path, and
/// initializes the timeline described within.
///
/// @param projectPath The path of the UTF-8 *.osp project file (JSON contents). Contents will be loaded automatically.
/// @param convert_absolute_paths Should all paths be converted to absolute paths (based on the folder of the path provided)
Timeline(std::string projectPath, bool convert_absolute_paths);
/// @param convert_absolute_paths Should all paths be converted to absolute paths (relative to the location of projectPath)
Timeline(const std::string& projectPath, bool convert_absolute_paths);
virtual ~Timeline();

View File

@@ -36,7 +36,6 @@
#include <cmath>
#include <stdio.h>
#include <memory>
#include "../Color.h"
#include "../Json.h"
#include "../KeyFrame.h"

View File

@@ -64,15 +64,21 @@ namespace openshot
void init_effect_details();
public:
Keyframe saturation; ///< The color saturation: 0.0 = black and white, 1.0 = normal, 2.0 = double saturation
Keyframe saturation; ///< Overall color saturation: 0.0 = greyscale, 1.0 = normal, 2.0 = double saturation
Keyframe saturation_R; ///< Red color saturation
Keyframe saturation_G; ///< Green color saturation
Keyframe saturation_B; ///< Blue color saturation
/// Blank constructor, useful when using Json to load the effect properties
Saturation();
/// Default constructor, which takes 1 curve, to adjust the color saturation over time.
/// Default constructor, which takes four curves (one common curve and one curve per color), to adjust the color saturation over time.
///
/// @param new_saturation The curve to adjust the saturation of the frame's image (0.0 = black and white, 1.0 = normal, 2.0 = double saturation)
Saturation(Keyframe new_saturation);
/// @param saturation The curve to adjust the saturation of the frame's image (0.0 = greyscale, 1.0 = normal, 2.0 = double saturation)
/// @param saturation_R The curve to adjust red saturation of the frame's image (0.0 = greyscale, 1.0 = normal, 2.0 = double saturation)
/// @param saturation_G The curve to adjust green saturation of the frame's image (0.0 = greyscale, 1.0 = normal, 2.0 = double saturation)
/// @param saturation_B The curve to adjust blue saturation of the frame's image (0.0 = greyscale, 1.0 = normal, 2.0 = double saturation)
Saturation(Keyframe saturation, Keyframe saturation_R, Keyframe saturation_G, Keyframe saturation_B);
/// @brief This method is required for all derived classes of ClipBase, and returns a
/// new openshot::Frame object. All Clip keyframes and effects are resolved into

View File

@@ -231,14 +231,14 @@ std::shared_ptr<Frame> CacheDisk::GetFrame(int64_t frame_number)
if (path.exists(frame_path)) {
// Load image file
std::shared_ptr<QImage> image = std::shared_ptr<QImage>(new QImage());
auto image = std::make_shared<QImage>();
image->load(frame_path);
// Set pixel formatimage->
image = std::shared_ptr<QImage>(new QImage(image->convertToFormat(QImage::Format_RGBA8888_Premultiplied)));
image = std::make_shared<QImage>(image->convertToFormat(QImage::Format_RGBA8888_Premultiplied));
// Create frame object
std::shared_ptr<Frame> frame(new Frame());
auto frame = std::make_shared<Frame>();
frame->number = frame_number;
frame->AddImage(image);

View File

@@ -134,7 +134,9 @@ void ChunkWriter::WriteFrame(std::shared_ptr<Frame> frame)
writer_thumb->WriteFrame(last_frame);
} else {
// Write the 1st frame (of the 1st chunk)... since no previous chunk is available
std::shared_ptr<Frame> blank_frame(new Frame(1, info.width, info.height, "#000000", info.sample_rate, info.channels));
auto blank_frame = std::make_shared<Frame>(
1, info.width, info.height, "#000000",
info.sample_rate, info.channels);
blank_frame->AddColor(info.width, info.height, "#000000");
writer_final->WriteFrame(blank_frame);
writer_preview->WriteFrame(blank_frame);

View File

@@ -387,7 +387,7 @@ std::shared_ptr<Frame> Clip::GetFrame(std::shared_ptr<openshot::Frame> frame, in
// Copy the image from the odd field
if (enabled_video)
frame->AddImage(std::shared_ptr<QImage>(new QImage(*original_frame->GetImage())));
frame->AddImage(std::make_shared<QImage>(*original_frame->GetImage()));
// Loop through each channel, add audio
if (enabled_audio && reader->info.has_audio)

View File

@@ -139,7 +139,7 @@ HRESULT DeckLinkInputDelegate::VideoInputFrameArrived(IDeckLinkVideoInputFrame*
{
// Handle Video Frame
if(videoFrame)
{
{
if (videoFrame->GetFlags() & bmdFrameHasNoInputSource)
{
@@ -245,7 +245,8 @@ omp_set_nested(true);
m_rgbFrame->GetBytes(&frameBytes);
// *********** CREATE OPENSHOT FRAME **********
std::shared_ptr<openshot::Frame> f(new openshot::Frame(copy_frameCount, width, height, "#000000", 2048, 2));
auto f = std::make_shared<openshot::Frame>(
copy_frameCount, width, height, "#000000", 2048, 2);
// Add Image data to openshot frame
// TODO: Fix Decklink support with QImage Upgrade
@@ -289,6 +290,3 @@ HRESULT DeckLinkInputDelegate::VideoInputFormatChanged(BMDVideoInputFormatChange
{
return S_OK;
}

View File

@@ -142,7 +142,7 @@ void DecklinkWriter::Open()
// throw DecklinkError("Failed to enable audio output. Is another application using the card?");
// Begin video preroll by scheduling a second of frames in hardware
//std::shared_ptr<Frame> f(new Frame(1, displayMode->GetWidth(), displayMode->GetHeight(), "Blue"));
//auto f = std::make_shared<Frame>(1, displayMode->GetWidth(), displayMode->GetHeight(), "Blue");
//f->AddColor(displayMode->GetWidth(), displayMode->GetHeight(), "Blue");
// Preroll 1 second of video

View File

@@ -86,7 +86,7 @@ int hw_de_on = 0;
AVHWDeviceType hw_de_av_device_type_global = AV_HWDEVICE_TYPE_NONE;
#endif
FFmpegReader::FFmpegReader(std::string path)
FFmpegReader::FFmpegReader(const std::string& path, bool inspect_reader)
: last_frame(0), is_seeking(0), seeking_pts(0), seeking_frame(0), seek_count(0),
audio_pts_offset(99999), video_pts_offset(99999), path(path), is_video_seek(true), check_interlace(false),
check_fps(false), enable_seek(true), is_open(false), seek_audio_frame_found(0), seek_video_frame_found(0),
@@ -94,27 +94,11 @@ FFmpegReader::FFmpegReader(std::string path)
current_video_frame(0), has_missing_frames(false), num_packets_since_video_frame(0), num_checks_since_final(0),
packet(NULL) {
// Initialize FFMpeg, and register all formats and codecs
AV_REGISTER_ALL
AVCODEC_REGISTER_ALL
// Init cache
working_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * info.fps.ToDouble() * 2, info.width, info.height, info.sample_rate, info.channels);
missing_frames.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels);
final_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels);
// Open and Close the reader, to populate its attributes (such as height, width, etc...)
Open();
Close();
}
FFmpegReader::FFmpegReader(std::string path, bool inspect_reader)
: last_frame(0), is_seeking(0), seeking_pts(0), seeking_frame(0), seek_count(0),
audio_pts_offset(99999), video_pts_offset(99999), path(path), is_video_seek(true), check_interlace(false),
check_fps(false), enable_seek(true), is_open(false), seek_audio_frame_found(0), seek_video_frame_found(0),
prev_samples(0), prev_pts(0), pts_total(0), pts_counter(0), is_duration_known(false), largest_frame_processed(0),
current_video_frame(0), has_missing_frames(false), num_packets_since_video_frame(0), num_checks_since_final(0),
packet(NULL) {
// Configure OpenMP parallelism
// Default number of threads per section
omp_set_num_threads(OPEN_MP_NUM_PROCESSORS);
// Allow nested parallel sections as deeply as supported
omp_set_max_active_levels(OPEN_MP_MAX_ACTIVE);
// Initialize FFMpeg, and register all formats and codecs
AV_REGISTER_ALL
@@ -901,11 +885,6 @@ std::shared_ptr<Frame> FFmpegReader::ReadStream(int64_t requested_frame) {
int minimum_packets = OPEN_MP_NUM_PROCESSORS;
int max_packets = 4096;
// Set the number of threads in OpenMP
omp_set_num_threads(OPEN_MP_NUM_PROCESSORS);
// Allow nested OpenMP sections
omp_set_nested(true);
// Debug output
ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ReadStream", "requested_frame", requested_frame, "OPEN_MP_NUM_PROCESSORS", OPEN_MP_NUM_PROCESSORS);
@@ -2160,7 +2139,7 @@ bool FFmpegReader::CheckMissingFrame(int64_t requested_frame) {
// Add this frame to the processed map (since it's already done)
std::shared_ptr<QImage> parent_image = parent_frame->GetImage();
if (parent_image) {
missing_frame->AddImage(std::shared_ptr<QImage>(new QImage(*parent_image)));
missing_frame->AddImage(std::make_shared<QImage>(*parent_image));
processed_video_frames[missing_frame->number] = missing_frame->number;
}
}
@@ -2250,7 +2229,7 @@ void FFmpegReader::CheckWorkingFrames(bool end_of_stream, int64_t requested_fram
if (info.has_video && !is_video_ready && last_video_frame) {
// Copy image from last frame
f->AddImage(std::shared_ptr<QImage>(new QImage(*last_video_frame->GetImage())));
f->AddImage(std::make_shared<QImage>(*last_video_frame->GetImage()));
is_video_ready = true;
}
@@ -2272,7 +2251,7 @@ void FFmpegReader::CheckWorkingFrames(bool end_of_stream, int64_t requested_fram
// Add missing image (if needed - sometimes end_of_stream causes frames with only audio)
if (info.has_video && !is_video_ready && last_video_frame)
// Copy image from last frame
f->AddImage(std::shared_ptr<QImage>(new QImage(*last_video_frame->GetImage())));
f->AddImage(std::make_shared<QImage>(*last_video_frame->GetImage()));
// Reset counter since last 'final' frame
num_checks_since_final = 0;

File diff suppressed because it is too large Load Diff

View File

@@ -29,6 +29,21 @@
*/
#include "../include/Frame.h"
#include "JuceHeader.h"
#include <QApplication>
#include <QImage>
#include <QPixmap>
#include <QBitmap>
#include <QColor>
#include <QString>
#include <QVector>
#include <QPainter>
#include <QHBoxLayout>
#include <QWidget>
#include <QLabel>
#include <QPointF>
#include <QWidget>
#include <thread> // for std::this_thread::sleep_for
#include <chrono> // for std::chrono::milliseconds
@@ -36,57 +51,31 @@
using namespace std;
using namespace openshot;
// Constructor - blank frame (300x200 blank image, 48kHz audio silence)
Frame::Frame() : number(1), pixel_ratio(1,1), channels(2), width(1), height(1), color("#000000"),
channel_layout(LAYOUT_STEREO), sample_rate(44100), qbuffer(NULL), has_audio_data(false), has_image_data(false),
max_audio_sample(0)
{
// Init the image magic and audio buffer
audio = std::shared_ptr<juce::AudioSampleBuffer>(new juce::AudioSampleBuffer(channels, 0));
// initialize the audio samples to zero (silence)
audio->clear();
}
// Constructor - image only (48kHz audio silence)
Frame::Frame(int64_t number, int width, int height, std::string color)
: number(number), pixel_ratio(1,1), channels(2), width(width), height(height), color(color),
channel_layout(LAYOUT_STEREO), sample_rate(44100), qbuffer(NULL), has_audio_data(false), has_image_data(false),
max_audio_sample(0)
{
// Init the image magic and audio buffer
audio = std::shared_ptr<juce::AudioSampleBuffer>(new juce::AudioSampleBuffer(channels, 0));
// initialize the audio samples to zero (silence)
audio->clear();
}
// Constructor - audio only (300x200 blank image)
Frame::Frame(int64_t number, int samples, int channels) :
number(number), pixel_ratio(1,1), channels(channels), width(1), height(1), color("#000000"),
channel_layout(LAYOUT_STEREO), sample_rate(44100), qbuffer(NULL), has_audio_data(false), has_image_data(false),
max_audio_sample(0)
{
// Init the image magic and audio buffer
audio = std::shared_ptr<juce::AudioSampleBuffer>(new juce::AudioSampleBuffer(channels, samples));
// initialize the audio samples to zero (silence)
audio->clear();
}
// Constructor - image & audio
Frame::Frame(int64_t number, int width, int height, std::string color, int samples, int channels)
: number(number), pixel_ratio(1,1), channels(channels), width(width), height(height), color(color),
channel_layout(LAYOUT_STEREO), sample_rate(44100), qbuffer(NULL), has_audio_data(false), has_image_data(false),
: audio(std::make_shared<juce::AudioSampleBuffer>(channels, samples)),
number(number), width(width), height(height),
pixel_ratio(1,1), color(color), qbuffer(NULL),
channels(channels), channel_layout(LAYOUT_STEREO),
sample_rate(44100),
has_audio_data(false), has_image_data(false),
max_audio_sample(0)
{
// Init the image magic and audio buffer
audio = std::shared_ptr<juce::AudioSampleBuffer>(new juce::AudioSampleBuffer(channels, samples));
// initialize the audio samples to zero (silence)
// zero (fill with silence) the audio buffer
audio->clear();
}
// Delegating Constructor - blank frame
Frame::Frame() : Frame::Frame(1, 1, 1, "#000000", 0, 2) {};
// Delegating Constructor - image only
Frame::Frame(int64_t number, int width, int height, std::string color)
: Frame::Frame(number, width, height, color, 0, 2) {};
// Delegating Constructor - audio only
Frame::Frame(int64_t number, int samples, int channels)
: Frame::Frame(number, 1, 1, "#000000", samples, channels) {};
// Copy constructor
Frame::Frame ( const Frame &other )
@@ -120,11 +109,11 @@ void Frame::DeepCopy(const Frame& other)
max_audio_sample = other.max_audio_sample;
if (other.image)
image = std::shared_ptr<QImage>(new QImage(*(other.image)));
image = std::make_shared<QImage>(*(other.image));
if (other.audio)
audio = std::shared_ptr<juce::AudioSampleBuffer>(new juce::AudioSampleBuffer(*(other.audio)));
audio = std::make_shared<juce::AudioSampleBuffer>(*(other.audio));
if (other.wave_image)
wave_image = std::shared_ptr<QImage>(new QImage(*(other.wave_image)));
wave_image = std::make_shared<QImage>(*(other.wave_image));
}
// Destructor
@@ -141,7 +130,7 @@ void Frame::Display()
// Only create the QApplication once
static int argc = 1;
static char* argv[1] = {NULL};
previewApp = std::shared_ptr<QApplication>(new QApplication(argc, argv));
previewApp = std::make_shared<QApplication>(argc, argv);
}
// Get preview image
@@ -155,7 +144,8 @@ void Frame::Display()
int new_height = previewImage->size().height() * pixel_ratio.Reciprocal().ToDouble();
// Resize to fix DAR
previewImage = std::shared_ptr<QImage>(new QImage(previewImage->scaled(new_width, new_height, Qt::IgnoreAspectRatio, Qt::SmoothTransformation)));
previewImage = std::make_shared<QImage>(previewImage->scaled(
new_width, new_height, Qt::IgnoreAspectRatio, Qt::SmoothTransformation));
}
// Create window
@@ -231,7 +221,8 @@ std::shared_ptr<QImage> Frame::GetWaveform(int width, int height, int Red, int G
}
// Create blank image
wave_image = std::shared_ptr<QImage>(new QImage(total_width, total_height, QImage::Format_RGBA8888_Premultiplied));
wave_image = std::make_shared<QImage>(
total_width, total_height, QImage::Format_RGBA8888_Premultiplied);
wave_image->fill(QColor(0,0,0,0));
// Load QPainter with wave_image device
@@ -256,13 +247,13 @@ std::shared_ptr<QImage> Frame::GetWaveform(int width, int height, int Red, int G
// Resize Image (if requested)
if (width != total_width || height != total_height) {
QImage scaled_wave_image = wave_image->scaled(width, height, Qt::IgnoreAspectRatio, Qt::FastTransformation);
wave_image = std::shared_ptr<QImage>(new QImage(scaled_wave_image));
wave_image = std::make_shared<QImage>(scaled_wave_image);
}
}
else
{
// No audio samples present
wave_image = std::shared_ptr<QImage>(new QImage(width, height, QImage::Format_RGBA8888_Premultiplied));
wave_image = std::make_shared<QImage>(width, height, QImage::Format_RGBA8888_Premultiplied);
wave_image->fill(QColor(QString::fromStdString("#000000")));
}
@@ -297,7 +288,7 @@ void Frame::DisplayWaveform()
// Only create the QApplication once
static int argc = 1;
static char* argv[1] = {NULL};
previewApp = std::shared_ptr<QApplication>(new QApplication(argc, argv));
previewApp = std::make_shared<QApplication>(argc, argv);
}
// Create window
@@ -602,11 +593,15 @@ void Frame::Save(std::string path, float scale, std::string format, int quality)
int new_height = previewImage->size().height() * pixel_ratio.Reciprocal().ToDouble();
// Resize to fix DAR
previewImage = std::shared_ptr<QImage>(new QImage(previewImage->scaled(new_width, new_height, Qt::IgnoreAspectRatio, Qt::SmoothTransformation)));
previewImage = std::make_shared<QImage>(previewImage->scaled(
new_width, new_height,
Qt::IgnoreAspectRatio, Qt::SmoothTransformation));
}
// Resize image
previewImage = std::shared_ptr<QImage>(new QImage(previewImage->scaled(new_width * scale, new_height * scale, Qt::KeepAspectRatio, Qt::SmoothTransformation)));
previewImage = std::make_shared<QImage>(previewImage->scaled(
new_width * scale, new_height * scale,
Qt::KeepAspectRatio, Qt::SmoothTransformation));
}
// Save image
@@ -618,7 +613,8 @@ void Frame::Thumbnail(std::string path, int new_width, int new_height, std::stri
std::string background_color, bool ignore_aspect, std::string format, int quality, float rotate) {
// Create blank thumbnail image & fill background color
std::shared_ptr<QImage> thumbnail = std::shared_ptr<QImage>(new QImage(new_width, new_height, QImage::Format_RGBA8888_Premultiplied));
auto thumbnail = std::make_shared<QImage>(
new_width, new_height, QImage::Format_RGBA8888_Premultiplied);
thumbnail->fill(QColor(QString::fromStdString(background_color)));
// Create painter
@@ -636,16 +632,22 @@ void Frame::Thumbnail(std::string path, int new_width, int new_height, std::stri
int aspect_height = previewImage->size().height() * pixel_ratio.Reciprocal().ToDouble();
// Resize to fix DAR
previewImage = std::shared_ptr<QImage>(new QImage(previewImage->scaled(aspect_width, aspect_height, Qt::IgnoreAspectRatio, Qt::SmoothTransformation)));
previewImage = std::make_shared<QImage>(previewImage->scaled(
aspect_width, aspect_height,
Qt::IgnoreAspectRatio, Qt::SmoothTransformation));
}
// Resize frame image
if (ignore_aspect)
// Ignore aspect ratio
previewImage = std::shared_ptr<QImage>(new QImage(previewImage->scaled(new_width, new_height, Qt::IgnoreAspectRatio, Qt::SmoothTransformation)));
previewImage = std::make_shared<QImage>(previewImage->scaled(
new_width, new_height,
Qt::IgnoreAspectRatio, Qt::SmoothTransformation));
else
// Maintain aspect ratio
previewImage = std::shared_ptr<QImage>(new QImage(previewImage->scaled(new_width, new_height, Qt::KeepAspectRatio, Qt::SmoothTransformation)));
previewImage = std::make_shared<QImage>(previewImage->scaled(
new_width, new_height,
Qt::KeepAspectRatio, Qt::SmoothTransformation));
// Composite frame image onto background (centered)
int x = (new_width - previewImage->size().width()) / 2.0; // center
@@ -669,14 +671,16 @@ void Frame::Thumbnail(std::string path, int new_width, int new_height, std::stri
// Overlay Image (if any)
if (overlay_path != "") {
// Open overlay
std::shared_ptr<QImage> overlay = std::shared_ptr<QImage>(new QImage());
auto overlay = std::make_shared<QImage>();
overlay->load(QString::fromStdString(overlay_path));
// Set pixel format
overlay = std::shared_ptr<QImage>(new QImage(overlay->convertToFormat(QImage::Format_RGBA8888_Premultiplied)));
overlay = std::make_shared<QImage>(
overlay->convertToFormat(QImage::Format_RGBA8888_Premultiplied));
// Resize to fit
overlay = std::shared_ptr<QImage>(new QImage(overlay->scaled(new_width, new_height, Qt::IgnoreAspectRatio, Qt::SmoothTransformation)));
overlay = std::make_shared<QImage>(overlay->scaled(
new_width, new_height, Qt::IgnoreAspectRatio, Qt::SmoothTransformation));
// Composite onto thumbnail
painter.setCompositionMode(QPainter::CompositionMode_SourceOver);
@@ -687,14 +691,16 @@ void Frame::Thumbnail(std::string path, int new_width, int new_height, std::stri
// Mask Image (if any)
if (mask_path != "") {
// Open mask
std::shared_ptr<QImage> mask = std::shared_ptr<QImage>(new QImage());
auto mask = std::make_shared<QImage>();
mask->load(QString::fromStdString(mask_path));
// Set pixel format
mask = std::shared_ptr<QImage>(new QImage(mask->convertToFormat(QImage::Format_RGBA8888_Premultiplied)));
mask = std::make_shared<QImage>(
mask->convertToFormat(QImage::Format_RGBA8888_Premultiplied));
// Resize to fit
mask = std::shared_ptr<QImage>(new QImage(mask->scaled(new_width, new_height, Qt::IgnoreAspectRatio, Qt::SmoothTransformation)));
mask = std::make_shared<QImage>(mask->scaled(
new_width, new_height, Qt::IgnoreAspectRatio, Qt::SmoothTransformation));
// Negate mask
mask->invertPixels();
@@ -747,7 +753,7 @@ void Frame::AddColor(int new_width, int new_height, std::string new_color)
const GenericScopedLock<juce::CriticalSection> lock(addingImageSection);
#pragma omp critical (AddImage)
{
image = std::shared_ptr<QImage>(new QImage(new_width, new_height, QImage::Format_RGBA8888_Premultiplied));
image = std::make_shared<QImage>(new_width, new_height, QImage::Format_RGBA8888_Premultiplied);
// Fill with solid color
image->fill(QColor(QString::fromStdString(color)));
@@ -759,30 +765,31 @@ void Frame::AddColor(int new_width, int new_height, std::string new_color)
}
// Add (or replace) pixel data to the frame
void Frame::AddImage(int new_width, int new_height, int bytes_per_pixel, QImage::Format type, const unsigned char *pixels_)
void Frame::AddImage(
int new_width, int new_height, int bytes_per_pixel,
QImage::Format type, const unsigned char *pixels_)
{
// Create new buffer
const GenericScopedLock<juce::CriticalSection> lock(addingImageSection);
int buffer_size = new_width * new_height * bytes_per_pixel;
qbuffer = new unsigned char[buffer_size]();
// Copy buffer data
memcpy((unsigned char*)qbuffer, pixels_, buffer_size);
// Create new image object, and fill with pixel data
#pragma omp critical (AddImage)
{
image = std::shared_ptr<QImage>(new QImage(qbuffer, new_width, new_height, new_width * bytes_per_pixel, type, (QImageCleanupFunction) &openshot::Frame::cleanUpBuffer, (void*) qbuffer));
const GenericScopedLock<juce::CriticalSection> lock(addingImageSection);
int buffer_size = new_width * new_height * bytes_per_pixel;
qbuffer = new unsigned char[buffer_size]();
// Always convert to RGBA8888 (if different)
if (image->format() != QImage::Format_RGBA8888_Premultiplied)
*image = image->convertToFormat(QImage::Format_RGBA8888_Premultiplied);
// Copy buffer data
memcpy((unsigned char*)qbuffer, pixels_, buffer_size);
// Update height and width
width = image->width();
height = image->height();
has_image_data = true;
}
} // Release addingImageSection lock
// Create new image object from pixel data
auto new_image = std::make_shared<QImage>(
qbuffer,
new_width, new_height,
new_width * bytes_per_pixel,
type,
(QImageCleanupFunction) &openshot::Frame::cleanUpBuffer,
(void*) qbuffer
);
AddImage(new_image);
}
// Add (or replace) pixel data to the frame
@@ -822,7 +829,6 @@ void Frame::AddImage(std::shared_ptr<QImage> new_image, bool only_odd_lines)
AddImage(new_image);
} else {
// Ignore image of different sizes or formats
bool ret=false;
#pragma omp critical (AddImage)
@@ -831,7 +837,8 @@ void Frame::AddImage(std::shared_ptr<QImage> new_image, bool only_odd_lines)
ret = true;
}
else if (new_image->format() != QImage::Format_RGBA8888_Premultiplied) {
new_image = std::shared_ptr<QImage>(new QImage(new_image->convertToFormat(QImage::Format_RGBA8888_Premultiplied)));
new_image = std::make_shared<QImage>(
new_image->convertToFormat(QImage::Format_RGBA8888_Premultiplied));
}
}
if (ret) {
@@ -941,7 +948,8 @@ std::shared_ptr<Magick::Image> Frame::GetMagickImage()
const QRgb *tmpBits = (const QRgb*)image->constBits();
// Create new image object, and fill with pixel data
std::shared_ptr<Magick::Image> magick_image = std::shared_ptr<Magick::Image>(new Magick::Image(image->width(), image->height(),"RGBA", Magick::CharPixel, tmpBits));
auto magick_image = std::make_shared<Magick::Image>(
image->width(), image->height(),"RGBA", Magick::CharPixel, tmpBits);
// Give image a transparent background color
magick_image->backgroundColor(Magick::Color("none"));
@@ -970,7 +978,9 @@ void Frame::AddMagickImage(std::shared_ptr<Magick::Image> new_image)
MagickCore::ExportImagePixels(new_image->constImage(), 0, 0, new_image->columns(), new_image->rows(), "RGBA", Magick::CharPixel, buffer, &exception);
// Create QImage of frame data
image = std::shared_ptr<QImage>(new QImage(qbuffer, width, height, width * BPP, QImage::Format_RGBA8888_Premultiplied, (QImageCleanupFunction) &cleanUpBuffer, (void*) qbuffer));
image = std::make_shared<QImage>(
qbuffer, width, height, width * BPP, QImage::Format_RGBA8888_Premultiplied,
(QImageCleanupFunction) &cleanUpBuffer, (void*) qbuffer);
// Update height and width
width = image->width();

View File

@@ -478,7 +478,8 @@ std::shared_ptr<Frame> FrameMapper::GetFrame(int64_t requested_frame)
}
// Create a new frame
std::shared_ptr<Frame> frame = std::make_shared<Frame>(frame_number, 1, 1, "#000000", samples_in_frame, channels_in_frame);
auto frame = std::make_shared<Frame>(
frame_number, 1, 1, "#000000", samples_in_frame, channels_in_frame);
frame->SampleRate(mapped_frame->SampleRate());
frame->ChannelsLayout(mapped_frame->ChannelsLayout());
@@ -488,13 +489,14 @@ std::shared_ptr<Frame> FrameMapper::GetFrame(int64_t requested_frame)
odd_frame = GetOrCreateFrame(mapped.Odd.Frame);
if (odd_frame)
frame->AddImage(std::shared_ptr<QImage>(new QImage(*odd_frame->GetImage())), true);
frame->AddImage(std::make_shared<QImage>(*odd_frame->GetImage()), true);
if (mapped.Odd.Frame != mapped.Even.Frame) {
// Add even lines (if different than the previous image)
std::shared_ptr<Frame> even_frame;
even_frame = GetOrCreateFrame(mapped.Even.Frame);
if (even_frame)
frame->AddImage(std::shared_ptr<QImage>(new QImage(*even_frame->GetImage())), false);
frame->AddImage(
std::make_shared<QImage>(*even_frame->GetImage()), false);
}
// Resample audio on frame (if needed)

View File

@@ -61,7 +61,7 @@ void ImageReader::Open()
try
{
// load image
image = std::shared_ptr<Magick::Image>(new Magick::Image(path));
image = std::make_shared<Magick::Image>(path);
// Give image a transparent background color
image->backgroundColor(Magick::Color("none"));
@@ -126,7 +126,9 @@ std::shared_ptr<Frame> ImageReader::GetFrame(int64_t requested_frame)
throw ReaderClosed("The FFmpegReader is closed. Call Open() before calling this method.", path);
// Create or get frame object
std::shared_ptr<Frame> image_frame(new Frame(requested_frame, image->size().width(), image->size().height(), "#000000", 0, 2));
auto image_frame = std::make_shared<Frame>(
requested_frame, image->size().width(), image->size().height(),
"#000000", 0, 2);
// Add Image data to frame
image_frame->AddMagickImage(image);

Some files were not shown because too many files have changed in this diff Show More