openshot:: prefixing as needed for SWIG

It turns out SWIG is a lot pickier than GCC about namespaces, and
requires a lot more `openshot::` prefixing in our headers, if we
eliminate the `using namespace...` statements from them.
This commit is contained in:
FeRD (Frank Dana)
2019-08-05 02:17:22 -04:00
parent ce8ff07fd3
commit 3ce85d0fff
19 changed files with 146 additions and 146 deletions

View File

@@ -65,7 +65,7 @@ namespace openshot {
/// @brief Add a Frame to the cache
/// @param frame The openshot::Frame object needing to be cached.
virtual void Add(std::shared_ptr<Frame> frame) = 0;
virtual void Add(std::shared_ptr<openshot::Frame> frame) = 0;
/// Clear the cache of all frames
virtual void Clear() = 0;
@@ -75,13 +75,13 @@ namespace openshot {
/// @brief Get a frame from the cache
/// @param frame_number The frame number of the cached frame
virtual std::shared_ptr<Frame> GetFrame(int64_t frame_number) = 0;
virtual std::shared_ptr<openshot::Frame> GetFrame(int64_t frame_number) = 0;
/// Gets the maximum bytes value
virtual int64_t GetBytes() = 0;
/// Get the smallest frame number
virtual std::shared_ptr<Frame> GetSmallestFrame() = 0;
virtual std::shared_ptr<openshot::Frame> GetSmallestFrame() = 0;
/// @brief Remove a specific frame
/// @param frame_number The frame number of the cached frame

View File

@@ -96,7 +96,7 @@ namespace openshot {
/// @brief Add a Frame to the cache
/// @param frame The openshot::Frame object needing to be cached.
void Add(std::shared_ptr<Frame> frame);
void Add(std::shared_ptr<openshot::Frame> frame);
/// Clear the cache of all frames
void Clear();
@@ -106,13 +106,13 @@ namespace openshot {
/// @brief Get a frame from the cache
/// @param frame_number The frame number of the cached frame
std::shared_ptr<Frame> GetFrame(int64_t frame_number);
std::shared_ptr<openshot::Frame> GetFrame(int64_t frame_number);
/// Gets the maximum bytes value
int64_t GetBytes();
/// Get the smallest frame number
std::shared_ptr<Frame> GetSmallestFrame();
std::shared_ptr<openshot::Frame> GetSmallestFrame();
/// @brief Move frame to front of queue (so it lasts longer)
/// @param frame_number The frame number of the cached frame

View File

@@ -50,7 +50,7 @@ namespace openshot {
*/
class CacheMemory : public CacheBase {
private:
std::map<int64_t, std::shared_ptr<Frame> > frames; ///< This map holds the frame number and Frame objects
std::map<int64_t, std::shared_ptr<openshot::Frame> > frames; ///< This map holds the frame number and Frame objects
std::deque<int64_t> frame_numbers; ///< This queue holds a sequential list of cached Frame numbers
bool needs_range_processing; ///< Something has changed, and the range data needs to be re-calculated
@@ -78,7 +78,7 @@ namespace openshot {
/// @brief Add a Frame to the cache
/// @param frame The openshot::Frame object needing to be cached.
void Add(std::shared_ptr<Frame> frame);
void Add(std::shared_ptr<openshot::Frame> frame);
/// Clear the cache of all frames
void Clear();
@@ -88,13 +88,13 @@ namespace openshot {
/// @brief Get a frame from the cache
/// @param frame_number The frame number of the cached frame
std::shared_ptr<Frame> GetFrame(int64_t frame_number);
std::shared_ptr<openshot::Frame> GetFrame(int64_t frame_number);
/// Gets the maximum bytes value
int64_t GetBytes();
/// Get the smallest frame number
std::shared_ptr<Frame> GetSmallestFrame();
std::shared_ptr<openshot::Frame> GetSmallestFrame();
/// @brief Move frame to front of queue (so it lasts longer)
/// @param frame_number The frame number of the cached frame

View File

@@ -106,10 +106,10 @@ namespace openshot
std::string path;
bool is_open;
int64_t chunk_size;
ReaderBase *local_reader;
openshot::ReaderBase *local_reader;
ChunkLocation previous_location;
ChunkVersion version;
std::shared_ptr<Frame> last_frame;
std::shared_ptr<openshot::Frame> last_frame;
/// Check if folder path existing
bool does_folder_exist(std::string path);
@@ -143,12 +143,12 @@ namespace openshot
void SetChunkSize(int64_t new_size) { chunk_size = new_size; };
/// Get the cache object used by this reader (always return NULL for this reader)
CacheMemory* GetCache() { return NULL; };
openshot::CacheMemory* GetCache() { return NULL; };
/// @brief Get an openshot::Frame object for a specific frame number of this reader.
/// @returns The requested frame (containing the image and audio)
/// @param requested_frame The frame number you want to retrieve
std::shared_ptr<Frame> GetFrame(int64_t requested_frame);
std::shared_ptr<openshot::Frame> GetFrame(int64_t requested_frame);
/// Determine if reader is open or closed
bool IsOpen() { return is_open; };

View File

@@ -87,10 +87,10 @@ namespace openshot
int64_t frame_count;
bool is_open;
bool is_writing;
ReaderBase *local_reader;
FFmpegWriter *writer_thumb;
FFmpegWriter *writer_preview;
FFmpegWriter *writer_final;
openshot::ReaderBase *local_reader;
openshot::FFmpegWriter *writer_thumb;
openshot::FFmpegWriter *writer_preview;
openshot::FFmpegWriter *writer_final;
std::shared_ptr<Frame> last_frame;
bool last_frame_needed;
std::string default_extension;
@@ -114,7 +114,7 @@ namespace openshot
/// @brief Constructor for ChunkWriter. Throws one of the following exceptions.
/// @param path The folder path of the chunk file to be created
/// @param reader The initial reader to base this chunk file's meta data on (such as fps, height, width, etc...)
ChunkWriter(std::string path, ReaderBase *reader);
ChunkWriter(std::string path, openshot::ReaderBase *reader);
/// Close the writer
void Close();
@@ -134,7 +134,7 @@ namespace openshot
/// @brief Add a frame to the stack waiting to be encoded.
/// @param frame The openshot::Frame object that needs to be written to this chunk file.
void WriteFrame(std::shared_ptr<Frame> frame);
void WriteFrame(std::shared_ptr<openshot::Frame> frame);
/// @brief Write a block of frames from a reader
/// @param start The starting frame number to write (of the reader passed into the constructor)
@@ -145,7 +145,7 @@ namespace openshot
/// @param reader The reader containing the frames you need
/// @param start The starting frame number to write
/// @param length The number of frames to write
void WriteFrame(ReaderBase* reader, int64_t start, int64_t length);
void WriteFrame(openshot::ReaderBase* reader, int64_t start, int64_t length);
};

View File

@@ -57,7 +57,7 @@ namespace openshot {
/// from lowest layer to top layer (since that is sequence clips are combined), and then by
/// position, and then by effect order.
struct CompareClipEffects{
bool operator()( EffectBase* lhs, EffectBase* rhs){
bool operator()( openshot::EffectBase* lhs, openshot::EffectBase* rhs){
if( lhs->Layer() < rhs->Layer() ) return true;
if( lhs->Layer() == rhs->Layer() && lhs->Position() < rhs->Position() ) return true;
if( lhs->Layer() == rhs->Layer() && lhs->Position() == rhs->Position() && lhs->Order() > rhs->Order() ) return true;
@@ -76,7 +76,7 @@ namespace openshot {
* Clip c1(new ImageReader("MyAwesomeLogo.jpeg"));
* Clip c2(new FFmpegReader("BackgroundVideo.webm"));
*
* // CLIP 1 (logo) - Set some clip properties (with Keyframes)
* // CLIP 1 (logo) - Set some clip properties (with openshot::Keyframes)
* c1.Position(0.0); // Set the position or location (in seconds) on the timeline
* c1.gravity = GRAVITY_LEFT; // Set the alignment / gravity of the clip (position on the screen)
* c1.scale = SCALE_CROP; // Set the scale mode (how the image is resized to fill the screen)
@@ -87,7 +87,7 @@ namespace openshot {
* c1.alpha.AddPoint(500, 0.0); // Keep the alpha transparent until frame #500
* c1.alpha.AddPoint(565, 1.0); // Animate the alpha from transparent to visible (between frame #501 and #565)
*
* // CLIP 2 (background video) - Set some clip properties (with Keyframes)
* // CLIP 2 (background video) - Set some clip properties (with openshot::Keyframes)
* c2.Position(0.0); // Set the position or location (in seconds) on the timeline
* c2.Start(10.0); // Set the starting position of the video (trim the left side of the video)
* c2.Layer(0); // Set the layer of the timeline (higher layers cover up images of lower layers)
@@ -97,40 +97,40 @@ namespace openshot {
* c2.alpha.AddPoint(384, 1.0); // Animate the alpha to visible (between frame #360 and frame #384)
* @endcode
*/
class Clip : public ClipBase {
class Clip : public openshot::ClipBase {
protected:
/// Section lock for multiple threads
CriticalSection getFrameCriticalSection;
private:
bool waveform; ///< Should a waveform be used instead of the clip's image
std::list<EffectBase*> effects; ///<List of clips on this timeline
std::list<openshot::EffectBase*> effects; ///<List of clips on this timeline
// Audio resampler (if time mapping)
AudioResampler *resampler;
AudioSampleBuffer *audio_cache;
openshot::AudioResampler *resampler;
juce::AudioSampleBuffer *audio_cache;
// File Reader object
ReaderBase* reader;
openshot::ReaderBase* reader;
/// If we allocated a reader, we store it here to free it later
/// (reader member variable itself may have been replaced)
ReaderBase* allocated_reader;
openshot::ReaderBase* allocated_reader;
/// Adjust frame number minimum value
int64_t adjust_frame_number_minimum(int64_t frame_number);
/// Apply effects to the source frame (if any)
std::shared_ptr<Frame> apply_effects(std::shared_ptr<Frame> frame);
std::shared_ptr<openshot::Frame> apply_effects(std::shared_ptr<openshot::Frame> frame);
/// Get file extension
std::string get_file_extension(std::string path);
/// Get a frame object or create a blank one
std::shared_ptr<Frame> GetOrCreateFrame(int64_t number);
std::shared_ptr<openshot::Frame> GetOrCreateFrame(int64_t number);
/// Adjust the audio and image of a time mapped frame
void get_time_mapped_frame(std::shared_ptr<Frame> frame, int64_t frame_number);
void get_time_mapped_frame(std::shared_ptr<openshot::Frame> frame, int64_t frame_number);
/// Init default settings for a clip
void init_settings();
@@ -145,11 +145,11 @@ namespace openshot {
void reverse_buffer(juce::AudioSampleBuffer* buffer);
public:
GravityType gravity; ///< The gravity of a clip determines where it snaps to its parent
ScaleType scale; ///< The scale determines how a clip should be resized to fit its parent
AnchorType anchor; ///< The anchor determines what parent a clip should snap to
FrameDisplayType display; ///< The format to display the frame number (if any)
VolumeMixType mixing; ///< What strategy should be followed when mixing audio with other clips
openshot::GravityType gravity; ///< The gravity of a clip determines where it snaps to its parent
openshot::ScaleType scale; ///< The scale determines how a clip should be resized to fit its parent
openshot::AnchorType anchor; ///< The anchor determines what parent a clip should snap to
openshot::FrameDisplayType display; ///< The format to display the frame number (if any)
openshot::VolumeMixType mixing; ///< What strategy should be followed when mixing audio with other clips
/// Default Constructor
Clip();
@@ -160,36 +160,36 @@ namespace openshot {
/// @brief Constructor with reader
/// @param new_reader The reader to be used by this clip
Clip(ReaderBase* new_reader);
Clip(openshot::ReaderBase* new_reader);
/// Destructor
virtual ~Clip();
/// @brief Add an effect to the clip
/// @param effect Add an effect to the clip. An effect can modify the audio or video of an openshot::Frame.
void AddEffect(EffectBase* effect);
void AddEffect(openshot::EffectBase* effect);
/// Close the internal reader
void Close();
/// Return the list of effects on the timeline
std::list<EffectBase*> Effects() { return effects; };
std::list<openshot::EffectBase*> Effects() { return effects; };
/// @brief Get an openshot::Frame object for a specific frame number of this timeline.
///
/// @returns The requested frame (containing the image)
/// @param requested_frame The frame number that is requested
std::shared_ptr<Frame> GetFrame(int64_t requested_frame);
std::shared_ptr<openshot::Frame> GetFrame(int64_t requested_frame);
/// Open the internal reader
void Open();
/// @brief Set the current reader
/// @param new_reader The reader to be used by this clip
void Reader(ReaderBase* new_reader);
void Reader(openshot::ReaderBase* new_reader);
/// Get the current reader
ReaderBase* Reader();
openshot::ReaderBase* Reader();
/// Override End() method
float End(); ///< Get end position (in seconds) of clip (trim end of video), which can be affected by the time curve.
@@ -207,55 +207,55 @@ namespace openshot {
/// @brief Remove an effect from the clip
/// @param effect Remove an effect from the clip.
void RemoveEffect(EffectBase* effect);
void RemoveEffect(openshot::EffectBase* effect);
/// Waveform property
bool Waveform() { return waveform; } ///< Get the waveform property of this clip
void Waveform(bool value) { waveform = value; } ///< Set the waveform property of this clip
// Scale and Location curves
Keyframe scale_x; ///< Curve representing the horizontal scaling in percent (0 to 1)
Keyframe scale_y; ///< Curve representing the vertical scaling in percent (0 to 1)
Keyframe location_x; ///< Curve representing the relative X position in percent based on the gravity (-1 to 1)
Keyframe location_y; ///< Curve representing the relative Y position in percent based on the gravity (-1 to 1)
openshot::Keyframe scale_x; ///< Curve representing the horizontal scaling in percent (0 to 1)
openshot::Keyframe scale_y; ///< Curve representing the vertical scaling in percent (0 to 1)
openshot::Keyframe location_x; ///< Curve representing the relative X position in percent based on the gravity (-1 to 1)
openshot::Keyframe location_y; ///< Curve representing the relative Y position in percent based on the gravity (-1 to 1)
// Alpha and Rotation curves
Keyframe alpha; ///< Curve representing the alpha (1 to 0)
Keyframe rotation; ///< Curve representing the rotation (0 to 360)
openshot::Keyframe alpha; ///< Curve representing the alpha (1 to 0)
openshot::Keyframe rotation; ///< Curve representing the rotation (0 to 360)
// Time and Volume curves
Keyframe time; ///< Curve representing the frames over time to play (used for speed and direction of video)
Keyframe volume; ///< Curve representing the volume (0 to 1)
openshot::Keyframe time; ///< Curve representing the frames over time to play (used for speed and direction of video)
openshot::Keyframe volume; ///< Curve representing the volume (0 to 1)
/// Curve representing the color of the audio wave form
Color wave_color;
openshot::Color wave_color;
// Crop settings and curves
GravityType crop_gravity; ///< Cropping needs to have a gravity to determine what side we are cropping
Keyframe crop_width; ///< Curve representing width in percent (0.0=0%, 1.0=100%)
Keyframe crop_height; ///< Curve representing height in percent (0.0=0%, 1.0=100%)
Keyframe crop_x; ///< Curve representing X offset in percent (-1.0=-100%, 0.0=0%, 1.0=100%)
Keyframe crop_y; ///< Curve representing Y offset in percent (-1.0=-100%, 0.0=0%, 1.0=100%)
openshot::GravityType crop_gravity; ///< Cropping needs to have a gravity to determine what side we are cropping
openshot::Keyframe crop_width; ///< Curve representing width in percent (0.0=0%, 1.0=100%)
openshot::Keyframe crop_height; ///< Curve representing height in percent (0.0=0%, 1.0=100%)
openshot::Keyframe crop_x; ///< Curve representing X offset in percent (-1.0=-100%, 0.0=0%, 1.0=100%)
openshot::Keyframe crop_y; ///< Curve representing Y offset in percent (-1.0=-100%, 0.0=0%, 1.0=100%)
// Shear and perspective curves
Keyframe shear_x; ///< Curve representing X shear angle in degrees (-45.0=left, 45.0=right)
Keyframe shear_y; ///< Curve representing Y shear angle in degrees (-45.0=down, 45.0=up)
Keyframe perspective_c1_x; ///< Curves representing X for coordinate 1
Keyframe perspective_c1_y; ///< Curves representing Y for coordinate 1
Keyframe perspective_c2_x; ///< Curves representing X for coordinate 2
Keyframe perspective_c2_y; ///< Curves representing Y for coordinate 2
Keyframe perspective_c3_x; ///< Curves representing X for coordinate 3
Keyframe perspective_c3_y; ///< Curves representing Y for coordinate 3
Keyframe perspective_c4_x; ///< Curves representing X for coordinate 4
Keyframe perspective_c4_y; ///< Curves representing Y for coordinate 4
openshot::Keyframe shear_x; ///< Curve representing X shear angle in degrees (-45.0=left, 45.0=right)
openshot::Keyframe shear_y; ///< Curve representing Y shear angle in degrees (-45.0=down, 45.0=up)
openshot::Keyframe perspective_c1_x; ///< Curves representing X for coordinate 1
openshot::Keyframe perspective_c1_y; ///< Curves representing Y for coordinate 1
openshot::Keyframe perspective_c2_x; ///< Curves representing X for coordinate 2
openshot::Keyframe perspective_c2_y; ///< Curves representing Y for coordinate 2
openshot::Keyframe perspective_c3_x; ///< Curves representing X for coordinate 3
openshot::Keyframe perspective_c3_y; ///< Curves representing Y for coordinate 3
openshot::Keyframe perspective_c4_x; ///< Curves representing X for coordinate 4
openshot::Keyframe perspective_c4_y; ///< Curves representing Y for coordinate 4
/// Audio channel filter and mappings
Keyframe channel_filter; ///< A number representing an audio channel to filter (clears all other channels)
Keyframe channel_mapping; ///< A number representing an audio channel to output (only works when filtering a channel)
openshot::Keyframe channel_filter; ///< A number representing an audio channel to filter (clears all other channels)
openshot::Keyframe channel_mapping; ///< A number representing an audio channel to output (only works when filtering a channel)
/// Override has_video and has_audio properties of clip (and their readers)
Keyframe has_audio; ///< An optional override to determine if this clip has audio (-1=undefined, 0=no, 1=yes)
Keyframe has_video; ///< An optional override to determine if this clip has video (-1=undefined, 0=no, 1=yes)
openshot::Keyframe has_audio; ///< An optional override to determine if this clip has audio (-1=undefined, 0=no, 1=yes)
openshot::Keyframe has_video; ///< An optional override to determine if this clip has video (-1=undefined, 0=no, 1=yes)
};

View File

@@ -45,10 +45,10 @@ namespace openshot {
class Color{
public:
Keyframe red; ///<Curve representing the red value (0 - 255)
Keyframe green; ///<Curve representing the green value (0 - 255)
Keyframe blue; ///<Curve representing the red value (0 - 255)
Keyframe alpha; ///<Curve representing the alpha value (0 - 255)
openshot::Keyframe red; ///<Curve representing the red value (0 - 255)
openshot::Keyframe green; ///<Curve representing the green value (0 - 255)
openshot::Keyframe blue; ///<Curve representing the red value (0 - 255)
openshot::Keyframe alpha; ///<Curve representing the alpha value (0 - 255)
/// Default constructor
Color() {};
@@ -60,7 +60,7 @@ namespace openshot {
Color(unsigned char Red, unsigned char Green, unsigned char Blue, unsigned char Alpha);
/// Constructor which takes 4 existing Keyframe curves
Color(Keyframe Red, Keyframe Green, Keyframe Blue, Keyframe Alpha);
Color(openshot::Keyframe Red, openshot::Keyframe Green, openshot::Keyframe Blue, openshot::Keyframe Alpha);
/// Get the HEX value of a color at a specific frame
std::string GetColorHex(int64_t frame_number);

View File

@@ -54,7 +54,7 @@ namespace openshot
class DummyReader : public ReaderBase
{
private:
std::shared_ptr<Frame> image_frame;
std::shared_ptr<openshot::Frame> image_frame;
bool is_open;
public:
@@ -63,7 +63,7 @@ namespace openshot
DummyReader();
/// Constructor for DummyReader.
DummyReader(Fraction fps, int width, int height, int sample_rate, int channels, float duration);
DummyReader(openshot::Fraction fps, int width, int height, int sample_rate, int channels, float duration);
virtual ~DummyReader();
@@ -78,7 +78,7 @@ namespace openshot
///
/// @returns The requested frame (containing the image)
/// @param requested_frame The frame number that is requested.
std::shared_ptr<Frame> GetFrame(int64_t requested_frame);
std::shared_ptr<openshot::Frame> GetFrame(int64_t requested_frame);
/// Determine if reader is open or closed
bool IsOpen() { return is_open; };

View File

@@ -88,7 +88,7 @@ namespace openshot
/// @returns The modified openshot::Frame object
/// @param frame The frame object that needs the effect applied to it
/// @param frame_number The frame number (starting at 1) of the effect on the timeline.
virtual std::shared_ptr<Frame> GetFrame(std::shared_ptr<Frame> frame, int64_t frame_number) = 0;
virtual std::shared_ptr<openshot::Frame> GetFrame(std::shared_ptr<openshot::Frame> frame, int64_t frame_number) = 0;
/// Initialize the values of the EffectInfo struct. It is important for derived classes to call
/// this method, or the EffectInfo struct values will not be initialized.

View File

@@ -130,7 +130,7 @@ namespace openshot {
int64_t pts_counter;
int64_t num_packets_since_video_frame;
int64_t num_checks_since_final;
std::shared_ptr<Frame> last_video_frame;
std::shared_ptr<openshot::Frame> last_video_frame;
bool is_seeking;
int64_t seeking_pts;
@@ -176,7 +176,7 @@ namespace openshot {
int64_t ConvertVideoPTStoFrame(int64_t pts);
/// Create a new Frame (or return an existing one) and add it to the working queue.
std::shared_ptr<Frame> CreateFrame(int64_t requested_frame);
std::shared_ptr<openshot::Frame> CreateFrame(int64_t requested_frame);
/// Calculate Starting video frame and sample # for an audio PTS
AudioLocation GetAudioPTSLocation(int64_t pts);
@@ -206,7 +206,7 @@ namespace openshot {
void ProcessAudioPacket(int64_t requested_frame, int64_t target_frame, int starting_sample);
/// Read the stream until we find the requested Frame
std::shared_ptr<Frame> ReadStream(int64_t requested_frame);
std::shared_ptr<openshot::Frame> ReadStream(int64_t requested_frame);
/// Remove AVFrame from cache (and deallocate its memory)
void RemoveAVFrame(AVFrame *);
@@ -256,7 +256,7 @@ namespace openshot {
///
/// @returns The requested frame of video
/// @param requested_frame The frame number that is requested.
std::shared_ptr<Frame> GetFrame(int64_t requested_frame);
std::shared_ptr<openshot::Frame> GetFrame(int64_t requested_frame);
/// Determine if reader is open or closed
bool IsOpen() { return is_open; };

View File

@@ -180,20 +180,20 @@ namespace openshot {
int original_sample_rate;
int original_channels;
std::shared_ptr<Frame> last_frame;
std::deque<std::shared_ptr<Frame> > spooled_audio_frames;
std::deque<std::shared_ptr<Frame> > spooled_video_frames;
std::shared_ptr<openshot::Frame> last_frame;
std::deque<std::shared_ptr<openshot::Frame> > spooled_audio_frames;
std::deque<std::shared_ptr<openshot::Frame> > spooled_video_frames;
std::deque<std::shared_ptr<Frame> > queued_audio_frames;
std::deque<std::shared_ptr<Frame> > queued_video_frames;
std::deque<std::shared_ptr<openshot::Frame> > queued_audio_frames;
std::deque<std::shared_ptr<openshot::Frame> > queued_video_frames;
std::deque<std::shared_ptr<Frame> > processed_frames;
std::deque<std::shared_ptr<Frame> > deallocate_frames;
std::deque<std::shared_ptr<openshot::Frame> > processed_frames;
std::deque<std::shared_ptr<openshot::Frame> > deallocate_frames;
std::map<std::shared_ptr<Frame>, AVFrame *> av_frames;
std::map<std::shared_ptr<openshot::Frame>, AVFrame *> av_frames;
/// Add an AVFrame to the cache
void add_avframe(std::shared_ptr<Frame> frame, AVFrame *av_frame);
void add_avframe(std::shared_ptr<openshot::Frame> frame, AVFrame *av_frame);
/// Add an audio output stream
AVStream *add_audio_stream();
@@ -231,13 +231,13 @@ namespace openshot {
void open_video(AVFormatContext *oc, AVStream *st);
/// process video frame
void process_video_packet(std::shared_ptr<Frame> frame);
void process_video_packet(std::shared_ptr<openshot::Frame> frame);
/// write all queued frames' audio to the video file
void write_audio_packets(bool is_final);
/// write video frame
bool write_video_packet(std::shared_ptr<Frame> frame, AVFrame *frame_final);
bool write_video_packet(std::shared_ptr<openshot::Frame> frame, AVFrame *frame_final);
/// write all queued frames
void write_queued_frames();
@@ -285,7 +285,7 @@ namespace openshot {
/// @param channels The number of audio channels needed in this file
/// @param channel_layout The 'layout' of audio channels (i.e. mono, stereo, surround, etc...)
/// @param bit_rate The audio bit rate used during encoding
void SetAudioOptions(bool has_audio, std::string codec, int sample_rate, int channels, ChannelLayout channel_layout, int bit_rate);
void SetAudioOptions(bool has_audio, std::string codec, int sample_rate, int channels, openshot::ChannelLayout channel_layout, int bit_rate);
/// @brief Set the cache size
/// @param new_size The number of frames to queue before writing to the file
@@ -301,14 +301,14 @@ namespace openshot {
/// @param interlaced Does this video need to be interlaced?
/// @param top_field_first Which frame should be used as the top field?
/// @param bit_rate The video bit rate used during encoding
void SetVideoOptions(bool has_video, std::string codec, Fraction fps, int width, int height, Fraction pixel_ratio, bool interlaced, bool top_field_first, int bit_rate);
void SetVideoOptions(bool has_video, std::string codec, openshot::Fraction fps, int width, int height, openshot::Fraction pixel_ratio, bool interlaced, bool top_field_first, int bit_rate);
/// @brief Set custom options (some codecs accept additional params). This must be called after the
/// PrepareStreams() method, otherwise the streams have not been initialized yet.
/// @param stream The stream (openshot::StreamType) this option should apply to
/// @param name The name of the option you want to set (i.e. qmin, qmax, etc...)
/// @param value The new value of this option
void SetOption(StreamType stream, std::string name, std::string value);
void SetOption(openshot::StreamType stream, std::string name, std::string value);
/// @brief Write the file header (after the options are set). This method is called automatically
/// by the Open() method if this method has not yet been called.
@@ -316,13 +316,13 @@ namespace openshot {
/// @brief Add a frame to the stack waiting to be encoded.
/// @param frame The openshot::Frame object to write to this image
void WriteFrame(std::shared_ptr<Frame> frame);
void WriteFrame(std::shared_ptr<openshot::Frame> frame);
/// @brief Write a block of frames from a reader
/// @param reader A openshot::ReaderBase object which will provide frames to be written
/// @param start The starting frame number of the reader
/// @param length The number of frames to write
void WriteFrame(ReaderBase *reader, int64_t start, int64_t length);
void WriteFrame(openshot::ReaderBase *reader, int64_t start, int64_t length);
/// @brief Write the file trailer (after all frames are written). This is called automatically
/// by the Close() method if this method has not yet been called.

View File

@@ -122,7 +122,7 @@ namespace openshot
CriticalSection addingImageSection;
CriticalSection addingAudioSection;
const unsigned char *qbuffer;
Fraction pixel_ratio;
openshot::Fraction pixel_ratio;
int channels;
ChannelLayout channel_layout;
int width;
@@ -189,10 +189,10 @@ namespace openshot
/// Channel Layout of audio samples. A frame needs to keep track of this, since Writers do not always
/// know the original channel layout of a frame's audio samples (i.e. mono, stereo, 5 point surround, etc...)
ChannelLayout ChannelsLayout();
openshot::ChannelLayout ChannelsLayout();
// Set the channel layout of audio samples (i.e. mono, stereo, 5 point surround, etc...)
void ChannelsLayout(ChannelLayout new_channel_layout) { channel_layout = new_channel_layout; };
void ChannelsLayout(openshot::ChannelLayout new_channel_layout) { channel_layout = new_channel_layout; };
/// Clean up buffer after QImage is deleted
static void cleanUpBuffer(void *info);
@@ -216,10 +216,10 @@ namespace openshot
float* GetAudioSamples(int channel);
/// Get an array of sample data (all channels interleaved together), using any sample rate
float* GetInterleavedAudioSamples(int new_sample_rate, AudioResampler* resampler, int* sample_count);
float* GetInterleavedAudioSamples(int new_sample_rate, openshot::AudioResampler* resampler, int* sample_count);
// Get a planar array of sample data, using any sample rate
float* GetPlanarAudioSamples(int new_sample_rate, AudioResampler* resampler, int* sample_count);
float* GetPlanarAudioSamples(int new_sample_rate, openshot::AudioResampler* resampler, int* sample_count);
/// Get number of audio channels
int GetAudioChannelsCount();
@@ -241,7 +241,7 @@ namespace openshot
#endif
/// Set Pixel Aspect Ratio
Fraction GetPixelRatio() { return pixel_ratio; };
openshot::Fraction GetPixelRatio() { return pixel_ratio; };
/// Get pixel data (as packets)
const unsigned char* GetPixels();
@@ -256,10 +256,10 @@ namespace openshot
int GetHeight();
/// Calculate the # of samples per video frame (for the current frame number)
int GetSamplesPerFrame(Fraction fps, int sample_rate, int channels);
int GetSamplesPerFrame(openshot::Fraction fps, int sample_rate, int channels);
/// Calculate the # of samples per video frame (for a specific frame number and frame rate)
static int GetSamplesPerFrame(int64_t frame_number, Fraction fps, int sample_rate, int channels);
static int GetSamplesPerFrame(int64_t frame_number, openshot::Fraction fps, int sample_rate, int channels);
/// Get an audio waveform image
std::shared_ptr<QImage> GetWaveform(int width, int height, int Red, int Green, int Blue, int Alpha);
@@ -271,7 +271,7 @@ namespace openshot
int GetWidth();
/// Resize audio container to hold more (or less) samples and channels
void ResizeAudio(int channels, int length, int sample_rate, ChannelLayout channel_layout);
void ResizeAudio(int channels, int length, int sample_rate, openshot::ChannelLayout channel_layout);
/// Get the original sample rate of this frame's audio data
int SampleRate();

View File

@@ -61,7 +61,7 @@ namespace openshot
protected:
float speed;
float volume;
ReaderBase *reader;
openshot::ReaderBase *reader;
PlaybackMode mode;
public:
@@ -97,7 +97,7 @@ namespace openshot
virtual ReaderBase* Reader() = 0;
/// Set the current reader, such as a FFmpegReader
virtual void Reader(ReaderBase *new_reader) = 0;
virtual void Reader(openshot::ReaderBase *new_reader) = 0;
/// Get the Volume
virtual float Volume() = 0;

View File

@@ -52,7 +52,7 @@ namespace openshot
public:
/// Default constructor
explicit QtPlayer();
explicit QtPlayer(RendererBase *rb);
explicit QtPlayer(openshot::RendererBase *rb);
/// Default destructor
virtual ~QtPlayer();
@@ -64,7 +64,7 @@ namespace openshot
std::string GetError();
/// Get Audio Devices from JUCE
std::vector<AudioDeviceInfo> GetAudioDeviceNames();
std::vector<openshot::AudioDeviceInfo> GetAudioDeviceNames();
/// Play the video
void Play();
@@ -73,7 +73,7 @@ namespace openshot
void Loading();
/// Get the current mode
PlaybackMode Mode();
openshot::PlaybackMode Mode();
/// Pause the video
void Pause();
@@ -105,10 +105,10 @@ namespace openshot
void Stop();
/// Set the current reader
void Reader(ReaderBase *new_reader);
void Reader(openshot::ReaderBase *new_reader);
/// Get the current reader, such as a FFmpegReader
ReaderBase* Reader();
openshot::ReaderBase* Reader();
/// Get the Volume
float Volume();

View File

@@ -68,23 +68,23 @@ namespace openshot
int height; ///< The height of the video (in pixels)
int width; ///< The width of the video (in pixesl)
int pixel_format; ///< The pixel format (i.e. YUV420P, RGB24, etc...)
Fraction fps; ///< Frames per second, as a fraction (i.e. 24/1 = 24 fps)
openshot::Fraction fps; ///< Frames per second, as a fraction (i.e. 24/1 = 24 fps)
int video_bit_rate; ///< The bit rate of the video stream (in bytes)
Fraction pixel_ratio; ///< The pixel ratio of the video stream as a fraction (i.e. some pixels are not square)
Fraction display_ratio; ///< The ratio of width to height of the video stream (i.e. 640x480 has a ratio of 4/3)
openshot::Fraction pixel_ratio; ///< The pixel ratio of the video stream as a fraction (i.e. some pixels are not square)
openshot::Fraction display_ratio; ///< The ratio of width to height of the video stream (i.e. 640x480 has a ratio of 4/3)
std::string vcodec; ///< The name of the video codec used to encode / decode the video stream
int64_t video_length; ///< The number of frames in the video stream
int video_stream_index; ///< The index of the video stream
Fraction video_timebase; ///< The video timebase determines how long each frame stays on the screen
openshot::Fraction video_timebase; ///< The video timebase determines how long each frame stays on the screen
bool interlaced_frame; // Are the contents of this frame interlaced
bool top_field_first; // Which interlaced field should be displayed first
std::string acodec; ///< The name of the audio codec used to encode / decode the video stream
int audio_bit_rate; ///< The bit rate of the audio stream (in bytes)
int sample_rate; ///< The number of audio samples per second (44100 is a common sample rate)
int channels; ///< The number of audio channels used in the audio stream
ChannelLayout channel_layout; ///< The channel layout (mono, stereo, 5 point surround, etc...)
openshot::ChannelLayout channel_layout; ///< The channel layout (mono, stereo, 5 point surround, etc...)
int audio_stream_index; ///< The index of the audio stream
Fraction audio_timebase; ///< The audio timebase determines how long each audio packet should be played
openshot::Fraction audio_timebase; ///< The audio timebase determines how long each audio packet should be played
std::map<std::string, std::string> metadata; ///< An optional map/dictionary of metadata for this reader
};
@@ -101,7 +101,7 @@ namespace openshot
/// Section lock for multiple threads
CriticalSection getFrameCriticalSection;
CriticalSection processingCriticalSection;
ClipBase* parent;
openshot::ClipBase* parent;
public:
@@ -109,13 +109,13 @@ namespace openshot
ReaderBase();
/// Information about the current media file
ReaderInfo info;
openshot::ReaderInfo info;
/// Parent clip object of this reader (which can be unparented and NULL)
ClipBase* GetClip();
openshot::ClipBase* GetClip();
/// Set parent clip object of this reader
void SetClip(ClipBase* clip);
void SetClip(openshot::ClipBase* clip);
/// Close the reader (and any resources it was consuming)
virtual void Close() = 0;
@@ -124,7 +124,7 @@ namespace openshot
void DisplayInfo();
/// Get the cache object used by this reader (note: not all readers use cache)
virtual CacheBase* GetCache() = 0;
virtual openshot::CacheBase* GetCache() = 0;
/// This method is required for all derived classes of ReaderBase, and returns the
/// openshot::Frame object, which contains the image and audio information for that
@@ -132,7 +132,7 @@ namespace openshot
///
/// @returns The requested frame of video
/// @param[in] number The frame number that is requested.
virtual std::shared_ptr<Frame> GetFrame(int64_t number) = 0;
virtual std::shared_ptr<openshot::Frame> GetFrame(int64_t number) = 0;
/// Determine if reader is open or closed
virtual bool IsOpen() = 0;

View File

@@ -50,7 +50,7 @@ namespace openshot
public:
/// Paint(render) a video Frame.
void paint(const std::shared_ptr<Frame> & frame);
void paint(const std::shared_ptr<openshot::Frame> & frame);
/// Allow manual override of the QWidget that is used to display
virtual void OverrideWidget(int64_t qwidget_address) = 0;
@@ -58,7 +58,7 @@ namespace openshot
protected:
RendererBase();
virtual ~RendererBase();
virtual void render(std::shared_ptr<QImage> image) = 0;
};

View File

@@ -98,7 +98,7 @@ namespace openshot
std::shared_ptr<Magick::Image> image;
MAGICK_DRAWABLE lines;
bool is_open;
GravityType gravity;
openshot::GravityType gravity;
public:
@@ -126,14 +126,14 @@ namespace openshot
void Close();
/// Get the cache object used by this reader (always returns NULL for this object)
CacheMemory* GetCache() { return NULL; };
openshot::CacheMemory* GetCache() { return NULL; };
/// Get an openshot::Frame object for a specific frame number of this reader. All numbers
/// return the same Frame, since they all share the same image data.
///
/// @returns The requested frame (containing the image)
/// @param requested_frame The frame number that is requested.
std::shared_ptr<Frame> GetFrame(int64_t requested_frame);
std::shared_ptr<openshot::Frame> GetFrame(int64_t requested_frame);
/// Determine if reader is open or closed
bool IsOpen() { return is_open; };

View File

@@ -57,23 +57,23 @@ namespace openshot
int height; ///< The height of the video (in pixels)
int width; ///< The width of the video (in pixels)
int pixel_format; ///< The pixel format (i.e. YUV420P, RGB24, etc...)
Fraction fps; ///< Frames per second, as a fraction (i.e. 24/1 = 24 fps)
openshot::Fraction fps; ///< Frames per second, as a fraction (i.e. 24/1 = 24 fps)
int video_bit_rate; ///< The bit rate of the video stream (in bytes)
Fraction pixel_ratio; ///< The pixel ratio of the video stream as a fraction (i.e. some pixels are not square)
Fraction display_ratio; ///< The ratio of width to height of the video stream (i.e. 640x480 has a ratio of 4/3)
openshot::Fraction pixel_ratio; ///< The pixel ratio of the video stream as a fraction (i.e. some pixels are not square)
openshot::Fraction display_ratio; ///< The ratio of width to height of the video stream (i.e. 640x480 has a ratio of 4/3)
std::string vcodec; ///< The name of the video codec used to encode / decode the video stream
int64_t video_length; ///< The number of frames in the video stream
int video_stream_index; ///< The index of the video stream
Fraction video_timebase; ///< The video timebase determines how long each frame stays on the screen
openshot::Fraction video_timebase; ///< The video timebase determines how long each frame stays on the screen
bool interlaced_frame; ///< Are the contents of this frame interlaced
bool top_field_first; ///< Which interlaced field should be displayed first
std::string acodec; ///< The name of the audio codec used to encode / decode the video stream
int audio_bit_rate; ///< The bit rate of the audio stream (in bytes)
int sample_rate; ///< The number of audio samples per second (44100 is a common sample rate)
int channels; ///< The number of audio channels used in the audio stream
ChannelLayout channel_layout; ///< The channel layout (mono, stereo, 5 point surround, etc...)
openshot::ChannelLayout channel_layout; ///< The channel layout (mono, stereo, 5 point surround, etc...)
int audio_stream_index; ///< The index of the audio stream
Fraction audio_timebase; ///< The audio timebase determines how long each audio packet should be played
openshot::Fraction audio_timebase; ///< The audio timebase determines how long each audio packet should be played
std::map<std::string, std::string> metadata; ///< An optional map/dictionary of video & audio metadata
};
@@ -95,16 +95,16 @@ namespace openshot
/// @brief This method copy's the info struct of a reader, and sets the writer with the same info
/// @param reader The source reader to copy
void CopyReaderInfo(ReaderBase* reader);
void CopyReaderInfo(openshot::ReaderBase* reader);
/// Determine if writer is open or closed
virtual bool IsOpen() = 0;
/// This method is required for all derived classes of WriterBase. Write a Frame to the video file.
virtual void WriteFrame(std::shared_ptr<Frame> frame) = 0;
virtual void WriteFrame(std::shared_ptr<openshot::Frame> frame) = 0;
/// This method is required for all derived classes of WriterBase. Write a block of frames from a reader.
virtual void WriteFrame(ReaderBase* reader, int64_t start, int64_t length) = 0;
virtual void WriteFrame(openshot::ReaderBase* reader, int64_t start, int64_t length) = 0;
/// Get and Set JSON methods
std::string Json(); ///< Generate JSON string of this object

View File

@@ -252,11 +252,11 @@ void ReaderBase::SetJsonValue(Json::Value root) {
}
/// Parent clip object of this reader (which can be unparented and NULL)
ClipBase* ReaderBase::GetClip() {
openshot::ClipBase* ReaderBase::GetClip() {
return parent;
}
/// Set parent clip object of this reader
void ReaderBase::SetClip(ClipBase* clip) {
void ReaderBase::SetClip(openshot::ClipBase* clip) {
parent = clip;
}