diff --git a/include/CacheBase.h b/include/CacheBase.h
index d09cb114..31cfad51 100644
--- a/include/CacheBase.h
+++ b/include/CacheBase.h
@@ -65,7 +65,7 @@ namespace openshot {
/// @brief Add a Frame to the cache
/// @param frame The openshot::Frame object needing to be cached.
- virtual void Add(std::shared_ptr frame) = 0;
+ virtual void Add(std::shared_ptr frame) = 0;
/// Clear the cache of all frames
virtual void Clear() = 0;
@@ -75,13 +75,13 @@ namespace openshot {
/// @brief Get a frame from the cache
/// @param frame_number The frame number of the cached frame
- virtual std::shared_ptr GetFrame(int64_t frame_number) = 0;
+ virtual std::shared_ptr GetFrame(int64_t frame_number) = 0;
/// Gets the maximum bytes value
virtual int64_t GetBytes() = 0;
/// Get the smallest frame number
- virtual std::shared_ptr GetSmallestFrame() = 0;
+ virtual std::shared_ptr GetSmallestFrame() = 0;
/// @brief Remove a specific frame
/// @param frame_number The frame number of the cached frame
diff --git a/include/CacheDisk.h b/include/CacheDisk.h
index 82c4c80e..e69cc357 100644
--- a/include/CacheDisk.h
+++ b/include/CacheDisk.h
@@ -96,7 +96,7 @@ namespace openshot {
/// @brief Add a Frame to the cache
/// @param frame The openshot::Frame object needing to be cached.
- void Add(std::shared_ptr frame);
+ void Add(std::shared_ptr frame);
/// Clear the cache of all frames
void Clear();
@@ -106,13 +106,13 @@ namespace openshot {
/// @brief Get a frame from the cache
/// @param frame_number The frame number of the cached frame
- std::shared_ptr GetFrame(int64_t frame_number);
+ std::shared_ptr GetFrame(int64_t frame_number);
/// Gets the maximum bytes value
int64_t GetBytes();
/// Get the smallest frame number
- std::shared_ptr GetSmallestFrame();
+ std::shared_ptr GetSmallestFrame();
/// @brief Move frame to front of queue (so it lasts longer)
/// @param frame_number The frame number of the cached frame
diff --git a/include/CacheMemory.h b/include/CacheMemory.h
index 7ac77537..fb3c75f6 100644
--- a/include/CacheMemory.h
+++ b/include/CacheMemory.h
@@ -50,7 +50,7 @@ namespace openshot {
*/
class CacheMemory : public CacheBase {
private:
- std::map > frames; ///< This map holds the frame number and Frame objects
+ std::map > frames; ///< This map holds the frame number and Frame objects
std::deque frame_numbers; ///< This queue holds a sequential list of cached Frame numbers
bool needs_range_processing; ///< Something has changed, and the range data needs to be re-calculated
@@ -78,7 +78,7 @@ namespace openshot {
/// @brief Add a Frame to the cache
/// @param frame The openshot::Frame object needing to be cached.
- void Add(std::shared_ptr frame);
+ void Add(std::shared_ptr frame);
/// Clear the cache of all frames
void Clear();
@@ -88,13 +88,13 @@ namespace openshot {
/// @brief Get a frame from the cache
/// @param frame_number The frame number of the cached frame
- std::shared_ptr GetFrame(int64_t frame_number);
+ std::shared_ptr GetFrame(int64_t frame_number);
/// Gets the maximum bytes value
int64_t GetBytes();
/// Get the smallest frame number
- std::shared_ptr GetSmallestFrame();
+ std::shared_ptr GetSmallestFrame();
/// @brief Move frame to front of queue (so it lasts longer)
/// @param frame_number The frame number of the cached frame
diff --git a/include/ChunkReader.h b/include/ChunkReader.h
index 4290df81..35e3af4e 100644
--- a/include/ChunkReader.h
+++ b/include/ChunkReader.h
@@ -106,10 +106,10 @@ namespace openshot
std::string path;
bool is_open;
int64_t chunk_size;
- ReaderBase *local_reader;
+ openshot::ReaderBase *local_reader;
ChunkLocation previous_location;
ChunkVersion version;
- std::shared_ptr last_frame;
+ std::shared_ptr last_frame;
/// Check if folder path existing
bool does_folder_exist(std::string path);
@@ -143,12 +143,12 @@ namespace openshot
void SetChunkSize(int64_t new_size) { chunk_size = new_size; };
/// Get the cache object used by this reader (always return NULL for this reader)
- CacheMemory* GetCache() { return NULL; };
+ openshot::CacheMemory* GetCache() { return NULL; };
/// @brief Get an openshot::Frame object for a specific frame number of this reader.
/// @returns The requested frame (containing the image and audio)
/// @param requested_frame The frame number you want to retrieve
- std::shared_ptr GetFrame(int64_t requested_frame);
+ std::shared_ptr GetFrame(int64_t requested_frame);
/// Determine if reader is open or closed
bool IsOpen() { return is_open; };
diff --git a/include/ChunkWriter.h b/include/ChunkWriter.h
index c96c118a..5fee5a37 100644
--- a/include/ChunkWriter.h
+++ b/include/ChunkWriter.h
@@ -87,10 +87,10 @@ namespace openshot
int64_t frame_count;
bool is_open;
bool is_writing;
- ReaderBase *local_reader;
- FFmpegWriter *writer_thumb;
- FFmpegWriter *writer_preview;
- FFmpegWriter *writer_final;
+ openshot::ReaderBase *local_reader;
+ openshot::FFmpegWriter *writer_thumb;
+ openshot::FFmpegWriter *writer_preview;
+ openshot::FFmpegWriter *writer_final;
std::shared_ptr last_frame;
bool last_frame_needed;
std::string default_extension;
@@ -114,7 +114,7 @@ namespace openshot
/// @brief Constructor for ChunkWriter. Throws one of the following exceptions.
/// @param path The folder path of the chunk file to be created
/// @param reader The initial reader to base this chunk file's meta data on (such as fps, height, width, etc...)
- ChunkWriter(std::string path, ReaderBase *reader);
+ ChunkWriter(std::string path, openshot::ReaderBase *reader);
/// Close the writer
void Close();
@@ -134,7 +134,7 @@ namespace openshot
/// @brief Add a frame to the stack waiting to be encoded.
/// @param frame The openshot::Frame object that needs to be written to this chunk file.
- void WriteFrame(std::shared_ptr frame);
+ void WriteFrame(std::shared_ptr frame);
/// @brief Write a block of frames from a reader
/// @param start The starting frame number to write (of the reader passed into the constructor)
@@ -145,7 +145,7 @@ namespace openshot
/// @param reader The reader containing the frames you need
/// @param start The starting frame number to write
/// @param length The number of frames to write
- void WriteFrame(ReaderBase* reader, int64_t start, int64_t length);
+ void WriteFrame(openshot::ReaderBase* reader, int64_t start, int64_t length);
};
diff --git a/include/Clip.h b/include/Clip.h
index 3e978429..76bfb923 100644
--- a/include/Clip.h
+++ b/include/Clip.h
@@ -57,7 +57,7 @@ namespace openshot {
/// from lowest layer to top layer (since that is sequence clips are combined), and then by
/// position, and then by effect order.
struct CompareClipEffects{
- bool operator()( EffectBase* lhs, EffectBase* rhs){
+ bool operator()( openshot::EffectBase* lhs, openshot::EffectBase* rhs){
if( lhs->Layer() < rhs->Layer() ) return true;
if( lhs->Layer() == rhs->Layer() && lhs->Position() < rhs->Position() ) return true;
if( lhs->Layer() == rhs->Layer() && lhs->Position() == rhs->Position() && lhs->Order() > rhs->Order() ) return true;
@@ -76,7 +76,7 @@ namespace openshot {
* Clip c1(new ImageReader("MyAwesomeLogo.jpeg"));
* Clip c2(new FFmpegReader("BackgroundVideo.webm"));
*
- * // CLIP 1 (logo) - Set some clip properties (with Keyframes)
+ * // CLIP 1 (logo) - Set some clip properties (with openshot::Keyframes)
* c1.Position(0.0); // Set the position or location (in seconds) on the timeline
* c1.gravity = GRAVITY_LEFT; // Set the alignment / gravity of the clip (position on the screen)
* c1.scale = SCALE_CROP; // Set the scale mode (how the image is resized to fill the screen)
@@ -87,7 +87,7 @@ namespace openshot {
* c1.alpha.AddPoint(500, 0.0); // Keep the alpha transparent until frame #500
* c1.alpha.AddPoint(565, 1.0); // Animate the alpha from transparent to visible (between frame #501 and #565)
*
- * // CLIP 2 (background video) - Set some clip properties (with Keyframes)
+ * // CLIP 2 (background video) - Set some clip properties (with openshot::Keyframes)
* c2.Position(0.0); // Set the position or location (in seconds) on the timeline
* c2.Start(10.0); // Set the starting position of the video (trim the left side of the video)
* c2.Layer(0); // Set the layer of the timeline (higher layers cover up images of lower layers)
@@ -97,40 +97,40 @@ namespace openshot {
* c2.alpha.AddPoint(384, 1.0); // Animate the alpha to visible (between frame #360 and frame #384)
* @endcode
*/
- class Clip : public ClipBase {
+ class Clip : public openshot::ClipBase {
protected:
/// Section lock for multiple threads
CriticalSection getFrameCriticalSection;
private:
bool waveform; ///< Should a waveform be used instead of the clip's image
- std::list effects; /// effects; /// apply_effects(std::shared_ptr frame);
+ std::shared_ptr apply_effects(std::shared_ptr frame);
/// Get file extension
std::string get_file_extension(std::string path);
/// Get a frame object or create a blank one
- std::shared_ptr GetOrCreateFrame(int64_t number);
+ std::shared_ptr GetOrCreateFrame(int64_t number);
/// Adjust the audio and image of a time mapped frame
- void get_time_mapped_frame(std::shared_ptr frame, int64_t frame_number);
+ void get_time_mapped_frame(std::shared_ptr frame, int64_t frame_number);
/// Init default settings for a clip
void init_settings();
@@ -145,11 +145,11 @@ namespace openshot {
void reverse_buffer(juce::AudioSampleBuffer* buffer);
public:
- GravityType gravity; ///< The gravity of a clip determines where it snaps to its parent
- ScaleType scale; ///< The scale determines how a clip should be resized to fit its parent
- AnchorType anchor; ///< The anchor determines what parent a clip should snap to
- FrameDisplayType display; ///< The format to display the frame number (if any)
- VolumeMixType mixing; ///< What strategy should be followed when mixing audio with other clips
+ openshot::GravityType gravity; ///< The gravity of a clip determines where it snaps to its parent
+ openshot::ScaleType scale; ///< The scale determines how a clip should be resized to fit its parent
+ openshot::AnchorType anchor; ///< The anchor determines what parent a clip should snap to
+ openshot::FrameDisplayType display; ///< The format to display the frame number (if any)
+ openshot::VolumeMixType mixing; ///< What strategy should be followed when mixing audio with other clips
/// Default Constructor
Clip();
@@ -160,36 +160,36 @@ namespace openshot {
/// @brief Constructor with reader
/// @param new_reader The reader to be used by this clip
- Clip(ReaderBase* new_reader);
+ Clip(openshot::ReaderBase* new_reader);
/// Destructor
virtual ~Clip();
/// @brief Add an effect to the clip
/// @param effect Add an effect to the clip. An effect can modify the audio or video of an openshot::Frame.
- void AddEffect(EffectBase* effect);
+ void AddEffect(openshot::EffectBase* effect);
/// Close the internal reader
void Close();
/// Return the list of effects on the timeline
- std::list Effects() { return effects; };
+ std::list Effects() { return effects; };
/// @brief Get an openshot::Frame object for a specific frame number of this timeline.
///
/// @returns The requested frame (containing the image)
/// @param requested_frame The frame number that is requested
- std::shared_ptr GetFrame(int64_t requested_frame);
+ std::shared_ptr GetFrame(int64_t requested_frame);
/// Open the internal reader
void Open();
/// @brief Set the current reader
/// @param new_reader The reader to be used by this clip
- void Reader(ReaderBase* new_reader);
+ void Reader(openshot::ReaderBase* new_reader);
/// Get the current reader
- ReaderBase* Reader();
+ openshot::ReaderBase* Reader();
/// Override End() method
float End(); ///< Get end position (in seconds) of clip (trim end of video), which can be affected by the time curve.
@@ -207,55 +207,55 @@ namespace openshot {
/// @brief Remove an effect from the clip
/// @param effect Remove an effect from the clip.
- void RemoveEffect(EffectBase* effect);
+ void RemoveEffect(openshot::EffectBase* effect);
/// Waveform property
bool Waveform() { return waveform; } ///< Get the waveform property of this clip
void Waveform(bool value) { waveform = value; } ///< Set the waveform property of this clip
// Scale and Location curves
- Keyframe scale_x; ///< Curve representing the horizontal scaling in percent (0 to 1)
- Keyframe scale_y; ///< Curve representing the vertical scaling in percent (0 to 1)
- Keyframe location_x; ///< Curve representing the relative X position in percent based on the gravity (-1 to 1)
- Keyframe location_y; ///< Curve representing the relative Y position in percent based on the gravity (-1 to 1)
+ openshot::Keyframe scale_x; ///< Curve representing the horizontal scaling in percent (0 to 1)
+ openshot::Keyframe scale_y; ///< Curve representing the vertical scaling in percent (0 to 1)
+ openshot::Keyframe location_x; ///< Curve representing the relative X position in percent based on the gravity (-1 to 1)
+ openshot::Keyframe location_y; ///< Curve representing the relative Y position in percent based on the gravity (-1 to 1)
// Alpha and Rotation curves
- Keyframe alpha; ///< Curve representing the alpha (1 to 0)
- Keyframe rotation; ///< Curve representing the rotation (0 to 360)
+ openshot::Keyframe alpha; ///< Curve representing the alpha (1 to 0)
+ openshot::Keyframe rotation; ///< Curve representing the rotation (0 to 360)
// Time and Volume curves
- Keyframe time; ///< Curve representing the frames over time to play (used for speed and direction of video)
- Keyframe volume; ///< Curve representing the volume (0 to 1)
+ openshot::Keyframe time; ///< Curve representing the frames over time to play (used for speed and direction of video)
+ openshot::Keyframe volume; ///< Curve representing the volume (0 to 1)
/// Curve representing the color of the audio wave form
- Color wave_color;
+ openshot::Color wave_color;
// Crop settings and curves
- GravityType crop_gravity; ///< Cropping needs to have a gravity to determine what side we are cropping
- Keyframe crop_width; ///< Curve representing width in percent (0.0=0%, 1.0=100%)
- Keyframe crop_height; ///< Curve representing height in percent (0.0=0%, 1.0=100%)
- Keyframe crop_x; ///< Curve representing X offset in percent (-1.0=-100%, 0.0=0%, 1.0=100%)
- Keyframe crop_y; ///< Curve representing Y offset in percent (-1.0=-100%, 0.0=0%, 1.0=100%)
+ openshot::GravityType crop_gravity; ///< Cropping needs to have a gravity to determine what side we are cropping
+ openshot::Keyframe crop_width; ///< Curve representing width in percent (0.0=0%, 1.0=100%)
+ openshot::Keyframe crop_height; ///< Curve representing height in percent (0.0=0%, 1.0=100%)
+ openshot::Keyframe crop_x; ///< Curve representing X offset in percent (-1.0=-100%, 0.0=0%, 1.0=100%)
+ openshot::Keyframe crop_y; ///< Curve representing Y offset in percent (-1.0=-100%, 0.0=0%, 1.0=100%)
// Shear and perspective curves
- Keyframe shear_x; ///< Curve representing X shear angle in degrees (-45.0=left, 45.0=right)
- Keyframe shear_y; ///< Curve representing Y shear angle in degrees (-45.0=down, 45.0=up)
- Keyframe perspective_c1_x; ///< Curves representing X for coordinate 1
- Keyframe perspective_c1_y; ///< Curves representing Y for coordinate 1
- Keyframe perspective_c2_x; ///< Curves representing X for coordinate 2
- Keyframe perspective_c2_y; ///< Curves representing Y for coordinate 2
- Keyframe perspective_c3_x; ///< Curves representing X for coordinate 3
- Keyframe perspective_c3_y; ///< Curves representing Y for coordinate 3
- Keyframe perspective_c4_x; ///< Curves representing X for coordinate 4
- Keyframe perspective_c4_y; ///< Curves representing Y for coordinate 4
+ openshot::Keyframe shear_x; ///< Curve representing X shear angle in degrees (-45.0=left, 45.0=right)
+ openshot::Keyframe shear_y; ///< Curve representing Y shear angle in degrees (-45.0=down, 45.0=up)
+ openshot::Keyframe perspective_c1_x; ///< Curves representing X for coordinate 1
+ openshot::Keyframe perspective_c1_y; ///< Curves representing Y for coordinate 1
+ openshot::Keyframe perspective_c2_x; ///< Curves representing X for coordinate 2
+ openshot::Keyframe perspective_c2_y; ///< Curves representing Y for coordinate 2
+ openshot::Keyframe perspective_c3_x; ///< Curves representing X for coordinate 3
+ openshot::Keyframe perspective_c3_y; ///< Curves representing Y for coordinate 3
+ openshot::Keyframe perspective_c4_x; ///< Curves representing X for coordinate 4
+ openshot::Keyframe perspective_c4_y; ///< Curves representing Y for coordinate 4
/// Audio channel filter and mappings
- Keyframe channel_filter; ///< A number representing an audio channel to filter (clears all other channels)
- Keyframe channel_mapping; ///< A number representing an audio channel to output (only works when filtering a channel)
+ openshot::Keyframe channel_filter; ///< A number representing an audio channel to filter (clears all other channels)
+ openshot::Keyframe channel_mapping; ///< A number representing an audio channel to output (only works when filtering a channel)
/// Override has_video and has_audio properties of clip (and their readers)
- Keyframe has_audio; ///< An optional override to determine if this clip has audio (-1=undefined, 0=no, 1=yes)
- Keyframe has_video; ///< An optional override to determine if this clip has video (-1=undefined, 0=no, 1=yes)
+ openshot::Keyframe has_audio; ///< An optional override to determine if this clip has audio (-1=undefined, 0=no, 1=yes)
+ openshot::Keyframe has_video; ///< An optional override to determine if this clip has video (-1=undefined, 0=no, 1=yes)
};
diff --git a/include/Color.h b/include/Color.h
index 33b05574..47db29a7 100644
--- a/include/Color.h
+++ b/include/Color.h
@@ -45,10 +45,10 @@ namespace openshot {
class Color{
public:
- Keyframe red; /// image_frame;
+ std::shared_ptr image_frame;
bool is_open;
public:
@@ -63,7 +63,7 @@ namespace openshot
DummyReader();
/// Constructor for DummyReader.
- DummyReader(Fraction fps, int width, int height, int sample_rate, int channels, float duration);
+ DummyReader(openshot::Fraction fps, int width, int height, int sample_rate, int channels, float duration);
virtual ~DummyReader();
@@ -78,7 +78,7 @@ namespace openshot
///
/// @returns The requested frame (containing the image)
/// @param requested_frame The frame number that is requested.
- std::shared_ptr GetFrame(int64_t requested_frame);
+ std::shared_ptr GetFrame(int64_t requested_frame);
/// Determine if reader is open or closed
bool IsOpen() { return is_open; };
diff --git a/include/EffectBase.h b/include/EffectBase.h
index 8d0326c6..29e98b58 100644
--- a/include/EffectBase.h
+++ b/include/EffectBase.h
@@ -88,7 +88,7 @@ namespace openshot
/// @returns The modified openshot::Frame object
/// @param frame The frame object that needs the effect applied to it
/// @param frame_number The frame number (starting at 1) of the effect on the timeline.
- virtual std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number) = 0;
+ virtual std::shared_ptr GetFrame(std::shared_ptr frame, int64_t frame_number) = 0;
/// Initialize the values of the EffectInfo struct. It is important for derived classes to call
/// this method, or the EffectInfo struct values will not be initialized.
diff --git a/include/FFmpegReader.h b/include/FFmpegReader.h
index a8b1740b..9faa86a3 100644
--- a/include/FFmpegReader.h
+++ b/include/FFmpegReader.h
@@ -130,7 +130,7 @@ namespace openshot {
int64_t pts_counter;
int64_t num_packets_since_video_frame;
int64_t num_checks_since_final;
- std::shared_ptr last_video_frame;
+ std::shared_ptr last_video_frame;
bool is_seeking;
int64_t seeking_pts;
@@ -176,7 +176,7 @@ namespace openshot {
int64_t ConvertVideoPTStoFrame(int64_t pts);
/// Create a new Frame (or return an existing one) and add it to the working queue.
- std::shared_ptr CreateFrame(int64_t requested_frame);
+ std::shared_ptr CreateFrame(int64_t requested_frame);
/// Calculate Starting video frame and sample # for an audio PTS
AudioLocation GetAudioPTSLocation(int64_t pts);
@@ -206,7 +206,7 @@ namespace openshot {
void ProcessAudioPacket(int64_t requested_frame, int64_t target_frame, int starting_sample);
/// Read the stream until we find the requested Frame
- std::shared_ptr ReadStream(int64_t requested_frame);
+ std::shared_ptr ReadStream(int64_t requested_frame);
/// Remove AVFrame from cache (and deallocate its memory)
void RemoveAVFrame(AVFrame *);
@@ -256,7 +256,7 @@ namespace openshot {
///
/// @returns The requested frame of video
/// @param requested_frame The frame number that is requested.
- std::shared_ptr GetFrame(int64_t requested_frame);
+ std::shared_ptr GetFrame(int64_t requested_frame);
/// Determine if reader is open or closed
bool IsOpen() { return is_open; };
diff --git a/include/FFmpegWriter.h b/include/FFmpegWriter.h
index 97fd7726..dc3a2cf7 100644
--- a/include/FFmpegWriter.h
+++ b/include/FFmpegWriter.h
@@ -180,20 +180,20 @@ namespace openshot {
int original_sample_rate;
int original_channels;
- std::shared_ptr last_frame;
- std::deque > spooled_audio_frames;
- std::deque > spooled_video_frames;
+ std::shared_ptr last_frame;
+ std::deque > spooled_audio_frames;
+ std::deque > spooled_video_frames;
- std::deque > queued_audio_frames;
- std::deque > queued_video_frames;
+ std::deque > queued_audio_frames;
+ std::deque > queued_video_frames;
- std::deque > processed_frames;
- std::deque > deallocate_frames;
+ std::deque > processed_frames;
+ std::deque > deallocate_frames;
- std::map, AVFrame *> av_frames;
+ std::map, AVFrame *> av_frames;
/// Add an AVFrame to the cache
- void add_avframe(std::shared_ptr frame, AVFrame *av_frame);
+ void add_avframe(std::shared_ptr frame, AVFrame *av_frame);
/// Add an audio output stream
AVStream *add_audio_stream();
@@ -231,13 +231,13 @@ namespace openshot {
void open_video(AVFormatContext *oc, AVStream *st);
/// process video frame
- void process_video_packet(std::shared_ptr frame);
+ void process_video_packet(std::shared_ptr frame);
/// write all queued frames' audio to the video file
void write_audio_packets(bool is_final);
/// write video frame
- bool write_video_packet(std::shared_ptr frame, AVFrame *frame_final);
+ bool write_video_packet(std::shared_ptr frame, AVFrame *frame_final);
/// write all queued frames
void write_queued_frames();
@@ -285,7 +285,7 @@ namespace openshot {
/// @param channels The number of audio channels needed in this file
/// @param channel_layout The 'layout' of audio channels (i.e. mono, stereo, surround, etc...)
/// @param bit_rate The audio bit rate used during encoding
- void SetAudioOptions(bool has_audio, std::string codec, int sample_rate, int channels, ChannelLayout channel_layout, int bit_rate);
+ void SetAudioOptions(bool has_audio, std::string codec, int sample_rate, int channels, openshot::ChannelLayout channel_layout, int bit_rate);
/// @brief Set the cache size
/// @param new_size The number of frames to queue before writing to the file
@@ -301,14 +301,14 @@ namespace openshot {
/// @param interlaced Does this video need to be interlaced?
/// @param top_field_first Which frame should be used as the top field?
/// @param bit_rate The video bit rate used during encoding
- void SetVideoOptions(bool has_video, std::string codec, Fraction fps, int width, int height, Fraction pixel_ratio, bool interlaced, bool top_field_first, int bit_rate);
+ void SetVideoOptions(bool has_video, std::string codec, openshot::Fraction fps, int width, int height, openshot::Fraction pixel_ratio, bool interlaced, bool top_field_first, int bit_rate);
/// @brief Set custom options (some codecs accept additional params). This must be called after the
/// PrepareStreams() method, otherwise the streams have not been initialized yet.
/// @param stream The stream (openshot::StreamType) this option should apply to
/// @param name The name of the option you want to set (i.e. qmin, qmax, etc...)
/// @param value The new value of this option
- void SetOption(StreamType stream, std::string name, std::string value);
+ void SetOption(openshot::StreamType stream, std::string name, std::string value);
/// @brief Write the file header (after the options are set). This method is called automatically
/// by the Open() method if this method has not yet been called.
@@ -316,13 +316,13 @@ namespace openshot {
/// @brief Add a frame to the stack waiting to be encoded.
/// @param frame The openshot::Frame object to write to this image
- void WriteFrame(std::shared_ptr frame);
+ void WriteFrame(std::shared_ptr frame);
/// @brief Write a block of frames from a reader
/// @param reader A openshot::ReaderBase object which will provide frames to be written
/// @param start The starting frame number of the reader
/// @param length The number of frames to write
- void WriteFrame(ReaderBase *reader, int64_t start, int64_t length);
+ void WriteFrame(openshot::ReaderBase *reader, int64_t start, int64_t length);
/// @brief Write the file trailer (after all frames are written). This is called automatically
/// by the Close() method if this method has not yet been called.
diff --git a/include/Frame.h b/include/Frame.h
index 1048c9cf..e1997485 100644
--- a/include/Frame.h
+++ b/include/Frame.h
@@ -122,7 +122,7 @@ namespace openshot
CriticalSection addingImageSection;
CriticalSection addingAudioSection;
const unsigned char *qbuffer;
- Fraction pixel_ratio;
+ openshot::Fraction pixel_ratio;
int channels;
ChannelLayout channel_layout;
int width;
@@ -189,10 +189,10 @@ namespace openshot
/// Channel Layout of audio samples. A frame needs to keep track of this, since Writers do not always
/// know the original channel layout of a frame's audio samples (i.e. mono, stereo, 5 point surround, etc...)
- ChannelLayout ChannelsLayout();
+ openshot::ChannelLayout ChannelsLayout();
// Set the channel layout of audio samples (i.e. mono, stereo, 5 point surround, etc...)
- void ChannelsLayout(ChannelLayout new_channel_layout) { channel_layout = new_channel_layout; };
+ void ChannelsLayout(openshot::ChannelLayout new_channel_layout) { channel_layout = new_channel_layout; };
/// Clean up buffer after QImage is deleted
static void cleanUpBuffer(void *info);
@@ -216,10 +216,10 @@ namespace openshot
float* GetAudioSamples(int channel);
/// Get an array of sample data (all channels interleaved together), using any sample rate
- float* GetInterleavedAudioSamples(int new_sample_rate, AudioResampler* resampler, int* sample_count);
+ float* GetInterleavedAudioSamples(int new_sample_rate, openshot::AudioResampler* resampler, int* sample_count);
// Get a planar array of sample data, using any sample rate
- float* GetPlanarAudioSamples(int new_sample_rate, AudioResampler* resampler, int* sample_count);
+ float* GetPlanarAudioSamples(int new_sample_rate, openshot::AudioResampler* resampler, int* sample_count);
/// Get number of audio channels
int GetAudioChannelsCount();
@@ -241,7 +241,7 @@ namespace openshot
#endif
/// Set Pixel Aspect Ratio
- Fraction GetPixelRatio() { return pixel_ratio; };
+ openshot::Fraction GetPixelRatio() { return pixel_ratio; };
/// Get pixel data (as packets)
const unsigned char* GetPixels();
@@ -256,10 +256,10 @@ namespace openshot
int GetHeight();
/// Calculate the # of samples per video frame (for the current frame number)
- int GetSamplesPerFrame(Fraction fps, int sample_rate, int channels);
+ int GetSamplesPerFrame(openshot::Fraction fps, int sample_rate, int channels);
/// Calculate the # of samples per video frame (for a specific frame number and frame rate)
- static int GetSamplesPerFrame(int64_t frame_number, Fraction fps, int sample_rate, int channels);
+ static int GetSamplesPerFrame(int64_t frame_number, openshot::Fraction fps, int sample_rate, int channels);
/// Get an audio waveform image
std::shared_ptr GetWaveform(int width, int height, int Red, int Green, int Blue, int Alpha);
@@ -271,7 +271,7 @@ namespace openshot
int GetWidth();
/// Resize audio container to hold more (or less) samples and channels
- void ResizeAudio(int channels, int length, int sample_rate, ChannelLayout channel_layout);
+ void ResizeAudio(int channels, int length, int sample_rate, openshot::ChannelLayout channel_layout);
/// Get the original sample rate of this frame's audio data
int SampleRate();
diff --git a/include/PlayerBase.h b/include/PlayerBase.h
index d5170562..bf23f576 100644
--- a/include/PlayerBase.h
+++ b/include/PlayerBase.h
@@ -61,7 +61,7 @@ namespace openshot
protected:
float speed;
float volume;
- ReaderBase *reader;
+ openshot::ReaderBase *reader;
PlaybackMode mode;
public:
@@ -97,7 +97,7 @@ namespace openshot
virtual ReaderBase* Reader() = 0;
/// Set the current reader, such as a FFmpegReader
- virtual void Reader(ReaderBase *new_reader) = 0;
+ virtual void Reader(openshot::ReaderBase *new_reader) = 0;
/// Get the Volume
virtual float Volume() = 0;
diff --git a/include/QtPlayer.h b/include/QtPlayer.h
index b3cd9d05..fc8f2a3d 100644
--- a/include/QtPlayer.h
+++ b/include/QtPlayer.h
@@ -52,7 +52,7 @@ namespace openshot
public:
/// Default constructor
explicit QtPlayer();
- explicit QtPlayer(RendererBase *rb);
+ explicit QtPlayer(openshot::RendererBase *rb);
/// Default destructor
virtual ~QtPlayer();
@@ -64,7 +64,7 @@ namespace openshot
std::string GetError();
/// Get Audio Devices from JUCE
- std::vector GetAudioDeviceNames();
+ std::vector GetAudioDeviceNames();
/// Play the video
void Play();
@@ -73,7 +73,7 @@ namespace openshot
void Loading();
/// Get the current mode
- PlaybackMode Mode();
+ openshot::PlaybackMode Mode();
/// Pause the video
void Pause();
@@ -105,10 +105,10 @@ namespace openshot
void Stop();
/// Set the current reader
- void Reader(ReaderBase *new_reader);
+ void Reader(openshot::ReaderBase *new_reader);
/// Get the current reader, such as a FFmpegReader
- ReaderBase* Reader();
+ openshot::ReaderBase* Reader();
/// Get the Volume
float Volume();
diff --git a/include/ReaderBase.h b/include/ReaderBase.h
index f33158a7..0ec93ede 100644
--- a/include/ReaderBase.h
+++ b/include/ReaderBase.h
@@ -68,23 +68,23 @@ namespace openshot
int height; ///< The height of the video (in pixels)
int width; ///< The width of the video (in pixesl)
int pixel_format; ///< The pixel format (i.e. YUV420P, RGB24, etc...)
- Fraction fps; ///< Frames per second, as a fraction (i.e. 24/1 = 24 fps)
+ openshot::Fraction fps; ///< Frames per second, as a fraction (i.e. 24/1 = 24 fps)
int video_bit_rate; ///< The bit rate of the video stream (in bytes)
- Fraction pixel_ratio; ///< The pixel ratio of the video stream as a fraction (i.e. some pixels are not square)
- Fraction display_ratio; ///< The ratio of width to height of the video stream (i.e. 640x480 has a ratio of 4/3)
+ openshot::Fraction pixel_ratio; ///< The pixel ratio of the video stream as a fraction (i.e. some pixels are not square)
+ openshot::Fraction display_ratio; ///< The ratio of width to height of the video stream (i.e. 640x480 has a ratio of 4/3)
std::string vcodec; ///< The name of the video codec used to encode / decode the video stream
int64_t video_length; ///< The number of frames in the video stream
int video_stream_index; ///< The index of the video stream
- Fraction video_timebase; ///< The video timebase determines how long each frame stays on the screen
+ openshot::Fraction video_timebase; ///< The video timebase determines how long each frame stays on the screen
bool interlaced_frame; // Are the contents of this frame interlaced
bool top_field_first; // Which interlaced field should be displayed first
std::string acodec; ///< The name of the audio codec used to encode / decode the video stream
int audio_bit_rate; ///< The bit rate of the audio stream (in bytes)
int sample_rate; ///< The number of audio samples per second (44100 is a common sample rate)
int channels; ///< The number of audio channels used in the audio stream
- ChannelLayout channel_layout; ///< The channel layout (mono, stereo, 5 point surround, etc...)
+ openshot::ChannelLayout channel_layout; ///< The channel layout (mono, stereo, 5 point surround, etc...)
int audio_stream_index; ///< The index of the audio stream
- Fraction audio_timebase; ///< The audio timebase determines how long each audio packet should be played
+ openshot::Fraction audio_timebase; ///< The audio timebase determines how long each audio packet should be played
std::map metadata; ///< An optional map/dictionary of metadata for this reader
};
@@ -101,7 +101,7 @@ namespace openshot
/// Section lock for multiple threads
CriticalSection getFrameCriticalSection;
CriticalSection processingCriticalSection;
- ClipBase* parent;
+ openshot::ClipBase* parent;
public:
@@ -109,13 +109,13 @@ namespace openshot
ReaderBase();
/// Information about the current media file
- ReaderInfo info;
+ openshot::ReaderInfo info;
/// Parent clip object of this reader (which can be unparented and NULL)
- ClipBase* GetClip();
+ openshot::ClipBase* GetClip();
/// Set parent clip object of this reader
- void SetClip(ClipBase* clip);
+ void SetClip(openshot::ClipBase* clip);
/// Close the reader (and any resources it was consuming)
virtual void Close() = 0;
@@ -124,7 +124,7 @@ namespace openshot
void DisplayInfo();
/// Get the cache object used by this reader (note: not all readers use cache)
- virtual CacheBase* GetCache() = 0;
+ virtual openshot::CacheBase* GetCache() = 0;
/// This method is required for all derived classes of ReaderBase, and returns the
/// openshot::Frame object, which contains the image and audio information for that
@@ -132,7 +132,7 @@ namespace openshot
///
/// @returns The requested frame of video
/// @param[in] number The frame number that is requested.
- virtual std::shared_ptr GetFrame(int64_t number) = 0;
+ virtual std::shared_ptr GetFrame(int64_t number) = 0;
/// Determine if reader is open or closed
virtual bool IsOpen() = 0;
diff --git a/include/RendererBase.h b/include/RendererBase.h
index 2638d336..1c526937 100644
--- a/include/RendererBase.h
+++ b/include/RendererBase.h
@@ -50,7 +50,7 @@ namespace openshot
public:
/// Paint(render) a video Frame.
- void paint(const std::shared_ptr & frame);
+ void paint(const std::shared_ptr & frame);
/// Allow manual override of the QWidget that is used to display
virtual void OverrideWidget(int64_t qwidget_address) = 0;
@@ -58,7 +58,7 @@ namespace openshot
protected:
RendererBase();
virtual ~RendererBase();
-
+
virtual void render(std::shared_ptr image) = 0;
};
diff --git a/include/TextReader.h b/include/TextReader.h
index 59b0aeed..2d54fdc2 100644
--- a/include/TextReader.h
+++ b/include/TextReader.h
@@ -98,7 +98,7 @@ namespace openshot
std::shared_ptr image;
MAGICK_DRAWABLE lines;
bool is_open;
- GravityType gravity;
+ openshot::GravityType gravity;
public:
@@ -126,14 +126,14 @@ namespace openshot
void Close();
/// Get the cache object used by this reader (always returns NULL for this object)
- CacheMemory* GetCache() { return NULL; };
+ openshot::CacheMemory* GetCache() { return NULL; };
/// Get an openshot::Frame object for a specific frame number of this reader. All numbers
/// return the same Frame, since they all share the same image data.
///
/// @returns The requested frame (containing the image)
/// @param requested_frame The frame number that is requested.
- std::shared_ptr GetFrame(int64_t requested_frame);
+ std::shared_ptr GetFrame(int64_t requested_frame);
/// Determine if reader is open or closed
bool IsOpen() { return is_open; };
diff --git a/include/WriterBase.h b/include/WriterBase.h
index 7387d613..503be6bf 100644
--- a/include/WriterBase.h
+++ b/include/WriterBase.h
@@ -57,23 +57,23 @@ namespace openshot
int height; ///< The height of the video (in pixels)
int width; ///< The width of the video (in pixels)
int pixel_format; ///< The pixel format (i.e. YUV420P, RGB24, etc...)
- Fraction fps; ///< Frames per second, as a fraction (i.e. 24/1 = 24 fps)
+ openshot::Fraction fps; ///< Frames per second, as a fraction (i.e. 24/1 = 24 fps)
int video_bit_rate; ///< The bit rate of the video stream (in bytes)
- Fraction pixel_ratio; ///< The pixel ratio of the video stream as a fraction (i.e. some pixels are not square)
- Fraction display_ratio; ///< The ratio of width to height of the video stream (i.e. 640x480 has a ratio of 4/3)
+ openshot::Fraction pixel_ratio; ///< The pixel ratio of the video stream as a fraction (i.e. some pixels are not square)
+ openshot::Fraction display_ratio; ///< The ratio of width to height of the video stream (i.e. 640x480 has a ratio of 4/3)
std::string vcodec; ///< The name of the video codec used to encode / decode the video stream
int64_t video_length; ///< The number of frames in the video stream
int video_stream_index; ///< The index of the video stream
- Fraction video_timebase; ///< The video timebase determines how long each frame stays on the screen
+ openshot::Fraction video_timebase; ///< The video timebase determines how long each frame stays on the screen
bool interlaced_frame; ///< Are the contents of this frame interlaced
bool top_field_first; ///< Which interlaced field should be displayed first
std::string acodec; ///< The name of the audio codec used to encode / decode the video stream
int audio_bit_rate; ///< The bit rate of the audio stream (in bytes)
int sample_rate; ///< The number of audio samples per second (44100 is a common sample rate)
int channels; ///< The number of audio channels used in the audio stream
- ChannelLayout channel_layout; ///< The channel layout (mono, stereo, 5 point surround, etc...)
+ openshot::ChannelLayout channel_layout; ///< The channel layout (mono, stereo, 5 point surround, etc...)
int audio_stream_index; ///< The index of the audio stream
- Fraction audio_timebase; ///< The audio timebase determines how long each audio packet should be played
+ openshot::Fraction audio_timebase; ///< The audio timebase determines how long each audio packet should be played
std::map metadata; ///< An optional map/dictionary of video & audio metadata
};
@@ -95,16 +95,16 @@ namespace openshot
/// @brief This method copy's the info struct of a reader, and sets the writer with the same info
/// @param reader The source reader to copy
- void CopyReaderInfo(ReaderBase* reader);
+ void CopyReaderInfo(openshot::ReaderBase* reader);
/// Determine if writer is open or closed
virtual bool IsOpen() = 0;
/// This method is required for all derived classes of WriterBase. Write a Frame to the video file.
- virtual void WriteFrame(std::shared_ptr frame) = 0;
+ virtual void WriteFrame(std::shared_ptr frame) = 0;
/// This method is required for all derived classes of WriterBase. Write a block of frames from a reader.
- virtual void WriteFrame(ReaderBase* reader, int64_t start, int64_t length) = 0;
+ virtual void WriteFrame(openshot::ReaderBase* reader, int64_t start, int64_t length) = 0;
/// Get and Set JSON methods
std::string Json(); ///< Generate JSON string of this object
diff --git a/src/ReaderBase.cpp b/src/ReaderBase.cpp
index ab651a9f..1966614c 100644
--- a/src/ReaderBase.cpp
+++ b/src/ReaderBase.cpp
@@ -252,11 +252,11 @@ void ReaderBase::SetJsonValue(Json::Value root) {
}
/// Parent clip object of this reader (which can be unparented and NULL)
-ClipBase* ReaderBase::GetClip() {
+openshot::ClipBase* ReaderBase::GetClip() {
return parent;
}
/// Set parent clip object of this reader
-void ReaderBase::SetClip(ClipBase* clip) {
+void ReaderBase::SetClip(openshot::ClipBase* clip) {
parent = clip;
}