diff --git a/.cproject b/.cproject deleted file mode 100644 index 1028fe17..00000000 --- a/.cproject +++ /dev/null @@ -1,405 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - cmake - -G "Unix Makefiles" ../ -D"CMAKE_BUILD_TYPE:STRING=Release" - true - false - true - - - cmake - -G "Unix Makefiles" ../ -D"MAGICKCORE_HDRI_ENABLE=0" -D"MAGICKCORE_QUANTUM_DEPTH=16" -D"OPENSHOT_IMAGEMAGICK_COMPATIBILITY=0" -D"ENABLE_BLACKMAGIC=1" -D"CMAKE_BUILD_TYPE:STRING=Debug" -D"DISABLE_TESTS=0" - - true - false - true - - - make - test - true - false - true - - - make - help - true - false - true - - - make - doc - true - false - true - - - cmake - -DCMAKE_CXX_COMPILER=/usr/local/opt/gcc48/bin/g++-4.8 -DCMAKE_C_COMPILER=/usr/local/opt/gcc48/bin/gcc-4.8 -DCMAKE_PREFIX_PATH=/usr/local/Cellar/qt5/5.1.1/ -DPYTHON_INCLUDE_DIR=/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/Versions/3.3/include/python3.3m/ -DPYTHON_LIBRARY=/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/Versions/3.3/lib/libpython3.3.dylib -DPython_FRAMEWORKS=/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/ ../ -D"CMAKE_BUILD_TYPE:STRING=Debug" - true - false - true - - - cmake - -DCMAKE_CXX_COMPILER=/usr/local/opt/gcc48/bin/g++-4.8 -DCMAKE_C_COMPILER=/usr/local/opt/gcc48/bin/gcc-4.8 -DCMAKE_PREFIX_PATH=/usr/local/Cellar/qt5/5.1.1/ -DPYTHON_INCLUDE_DIR=/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/Versions/3.3/include/python3.3m/ -DPYTHON_LIBRARY=/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/Versions/3.3/lib/libpython3.3.dylib -DPython_FRAMEWORKS=/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/ ../ -D"CMAKE_BUILD_TYPE:STRING=Release" - true - false - true - - - cmake - -G "MinGW Makefiles" ../ -D"CMAKE_BUILD_TYPE:STRING=Debug" - true - false - true - - - cmake - -G "MinGW Makefiles" ../ -D"CMAKE_BUILD_TYPE:STRING=Release" - true - false - true - - - - diff --git a/.gitignore b/.gitignore index e7f24925..a11656cf 100644 --- a/.gitignore +++ b/.gitignore @@ -3,4 +3,7 @@ build/* *.DS_Store .pydevproject .settings -.idea/* \ No newline at end of file +.idea/* +.project +.cproject +/.metadata/ diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 3059ee9d..f0f868c9 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -22,7 +22,10 @@ linux-builder: - make install - mv /usr/local/lib/python3.4/dist-packages/*openshot* install-x64/python - echo -e "CI_PROJECT_NAME:$CI_PROJECT_NAME\nCI_COMMIT_REF_NAME:$CI_COMMIT_REF_NAME\nCI_COMMIT_SHA:$CI_COMMIT_SHA\nCI_JOB_ID:$CI_JOB_ID" > "install-x64/share/$CI_PROJECT_NAME" + - git log $(git describe --tags --abbrev=0)..HEAD --oneline --pretty=format:"%C(auto,yellow)%h%C(auto,magenta)% %C(auto,blue)%>(12,trunc)%ad %C(auto,green)%<(25,trunc)%aN%C(auto,reset)%s%C(auto,red)% gD% D" --date=short > "install-x64/share/$CI_PROJECT_NAME.log" when: always + except: + - tags tags: - linux @@ -46,36 +49,13 @@ mac-builder: - make install - mv /Library/Frameworks/Python.framework/Versions/3.6/lib/python3.6/site-packages/*openshot* install-x64/python - echo -e "CI_PROJECT_NAME:$CI_PROJECT_NAME\nCI_COMMIT_REF_NAME:$CI_COMMIT_REF_NAME\nCI_COMMIT_SHA:$CI_COMMIT_SHA\nCI_JOB_ID:$CI_JOB_ID" > "install-x64/share/$CI_PROJECT_NAME" + - git log $(git describe --tags --abbrev=0)..HEAD --oneline --pretty=format:"%C(auto,yellow)%h%C(auto,magenta)% %C(auto,blue)%>(12,trunc)%ad %C(auto,green)%<(25,trunc)%aN%C(auto,reset)%s%C(auto,red)% gD% D" --date=short > "install-x64/share/$CI_PROJECT_NAME.log" when: always + except: + - tags tags: - mac -windows-builder-x86: - stage: build-libopenshot - artifacts: - expire_in: 6 months - paths: - - build\install-x86\* - script: - - try { Invoke-WebRequest -Uri "http://gitlab.openshot.org/OpenShot/libopenshot-audio/-/jobs/artifacts/$CI_COMMIT_REF_NAME/download?job=windows-builder-x86" -Headers @{"PRIVATE-TOKEN"="$ACCESS_TOKEN"} -OutFile "artifacts.zip" } catch { $_.Exception.Response.StatusCode.Value__ } - - if (-not (Test-Path "artifacts.zip")) { Invoke-WebRequest -Uri "http://gitlab.openshot.org/OpenShot/libopenshot-audio/-/jobs/artifacts/develop/download?job=windows-builder-x86" -Headers @{"PRIVATE-TOKEN"="$ACCESS_TOKEN"} -OutFile "artifacts.zip" } - - Expand-Archive -Path artifacts.zip -DestinationPath . - - $env:LIBOPENSHOT_AUDIO_DIR = "$CI_PROJECT_DIR\build\install-x86" - - $env:UNITTEST_DIR = "C:\msys32\usr" - - $env:ZMQDIR = "C:\msys32\usr" - - $env:Path = "C:\msys32\mingw32\bin;C:\msys32\mingw32\lib;C:\msys32\usr\lib\cmake\UnitTest++;C:\msys32\home\jonathan\depot_tools;C:\msys32\usr;C:\msys32\usr\lib;" + $env:Path; - - New-Item -ItemType Directory -Force -Path build - - New-Item -ItemType Directory -Force -Path build\install-x86\python - - cd build - - cmake -D"CMAKE_INSTALL_PREFIX:PATH=$CI_PROJECT_DIR\build\install-x86" -G "MinGW Makefiles" -D"CMAKE_BUILD_TYPE:STRING=Release" -D"CMAKE_CXX_FLAGS=-m32" -D"CMAKE_EXE_LINKER_FLAGS=-Wl,--large-address-aware" -D"CMAKE_C_FLAGS=-m32" ../ - - mingw32-make install - - Move-Item -Force -path "C:\msys32\mingw32\lib\python3.6\site-packages\*openshot*" -destination "install-x86\python\" - - cp src\libopenshot.dll install-x86\lib - - New-Item -path "install-x86/share/" -Name "$CI_PROJECT_NAME" -Value "CI_PROJECT_NAME:$CI_PROJECT_NAME`nCI_COMMIT_REF_NAME:$CI_COMMIT_REF_NAME`nCI_COMMIT_SHA:$CI_COMMIT_SHA`nCI_JOB_ID:$CI_JOB_ID" -ItemType file -force - when: always - tags: - - windows - windows-builder-x64: stage: build-libopenshot artifacts: @@ -98,7 +78,42 @@ windows-builder-x64: - Move-Item -Force -path "C:\msys64\mingw64\lib\python3.6\site-packages\*openshot*" -destination "install-x64\python\" - cp src\libopenshot.dll install-x64\lib - New-Item -path "install-x64/share/" -Name "$CI_PROJECT_NAME" -Value "CI_PROJECT_NAME:$CI_PROJECT_NAME`nCI_COMMIT_REF_NAME:$CI_COMMIT_REF_NAME`nCI_COMMIT_SHA:$CI_COMMIT_SHA`nCI_JOB_ID:$CI_JOB_ID" -ItemType file -force + - $PREV_GIT_LABEL=(git describe --tags --abbrev=0) + - git log "$PREV_GIT_LABEL..HEAD" --oneline --pretty=format:"%C(auto,yellow)%h%C(auto,magenta)% %C(auto,blue)%>(12,trunc)%ad %C(auto,green)%<(25,trunc)%aN%C(auto,reset)%s%C(auto,red)% gD% D" --date=short > "install-x64/share/$CI_PROJECT_NAME.log" when: always + except: + - tags + tags: + - windows + +windows-builder-x86: + stage: build-libopenshot + artifacts: + expire_in: 6 months + paths: + - build\install-x86\* + script: + - try { Invoke-WebRequest -Uri "http://gitlab.openshot.org/OpenShot/libopenshot-audio/-/jobs/artifacts/$CI_COMMIT_REF_NAME/download?job=windows-builder-x86" -Headers @{"PRIVATE-TOKEN"="$ACCESS_TOKEN"} -OutFile "artifacts.zip" } catch { $_.Exception.Response.StatusCode.Value__ } + - if (-not (Test-Path "artifacts.zip")) { Invoke-WebRequest -Uri "http://gitlab.openshot.org/OpenShot/libopenshot-audio/-/jobs/artifacts/develop/download?job=windows-builder-x86" -Headers @{"PRIVATE-TOKEN"="$ACCESS_TOKEN"} -OutFile "artifacts.zip" } + - Expand-Archive -Path artifacts.zip -DestinationPath . + - $env:LIBOPENSHOT_AUDIO_DIR = "$CI_PROJECT_DIR\build\install-x86" + - $env:UNITTEST_DIR = "C:\msys32\usr" + - $env:RESVGDIR = "C:\msys32\usr\local" + - $env:ZMQDIR = "C:\msys32\usr" + - $env:Path = "C:\msys32\mingw32\bin;C:\msys32\mingw32\lib;C:\msys32\usr\lib\cmake\UnitTest++;C:\msys32\home\jonathan\depot_tools;C:\msys32\usr;C:\msys32\usr\lib;" + $env:Path; + - New-Item -ItemType Directory -Force -Path build + - New-Item -ItemType Directory -Force -Path build\install-x86\python + - cd build + - cmake -D"CMAKE_INSTALL_PREFIX:PATH=$CI_PROJECT_DIR\build\install-x86" -G "MinGW Makefiles" -D"CMAKE_BUILD_TYPE:STRING=Release" -D"CMAKE_CXX_FLAGS=-m32" -D"CMAKE_EXE_LINKER_FLAGS=-Wl,--large-address-aware" -D"CMAKE_C_FLAGS=-m32" ../ + - mingw32-make install + - Move-Item -Force -path "C:\msys32\mingw32\lib\python3.6\site-packages\*openshot*" -destination "install-x86\python\" + - cp src\libopenshot.dll install-x86\lib + - New-Item -path "install-x86/share/" -Name "$CI_PROJECT_NAME" -Value "CI_PROJECT_NAME:$CI_PROJECT_NAME`nCI_COMMIT_REF_NAME:$CI_COMMIT_REF_NAME`nCI_COMMIT_SHA:$CI_COMMIT_SHA`nCI_JOB_ID:$CI_JOB_ID" -ItemType file -force + - $PREV_GIT_LABEL=(git describe --tags --abbrev=0) + - git log "$PREV_GIT_LABEL..HEAD" --oneline --pretty=format:"%C(auto,yellow)%h%C(auto,magenta)% %C(auto,blue)%>(12,trunc)%ad %C(auto,green)%<(25,trunc)%aN%C(auto,reset)%s%C(auto,red)% gD% D" --date=short > "install-x86/share/$CI_PROJECT_NAME.log" + when: always + except: + - tags tags: - windows @@ -108,5 +123,7 @@ trigger-pipeline: - "curl -X POST -F token=$OPENSHOT_QT_PIPELINE_TOKEN -F ref=$CI_COMMIT_REF_NAME http://gitlab.openshot.org/api/v4/projects/3/trigger/pipeline" when: always dependencies: [] + except: + - tags tags: - gitlab diff --git a/.project b/.project deleted file mode 100644 index 5e324854..00000000 --- a/.project +++ /dev/null @@ -1,94 +0,0 @@ - - - libopenshot - - - - - - com.aptana.ide.core.unifiedBuilder - - - - - org.python.pydev.PyDevBuilder - - - - - org.eclipse.cdt.managedbuilder.core.genmakebuilder - clean,full,incremental, - - - ?name? - - - - org.eclipse.cdt.make.core.append_environment - true - - - org.eclipse.cdt.make.core.autoBuildTarget - all - - - org.eclipse.cdt.make.core.buildArguments - - - - org.eclipse.cdt.make.core.buildCommand - make - - - org.eclipse.cdt.make.core.buildLocation - ${workspace_loc:/libopenshot/build} - - - org.eclipse.cdt.make.core.cleanBuildTarget - clean - - - org.eclipse.cdt.make.core.contents - org.eclipse.cdt.make.core.activeConfigSettings - - - org.eclipse.cdt.make.core.enableAutoBuild - false - - - org.eclipse.cdt.make.core.enableCleanBuild - true - - - org.eclipse.cdt.make.core.enableFullBuild - true - - - org.eclipse.cdt.make.core.fullBuildTarget - all - - - org.eclipse.cdt.make.core.stopOnError - true - - - org.eclipse.cdt.make.core.useDefaultBuildCmd - true - - - - - org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder - - - - - - org.eclipse.cdt.core.cnature - org.eclipse.cdt.core.ccnature - org.eclipse.cdt.managedbuilder.core.managedBuildNature - org.eclipse.cdt.managedbuilder.core.ScannerConfigNature - org.python.pydev.pythonNature - com.aptana.ruby.core.rubynature - - diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 00000000..879a8190 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,50 @@ +dist: xenial +sudo: required + +matrix: + include: + - language: cpp + name: "FFmpeg 2" + before_script: + - sudo add-apt-repository ppa:openshot.developers/libopenshot-daily -y + - sudo add-apt-repository ppa:beineri/opt-qt-5.10.0-xenial -y + - sudo apt-get update -qq + - sudo apt-get install gcc-4.8 cmake libavcodec-dev libavformat-dev libswscale-dev libavresample-dev libavutil-dev libopenshot-audio-dev libopenshot-dev libfdk-aac-dev libfdk-aac-dev libjsoncpp-dev libmagick++-dev libopenshot-audio-dev libunittest++-dev libzmq3-dev pkg-config python3-dev qtbase5-dev qtmultimedia5-dev swig -y + - sudo apt autoremove -y + script: + - mkdir -p build; cd build; + - cmake -D"CMAKE_BUILD_TYPE:STRING=Debug" ../ + - make VERBOSE=1 + - make test + + - language: cpp + name: "FFmpeg 3" + before_script: + - sudo add-apt-repository ppa:openshot.developers/libopenshot-daily -y + - sudo add-apt-repository ppa:beineri/opt-qt-5.10.0-xenial -y + - sudo add-apt-repository ppa:jonathonf/ffmpeg-3 -y + - sudo apt-get update -qq + - sudo apt-get install gcc-4.8 cmake libavcodec-dev libavformat-dev libswscale-dev libavresample-dev libavutil-dev libopenshot-audio-dev libopenshot-dev libfdk-aac-dev libfdk-aac-dev libjsoncpp-dev libmagick++-dev libopenshot-audio-dev libunittest++-dev libzmq3-dev pkg-config python3-dev qtbase5-dev qtmultimedia5-dev swig -y + - sudo apt autoremove -y + script: + - mkdir -p build; cd build; + - cmake -D"CMAKE_BUILD_TYPE:STRING=Debug" ../ + - make VERBOSE=1 + - make test + + - language: cpp + name: "FFmpeg 4" + before_script: + - sudo add-apt-repository ppa:openshot.developers/libopenshot-daily -y + - sudo add-apt-repository ppa:beineri/opt-qt-5.10.0-xenial -y + - sudo add-apt-repository ppa:jonathonf/ffmpeg -y + - sudo add-apt-repository ppa:jonathonf/ffmpeg-4 -y + - sudo add-apt-repository ppa:jonathonf/backports -y + - sudo apt-get update -qq + - sudo apt-get install gcc-4.8 cmake libavcodec58 libavformat58 libavcodec-dev libavformat-dev libswscale-dev libavresample-dev libavutil-dev libopenshot-audio-dev libopenshot-dev libfdk-aac-dev libfdk-aac-dev libjsoncpp-dev libmagick++-dev libopenshot-audio-dev libunittest++-dev libzmq3-dev pkg-config python3-dev qtbase5-dev qtmultimedia5-dev swig -y + - sudo apt autoremove -y + script: + - mkdir -p build; cd build; + - cmake -D"CMAKE_BUILD_TYPE:STRING=Debug" ../ + - make VERBOSE=1 + - make test diff --git a/INSTALL.md b/INSTALL.md new file mode 100644 index 00000000..474fb9c2 --- /dev/null +++ b/INSTALL.md @@ -0,0 +1,153 @@ +## Detailed Install Instructions + +Operating system specific install instructions are located in: + +* doc/INSTALL-LINUX.md +* doc/INSTALL-MAC.md +* doc/INSTALL-WINDOWS.md + +## Getting Started + +The best way to get started with libopenshot, is to learn about our build system, obtain all the source code, +install a development IDE and tools, and better understand our dependencies. So, please read through the +following sections, and follow the instructions. And keep in mind, that your computer is likely different +than the one used when writing these instructions. Your file paths and versions of applications might be +slightly different, so keep an eye out for subtle file path differences in the commands you type. + +## Build Tools + +CMake is the backbone of our build system. It is a cross-platform build system, which checks for dependencies, +locates header files and libraries, generates makefiles, and supports the cross-platform compiling of +libopenshot and libopenshot-audio. CMake uses an out-of-source build concept, where all temporary build +files, such as makefiles, object files, and even the final binaries, are created outside of the source +code folder, inside a /build/ sub-folder. This prevents the build process from cluttering up the source +code. These instructions have only been tested with the GNU compiler (including MSYS2/MinGW for Windows). + +## Dependencies + +The following libraries are required to build libopenshot. Instructions on how to install these +dependencies vary for each operating system. Libraries and Executables have been labeled in the +list below to help distinguish between them. + +* ### FFmpeg (libavformat, libavcodec, libavutil, libavdevice, libavresample, libswscale) + * http://www.ffmpeg.org/ `(Library)` + * This library is used to decode and encode video, audio, and image files. It is also used to obtain information about media files, such as frame rate, sample rate, aspect ratio, and other common attributes. + +* ### ImageMagick++ (libMagick++, libMagickWand, libMagickCore) + * http://www.imagemagick.org/script/magick++.php `(Library)` + * This library is **optional**, and used to decode and encode images. + +* ### OpenShot Audio Library (libopenshot-audio) + * https://github.com/OpenShot/libopenshot-audio/ `(Library)` + * This library is used to mix, resample, host plug-ins, and play audio. It is based on the JUCE project, which is an outstanding audio library used by many different applications + +* ### Qt 5 (libqt5) + * http://www.qt.io/qt5/ `(Library)` + * Qt5 is used to display video, store image data, composite images, apply image effects, and many other utility functions, such as file system manipulation, high resolution timers, etc... + +* ### CMake (cmake) + * http://www.cmake.org/ `(Executable)` + * This executable is used to automate the generation of Makefiles, check for dependencies, and is the backbone of libopenshot’s cross-platform build process. + +* ### SWIG (swig) + * http://www.swig.org/ `(Executable)` + * This executable is used to generate the Python and Ruby bindings for libopenshot. It is a simple and powerful wrapper for C++ libraries, and supports many languages. + +* ### Python 3 (libpython) + * http://www.python.org/ `(Executable and Library)` + * This library is used by swig to create the Python (version 3+) bindings for libopenshot. This is also the official language used by OpenShot Video Editor (a graphical interface to libopenshot). + +* ### Doxygen (doxygen) + * http://www.stack.nl/~dimitri/doxygen/ `(Executable)` + * This executable is used to auto-generate the documentation used by libopenshot. + +* ### UnitTest++ (libunittest++) + * https://github.com/unittest-cpp/ `(Library)` + * This library is used to execute unit tests for libopenshot. It contains many macros used to keep our unit testing code very clean and simple. + +* ### ZeroMQ (libzmq) + * http://zeromq.org/ `(Library)` + * This library is used to communicate between libopenshot and other applications (publisher / subscriber). Primarily used to send debug data from libopenshot. + +* ### OpenMP (-fopenmp) + * http://openmp.org/wp/ `(Compiler Flag)` + * If your compiler supports this flag (GCC, Clang, and most other compilers), it provides libopenshot with easy methods of using parallel programming techniques to improve performance and take advantage of multi-core processors. + +## CMake Flags (Optional) +There are many different build flags that can be passed to cmake to adjust how libopenshot is compiled. Some of these flags might be required when compiling on certain OSes, just depending on how your build environment is setup. To add a build flag, follow this general syntax: $ cmake -DMAGICKCORE_HDRI_ENABLE=1 -DENABLE_TESTS=1 ../ + +* MAGICKCORE_HDRI_ENABLE (default 0) +* MAGICKCORE_QUANTUM_DEPTH (default 0) +* OPENSHOT_IMAGEMAGICK_COMPATIBILITY (default 0) +* DISABLE_TESTS (default 0) +* CMAKE_PREFIX_PATH (`/location/to/missing/library/`) +* PYTHON_INCLUDE_DIR (`/location/to/python/include/`) +* PYTHON_LIBRARY (`/location/to/python/lib.a`) +* PYTHON_FRAMEWORKS (`/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/`) +* CMAKE_CXX_COMPILER (`/location/to/mingw/g++`) +* CMAKE_C_COMPILER (`/location/to/mingw/gcc`) + +## Obtaining Source Code + +The first step in installing libopenshot is to obtain the most recent source code. The source code is available on [GitHub](https://github.com/OpenShot/libopenshot). Use the following command to obtain the latest libopenshot source code. + +``` +git clone https://github.com/OpenShot/libopenshot.git +git clone https://github.com/OpenShot/libopenshot-audio.git +``` + +## Folder Structure (libopenshot) + +The source code is divided up into the following folders. + +* ### build/ + * This folder needs to be manually created, and is used by cmake to store the temporary build files, such as makefiles, as well as the final binaries (library and test executables). + +* ### cmake/ + * This folder contains custom modules not included by default in cmake, used to find dependency libraries and headers and determine if these libraries are installed. + +* ### doc/ + * This folder contains documentation and related files, such as logos and images required by the doxygen auto-generated documentation. + +* ### include/ + * This folder contains all headers (*.h) used by libopenshot. + +* ### src/ + * This folder contains all source code (*.cpp) used by libopenshot. + +* ### tests/ + * This folder contains all unit test code. Each class has it’s own test file (*.cpp), and uses UnitTest++ macros to keep the test code simple and manageable. + +* ### thirdparty/ + * This folder contains code not written by the OpenShot team. For example, jsoncpp, an open-source JSON parser. + +## Linux Build Instructions (libopenshot-audio) +To compile libopenshot-audio, we need to go through a few additional steps to manually build and install it. Launch a terminal and enter: + +``` +cd [libopenshot-audio repo folder] +mkdir build +cd build +cmake ../ +make +make install +./src/openshot-audio-test-sound (This should play a test sound) +``` + +## Linux Build Instructions (libopenshot) +Run the following commands to compile libopenshot: + +``` +cd [libopenshot repo directory] +mkdir -p build +cd build +cmake ../ +make +make install +``` + +For more detailed instructions, please see: + +* doc/INSTALL-LINUX.md +* doc/INSTALL-MAC.md +* doc/INSTALL-WINDOWS.md diff --git a/README b/README deleted file mode 100644 index 8c05f753..00000000 --- a/README +++ /dev/null @@ -1,66 +0,0 @@ -#################################################################### - OpenShot Library -#################################################################### - -OpenShot Library (libopenshot) is an open-source project dedicated to -delivering high quality video editing, animation, and playback solutions -to the world. For more information visit . - -#################################################################### - License -#################################################################### - -Copyright (c) 2008-2014 OpenShot Studios, LLC -. - -OpenShot Library (libopenshot) is free software: you can redistribute it -and/or modify it under the terms of the GNU Lesser General Public License -as published by the Free Software Foundation, either version 3 of the -License, or (at your option) any later version. - -OpenShot Library (libopenshot) is distributed in the hope that it will be -useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU Lesser General Public License for more details. - -You should have received a copy of the GNU Lesser General Public License -along with OpenShot Library. If not, see . - -#################################################################### - - To release a closed-source product which uses libopenshot (i.e. video - editing and playback), commercial licenses are available: contact - sales@openshot.org for more information. - - -#################################################################### - Install -#################################################################### - -Please see /doc/InstallationGuide.pdf for a very detailed -Linux, Mac, and Windows compiling instruction guide. An online version -is also available: -https://docs.google.com/document/d/1V6nq-IuS9zxqO1-OSt8iTS_cw_HMCpsUNofHLYtUNjM/pub - - -#################################################################### - Documentation -#################################################################### - -Documentation is auto-generated by Doxygen, and can be created with -$ make doc (Also available online: ) - - -#################################################################### - Authors -#################################################################### - -Please see AUTHORS file for a full list of authors. - - -#################################################################### - www.openshot.org | www.openshotstudios.com -#################################################################### - - Copyright (c) 2008-2014 OpenShot Studios, LLC - . diff --git a/README.md b/README.md new file mode 100644 index 00000000..8deb86a1 --- /dev/null +++ b/README.md @@ -0,0 +1,85 @@ +OpenShot Video Library (libopenshot) is a free, open-source C++ library dedicated to +delivering high quality video editing, animation, and playback solutions to the +world. + +## Build Status + +[![Build Status](https://img.shields.io/travis/OpenShot/libopenshot/develop.svg?label=libopenshot)](https://travis-ci.org/OpenShot/libopenshot) [![Build Status](https://img.shields.io/travis/OpenShot/libopenshot-audio/develop.svg?label=libopenshot-audio)](https://travis-ci.org/OpenShot/libopenshot-audio) + +## Features + +* Cross-Platform (Linux, Mac, and Windows) +* Multi-Layer Compositing +* Video and Audio Effects (Chroma Key, Color Adjustment, Grayscale, etc…) +* Animation Curves (Bézier, Linear, Constant) +* Time Mapping (Curve-based Slow Down, Speed Up, Reverse) +* Audio Mixing & Resampling (Curve-based) +* Audio Plug-ins (VST & AU) +* Audio Drivers (ASIO, WASAPI, DirectSound, CoreAudio, iPhone Audio, ALSA, JACK, and Android) +* Telecine and Inverse Telecine (Film to TV, TV to Film) +* Frame Rate Conversions +* Multi-Processor Support (Performance) +* Python and Ruby Bindings (All Features Supported) +* Qt Video Player Included (Ability to display video on any QWidget) +* Unit Tests (Stability) +* All FFmpeg Formats and Codecs Supported (Images, Videos, and Audio files) +* Full Documentation with Examples (Doxygen Generated) + +## Install + +Detailed instructions for building libopenshot and libopenshot-audio for each OS. These instructions +are also available in the /docs/ source folder. + + * [Linux](https://github.com/OpenShot/libopenshot/wiki/Linux-Build-Instructions) + * [Mac](https://github.com/OpenShot/libopenshot/wiki/Mac-Build-Instructions) + * [Windows](https://github.com/OpenShot/libopenshot/wiki/Windows-Build-Instructions) + +## Documentation + +Beautiful HTML documentation can be generated using Doxygen. +``` +make doc +``` +(Also available online: http://openshot.org/files/libopenshot/) + +## Developers + +Are you interested in becoming more involved in the development of +OpenShot? Build exciting new features, fix bugs, make friends, and become a hero! +Please read the [step-by-step](https://github.com/OpenShot/openshot-qt/wiki/Become-a-Developer) +instructions for getting source code, configuring dependencies, and building OpenShot. + +## Report a bug + +You can report a new libopenshot issue directly on GitHub: + +https://github.com/OpenShot/libopenshot/issues + +## Websites + +- https://www.openshot.org/ (Official website and blog) +- https://github.com/OpenShot/libopenshot/ (source code and issue tracker) +- https://github.com/OpenShot/libopenshot-audio/ (source code for audio library) +- https://github.com/OpenShot/openshot-qt/ (source code for Qt client) +- https://launchpad.net/openshot/ + +### License + +Copyright (c) 2008-2019 OpenShot Studios, LLC. + +OpenShot Library (libopenshot) is free software: you can redistribute it +and/or modify it under the terms of the GNU Lesser General Public License +as published by the Free Software Foundation, either version 3 of the +License, or (at your option) any later version. + +OpenShot Library (libopenshot) is distributed in the hope that it will be +useful, but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public License +along with OpenShot Library. If not, see http://www.gnu.org/licenses/. + +To release a closed-source product which uses libopenshot (i.e. video +editing and playback), commercial licenses are also available: contact +sales@openshot.org for more information. diff --git a/cmake/Modules/FindFFmpeg.cmake b/cmake/Modules/FindFFmpeg.cmake index 4af6cc93..34f0a7bd 100644 --- a/cmake/Modules/FindFFmpeg.cmake +++ b/cmake/Modules/FindFFmpeg.cmake @@ -1,151 +1,161 @@ -# - Try to find FFMPEG +# vim: ts=2 sw=2 +# - Try to find the required ffmpeg components(default: AVFORMAT, AVUTIL, AVCODEC) +# # Once done this will define -# -# FFMPEG_FOUND - system has FFMPEG -# FFMPEG_INCLUDE_DIR - the include directory -# FFMPEG_LIBRARY_DIR - the directory containing the libraries -# FFMPEG_LIBRARIES - Link these to use FFMPEG -# - -# FindAvformat -FIND_PATH( AVFORMAT_INCLUDE_DIR libavformat/avformat.h - PATHS /usr/include/ - /usr/include/ffmpeg/ - $ENV{FFMPEGDIR}/include/ - $ENV{FFMPEGDIR}/include/ffmpeg/ ) - -FIND_LIBRARY( AVFORMAT_LIBRARY avformat avformat-55 avformat-57 - PATHS /usr/lib/ - /usr/lib/ffmpeg/ - $ENV{FFMPEGDIR}/lib/ - $ENV{FFMPEGDIR}/lib/ffmpeg/ - $ENV{FFMPEGDIR}/bin/ ) - -#FindAvcodec -FIND_PATH( AVCODEC_INCLUDE_DIR libavcodec/avcodec.h - PATHS /usr/include/ - /usr/include/ffmpeg/ - $ENV{FFMPEGDIR}/include/ - $ENV{FFMPEGDIR}/include/ffmpeg/ ) - -FIND_LIBRARY( AVCODEC_LIBRARY avcodec avcodec-55 avcodec-57 - PATHS /usr/lib/ - /usr/lib/ffmpeg/ - $ENV{FFMPEGDIR}/lib/ - $ENV{FFMPEGDIR}/lib/ffmpeg/ - $ENV{FFMPEGDIR}/bin/ ) - -#FindAvutil -FIND_PATH( AVUTIL_INCLUDE_DIR libavutil/avutil.h - PATHS /usr/include/ - /usr/include/ffmpeg/ - $ENV{FFMPEGDIR}/include/ - $ENV{FFMPEGDIR}/include/ffmpeg/ ) - -FIND_LIBRARY( AVUTIL_LIBRARY avutil avutil-52 avutil-55 - PATHS /usr/lib/ - /usr/lib/ffmpeg/ - $ENV{FFMPEGDIR}/lib/ - $ENV{FFMPEGDIR}/lib/ffmpeg/ - $ENV{FFMPEGDIR}/bin/ ) - -#FindAvdevice -FIND_PATH( AVDEVICE_INCLUDE_DIR libavdevice/avdevice.h - PATHS /usr/include/ - /usr/include/ffmpeg/ - $ENV{FFMPEGDIR}/include/ - $ENV{FFMPEGDIR}/include/ffmpeg/ ) - -FIND_LIBRARY( AVDEVICE_LIBRARY avdevice avdevice-55 avdevice-56 - PATHS /usr/lib/ - /usr/lib/ffmpeg/ - $ENV{FFMPEGDIR}/lib/ - $ENV{FFMPEGDIR}/lib/ffmpeg/ - $ENV{FFMPEGDIR}/bin/ ) - -#FindSwscale -FIND_PATH( SWSCALE_INCLUDE_DIR libswscale/swscale.h - PATHS /usr/include/ - /usr/include/ffmpeg/ - $ENV{FFMPEGDIR}/include/ - $ENV{FFMPEGDIR}/include/ffmpeg/ ) - -FIND_LIBRARY( SWSCALE_LIBRARY swscale swscale-2 swscale-4 - PATHS /usr/lib/ - /usr/lib/ffmpeg/ - $ENV{FFMPEGDIR}/lib/ - $ENV{FFMPEGDIR}/lib/ffmpeg/ - $ENV{FFMPEGDIR}/bin/ ) - -#FindAvresample -FIND_PATH( AVRESAMPLE_INCLUDE_DIR libavresample/avresample.h - PATHS /usr/include/ - /usr/include/ffmpeg/ - $ENV{FFMPEGDIR}/include/ - $ENV{FFMPEGDIR}/include/ffmpeg/ ) - -FIND_LIBRARY( AVRESAMPLE_LIBRARY avresample avresample-2 avresample-3 - PATHS /usr/lib/ - /usr/lib/ffmpeg/ - $ENV{FFMPEGDIR}/lib/ - $ENV{FFMPEGDIR}/lib/ffmpeg/ - $ENV{FFMPEGDIR}/bin/ ) - -SET( FFMPEG_FOUND FALSE ) - -IF ( AVFORMAT_INCLUDE_DIR AND AVFORMAT_LIBRARY ) - SET ( AVFORMAT_FOUND TRUE ) -ENDIF ( AVFORMAT_INCLUDE_DIR AND AVFORMAT_LIBRARY ) - -IF ( AVCODEC_INCLUDE_DIR AND AVCODEC_LIBRARY ) - SET ( AVCODEC_FOUND TRUE) -ENDIF ( AVCODEC_INCLUDE_DIR AND AVCODEC_LIBRARY ) - -IF ( AVUTIL_INCLUDE_DIR AND AVUTIL_LIBRARY ) - SET ( AVUTIL_FOUND TRUE ) -ENDIF ( AVUTIL_INCLUDE_DIR AND AVUTIL_LIBRARY ) - -IF ( AVDEVICE_INCLUDE_DIR AND AVDEVICE_LIBRARY ) - SET ( AVDEVICE_FOUND TRUE ) -ENDIF ( AVDEVICE_INCLUDE_DIR AND AVDEVICE_LIBRARY ) - -IF ( SWSCALE_INCLUDE_DIR AND SWSCALE_LIBRARY ) - SET ( SWSCALE_FOUND TRUE ) -ENDIF ( SWSCALE_INCLUDE_DIR AND SWSCALE_LIBRARY ) - -IF ( AVRESAMPLE_INCLUDE_DIR AND AVRESAMPLE_LIBRARY ) - SET ( AVRESAMPLE_FOUND TRUE ) -ENDIF ( AVRESAMPLE_INCLUDE_DIR AND AVRESAMPLE_LIBRARY ) - -IF ( AVFORMAT_INCLUDE_DIR OR AVCODEC_INCLUDE_DIR OR AVUTIL_INCLUDE_DIR OR AVDEVICE_FOUND OR SWSCALE_FOUND OR AVRESAMPLE_FOUND ) - - SET ( FFMPEG_FOUND TRUE ) - - SET ( FFMPEG_INCLUDE_DIR - ${AVFORMAT_INCLUDE_DIR} - ${AVCODEC_INCLUDE_DIR} - ${AVUTIL_INCLUDE_DIR} - ${AVDEVICE_INCLUDE_DIR} - ${SWSCALE_INCLUDE_DIR} - ${AVRESAMPLE_INCLUDE_DIR} ) - - SET ( FFMPEG_LIBRARIES - ${AVFORMAT_LIBRARY} - ${AVCODEC_LIBRARY} - ${AVUTIL_LIBRARY} - ${AVDEVICE_LIBRARY} - ${SWSCALE_LIBRARY} - ${AVRESAMPLE_LIBRARY} ) - -ENDIF ( AVFORMAT_INCLUDE_DIR OR AVCODEC_INCLUDE_DIR OR AVUTIL_INCLUDE_DIR OR AVDEVICE_FOUND OR SWSCALE_FOUND OR AVRESAMPLE_FOUND ) - -MARK_AS_ADVANCED( - FFMPEG_LIBRARY_DIR - FFMPEG_INCLUDE_DIR -) +# FFMPEG_FOUND - System has the all required components. +# FFMPEG_INCLUDE_DIRS - Include directory necessary for using the required components headers. +# FFMPEG_LIBRARIES - Link these to use the required ffmpeg components. +# FFMPEG_DEFINITIONS - Compiler switches required for using the required ffmpeg components. +# +# For each of the components it will additionally set. +# - AVCODEC +# - AVDEVICE +# - AVFORMAT +# - AVFILTER +# - AVUTIL +# - POSTPROC +# - SWSCALE +# - SWRESAMPLE +# - AVRESAMPLE +# the following variables will be defined +# _FOUND - System has +# _INCLUDE_DIRS - Include directory necessary for using the headers +# _LIBRARIES - Link these to use +# _DEFINITIONS - Compiler switches required for using +# _VERSION - The components version +# +# Copyright (c) 2006, Matthias Kretz, +# Copyright (c) 2008, Alexander Neundorf, +# Copyright (c) 2011, Michael Jansen, +# +# Redistribution and use is allowed according to the terms of the BSD license. +# For details see the accompanying COPYING-CMAKE-SCRIPTS file. include(FindPackageHandleStandardArgs) -# handle the QUIETLY and REQUIRED arguments and set FFMPEG_FOUND to TRUE -# if all listed variables are TRUE -find_package_handle_standard_args(FFMPEG DEFAULT_MSG - FFMPEG_LIBRARIES FFMPEG_INCLUDE_DIR) + +# The default components were taken from a survey over other FindFFMPEG.cmake files +if (NOT FFmpeg_FIND_COMPONENTS) + set(FFmpeg_FIND_COMPONENTS AVCODEC AVFORMAT AVUTIL) +endif () + +# +### Macro: set_component_found +# +# Marks the given component as found if both *_LIBRARIES AND *_INCLUDE_DIRS is present. +# +macro(set_component_found _component ) + if (${_component}_LIBRARIES AND ${_component}_INCLUDE_DIRS) + # message(STATUS " - ${_component} found.") + set(${_component}_FOUND TRUE) + else () + # message(STATUS " - ${_component} not found.") + endif () +endmacro() + +# +### Macro: find_component +# +# Checks for the given component by invoking pkgconfig and then looking up the libraries and +# include directories. +# +macro(find_component _component _pkgconfig _library _header) + + if (NOT WIN32) + # use pkg-config to get the directories and then use these values + # in the FIND_PATH() and FIND_LIBRARY() calls + find_package(PkgConfig) + if (PKG_CONFIG_FOUND) + pkg_check_modules(PC_${_component} ${_pkgconfig}) + endif () + endif (NOT WIN32) + + find_path(${_component}_INCLUDE_DIRS ${_header} + HINTS + /opt/ + /opt/include/ + ${PC_LIB${_component}_INCLUDEDIR} + ${PC_LIB${_component}_INCLUDE_DIRS} + $ENV{FFMPEGDIR}/include/ + $ENV{FFMPEGDIR}/include/ffmpeg/ + PATH_SUFFIXES + ffmpeg + ) + + find_library(${_component}_LIBRARIES NAMES ${_library} + HINTS + ${PC_LIB${_component}_LIBDIR} + ${PC_LIB${_component}_LIBRARY_DIRS} + $ENV{FFMPEGDIR}/lib/ + $ENV{FFMPEGDIR}/lib/ffmpeg/ + $ENV{FFMPEGDIR}/bin/ + ) + + set(${_component}_DEFINITIONS ${PC_${_component}_CFLAGS_OTHER} CACHE STRING "The ${_component} CFLAGS.") + set(${_component}_VERSION ${PC_${_component}_VERSION} CACHE STRING "The ${_component} version number.") + + set_component_found(${_component}) + + mark_as_advanced( + ${_component}_INCLUDE_DIRS + ${_component}_LIBRARIES + ${_component}_DEFINITIONS + ${_component}_VERSION) + +endmacro() + + +# Check for cached results. If there are skip the costly part. +if (NOT FFMPEG_LIBRARIES) + + # Check for all possible component. + find_component(AVCODEC libavcodec avcodec libavcodec/avcodec.h) + find_component(AVFORMAT libavformat avformat libavformat/avformat.h) + find_component(AVDEVICE libavdevice avdevice libavdevice/avdevice.h) + find_component(AVUTIL libavutil avutil libavutil/avutil.h) + find_component(AVFILTER libavfilter avfilter libavfilter/avfilter.h) + find_component(SWSCALE libswscale swscale libswscale/swscale.h) + find_component(POSTPROC libpostproc postproc libpostproc/postprocess.h) + find_component(SWRESAMPLE libswresample swresample libswresample/swresample.h) + find_component(AVRESAMPLE libavresample avresample libavresample/avresample.h) + + # Check if the required components were found and add their stuff to the FFMPEG_* vars. + foreach (_component ${FFmpeg_FIND_COMPONENTS}) + if (${_component}_FOUND) + # message(STATUS "Required component ${_component} present.") + set(FFMPEG_LIBRARIES ${FFMPEG_LIBRARIES} ${${_component}_LIBRARIES}) + set(FFMPEG_DEFINITIONS ${FFMPEG_DEFINITIONS} ${${_component}_DEFINITIONS}) + list(APPEND FFMPEG_INCLUDE_DIRS ${${_component}_INCLUDE_DIRS}) + else () + # message(STATUS "Required component ${_component} missing.") + endif () + endforeach () + + # Build the include path with duplicates removed. + if (FFMPEG_INCLUDE_DIRS) + list(REMOVE_DUPLICATES FFMPEG_INCLUDE_DIRS) + endif () + + # cache the vars. + set(FFMPEG_INCLUDE_DIRS ${FFMPEG_INCLUDE_DIRS} CACHE STRING "The FFmpeg include directories." FORCE) + set(FFMPEG_LIBRARIES ${FFMPEG_LIBRARIES} CACHE STRING "The FFmpeg libraries." FORCE) + set(FFMPEG_DEFINITIONS ${FFMPEG_DEFINITIONS} CACHE STRING "The FFmpeg cflags." FORCE) + + mark_as_advanced(FFMPEG_INCLUDE_DIRS + FFMPEG_LIBRARIES + FFMPEG_DEFINITIONS) + +endif () + +# Now set the noncached _FOUND vars for the components. +foreach (_component AVCODEC AVDEVICE AVFORMAT AVUTIL POSTPROCESS SWSCALE SWRESAMPLE AVRESAMPLE) + set_component_found(${_component}) +endforeach () + +# Compile the list of required vars +set(_FFmpeg_REQUIRED_VARS FFMPEG_LIBRARIES FFMPEG_INCLUDE_DIRS) +foreach (_component ${FFmpeg_FIND_COMPONENTS}) + list(APPEND _FFmpeg_REQUIRED_VARS ${_component}_LIBRARIES ${_component}_INCLUDE_DIRS) +endforeach () + +# Give a nice error message if some of the required vars are missing. +find_package_handle_standard_args(FFmpeg DEFAULT_MSG ${_FFmpeg_REQUIRED_VARS}) \ No newline at end of file diff --git a/cmake/Modules/FindOpenShotAudio.cmake b/cmake/Modules/FindOpenShotAudio.cmake index 1de4529b..0aeb0e1f 100644 --- a/cmake/Modules/FindOpenShotAudio.cmake +++ b/cmake/Modules/FindOpenShotAudio.cmake @@ -7,31 +7,12 @@ message("$ENV{LIBOPENSHOT_AUDIO_DIR}") -# Find the base directory of juce includes -find_path(LIBOPENSHOT_AUDIO_BASE_DIR JuceHeader.h +# Find the libopenshot-audio header files +find_path(LIBOPENSHOT_AUDIO_INCLUDE_DIR JuceHeader.h PATHS $ENV{LIBOPENSHOT_AUDIO_DIR}/include/libopenshot-audio/ /usr/include/libopenshot-audio/ /usr/local/include/libopenshot-audio/ ) -# Get a list of all header file paths -FILE(GLOB_RECURSE JUCE_HEADER_FILES - ${LIBOPENSHOT_AUDIO_BASE_DIR}/*.h -) - -# Loop through each header file -FOREACH(HEADER_PATH ${JUCE_HEADER_FILES}) - # Get the directory of each header file - get_filename_component(HEADER_DIRECTORY ${HEADER_PATH} - PATH - ) - - # Append each directory into the HEADER_DIRECTORIES list - LIST(APPEND HEADER_DIRECTORIES ${HEADER_DIRECTORY}) -ENDFOREACH(HEADER_PATH) - -# Remove duplicates from the header directories list -LIST(REMOVE_DUPLICATES HEADER_DIRECTORIES) - # Find the libopenshot-audio.so (check env var first) find_library(LIBOPENSHOT_AUDIO_LIBRARY NAMES libopenshot-audio openshot-audio @@ -48,9 +29,7 @@ find_library(LIBOPENSHOT_AUDIO_LIBRARY set(LIBOPENSHOT_AUDIO_LIBRARIES ${LIBOPENSHOT_AUDIO_LIBRARY}) set(LIBOPENSHOT_AUDIO_LIBRARY ${LIBOPENSHOT_AUDIO_LIBRARIES}) -# Seems to work fine with just the base dir (rather than all the actual include folders) -set(LIBOPENSHOT_AUDIO_INCLUDE_DIR ${LIBOPENSHOT_AUDIO_BASE_DIR} ) -set(LIBOPENSHOT_AUDIO_INCLUDE_DIRS ${LIBOPENSHOT_AUDIO_BASE_DIR} ) +set(LIBOPENSHOT_AUDIO_INCLUDE_DIRS ${LIBOPENSHOT_AUDIO_INCLUDE_DIR} ) include(FindPackageHandleStandardArgs) # handle the QUIETLY and REQUIRED arguments and set LIBOPENSHOT_AUDIO_FOUND to TRUE diff --git a/cmake/Modules/FindRESVG.cmake b/cmake/Modules/FindRESVG.cmake new file mode 100644 index 00000000..b03a0667 --- /dev/null +++ b/cmake/Modules/FindRESVG.cmake @@ -0,0 +1,28 @@ +# - Try to find RESVG +# Once done this will define +# RESVG_FOUND - System has RESVG +# RESVG_INCLUDE_DIRS - The RESVG include directories +# RESVG_LIBRARIES - The libraries needed to use RESVG +find_path ( RESVG_INCLUDE_DIR ResvgQt.h + PATHS ${RESVGDIR}/include/resvg + $ENV{RESVGDIR}/include/resvg + $ENV{RESVGDIR}/include + /usr/include/resvg + /usr/include + /usr/local/include/resvg + /usr/local/include ) + +find_library ( RESVG_LIBRARY NAMES resvg + PATHS /usr/lib + /usr/local/lib + $ENV{RESVGDIR} + $ENV{RESVGDIR}/lib ) + +set ( RESVG_LIBRARIES ${RESVG_LIBRARY} ) +set ( RESVG_INCLUDE_DIRS ${RESVG_INCLUDE_DIR} ) + +include ( FindPackageHandleStandardArgs ) +# handle the QUIETLY and REQUIRED arguments and set RESVG_FOUND to TRUE +# if all listed variables are TRUE +find_package_handle_standard_args ( RESVG "Could NOT find RESVG, using Qt SVG parsing instead" RESVG_LIBRARY RESVG_INCLUDE_DIR ) +mark_as_advanced( RESVG_LIBRARY RESVG_INCLUDE_DIR ) diff --git a/cmake/Windows/README b/cmake/Windows/README deleted file mode 100644 index 44dd56d7..00000000 --- a/cmake/Windows/README +++ /dev/null @@ -1,85 +0,0 @@ -#################################################################### - Install Dependencies for Windows -#################################################################### - -Most Windows dependencies needed for libopenshot-audio, libopenshot, and openshot-qt -can be installed easily with MSYS2 and the pacman package manager. Follow these -directions to setup a Windows build environment for OpenShot: - -#################################################################### - -1) Install MSYS2 (http://www.msys2.org/) - -2) Run MSYS2 command prompt (for example: C:\msys64\msys2_shell.cmd) - -3) Append PATH (so MSYS2 can find executables and libraries): - $ PATH=$PATH:/c/msys64/mingw64/bin:/c/msys64/mingw64/lib (64-bit PATH) - or - $ PATH=$PATH:/c/msys32/mingw32/bin:/c/msys32/mingw32/lib (32-bit PATH) - -4) Update and upgrade all packages - $ pacman -Syu - -5a) Install the following packages: -*** for 64-BIT support *** - - $ pacman -S --needed base-devel mingw-w64-x86_64-toolchain - $ pacman -S mingw64/mingw-w64-x86_64-ffmpeg - $ pacman -S mingw64/mingw-w64-x86_64-python3-pyqt5 - $ pacman -S mingw64/mingw-w64-x86_64-swig - $ pacman -S mingw64/mingw-w64-x86_64-cmake - $ pacman -S mingw64/mingw-w64-x86_64-doxygen - $ pacman -S mingw64/mingw-w64-x86_64-python3-pip - $ pacman -S mingw32/mingw-w64-i686-zeromq - $ pacman -S mingw64/mingw-w64-x86_64-python3-pyzmq - $ pacman -S mingw64/mingw-w64-x86_64-python3-cx_Freeze - $ pacman -S git - - Install ImageMagick if needed (OPTIONAL and NOT NEEDED) - $ pacman -S mingw64/mingw-w64-x86_64-imagemagick - -5b) Install the following packages: -*** for 32-BIT support *** - - $ pacman -S --needed base-devel mingw32/mingw-w64-i686-toolchain - $ pacman -S mingw32/mingw-w64-i686-ffmpeg - $ pacman -S mingw32/mingw-w64-i686-python3-pyqt5 - $ pacman -S mingw32/mingw-w64-i686-swig - $ pacman -S mingw32/mingw-w64-i686-cmake - $ pacman -S mingw32/mingw-w64-i686-doxygen - $ pacman -S mingw32/mingw-w64-i686-python3-pip - $ pacman -S mingw32/mingw-w64-i686-zeromq - $ pacman -S mingw32/mingw-w64-i686-python3-pyzmq - $ pacman -S mingw32/mingw-w64-i686-python3-cx_Freeze - $ pacman -S git - - Install ImageMagick if needed (OPTIONAL and NOT NEEDED) - $ pacman -S mingw32/mingw-w32-x86_32-imagemagick - -6) Install Python PIP Dependencies - $ pip3 install httplib2 - $ pip3 install slacker - $ pip3 install tinys3 - $ pip3 install github3.py - $ pip3 install requests - -7) Download Unittest++ (https://github.com/unittest-cpp/unittest-cpp) into /c/home/USER/unittest-cpp-master/ - Configure Unittest++: - $ cmake -G "MSYS Makefiles" ../ -DCMAKE_MAKE_PROGRAM=mingw32-make -DCMAKE_INSTALL_PREFIX:PATH=/usr - Build Unittest++ - $ mingw32-make install - -8) ZMQ++ Header (This might not be needed anymore) - NOTE: Download and copy zmq.hpp into the /c/msys64/mingw64/include/ folder - - -#################################################################### - OPTIONAL: Installing ImageMagick on Windows -#################################################################### - -If you would rather install ImageMagick from source code yourself, follow these steps: - -Step 1) Copy [build-imagemagick.sh and urls.txt] into your local MSYS2 environment -Step 2) Run MSYS2 Shell -Step 3) Execute this command - $ ./build-imagemagick.sh \ No newline at end of file diff --git a/cmake/Windows/build-imagemagick.sh b/cmake/Windows/build-imagemagick.sh deleted file mode 100644 index b3814dd9..00000000 --- a/cmake/Windows/build-imagemagick.sh +++ /dev/null @@ -1,274 +0,0 @@ -#!/bin/bash -# xml2 build ok but failed test -# libfpx build error - -function ised() { - IN=$1 - shift - tmp=$RANDOM.$$ - <$IN sed "$@" >$tmp && cat $tmp > $IN - rm $tmp -} - -function ask() { - read -p "${1:-Are you sure?]} [Y/n] " response - case $response in - y|Y|"") - true;; - *) - false;; - esac -} - -function download() { - while IFS=\; read url md5 <&3; do - fileName=${url##*/} - - echo "Downloading ${fileName}..." - while true; do - if [[ ! -e $fileName ]]; then - wget ${url} -O ${fileName} - else - echo "File exists!" - fi - - localMd5=$(md5sum ${fileName} | cut -d\ -f1) - - if [[ ${localMd5} != ${md5} ]]; then - ask "Checksum failed. Do you want to download this file again? [Y/n] " - if [[ $? -ne 0 ]]; then - exit 1 - fi - rm ${fileName} - else - break - fi - done - done 3< urls.txt -} - -function extract() { - file=$1 - if [[ ! -e ${file} ]]; then - return - fi - - case $file in - *.tar.gz) - tar xzf $file - ;; - *.tar.xz|*.tar.lzma) - tar xJf $file - ;; - *.tar.bz2) - tar xjf $file - ;; - *) - "Don't know how to extract $file" - esac -} - -function isLibsInstalled() { - libs="$@" - notfound=false - for l in "${libs}"; do - ld -L/usr/local/lib -l"${l}" 2>/dev/null - if [[ $? -ne 0 ]]; then - notfound=true - fi - done - - ! ${notfound} -} - -function isDirExists() { - dir="$@" - found=false - for d in ${dir}; do - if [[ -d "${d}" ]]; then - found=true - break - fi - done - - ${found} -} - -function extractIfNeeded() { - file=$1 - isDirExists ${file%%-*}-* - if [[ $? -ne 0 ]]; then - echo "Extracting $file" - extract $file - fi -} - -function buildbzip2() { - if isLibsInstalled "bz2"; then - if ask "Found bzip2 installed. Do you want to reinstall it?"; then : - else - return 0 - fi - fi - - extractIfNeeded bzip2-*.tar.lzma - - cd bzip2-*/ - tar xzf bzip2-1.0.6.tar.gz - tar xzf cygming-autotools-buildfiles.tar.gz - cd bzip2-*/ - autoconf - mkdir ../build - cd ../build - ../bzip2-*/configure - make - make install - cd ../.. -} - -function buildzlib() { - if isLibsInstalled "z"; then - if ask "Found zlib installed. Do you want to reinstall it?"; then : - else - return 0 - fi - fi - - extractIfNeeded zlib-*.tar.xz - - cd zlib-*/ - INCLUDE_PATH=/usr/local/include LIBRARY_PATH=/usr/local/lib BINARY_PATH=/usr/local/bin make install -f win32/Makefile.gcc SHARED_MODE=1 - cd .. -} - -function buildlibxml2() { - if isLibsInstalled "xml2"; then - if ask "Found libxml2 installed. Do you want to reinstall it?"; then : - else - return 0 - fi - fi - extractIfNeeded libxml2-*.tar.gz - cd libxml2-*/win32/ - ised configure.js 's/dirSep = "\\\\";/dirSep = "\/";/' - cscript.exe configure.js compiler=mingw prefix=/usr/local - # ised ../dict.c '/typedef.*uint32_t;$/d' - ised Makefile.mingw 's/cmd.exe \/C "\?if not exist \(.*\) mkdir \1"\?/mkdir -p \1/' - ised Makefile.mingw 's/cmd.exe \/C "copy\(.*\)"/cp\1/' - ised Makefile.mingw '/cp/{y/\\/\//;}' - ised Makefile.mingw '/PREFIX/{y/\\/\//;}' - make -f Makefile.mingw - make -f Makefile.mingw install - cd ../../ -} - -function buildlibpng() { - if isLibsInstalled "png"; then - if ask "Found libpng installed. Do you want to reinstall it?"; then : - else - return 0 - fi - fi - - extractIfNeeded libpng-*.tar.xz - - cd libpng-*/ - make -f scripts/makefile.msys - make install -f scripts/makefile.msys - cd .. -} - -function buildjpegsrc() { - if isLibsInstalled "jpeg"; then - if ask "Found jpegsrc installed. Do you want to reinstall it?"; then : - else - return 0 - fi - fi - - extract jpegsrc*.tar.gz - - cd jpeg-*/ - ./configure - make - make install - cd .. -} - -function buildfreetype() { - if isLibsInstalled "freetype"; then - if ask "Found freetype installed. Do you want to reinstall it?"; then : - else - return 0 - fi - fi - extract freetype*.tar.bz2 - - INCLUDE_PATH=/usr/local/include - LIBRARY_PATH=/usr/local/lib - BINARY_PATH=/usr/local/bin - cd freetype-*/ - ./configure - make - make install - cd .. -} - -function buildlibwmf() { - if isLibsInstalled "wmf"; then - if ask "Found libwmf installed. Do you want to reinstall it?"; then : - else - return 0 - fi - fi - extract libwmf*.tar.gz - - cd libwmf-*/ - ./configure CFLAGS="-I/usr/local/include" LDFLAGS="-L/usr/local/lib" - make - make install - cd .. -} - -function buildlibwebp() { - if isLibsInstalled "webp"; then - if ask "Found libwebp installed. Do you want to reinstall it?"; then : - else - return 0 - fi - fi - extract libwebp*.tar.gz - - cd libwebp-*/ - ./configure CFLAGS="-I/usr/local/include" LDFLAGS="-L/usr/local/lib" - make - make install - cd .. -} - -function buildDelegate() { - delegates="bzip2 zlib libxml2 libpng jpegsrc freetype libwmf libwebp" - for d in ${delegates}; do - echo "**********************************************************" - echo "Building $d" - build${d} - done -} - -function build() { - extractIfNeeded ImageMagick-*.tar.xz - - local oldPwd=$(pwd -L) - cd ImageMagick-*/ - # patch configure - #sed -i 's/${GDI32_LIBS}x" !=/${GDI32_LIBS} ==/' configure - ised configure 's/${GDI32_LIBS}x" !=/${GDI32_LIBS} ==/' - ./configure --enable-shared --disable-static --enable-delegate-build --without-modules CFLAGS="-I/usr/local/include" LDFLAGS="-L/usr/local/lib" - make - make install - cd ${oldPwd} -} - -download -buildDelegate -build diff --git a/cmake/Windows/urls.txt b/cmake/Windows/urls.txt deleted file mode 100644 index 0a45d8af..00000000 --- a/cmake/Windows/urls.txt +++ /dev/null @@ -1,10 +0,0 @@ -ftp://ftp.imagemagick.org/pub/ImageMagick/releases/ImageMagick-6.8.8-10.tar.xz;ab9b397c1d4798a9f6ae6cc94aa292fe -ftp://ftp.imagemagick.org/pub/ImageMagick/delegates/libpng-1.6.20.tar.xz;3968acb7c66ef81a9dab867f35d0eb4b -ftp://ftp.imagemagick.org/pub/ImageMagick/delegates/libwebp-0.4.4.tar.gz;b737062cf688e502b940b460ddc3015f -ftp://ftp.imagemagick.org/pub/ImageMagick/delegates/libwmf-0.2.8.4.tar.gz;d1177739bf1ceb07f57421f0cee191e0 -ftp://ftp.imagemagick.org/pub/ImageMagick/delegates/libxml2-2.9.3.tar.gz;daece17e045f1c107610e137ab50c179 -ftp://ftp.imagemagick.org/pub/ImageMagick/delegates/zlib-1.2.8.tar.xz;28f1205d8dd2001f26fec1e8c2cebe37 -ftp://ftp.imagemagick.org/pub/ImageMagick/delegates/freetype-2.6.2.tar.bz2;86109d0c998787d81ac582bad9adf82e -http://ncu.dl.sourceforge.net/project/mingw/MinGW/Extension/bzip2/bzip2-1.0.6-4/bzip2-1.0.6-4-mingw32-src.tar.lzma;2a25de4331d1e6e1458d8632dff55fad -ftp://ftp.imagemagick.org/pub/ImageMagick/delegates/libfpx-1.3.1-4.tar.xz;65e2cf8dcf230ad0b90aead35553bbda -ftp://ftp.imagemagick.org/pub/ImageMagick/delegates/jpegsrc.v9a.tar.gz;3353992aecaee1805ef4109aadd433e7 diff --git a/doc/INSTALL-LINUX.md b/doc/INSTALL-LINUX.md new file mode 100644 index 00000000..6ed4f3f6 --- /dev/null +++ b/doc/INSTALL-LINUX.md @@ -0,0 +1,225 @@ +## Getting Started + +The best way to get started with libopenshot, is to learn about our build system, obtain all the source code, +install a development IDE and tools, and better understand our dependencies. So, please read through the +following sections, and follow the instructions. And keep in mind, that your computer is likely different +than the one used when writing these instructions. Your file paths and versions of applications might be +slightly different, so keep an eye out for subtle file path differences in the commands you type. + +## Build Tools + +CMake is the backbone of our build system. It is a cross-platform build system, which checks for +dependencies, locates header files and libraries, generates makefiles, and supports the cross-platform +compiling of libopenshot and libopenshot-audio. CMake uses an out-of-source build concept, where +all temporary build files, such as makefiles, object files, and even the final binaries, are created +outside of the source code folder, inside a /build/ sub-folder. This prevents the build process +from cluttering up the source code. These instructions have only been tested with the GNU compiler +(including MSYS2/MinGW for Windows). + +## Dependencies + +The following libraries are required to build libopenshot. Instructions on how to install these +dependencies vary for each operating system. Libraries and Executables have been labeled in the +list below to help distinguish between them. + +* ### FFmpeg (libavformat, libavcodec, libavutil, libavdevice, libavresample, libswscale) + * http://www.ffmpeg.org/ `(Library)` + * This library is used to decode and encode video, audio, and image files. It is also used to obtain information about media files, such as frame rate, sample rate, aspect ratio, and other common attributes. + +* ### ImageMagick++ (libMagick++, libMagickWand, libMagickCore) + * http://www.imagemagick.org/script/magick++.php `(Library)` + * This library is **optional**, and used to decode and encode images. + +* ### OpenShot Audio Library (libopenshot-audio) + * https://github.com/OpenShot/libopenshot-audio/ `(Library)` + * This library is used to mix, resample, host plug-ins, and play audio. It is based on the JUCE project, which is an outstanding audio library used by many different applications + +* ### Qt 5 (libqt5) + * http://www.qt.io/qt5/ `(Library)` + * Qt5 is used to display video, store image data, composite images, apply image effects, and many other utility functions, such as file system manipulation, high resolution timers, etc... + +* ### CMake (cmake) + * http://www.cmake.org/ `(Executable)` + * This executable is used to automate the generation of Makefiles, check for dependencies, and is the backbone of libopenshot’s cross-platform build process. + +* ### SWIG (swig) + * http://www.swig.org/ `(Executable)` + * This executable is used to generate the Python and Ruby bindings for libopenshot. It is a simple and powerful wrapper for C++ libraries, and supports many languages. + +* ### Python 3 (libpython) + * http://www.python.org/ `(Executable and Library)` + * This library is used by swig to create the Python (version 3+) bindings for libopenshot. This is also the official language used by OpenShot Video Editor (a graphical interface to libopenshot). + +* ### Doxygen (doxygen) + * http://www.stack.nl/~dimitri/doxygen/ `(Executable)` + * This executable is used to auto-generate the documentation used by libopenshot. + +* ### UnitTest++ (libunittest++) + * https://github.com/unittest-cpp/ `(Library)` + * This library is used to execute unit tests for libopenshot. It contains many macros used to keep our unit testing code very clean and simple. + +* ### ZeroMQ (libzmq) + * http://zeromq.org/ `(Library)` + * This library is used to communicate between libopenshot and other applications (publisher / subscriber). Primarily used to send debug data from libopenshot. + +* ### OpenMP (-fopenmp) + * http://openmp.org/wp/ `(Compiler Flag)` + * If your compiler supports this flag (GCC, Clang, and most other compilers), it provides libopenshot with easy methods of using parallel programming techniques to improve performance and take advantage of multi-core processors. + + +## CMake Flags (Optional) +There are many different build flags that can be passed to cmake to adjust how libopenshot is +compiled. Some of these flags might be required when compiling on certain OSes, just depending +on how your build environment is setup. To add a build flag, follow this general syntax: +`cmake -DMAGICKCORE_HDRI_ENABLE=1 -DENABLE_TESTS=1 ../` + +* MAGICKCORE_HDRI_ENABLE (default 0) +* MAGICKCORE_QUANTUM_DEPTH (default 0) +* OPENSHOT_IMAGEMAGICK_COMPATIBILITY (default 0) +* DISABLE_TESTS (default 0) +* CMAKE_PREFIX_PATH (`/location/to/missing/library/`) +* PYTHON_INCLUDE_DIR (`/location/to/python/include/`) +* PYTHON_LIBRARY (`/location/to/python/lib.a`) +* PYTHON_FRAMEWORKS (`/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/`) +* CMAKE_CXX_COMPILER (`/location/to/mingw/g++`) +* CMAKE_C_COMPILER (`/location/to/mingw/gcc`) + +## Obtaining Source Code + +The first step in installing libopenshot is to obtain the most recent source code. The source code is +available on [GitHub](https://github.com/OpenShot/libopenshot). Use the following command +to obtain the latest libopenshot source code. + +``` +git clone https://github.com/OpenShot/libopenshot.git +git clone https://github.com/OpenShot/libopenshot-audio.git +``` + +## Folder Structure (libopenshot) + +The source code is divided up into the following folders. + +* ### build/ + * This folder needs to be manually created, and is used by cmake to store the temporary build files, such as makefiles, as well as the final binaries (library and test executables). + +* ### cmake/ + * This folder contains custom modules not included by default in cmake, used to find dependency libraries and headers and determine if these libraries are installed. + +* ### doc/ + * This folder contains documentation and related files, such as logos and images required by the doxygen auto-generated documentation. + +* ### include/ + * This folder contains all headers (*.h) used by libopenshot. + +* ### src/ + * This folder contains all source code (*.cpp) used by libopenshot. + +* ### tests/ + * This folder contains all unit test code. Each class has it’s own test file (*.cpp), and uses UnitTest++ macros to keep the test code simple and manageable. + +* ### thirdparty/ + * This folder contains code not written by the OpenShot team. For example, jsoncpp, an open-source JSON parser. + +## Install Dependencies + +In order to actually compile libopenshot, we need to install some dependencies on your system. The easiest +way to accomplish this is with our Daily PPA. A PPA is an unofficial Ubuntu repository, which has our +software packages available to download and install. + +``` + sudo add-apt-repository ppa:openshot.developers/libopenshot-daily + sudo apt-get update + sudo apt-get install openshot-qt \ + cmake \ + libx11-dev \ + libasound2-dev \ + libavcodec-dev \ + libavdevice-dev \ + libavfilter-dev \ + libavformat-dev \ + libavresample-dev \ + libavutil-dev \ + libfdk-aac-dev \ + libfreetype6-dev \ + libjsoncpp-dev \ + libmagick++-dev \ + libopenshot-audio-dev \ + libswscale-dev \ + libunittest++-dev \ + libxcursor-dev \ + libxinerama-dev \ + libxrandr-dev \ + libzmq3-dev \ + pkg-config \ + python3-dev \ + qtbase5-dev \ + qtmultimedia5-dev \ + swig +``` + +## Linux Build Instructions (libopenshot-audio) +To compile libopenshot-audio, we need to go through a few additional steps to manually build and +install it. Launch a terminal and enter: + +``` +cd [libopenshot-audio repo folder] +mkdir build +cd build +cmake ../ +make +make install +./src/openshot-audio-test-sound (This should play a test sound) +``` + +## Linux Build Instructions (libopenshot) +Run the following commands to compile libopenshot: + +``` +cd [libopenshot repo directory] +mkdir -p build +cd build +cmake ../ +make +``` + +If you are missing any dependencies for libopenshot, you might receive error messages at this point. +Just install the missing packages (usually with a -dev suffix), and run the above commands again. +Repeat until no error messages are displayed, and the build process completes. Also, if you manually +install Qt 5, you might need to specify the location for cmake: + +``` +cmake -DCMAKE_PREFIX_PATH=/qt5_path/qt5/5.2.0/ ../ +``` + +To run all unit tests (and verify everything is working correctly), launch a terminal, and enter: + +``` +make test +``` + +To auto-generate documentation for libopenshot, launch a terminal, and enter: + +``` +make doc +``` + +This will use doxygen to generate a folder of HTML files, with all classes and methods documented. The +folder is located at **build/doc/html/**. Once libopenshot has been successfully built, we need to +install it (i.e. copy it to the correct folder, so other libraries can find it). + +``` +make install +``` + +This will copy the binary files to /usr/local/lib/, and the header files to /usr/local/include/openshot/... +This is where other projects will look for the libopenshot files when building. Python 3 bindings are +also installed at this point. let's verify the python bindings work: + +``` +python3 +>>> import openshot +``` + +If no errors are displayed, you have successfully compiled and installed libopenshot on your system. +Congratulations and be sure to read our wiki on [Becoming an OpenShot Developer](https://github.com/OpenShot/openshot-qt/wiki/Become-a-Developer)! +Welcome to the OpenShot developer community! We look forward to meeting you! diff --git a/doc/INSTALL-MAC.md b/doc/INSTALL-MAC.md new file mode 100644 index 00000000..ab7f79c3 --- /dev/null +++ b/doc/INSTALL-MAC.md @@ -0,0 +1,218 @@ +## Getting Started + +The best way to get started with libopenshot, is to learn about our build system, obtain all the source code, +install a development IDE and tools, and better understand our dependencies. So, please read through the +following sections, and follow the instructions. And keep in mind, that your computer is likely different +than the one used when writing these instructions. Your file paths and versions of applications might be +slightly different, so keep an eye out for subtle file path differences in the commands you type. + +## Build Tools + +CMake is the backbone of our build system. It is a cross-platform build system, which checks for +dependencies, locates header files and libraries, generates makefiles, and supports the cross-platform +compiling of libopenshot and libopenshot-audio. CMake uses an out-of-source build concept, where +all temporary build files, such as makefiles, object files, and even the final binaries, are created +outside of the source code folder, inside a /build/ sub-folder. This prevents the build process +from cluttering up the source code. These instructions have only been tested with the GNU compiler +(including MSYS2/MinGW for Windows). + +## Dependencies + +The following libraries are required to build libopenshot. Instructions on how to install these +dependencies vary for each operating system. Libraries and Executables have been labeled in the +list below to help distinguish between them. + +* ### FFmpeg (libavformat, libavcodec, libavutil, libavdevice, libavresample, libswscale) + * http://www.ffmpeg.org/ `(Library)` + * This library is used to decode and encode video, audio, and image files. It is also used to obtain information about media files, such as frame rate, sample rate, aspect ratio, and other common attributes. + +* ### ImageMagick++ (libMagick++, libMagickWand, libMagickCore) + * http://www.imagemagick.org/script/magick++.php `(Library)` + * This library is **optional**, and used to decode and encode images. + +* ### OpenShot Audio Library (libopenshot-audio) + * https://github.com/OpenShot/libopenshot-audio/ `(Library)` + * This library is used to mix, resample, host plug-ins, and play audio. It is based on the JUCE project, which is an outstanding audio library used by many different applications + +* ### Qt 5 (libqt5) + * http://www.qt.io/qt5/ `(Library)` + * Qt5 is used to display video, store image data, composite images, apply image effects, and many other utility functions, such as file system manipulation, high resolution timers, etc... + +* ### CMake (cmake) + * http://www.cmake.org/ `(Executable)` + * This executable is used to automate the generation of Makefiles, check for dependencies, and is the backbone of libopenshot’s cross-platform build process. + +* ### SWIG (swig) + * http://www.swig.org/ `(Executable)` + * This executable is used to generate the Python and Ruby bindings for libopenshot. It is a simple and powerful wrapper for C++ libraries, and supports many languages. + +* ### Python 3 (libpython) + * http://www.python.org/ `(Executable and Library)` + * This library is used by swig to create the Python (version 3+) bindings for libopenshot. This is also the official language used by OpenShot Video Editor (a graphical interface to libopenshot). + +* ### Doxygen (doxygen) + * http://www.stack.nl/~dimitri/doxygen/ `(Executable)` + * This executable is used to auto-generate the documentation used by libopenshot. + +* ### UnitTest++ (libunittest++) + * https://github.com/unittest-cpp/ `(Library)` + * This library is used to execute unit tests for libopenshot. It contains many macros used to keep our unit testing code very clean and simple. + +* ### ZeroMQ (libzmq) + * http://zeromq.org/ `(Library)` + * This library is used to communicate between libopenshot and other applications (publisher / subscriber). Primarily used to send debug data from libopenshot. + +* ### OpenMP (-fopenmp) + * http://openmp.org/wp/ `(Compiler Flag)` + * If your compiler supports this flag (GCC, Clang, and most other compilers), it provides libopenshot with easy methods of using parallel programming techniques to improve performance and take advantage of multi-core processors. + +## CMake Flags (Optional) +There are many different build flags that can be passed to cmake to adjust how libopenshot is compiled. +Some of these flags might be required when compiling on certain OSes, just depending on how your build +environment is setup. To add a build flag, follow this general syntax: +`cmake -DMAGICKCORE_HDRI_ENABLE=1 -DENABLE_TESTS=1 ../` + +* MAGICKCORE_HDRI_ENABLE (default 0) +* MAGICKCORE_QUANTUM_DEPTH (default 0) +* OPENSHOT_IMAGEMAGICK_COMPATIBILITY (default 0) +* DISABLE_TESTS (default 0) +* CMAKE_PREFIX_PATH (`/location/to/missing/library/`) +* PYTHON_INCLUDE_DIR (`/location/to/python/include/`) +* PYTHON_LIBRARY (`/location/to/python/lib.a`) +* PYTHON_FRAMEWORKS (`/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/`) +* CMAKE_CXX_COMPILER (`/location/to/mingw/g++`) +* CMAKE_C_COMPILER (`/location/to/mingw/gcc`) + +## Obtaining Source Code + +The first step in installing libopenshot is to obtain the most recent source code. The source code +is available on [GitHub](https://github.com/OpenShot/libopenshot). Use the following command to +obtain the latest libopenshot source code. + +``` +git clone https://github.com/OpenShot/libopenshot.git +git clone https://github.com/OpenShot/libopenshot-audio.git +``` + +## Folder Structure (libopenshot) + +The source code is divided up into the following folders. + +* ### build/ + * This folder needs to be manually created, and is used by cmake to store the temporary build files, such as makefiles, as well as the final binaries (library and test executables). + +* ### cmake/ + * This folder contains custom modules not included by default in cmake, used to find dependency libraries and headers and determine if these libraries are installed. + +* ### doc/ + * This folder contains documentation and related files, such as logos and images required by the doxygen auto-generated documentation. + +* ### include/ + * This folder contains all headers (*.h) used by libopenshot. + +* ### src/ + * This folder contains all source code (*.cpp) used by libopenshot. + +* ### tests/ + * This folder contains all unit test code. Each class has it’s own test file (*.cpp), and uses UnitTest++ macros to keep the test code simple and manageable. + +* ### thirdparty/ + * This folder contains code not written by the OpenShot team. For example, jsoncpp, an open-source JSON parser. + +## Install Dependencies + +In order to actually compile libopenshot and libopenshot-audio, we need to install some dependencies on +your system. Most packages needed by libopenshot can be installed easily with Homebrew. However, first +install Xcode with the following options ("UNIX Development", "System Tools", "Command Line Tools", or +"Command Line Support"). Be sure to refresh your list of Homebrew packages with the “brew update” command. + +**NOTE:** Homebrew seems to work much better for most users (compared to MacPorts), so I am going to +focus on brew for this guide. + +Install the following packages using the Homebrew package installer (http://brew.sh/). Pay close attention +to any warnings or errors during these brew installs. NOTE: You might have some conflicting libraries in +your /usr/local/ folders, so follow the directions from brew if these are detected. + +``` +brew install gcc48 --enable-all-languages +brew install ffmpeg +brew install librsvg +brew install swig +brew install doxygen +brew install unittest-cpp --cc=gcc-4.8. You must specify the c++ compiler with the --cc flag to be 4.7 or 4.8. +brew install qt5 +brew install cmake +brew install zeromq +``` + +## Mac Build Instructions (libopenshot-audio) +Since libopenshot-audio is not available in a Homebrew or MacPorts package, we need to go through a +few additional steps to manually build and install it. Launch a terminal and enter: + +``` +cd [libopenshot-audio repo folder] +mkdir build +cd build +cmake -d -G "Unix Makefiles" -DCMAKE_CXX_COMPILER=clang++ -DCMAKE_C_COMPILER=clang ../ (CLang must be used due to GNU incompatible Objective-C code in some of the Apple frameworks) +make +make install +./src/openshot-audio-test-sound (This should play a test sound) +``` + +## Mac Build Instructions (libopenshot) +Run the following commands to build libopenshot: + +``` +$ cd [libopenshot repo folder] +$ mkdir build +$ cd build +$ cmake -G "Unix Makefiles" -DCMAKE_CXX_COMPILER=/usr/local/opt/gcc48/bin/g++-4.8 -DCMAKE_C_COMPILER=/usr/local/opt/gcc48/bin/gcc-4.8 -DCMAKE_PREFIX_PATH=/usr/local/Cellar/qt5/5.4.2/ -DPYTHON_INCLUDE_DIR=/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/Versions/3.3/include/python3.3m/ -DPYTHON_LIBRARY=/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/Versions/3.3/lib/libpython3.3.dylib -DPython_FRAMEWORKS=/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/ ../ -D"CMAKE_BUILD_TYPE:STRING=Debug" +``` + +The extra arguments on the cmake command make sure the compiler will be gcc4.8 and that cmake +knows where to look for the Qt header files and Python library. Double check these file paths, +as yours will likely be different. + +``` +make +``` + +If you are missing any dependencies for libopenshot, you will receive error messages at this point. +Just install the missing dependencies, and run the above commands again. Repeat until no error +messages are displayed and the build process completes. + +Also, if you are having trouble building, please see the CMake Flags section above, as it might +provide a solution for finding a missing folder path, missing Python 3 library, etc... + +To run all unit tests (and verify everything is working correctly), launch a terminal, and enter: + +``` +make test +``` + +To auto-generate the documentation for libopenshot, launch a terminal, and enter: + +``` +make doc +``` + +This will use doxygen to generate a folder of HTML files, with all classes and methods documented. +The folder is located at build/doc/html/. Once libopenshot has been successfully built, we need +to install it (i.e. copy it to the correct folder, so other libraries can find it). + +``` +make install +``` + +This should copy the binary files to /usr/local/lib/, and the header files to /usr/local/include/openshot/... +This is where other projects will look for the libopenshot files when building. Python 3 bindings are +also installed at this point. let's verify the python bindings work: + +``` +python3 (or python) +>>> import openshot +``` + +If no errors are displayed, you have successfully compiled and installed libopenshot on your +system. Congratulations and be sure to read our wiki on [Becoming an OpenShot Developer](https://github.com/OpenShot/openshot-qt/wiki/Become-a-Developer)! +Welcome to the OpenShot developer community! We look forward to meeting you! diff --git a/doc/INSTALL-WINDOWS.md b/doc/INSTALL-WINDOWS.md new file mode 100644 index 00000000..7f5b8f78 --- /dev/null +++ b/doc/INSTALL-WINDOWS.md @@ -0,0 +1,329 @@ +## Getting Started + +The best way to get started with libopenshot, is to learn about our build system, obtain all the +source code, install a development IDE and tools, and better understand our dependencies. So, +please read through the following sections, and follow the instructions. And keep in mind, +that your computer is likely different than the one used when writing these instructions. +Your file paths and versions of applications might be slightly different, so keep an eye out +for subtle file path differences in the commands you type. + +## Build Tools + +CMake is the backbone of our build system. It is a cross-platform build system, which +checks for dependencies, locates header files and libraries, generates makefiles, and +supports the cross-platform compiling of libopenshot and libopenshot-audio. CMake uses +an out-of-source build concept, where all temporary build files, such as makefiles, +object files, and even the final binaries, are created outside of the source code +folder, inside a /build/ sub-folder. This prevents the build process from cluttering +up the source code. These instructions have only been tested with the GNU compiler +(including MSYS2/MinGW for Windows). + +## Dependencies + +The following libraries are required to build libopenshot. Instructions on how to +install these dependencies vary for each operating system. Libraries and Executables +have been labeled in the list below to help distinguish between them. + +* ### FFmpeg (libavformat, libavcodec, libavutil, libavdevice, libavresample, libswscale) + * http://www.ffmpeg.org/ `(Library)` + * This library is used to decode and encode video, audio, and image files. It is also used to obtain information about media files, such as frame rate, sample rate, aspect ratio, and other common attributes. + +* ### ImageMagick++ (libMagick++, libMagickWand, libMagickCore) + * http://www.imagemagick.org/script/magick++.php `(Library)` + * This library is **optional**, and used to decode and encode images. + +* ### OpenShot Audio Library (libopenshot-audio) + * https://github.com/OpenShot/libopenshot-audio/ `(Library)` + * This library is used to mix, resample, host plug-ins, and play audio. It is based on the JUCE project, which is an outstanding audio library used by many different applications + +* ### Qt 5 (libqt5) + * http://www.qt.io/qt5/ `(Library)` + * Qt5 is used to display video, store image data, composite images, apply image effects, and many other utility functions, such as file system manipulation, high resolution timers, etc... + +* ### CMake (cmake) + * http://www.cmake.org/ `(Executable)` + * This executable is used to automate the generation of Makefiles, check for dependencies, and is the backbone of libopenshot’s cross-platform build process. + +* ### SWIG (swig) + * http://www.swig.org/ `(Executable)` + * This executable is used to generate the Python and Ruby bindings for libopenshot. It is a simple and powerful wrapper for C++ libraries, and supports many languages. + +* ### Python 3 (libpython) + * http://www.python.org/ `(Executable and Library)` + * This library is used by swig to create the Python (version 3+) bindings for libopenshot. This is also the official language used by OpenShot Video Editor (a graphical interface to libopenshot). + +* ### Doxygen (doxygen) + * http://www.stack.nl/~dimitri/doxygen/ `(Executable)` + * This executable is used to auto-generate the documentation used by libopenshot. + +* ### UnitTest++ (libunittest++) + * https://github.com/unittest-cpp/ `(Library)` + * This library is used to execute unit tests for libopenshot. It contains many macros used to keep our unit testing code very clean and simple. + +* ### ZeroMQ (libzmq) + * http://zeromq.org/ `(Library)` + * This library is used to communicate between libopenshot and other applications (publisher / subscriber). Primarily used to send debug data from libopenshot. + +* ### OpenMP (-fopenmp) + * http://openmp.org/wp/ `(Compiler Flag)` + * If your compiler supports this flag (GCC, Clang, and most other compilers), it provides libopenshot with easy methods of using parallel programming techniques to improve performance and take advantage of multi-core processors. + +## CMake Flags (Optional) +There are many different build flags that can be passed to cmake to adjust how libopenshot +is compiled. Some of these flags might be required when compiling on certain OSes, just +depending on how your build environment is setup. To add a build flag, follow this general +syntax: `cmake -DMAGICKCORE_HDRI_ENABLE=1 -DENABLE_TESTS=1 ../` + +* MAGICKCORE_HDRI_ENABLE (default 0) +* MAGICKCORE_QUANTUM_DEPTH (default 0) +* OPENSHOT_IMAGEMAGICK_COMPATIBILITY (default 0) +* DISABLE_TESTS (default 0) +* CMAKE_PREFIX_PATH (`/location/to/missing/library/`) +* PYTHON_INCLUDE_DIR (`/location/to/python/include/`) +* PYTHON_LIBRARY (`/location/to/python/lib.a`) +* PYTHON_FRAMEWORKS (`/usr/local/Cellar/python3/3.3.2/Frameworks/Python.framework/`) +* CMAKE_CXX_COMPILER (`/location/to/mingw/g++`) +* CMAKE_C_COMPILER (`/location/to/mingw/gcc`) + +## Environment Variables + +Many environment variables will need to be set during this Windows installation guide. +The command line will need to be closed and re-launched after any changes to your environment +variables. Also, dependency libraries will not be found during linking or execution without +being found in the PATH environment variable. So, if you get errors related to missing +commands or libraries, double check the PATH variable. + +The following environment variables need to be added to your “System Variables”. Be sure to +check each folder path for accuracy, as your paths will likely be different than this list. + +### Example Variables + +* DL_DIR (`C:\libdl`) +* DXSDK_DIR (`C:\Program Files\Microsoft DirectX SDK (June 2010)\`) +* FFMPEGDIR (`C:\ffmpeg-git-95f163b-win32-dev`) +* FREETYPE_DIR (`C:\Program Files\GnuWin32`) +* HOME (`C:\msys\1.0\home`) +* LIBOPENSHOT_AUDIO_DIR (`C:\Program Files\libopenshot-audio`) +* QTDIR (`C:\qt5`) +* SNDFILE_DIR (`C:\Program Files\libsndfile`) +* UNITTEST_DIR (`C:\UnitTest++`) +* ZMQDIR (`C:\msys2\usr\local\`) +* PATH (`The following paths are an example`) + * C:\Qt5\bin; C:\Qt5\MinGW\bin\; C:\msys\1.0\local\lib; C:\Program Files\CMake 2.8\bin; C:\UnitTest++\build; C:\libopenshot\build\src; C:\Program Files\doxygen\bin; C:\ffmpeg-git-95f163b-win32-dev\lib; C:\swigwin-2.0.4; C:\Python33; C:\Program Files\Project\lib; C:\msys2\usr\local\ + + + + + +## Obtaining Source Code + +The first step in installing libopenshot is to obtain the most recent source code. The source code +is available on [GitHub](https://github.com/OpenShot/libopenshot). Use the following command to +obtain the latest libopenshot source code. + +``` +git clone https://github.com/OpenShot/libopenshot.git +git clone https://github.com/OpenShot/libopenshot-audio.git +``` + +## Folder Structure (libopenshot) + +The source code is divided up into the following folders. + +* ### build/ + * This folder needs to be manually created, and is used by cmake to store the temporary + build files, such as makefiles, as well as the final binaries (library and test executables). + +* ### cmake/ + * This folder contains custom modules not included by default in cmake, used to find + dependency libraries and headers and determine if these libraries are installed. + +* ### doc/ + * This folder contains documentation and related files, such as logos and images + required by the doxygen auto-generated documentation. + +* ### include/ + * This folder contains all headers (*.h) used by libopenshot. + +* ### src/ + * This folder contains all source code (*.cpp) used by libopenshot. + +* ### tests/ + * This folder contains all unit test code. Each class has it’s own test file (*.cpp), and + uses UnitTest++ macros to keep the test code simple and manageable. + +* ### thirdparty/ + * This folder contains code not written by the OpenShot team. For example, jsoncpp, an + open-source JSON parser. + +## Install MSYS2 Dependencies + +Most Windows dependencies needed for libopenshot-audio, libopenshot, and openshot-qt +can be installed easily with MSYS2 and the pacman package manager. Follow these +directions to setup a Windows build environment for OpenShot. + +1) Install MSYS2: http://www.msys2.org/ + +2) Run MSYS2 command prompt (for example: `C:\msys64\msys2_shell.cmd`) + +3) Append PATH (so MSYS2 can find executables and libraries): + +``` +PATH=$PATH:/c/msys64/mingw64/bin:/c/msys64/mingw64/lib (64-bit PATH) + or +PATH=$PATH:/c/msys32/mingw32/bin:/c/msys32/mingw32/lib (32-bit PATH) +``` + +4) Update and upgrade all packages + +``` +pacman -Syu +``` + +5a) Install the following packages (**64-Bit**) + +``` +pacman -S --needed base-devel mingw-w64-x86_64-toolchain +pacman -S mingw64/mingw-w64-x86_64-ffmpeg +pacman -S mingw64/mingw-w64-x86_64-python3-pyqt5 +pacman -S mingw64/mingw-w64-x86_64-swig +pacman -S mingw64/mingw-w64-x86_64-cmake +pacman -S mingw64/mingw-w64-x86_64-doxygen +pacman -S mingw64/mingw-w64-x86_64-python3-pip +pacman -S mingw32/mingw-w64-i686-zeromq +pacman -S mingw64/mingw-w64-x86_64-python3-pyzmq +pacman -S mingw64/mingw-w64-x86_64-python3-cx_Freeze +pacman -S git + +# Install ImageMagick if needed (OPTIONAL and NOT NEEDED) +pacman -S mingw64/mingw-w64-x86_64-imagemagick +``` + +5b) **Or** Install the following packages (**32-Bit**) + +``` +pacman -S --needed base-devel mingw32/mingw-w64-i686-toolchain +pacman -S mingw32/mingw-w64-i686-ffmpeg +pacman -S mingw32/mingw-w64-i686-python3-pyqt5 +pacman -S mingw32/mingw-w64-i686-swig +pacman -S mingw32/mingw-w64-i686-cmake +pacman -S mingw32/mingw-w64-i686-doxygen +pacman -S mingw32/mingw-w64-i686-python3-pip +pacman -S mingw32/mingw-w64-i686-zeromq +pacman -S mingw32/mingw-w64-i686-python3-pyzmq +pacman -S mingw32/mingw-w64-i686-python3-cx_Freeze +pacman -S git + +# Install ImageMagick if needed (OPTIONAL and NOT NEEDED) +pacman -S mingw32/mingw-w32-x86_32-imagemagick +``` + +6) Install Python PIP Dependencies + +``` +pip3 install httplib2 +pip3 install slacker +pip3 install tinys3 +pip3 install github3.py +pip3 install requests +``` + +7) Download Unittest++ (https://github.com/unittest-cpp/unittest-cpp) into /MSYS2/[USER]/unittest-cpp-master/ + +``` +cmake -G "MSYS Makefiles" ../ -DCMAKE_MAKE_PROGRAM=mingw32-make -DCMAKE_INSTALL_PREFIX:PATH=/usr +mingw32-make install +``` + +8) ZMQ++ Header (This might not be needed anymore) + NOTE: Download and copy zmq.hpp into the /c/msys64/mingw64/include/ folder + +## Manual Dependencies + +* ### DLfcn + * https://github.com/dlfcn-win32/dlfcn-win32 + * Download and Extract the Win32 Static (.tar.bz2) archive to a local folder: C:\libdl\ + * Create an environment variable called DL_DIR and set the value to C:\libdl\. This environment variable will be used by CMake to find the binary and header file. + +* ### DirectX SDK / Windows SDK + * Windows 7: (DirectX SDK) http://www.microsoft.com/download/en/details.aspx?displaylang=en&id=6812 + * Windows 8: (Windows SDK) + * https://msdn.microsoft.com/en-us/windows/desktop/aa904949 + * Download and Install the SDK Setup program. This is needed for the JUCE library to play audio on Windows. +Create an environment variable called DXSDK_DIR and set the value to C:\Program Files\Microsoft DirectX SDK (June 2010)\ (your path might be different). This environment variable will be used by CMake to find the binaries and header files. + +* ### libSndFile + * http://www.mega-nerd.com/libsndfile/#Download + * Download and Install the Win32 Setup program. + * Create an environment variable called SNDFILE_DIR and set the value to C:\Program Files\libsndfile. This environment variable will be used by CMake to find the binary and header files. + +* ### libzmq + * http://zeromq.org/intro:get-the-software + * Download source code (zip) + * Follow their instructions, and build with mingw + * Create an environment variable called ZMQDIR and set the value to C:\libzmq\build\ (the location of the compiled version). This environment variable will be used by CMake to find the binary and header files. + +## Windows Build Instructions (libopenshot-audio) +In order to compile libopenshot-audio, launch a command prompt and enter the following commands. This does not require the MSYS2 prompt, but it should work in both the Windows command prompt and the MSYS2 prompt. + +``` +cd [libopenshot-audio repo folder] +mkdir build +cd build +cmake -G “MinGW Makefiles” ../ +mingw32-make +mingw32-make install +openshot-audio-test-sound (This should play a test sound) +``` + +## Windows Build Instructions (libopenshot) +Run the following commands to build libopenshot: + +``` +cd [libopenshot repo folder] +mkdir build +cd build +cmake -G "MinGW Makefiles" -DPYTHON_INCLUDE_DIR="C:/Python34/include/" -DPYTHON_LIBRARY="C:/Python34/libs/libpython34.a" ../ +mingw32-make +``` + +If you are missing any dependencies for libopenshot, you will receive error messages at this point. +Just install the missing dependencies, and run the above commands again. Repeat until no error +messages are displayed and the build process completes. + +Also, if you are having trouble building, please see the CMake Flags section above, as +it might provide a solution for finding a missing folder path, missing Python 3 library, etc... + +To run all unit tests (and verify everything is working correctly), launch a terminal, and enter: + +``` +mingw32-make test +``` + +To auto-generate the documentation for libopenshot, launch a terminal, and enter: + +``` +mingw32-make doc +``` + +This will use doxygen to generate a folder of HTML files, with all classes and methods +documented. The folder is located at build/doc/html/. Once libopenshot has been successfully +built, we need to install it (i.e. copy it to the correct folder, so other libraries can find it). + +``` +mingw32-make install +``` + +This should copy the binary files to C:\Program Files\openshot\lib\, and the header +files to C:\Program Files\openshot\include\... This is where other projects will +look for the libopenshot files when building.. Python 3 bindings are also installed +at this point. let's verify the python bindings work: + +``` +python3 +>>> import openshot +``` + +If no errors are displayed, you have successfully compiled and installed libopenshot on +your system. Congratulations and be sure to read our wiki on [Becoming an OpenShot Developer](https://github.com/OpenShot/openshot-qt/wiki/Become-a-Developer)! +Welcome to the OpenShot developer community! We look forward to meeting you! diff --git a/doc/InstallationGuide.pdf b/doc/InstallationGuide.pdf deleted file mode 100644 index 4cd294e5..00000000 Binary files a/doc/InstallationGuide.pdf and /dev/null differ diff --git a/include/AudioBufferSource.h b/include/AudioBufferSource.h index 57826f66..b1571c8d 100644 --- a/include/AudioBufferSource.h +++ b/include/AudioBufferSource.h @@ -37,7 +37,7 @@ #endif #include -#include "JuceLibraryCode/JuceHeader.h" +#include "JuceHeader.h" using namespace std; diff --git a/include/AudioReaderSource.h b/include/AudioReaderSource.h index 31b17d80..76c3dc7d 100644 --- a/include/AudioReaderSource.h +++ b/include/AudioReaderSource.h @@ -37,8 +37,8 @@ #endif #include -#include "JuceLibraryCode/JuceHeader.h" #include "ReaderBase.h" +#include "JuceHeader.h" using namespace std; diff --git a/include/AudioResampler.h b/include/AudioResampler.h index 412d49b1..b81bfc3e 100644 --- a/include/AudioResampler.h +++ b/include/AudioResampler.h @@ -38,9 +38,9 @@ #define _NDEBUG #endif -#include "JuceLibraryCode/JuceHeader.h" #include "AudioBufferSource.h" #include "Exceptions.h" +#include "JuceHeader.h" namespace openshot { diff --git a/include/ChunkReader.h b/include/ChunkReader.h index aa151093..b780602b 100644 --- a/include/ChunkReader.h +++ b/include/ChunkReader.h @@ -29,8 +29,6 @@ #define OPENSHOT_CHUNK_READER_H #include "ReaderBase.h" -#include "FFmpegReader.h" - #include #include #include @@ -107,7 +105,7 @@ namespace openshot string path; bool is_open; int64_t chunk_size; - FFmpegReader *local_reader; + ReaderBase *local_reader; ChunkLocation previous_location; ChunkVersion version; std::shared_ptr last_frame; diff --git a/include/Clip.h b/include/Clip.h index f30844b2..346629e4 100644 --- a/include/Clip.h +++ b/include/Clip.h @@ -36,7 +36,6 @@ #include #include #include -#include "JuceLibraryCode/JuceHeader.h" #include "AudioResampler.h" #include "ClipBase.h" #include "Color.h" @@ -44,18 +43,10 @@ #include "EffectBase.h" #include "Effects.h" #include "EffectInfo.h" -#include "FFmpegReader.h" #include "Fraction.h" -#include "FrameMapper.h" -#ifdef USE_IMAGEMAGICK - #include "ImageReader.h" - #include "TextReader.h" -#endif -#include "QtImageReader.h" -#include "ChunkReader.h" #include "KeyFrame.h" #include "ReaderBase.h" -#include "DummyReader.h" +#include "JuceHeader.h" using namespace std; using namespace openshot; @@ -136,7 +127,7 @@ namespace openshot { std::shared_ptr GetOrCreateFrame(int64_t number); /// Adjust the audio and image of a time mapped frame - std::shared_ptr get_time_mapped_frame(std::shared_ptr frame, int64_t frame_number); + void get_time_mapped_frame(std::shared_ptr frame, int64_t frame_number); /// Init default settings for a clip void init_settings(); diff --git a/include/ClipBase.h b/include/ClipBase.h index 06341640..3dae8a53 100644 --- a/include/ClipBase.h +++ b/include/ClipBase.h @@ -58,8 +58,6 @@ namespace openshot { float start; ///< The position in seconds to start playing (used to trim the beginning of a clip) float end; ///< The position in seconds to end playing (used to trim the ending of a clip) string previous_properties; ///< This string contains the previous JSON properties - int max_width; ///< The maximum image width needed by this clip (used for optimizations) - int max_height; ///< The maximium image height needed by this clip (used for optimizations) /// Generate JSON for a property Json::Value add_property_json(string name, float value, string type, string memo, Keyframe* keyframe, float min_value, float max_value, bool readonly, int64_t requested_frame); @@ -70,7 +68,7 @@ namespace openshot { public: /// Constructor for the base clip - ClipBase() { max_width = 0; max_height = 0; }; + ClipBase() { }; // Compare a clip using the Position() property bool operator< ( ClipBase& a) { return (Position() < a.Position()); } @@ -93,9 +91,6 @@ namespace openshot { void Start(float value) { start = value; } ///< Set start position (in seconds) of clip (trim start of video) void End(float value) { end = value; } ///< Set end position (in seconds) of clip (trim end of video) - /// Set Max Image Size (used for performance optimization) - void SetMaxSize(int width, int height) { max_width = width; max_height = height; }; - /// Get and Set JSON methods virtual string Json() = 0; ///< Generate JSON string of this object virtual void SetJson(string value) = 0; ///< Load JSON string into this object diff --git a/include/Coordinate.h b/include/Coordinate.h index 0ca6b7b8..bf561084 100644 --- a/include/Coordinate.h +++ b/include/Coordinate.h @@ -52,11 +52,6 @@ namespace openshot { * \endcode */ class Coordinate { - private: - bool increasing; ///< Is the Y value increasing or decreasing? - Fraction repeated; ///< Fraction of repeated Y values (for example, 1/3 would be the first Y value of 3 repeated values) - double delta; ///< This difference in Y value (from the previous unique Y value) - public: double X; ///< The X value of the coordinate (usually representing the frame #) double Y; ///< The Y value of the coordinate (usually representing the value of the property being animated) @@ -69,27 +64,6 @@ namespace openshot { /// @param y The Y coordinate (usually representing the value of the property being animated) Coordinate(double x, double y); - /// @brief Set the repeating Fraction (used internally on the timeline, to track changes to coordinates) - /// @param is_repeated The fraction representing how many times this coordinate Y value repeats (only used on the timeline) - void Repeat(Fraction is_repeated) { repeated=is_repeated; } - - /// Get the repeating Fraction (used internally on the timeline, to track changes to coordinates) - Fraction Repeat() { return repeated; } - - /// @brief Set the increasing flag (used internally on the timeline, to track changes to coordinates) - /// @param is_increasing Indicates if this coordinate Y value is increasing (when compared to the previous coordinate) - void IsIncreasing(bool is_increasing) { increasing = is_increasing; } - - /// Get the increasing flag (used internally on the timeline, to track changes to coordinates) - bool IsIncreasing() { return increasing; } - - /// @brief Set the delta / difference between previous coordinate value (used internally on the timeline, to track changes to coordinates) - /// @param new_delta Indicates how much this Y value differs from the previous Y value - void Delta(double new_delta) { delta=new_delta; } - - /// Get the delta / difference between previous coordinate value (used internally on the timeline, to track changes to coordinates) - float Delta() { return delta; } - /// Get and Set JSON methods string Json(); ///< Generate JSON string of this object Json::Value JsonValue(); ///< Generate Json::JsonValue for this object diff --git a/include/CrashHandler.h b/include/CrashHandler.h index e3a4bbe5..12c79a86 100644 --- a/include/CrashHandler.h +++ b/include/CrashHandler.h @@ -53,13 +53,15 @@ namespace openshot { class CrashHandler { private: /// Default constructor - CrashHandler(){}; // Don't allow user to create an instance of this singleton + CrashHandler(){return;}; // Don't allow user to create an instance of this singleton /// Default copy method - CrashHandler(CrashHandler const&){}; // Don't allow the user to copy this instance + //CrashHandler(CrashHandler const&){}; // Don't allow the user to copy this instance + CrashHandler(CrashHandler const&) = delete; // Don't allow the user to copy this instance /// Default assignment operator - CrashHandler & operator=(CrashHandler const&){}; // Don't allow the user to assign this instance + //CrashHandler & operator=(CrashHandler const&){}; // Don't allow the user to assign this instance + CrashHandler & operator=(CrashHandler const&) = delete; // Don't allow the user to assign this instance /// Private variable to keep track of singleton instance static CrashHandler *m_pInstance; diff --git a/include/DecklinkInput.h b/include/DecklinkInput.h index 9964461d..cfd5b6b1 100644 --- a/include/DecklinkInput.h +++ b/include/DecklinkInput.h @@ -62,9 +62,9 @@ #include #include "DeckLinkAPI.h" -#include "../include/Frame.h" +#include "Frame.h" #include "CacheMemory.h" -#include "../include/OpenMPUtilities.h" +#include "OpenMPUtilities.h" /// Implementation of the Blackmagic Decklink API (used by the DecklinkReader) class DeckLinkInputDelegate : public IDeckLinkInputCallback diff --git a/include/DecklinkOutput.h b/include/DecklinkOutput.h index fb461438..ddb6e9bc 100644 --- a/include/DecklinkOutput.h +++ b/include/DecklinkOutput.h @@ -63,8 +63,8 @@ #include "DeckLinkAPI.h" #include "CacheMemory.h" -#include "../include/Frame.h" -#include "../include/OpenMPUtilities.h" +#include "Frame.h" +#include "OpenMPUtilities.h" enum OutputSignal { kOutputSignalPip = 0, diff --git a/include/EffectBase.h b/include/EffectBase.h index 209369a8..d38e3f45 100644 --- a/include/EffectBase.h +++ b/include/EffectBase.h @@ -32,8 +32,8 @@ #include #include #include "ClipBase.h" -#include "Frame.h" #include "Json.h" +#include "Frame.h" using namespace std; diff --git a/include/FFmpegReader.h b/include/FFmpegReader.h index 6072756a..eaa45943 100644 --- a/include/FFmpegReader.h +++ b/include/FFmpegReader.h @@ -42,8 +42,10 @@ #include #include #include "CacheMemory.h" +#include "Clip.h" #include "Exceptions.h" #include "OpenMPUtilities.h" +#include "Settings.h" using namespace std; diff --git a/include/FFmpegUtilities.h b/include/FFmpegUtilities.h index 578c6586..346da541 100644 --- a/include/FFmpegUtilities.h +++ b/include/FFmpegUtilities.h @@ -43,7 +43,15 @@ #include #include #include + // Change this to the first version swrescale works + #if (LIBAVFORMAT_VERSION_MAJOR >= 57) + #define USE_SW + #endif + #ifdef USE_SW + #include + #else #include + #endif #include #include #include @@ -106,7 +114,65 @@ #define PIX_FMT_YUV420P AV_PIX_FMT_YUV420P #endif - #if IS_FFMPEG_3_2 + #ifdef USE_SW + #define SWR_CONVERT(ctx, out, linesize, out_count, in, linesize2, in_count) \ + swr_convert(ctx, out, out_count, (const uint8_t **)in, in_count) + #define SWR_ALLOC() swr_alloc() + #define SWR_CLOSE(ctx) {} + #define SWR_FREE(ctx) swr_free(ctx) + #define SWR_INIT(ctx) swr_init(ctx) + #define SWRCONTEXT SwrContext + #else + #define SWR_CONVERT(ctx, out, linesize, out_count, in, linesize2, in_count) \ + avresample_convert(ctx, out, linesize, out_count, (uint8_t **)in, linesize2, in_count) + #define SWR_ALLOC() avresample_alloc_context() + #define SWR_CLOSE(ctx) avresample_close(ctx) + #define SWR_FREE(ctx) avresample_free(ctx) + #define SWR_INIT(ctx) avresample_open(ctx) + #define SWRCONTEXT AVAudioResampleContext + #endif + + + #if (LIBAVFORMAT_VERSION_MAJOR >= 58) + #define AV_REGISTER_ALL + #define AVCODEC_REGISTER_ALL + #define AV_FILENAME url + #define MY_INPUT_BUFFER_PADDING_SIZE AV_INPUT_BUFFER_PADDING_SIZE + #define AV_ALLOCATE_FRAME() av_frame_alloc() + #define AV_ALLOCATE_IMAGE(av_frame, pix_fmt, width, height) av_image_alloc(av_frame->data, av_frame->linesize, width, height, pix_fmt, 1) + #define AV_RESET_FRAME(av_frame) av_frame_unref(av_frame) + #define AV_FREE_FRAME(av_frame) av_frame_free(av_frame) + #define AV_FREE_PACKET(av_packet) av_packet_unref(av_packet) + #define AV_FREE_CONTEXT(av_context) avcodec_free_context(&av_context) + #define AV_GET_CODEC_TYPE(av_stream) av_stream->codecpar->codec_type + #define AV_FIND_DECODER_CODEC_ID(av_stream) av_stream->codecpar->codec_id + auto AV_GET_CODEC_CONTEXT = [](AVStream* av_stream, AVCodec* av_codec) { \ + AVCodecContext *context = avcodec_alloc_context3(av_codec); \ + avcodec_parameters_to_context(context, av_stream->codecpar); \ + return context; \ + }; + #define AV_GET_CODEC_PAR_CONTEXT(av_stream, av_codec) av_codec; + #define AV_GET_CODEC_FROM_STREAM(av_stream,codec_in) + #define AV_GET_CODEC_ATTRIBUTES(av_stream, av_context) av_stream->codecpar + #define AV_GET_CODEC_PIXEL_FORMAT(av_stream, av_context) (AVPixelFormat) av_stream->codecpar->format + #define AV_GET_SAMPLE_FORMAT(av_stream, av_context) av_stream->codecpar->format + #define AV_GET_IMAGE_SIZE(pix_fmt, width, height) av_image_get_buffer_size(pix_fmt, width, height, 1) + #define AV_COPY_PICTURE_DATA(av_frame, buffer, pix_fmt, width, height) av_image_fill_arrays(av_frame->data, av_frame->linesize, buffer, pix_fmt, width, height, 1) + #define AV_OUTPUT_CONTEXT(output_context, path) avformat_alloc_output_context2( output_context, NULL, NULL, path) + #define AV_OPTION_FIND(priv_data, name) av_opt_find(priv_data, name, NULL, 0, 0) + #define AV_OPTION_SET( av_stream, priv_data, name, value, avcodec) av_opt_set(priv_data, name, value, 0); avcodec_parameters_from_context(av_stream->codecpar, avcodec); + #define AV_FORMAT_NEW_STREAM(oc, st_codec, av_codec, av_st) av_st = avformat_new_stream(oc, NULL);\ + if (!av_st) \ + throw OutOfMemory("Could not allocate memory for the video stream.", path); \ + c = avcodec_alloc_context3(av_codec); \ + st_codec = c; \ + av_st->codecpar->codec_id = av_codec->id; + #define AV_COPY_PARAMS_FROM_CONTEXT(av_stream, av_codec) avcodec_parameters_from_context(av_stream->codecpar, av_codec); + #elif IS_FFMPEG_3_2 + #define AV_REGISTER_ALL av_register_all(); + #define AVCODEC_REGISTER_ALL avcodec_register_all(); + #define AV_FILENAME filename + #define MY_INPUT_BUFFER_PADDING_SIZE FF_INPUT_BUFFER_PADDING_SIZE #define AV_ALLOCATE_FRAME() av_frame_alloc() #define AV_ALLOCATE_IMAGE(av_frame, pix_fmt, width, height) av_image_alloc(av_frame->data, av_frame->linesize, width, height, pix_fmt, 1) #define AV_RESET_FRAME(av_frame) av_frame_unref(av_frame) @@ -138,6 +204,10 @@ av_st->codecpar->codec_id = av_codec->id; #define AV_COPY_PARAMS_FROM_CONTEXT(av_stream, av_codec) avcodec_parameters_from_context(av_stream->codecpar, av_codec); #elif LIBAVFORMAT_VERSION_MAJOR >= 55 + #define AV_REGISTER_ALL av_register_all(); + #define AVCODEC_REGISTER_ALL avcodec_register_all(); + #define AV_FILENAME filename + #define MY_INPUT_BUFFER_PADDING_SIZE FF_INPUT_BUFFER_PADDING_SIZE #define AV_ALLOCATE_FRAME() av_frame_alloc() #define AV_ALLOCATE_IMAGE(av_frame, pix_fmt, width, height) avpicture_alloc((AVPicture *) av_frame, pix_fmt, width, height) #define AV_RESET_FRAME(av_frame) av_frame_unref(av_frame) @@ -164,6 +234,10 @@ c = av_st->codec; #define AV_COPY_PARAMS_FROM_CONTEXT(av_stream, av_codec) #else + #define AV_REGISTER_ALL av_register_all(); + #define AVCODEC_REGISTER_ALL avcodec_register_all(); + #define AV_FILENAME filename + #define MY_INPUT_BUFFER_PADDING_SIZE FF_INPUT_BUFFER_PADDING_SIZE #define AV_ALLOCATE_FRAME() avcodec_alloc_frame() #define AV_ALLOCATE_IMAGE(av_frame, pix_fmt, width, height) avpicture_alloc((AVPicture *) av_frame, pix_fmt, width, height) #define AV_RESET_FRAME(av_frame) avcodec_get_frame_defaults(av_frame) diff --git a/include/FFmpegWriter.h b/include/FFmpegWriter.h index 8343002e..e219f72c 100644 --- a/include/FFmpegWriter.h +++ b/include/FFmpegWriter.h @@ -51,6 +51,7 @@ #include "Exceptions.h" #include "OpenMPUtilities.h" #include "ZmqLogger.h" +#include "Settings.h" using namespace std; @@ -174,8 +175,8 @@ namespace openshot int initial_audio_input_frame_size; int audio_input_position; int audio_encoder_buffer_size; - AVAudioResampleContext *avr; - AVAudioResampleContext *avr_planar; + SWRCONTEXT *avr; + SWRCONTEXT *avr_planar; /* Resample options */ int original_sample_rate; diff --git a/include/Frame.h b/include/Frame.h index a7ad509f..66d8ccfa 100644 --- a/include/Frame.h +++ b/include/Frame.h @@ -56,13 +56,13 @@ #ifdef USE_IMAGEMAGICK #include "Magick++.h" #endif -#include "JuceLibraryCode/JuceHeader.h" #include "ChannelLayouts.h" #include "AudioBufferSource.h" #include "AudioResampler.h" #include "Fraction.h" +#include "JuceHeader.h" - +#pragma SWIG nowarn=362 using namespace std; namespace openshot diff --git a/include/FrameMapper.h b/include/FrameMapper.h index e70fdbc5..216fe73f 100644 --- a/include/FrameMapper.h +++ b/include/FrameMapper.h @@ -34,11 +34,11 @@ #include #include #include "CacheMemory.h" -#include "../include/ReaderBase.h" -#include "../include/Frame.h" -#include "../include/Fraction.h" -#include "../include/Exceptions.h" -#include "../include/KeyFrame.h" +#include "ReaderBase.h" +#include "Frame.h" +#include "Fraction.h" +#include "Exceptions.h" +#include "KeyFrame.h" // Include FFmpeg headers and macros @@ -146,7 +146,7 @@ namespace openshot ReaderBase *reader; // The source video reader CacheMemory final_cache; // Cache of actual Frame objects bool is_dirty; // When this is true, the next call to GetFrame will re-init the mapping - AVAudioResampleContext *avr; // Audio resampling context object + SWRCONTEXT *avr; // Audio resampling context object // Internal methods used by init void AddField(int64_t frame); diff --git a/include/OpenMPUtilities.h b/include/OpenMPUtilities.h index 8a95a950..65047c31 100644 --- a/include/OpenMPUtilities.h +++ b/include/OpenMPUtilities.h @@ -29,8 +29,14 @@ #define OPENSHOT_OPENMP_UTILITIES_H #include +#include +#include + +// Calculate the # of OpenMP and FFmpeg Threads to allow. We are limiting both +// of these based on our own performance tests (more is not always better). +#define OPEN_MP_NUM_PROCESSORS (min(omp_get_num_procs(), 6)) +#define FF_NUM_PROCESSORS (min(omp_get_num_procs(), 12)) + - // Calculate the # of OpenMP Threads to allow - #define OPEN_MP_NUM_PROCESSORS omp_get_num_procs() #endif diff --git a/include/OpenShot.h b/include/OpenShot.h index e4b60f3e..207f4b42 100644 --- a/include/OpenShot.h +++ b/include/OpenShot.h @@ -134,5 +134,6 @@ #include "Profiles.h" #include "QtImageReader.h" #include "Timeline.h" +#include "Settings.h" #endif diff --git a/include/PlayerBase.h b/include/PlayerBase.h index 80cdf708..ecc222a8 100644 --- a/include/PlayerBase.h +++ b/include/PlayerBase.h @@ -29,7 +29,7 @@ #define OPENSHOT_PLAYER_BASE_H #include -#include "../include/ReaderBase.h" +#include "ReaderBase.h" using namespace std; diff --git a/include/Qt/AudioPlaybackThread.h b/include/Qt/AudioPlaybackThread.h index 9f534749..1d654756 100644 --- a/include/Qt/AudioPlaybackThread.h +++ b/include/Qt/AudioPlaybackThread.h @@ -29,9 +29,9 @@ #ifndef OPENSHOT_AUDIO_PLAYBACK_THREAD_H #define OPENSHOT_AUDIO_PLAYBACK_THREAD_H -#include "../../include/ReaderBase.h" -#include "../../include/RendererBase.h" -#include "../../include/AudioReaderSource.h" +#include "../ReaderBase.h" +#include "../RendererBase.h" +#include "../AudioReaderSource.h" namespace openshot { @@ -57,12 +57,15 @@ namespace openshot class AudioDeviceManagerSingleton { private: /// Default constructor (Don't allow user to create an instance of this singleton) - AudioDeviceManagerSingleton(){}; + AudioDeviceManagerSingleton(){ initialise_error=""; }; /// Private variable to keep track of singleton instance static AudioDeviceManagerSingleton * m_pInstance; public: + /// Error found during JUCE initialise method + string initialise_error; + /// Create or get an instance of this singleton (invoke the class with this method) static AudioDeviceManagerSingleton * Instance(int numChannels); @@ -78,52 +81,55 @@ namespace openshot */ class AudioPlaybackThread : Thread { - AudioSourcePlayer player; - AudioTransportSource transport; - MixerAudioSource mixer; - AudioReaderSource *source; - double sampleRate; - int numChannels; - WaitableEvent play; - WaitableEvent played; - int buffer_size; - bool is_playing; - SafeTimeSliceThread time_thread; - - /// Constructor - AudioPlaybackThread(); - /// Destructor - ~AudioPlaybackThread(); + AudioSourcePlayer player; + AudioTransportSource transport; + MixerAudioSource mixer; + AudioReaderSource *source; + double sampleRate; + int numChannels; + WaitableEvent play; + WaitableEvent played; + int buffer_size; + bool is_playing; + SafeTimeSliceThread time_thread; - /// Set the current thread's reader - void Reader(ReaderBase *reader); + /// Constructor + AudioPlaybackThread(); + /// Destructor + ~AudioPlaybackThread(); - /// Get the current frame object (which is filling the buffer) - std::shared_ptr getFrame(); + /// Set the current thread's reader + void Reader(ReaderBase *reader); - /// Get the current frame number being played - int64_t getCurrentFramePosition(); + /// Get the current frame object (which is filling the buffer) + std::shared_ptr getFrame(); - /// Play the audio - void Play(); + /// Get the current frame number being played + int64_t getCurrentFramePosition(); - /// Seek the audio thread - void Seek(int64_t new_position); + /// Play the audio + void Play(); - /// Stop the audio playback - void Stop(); + /// Seek the audio thread + void Seek(int64_t new_position); - /// Start thread - void run(); - - /// Set Speed (The speed and direction to playback a reader (1=normal, 2=fast, 3=faster, -1=rewind, etc...) - void setSpeed(int new_speed) { if (source) source->setSpeed(new_speed); } + /// Stop the audio playback + void Stop(); - /// Get Speed (The speed and direction to playback a reader (1=normal, 2=fast, 3=faster, -1=rewind, etc...) - int getSpeed() const { if (source) return source->getSpeed(); else return 1; } + /// Start thread + void run(); - friend class PlayerPrivate; - friend class QtPlayer; + /// Set Speed (The speed and direction to playback a reader (1=normal, 2=fast, 3=faster, -1=rewind, etc...) + void setSpeed(int new_speed) { if (source) source->setSpeed(new_speed); } + + /// Get Speed (The speed and direction to playback a reader (1=normal, 2=fast, 3=faster, -1=rewind, etc...) + int getSpeed() const { if (source) return source->getSpeed(); else return 1; } + + /// Get Audio Error (if any) + string getError() { return AudioDeviceManagerSingleton::Instance(numChannels)->initialise_error; } + + friend class PlayerPrivate; + friend class QtPlayer; }; } diff --git a/include/Qt/PlayerPrivate.h b/include/Qt/PlayerPrivate.h index 3311dea9..f626fb99 100644 --- a/include/Qt/PlayerPrivate.h +++ b/include/Qt/PlayerPrivate.h @@ -29,12 +29,12 @@ #ifndef OPENSHOT_PLAYER_PRIVATE_H #define OPENSHOT_PLAYER_PRIVATE_H -#include "../../include/ReaderBase.h" -#include "../../include/RendererBase.h" -#include "../../include/AudioReaderSource.h" -#include "../../include/Qt/AudioPlaybackThread.h" -#include "../../include/Qt/VideoPlaybackThread.h" -#include "../../include/Qt/VideoCacheThread.h" +#include "../ReaderBase.h" +#include "../RendererBase.h" +#include "../AudioReaderSource.h" +#include "../Qt/AudioPlaybackThread.h" +#include "../Qt/VideoPlaybackThread.h" +#include "../Qt/VideoCacheThread.h" namespace openshot { diff --git a/include/Qt/VideoCacheThread.h b/include/Qt/VideoCacheThread.h index 3f781f62..4afb7ee5 100644 --- a/include/Qt/VideoCacheThread.h +++ b/include/Qt/VideoCacheThread.h @@ -28,9 +28,9 @@ #ifndef OPENSHOT_VIDEO_CACHE_THREAD_H #define OPENSHOT_VIDEO_CACHE_THREAD_H -#include "../../include/OpenMPUtilities.h" -#include "../../include/ReaderBase.h" -#include "../../include/RendererBase.h" +#include "../OpenMPUtilities.h" +#include "../ReaderBase.h" +#include "../RendererBase.h" namespace openshot { diff --git a/include/Qt/VideoPlaybackThread.h b/include/Qt/VideoPlaybackThread.h index 03ffe6d2..90dc3681 100644 --- a/include/Qt/VideoPlaybackThread.h +++ b/include/Qt/VideoPlaybackThread.h @@ -29,8 +29,8 @@ #ifndef OPENSHOT_VIDEO_PLAYBACK_THREAD_H #define OPENSHOT_VIDEO_PLAYBACK_THREAD_H -#include "../../include/ReaderBase.h" -#include "../../include/RendererBase.h" +#include "../ReaderBase.h" +#include "../RendererBase.h" namespace openshot { diff --git a/include/QtImageReader.h b/include/QtImageReader.h index 772a879e..6b260f15 100644 --- a/include/QtImageReader.h +++ b/include/QtImageReader.h @@ -28,19 +28,14 @@ #ifndef OPENSHOT_QIMAGE_READER_H #define OPENSHOT_QIMAGE_READER_H -#include "ReaderBase.h" - #include #include #include #include #include #include -#include -#include -#include -#include "CacheMemory.h" #include "Exceptions.h" +#include "ReaderBase.h" using namespace std; @@ -110,9 +105,6 @@ namespace openshot Json::Value JsonValue(); ///< Generate Json::JsonValue for this object void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object - /// Set Max Image Size (used for performance optimization) - void SetMaxSize(int width, int height); - /// Open File - which is called by the constructor automatically void Open(); }; diff --git a/include/QtPlayer.h b/include/QtPlayer.h index 8774b886..a1a7ee0c 100644 --- a/include/QtPlayer.h +++ b/include/QtPlayer.h @@ -31,9 +31,9 @@ #include #include -#include "../include/PlayerBase.h" -#include "../include/Qt/PlayerPrivate.h" -#include "../include/RendererBase.h" +#include "PlayerBase.h" +#include "Qt/PlayerPrivate.h" +#include "RendererBase.h" using namespace std; @@ -59,6 +59,9 @@ namespace openshot /// Close audio device void CloseAudioDevice(); + /// Get Error (if any) + string GetError(); + /// Play the video void Play(); diff --git a/include/ReaderBase.h b/include/ReaderBase.h index 2b3ee917..b0a1b3db 100644 --- a/include/ReaderBase.h +++ b/include/ReaderBase.h @@ -35,6 +35,7 @@ #include #include "CacheMemory.h" #include "ChannelLayouts.h" +#include "ClipBase.h" #include "Fraction.h" #include "Frame.h" #include "Json.h" @@ -99,9 +100,7 @@ namespace openshot /// Section lock for multiple threads CriticalSection getFrameCriticalSection; CriticalSection processingCriticalSection; - - int max_width; ///< The maximum image width needed by this clip (used for optimizations) - int max_height; ///< The maximium image height needed by this clip (used for optimizations) + ClipBase* parent; public: @@ -111,6 +110,12 @@ namespace openshot /// Information about the current media file ReaderInfo info; + /// Parent clip object of this reader (which can be unparented and NULL) + ClipBase* GetClip(); + + /// Set parent clip object of this reader + void SetClip(ClipBase* clip); + /// Close the reader (and any resources it was consuming) virtual void Close() = 0; @@ -140,9 +145,6 @@ namespace openshot virtual Json::Value JsonValue() = 0; ///< Generate Json::JsonValue for this object virtual void SetJsonValue(Json::Value root) = 0; ///< Load Json::JsonValue into this object - /// Set Max Image Size (used for performance optimization) - void SetMaxSize(int width, int height) { max_width = width; max_height = height; }; - /// Open the reader (and start consuming resources, such as images or video files) virtual void Open() = 0; }; diff --git a/include/RendererBase.h b/include/RendererBase.h index 3f1c0b1c..c71d664a 100644 --- a/include/RendererBase.h +++ b/include/RendererBase.h @@ -28,7 +28,7 @@ #ifndef OPENSHOT_RENDERER_BASE_H #define OPENSHOT_RENDERER_BASE_H -#include "../include/Frame.h" +#include "Frame.h" #include // for realloc #include diff --git a/include/Settings.h b/include/Settings.h new file mode 100644 index 00000000..e46f12e0 --- /dev/null +++ b/include/Settings.h @@ -0,0 +1,103 @@ +/** + * @file + * @brief Header file for global Settings class + * @author Jonathan Thomas + * + * @section LICENSE + * + * Copyright (c) 2008-2014 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#ifndef OPENSHOT_SETTINGS_H +#define OPENSHOT_SETTINGS_H + + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "JuceHeader.h" + + +using namespace std; + +namespace openshot { + + /** + * @brief This class is contains settings used by libopenshot (and can be safely toggled at any point) + * + * Settings class is used primarily to toggle scale settings between preview and rendering, and adjust + * other runtime related settings. + */ + class Settings { + private: + + /// Default constructor + Settings(){}; // Don't allow user to create an instance of this singleton + +#if __GNUC__ >=7 + /// Default copy method + Settings(Settings const&) = delete; // Don't allow the user to assign this instance + + /// Default assignment operator + Settings & operator=(Settings const&) = delete; // Don't allow the user to assign this instance +#else + /// Default copy method + Settings(Settings const&) {}; // Don't allow the user to assign this instance + + /// Default assignment operator + Settings & operator=(Settings const&); // Don't allow the user to assign this instance +#endif + + /// Private variable to keep track of singleton instance + static Settings * m_pInstance; + + public: + /// Use video card for faster video decoding (if supported) + bool HARDWARE_DECODE = false; + + /// Use video card for faster video encoding (if supported) + bool HARDWARE_ENCODE = false; + + /// Scale mode used in FFmpeg decoding and encoding (used as an optimization for faster previews) + bool HIGH_QUALITY_SCALING = false; + + /// Maximum width for image data (useful for optimzing for a smaller preview or render) + int MAX_WIDTH = 0; + + /// Maximum height for image data (useful for optimzing for a smaller preview or render) + int MAX_HEIGHT = 0; + + /// Wait for OpenMP task to finish before continuing (used to limit threads on slower systems) + bool WAIT_FOR_VIDEO_PROCESSING_TASK = false; + + /// Create or get an instance of this logger singleton (invoke the class with this method) + static Settings * Instance(); + }; + +} + +#endif diff --git a/include/Timeline.h b/include/Timeline.h index ed5c2ab3..312add2e 100644 --- a/include/Timeline.h +++ b/include/Timeline.h @@ -48,6 +48,7 @@ #include "KeyFrame.h" #include "OpenMPUtilities.h" #include "ReaderBase.h" +#include "Settings.h" using namespace std; using namespace openshot; @@ -265,6 +266,10 @@ namespace openshot { Json::Value JsonValue(); ///< Generate Json::JsonValue for this object void SetJsonValue(Json::Value root); ///< Load Json::JsonValue into this object + /// Set Max Image Size (used for performance optimization). Convenience function for setting + /// Settings::Instance()->MAX_WIDTH and Settings::Instance()->MAX_HEIGHT. + void SetMaxSize(int width, int height); + /// @brief Apply a special formatted JSON object, which represents a change to the timeline (add, update, delete) /// This is primarily designed to keep the timeline (and its child objects... such as clips and effects) in sync /// with another application... such as OpenShot Video Editor (http://www.openshot.org). diff --git a/include/Version.h b/include/Version.h index 971d5cfe..6077cde5 100644 --- a/include/Version.h +++ b/include/Version.h @@ -36,8 +36,8 @@ #define OPENSHOT_VERSION_MAJOR 0; /// Major version number is incremented when huge features are added or improved. #define OPENSHOT_VERSION_MINOR 2; /// Minor version is incremented when smaller (but still very important) improvements are added. -#define OPENSHOT_VERSION_BUILD 0; /// Build number is incremented when minor bug fixes and less important improvements are added. -#define OPENSHOT_VERSION_SO 15; /// Shared object version number. This increments any time the API and ABI changes (so old apps will no longer link) +#define OPENSHOT_VERSION_BUILD 3; /// Build number is incremented when minor bug fixes and less important improvements are added. +#define OPENSHOT_VERSION_SO 17; /// Shared object version number. This increments any time the API and ABI changes (so old apps will no longer link) #define OPENSHOT_VERSION_MAJOR_MINOR STRINGIZE(OPENSHOT_VERSION_MAJOR) "." STRINGIZE(OPENSHOT_VERSION_MINOR); /// A string of the "Major.Minor" version #define OPENSHOT_VERSION_ALL STRINGIZE(OPENSHOT_VERSION_MAJOR) "." STRINGIZE(OPENSHOT_VERSION_MINOR) "." STRINGIZE(OPENSHOT_VERSION_BUILD); /// A string of the entire version "Major.Minor.Build" diff --git a/include/ZmqLogger.h b/include/ZmqLogger.h index c134f2cf..8ababac0 100644 --- a/include/ZmqLogger.h +++ b/include/ZmqLogger.h @@ -29,7 +29,6 @@ #define OPENSHOT_LOGGER_H -#include "JuceLibraryCode/JuceHeader.h" #include #include #include @@ -40,6 +39,7 @@ #include #include #include +#include "JuceHeader.h" using namespace std; @@ -47,11 +47,10 @@ using namespace std; namespace openshot { /** - * @brief This abstract class is the base class, used by all readers in libopenshot. + * @brief This class is used for logging and sending those logs over a ZemoMQ socket to a listener * - * Readers are types of classes that read video, audio, and image files, and - * return openshot::Frame objects. The only requirements for a 'reader', are to - * derive from this base class, implement the GetFrame method, and call the InitFileInfo() method. + * OpenShot desktop editor listens to this port, to receive libopenshot debug output. It both logs to + * a file and sends the stdout over a socket. */ class ZmqLogger { private: @@ -72,11 +71,19 @@ namespace openshot { /// Default constructor ZmqLogger(){}; // Don't allow user to create an instance of this singleton +#if __GNUC__ >=7 /// Default copy method - ZmqLogger(ZmqLogger const&){}; // Don't allow the user to copy this instance + ZmqLogger(ZmqLogger const&) = delete; // Don't allow the user to assign this instance /// Default assignment operator - ZmqLogger & operator=(ZmqLogger const&){}; // Don't allow the user to assign this instance + ZmqLogger & operator=(ZmqLogger const&) = delete; // Don't allow the user to assign this instance +#else + /// Default copy method + ZmqLogger(ZmqLogger const&) {}; // Don't allow the user to assign this instance + + /// Default assignment operator + ZmqLogger & operator=(ZmqLogger const&); // Don't allow the user to assign this instance +#endif /// Private variable to keep track of singleton instance static ZmqLogger * m_pInstance; diff --git a/include/effects/Mask.h b/include/effects/Mask.h index ad1a6aab..ef707f5f 100644 --- a/include/effects/Mask.h +++ b/include/effects/Mask.h @@ -65,6 +65,7 @@ namespace openshot private: ReaderBase *reader; std::shared_ptr original_mask; + bool needs_refresh; /// Init effect settings void init_effect_details(); diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 0c4ff990..71541432 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -37,12 +37,12 @@ IF (WIN32) ENDIF(WIN32) IF (APPLE) # If you still get errors compiling with GCC 4.8, mac headers need to be patched: http://hamelot.co.uk/programming/osx-gcc-dispatch_block_t-has-not-been-declared-invalid-typedef/ - SET_PROPERTY(GLOBAL PROPERTY JUCE_MAC "JUCE_MAC") - ADD_DEFINITIONS(-DNDEBUG) - SET(EXTENSION "mm") - - SET(JUCE_PLATFORM_SPECIFIC_DIR build/macosx/platform_specific_code) - SET(JUCE_PLATFORM_SPECIFIC_LIBRARIES "-framework Carbon -framework Cocoa -framework CoreFoundation -framework CoreAudio -framework CoreMidi -framework IOKit -framework AGL -framework AudioToolbox -framework QuartzCore -lobjc -framework Accelerate") + SET_PROPERTY(GLOBAL PROPERTY JUCE_MAC "JUCE_MAC") + ADD_DEFINITIONS(-DNDEBUG) + SET(EXTENSION "mm") + + SET(JUCE_PLATFORM_SPECIFIC_DIR build/macosx/platform_specific_code) + SET(JUCE_PLATFORM_SPECIFIC_LIBRARIES "-framework Carbon -framework Cocoa -framework CoreFoundation -framework CoreAudio -framework CoreMidi -framework IOKit -framework AGL -framework AudioToolbox -framework QuartzCore -lobjc -framework Accelerate") ENDIF(APPLE) ################ IMAGE MAGICK ################## @@ -74,20 +74,43 @@ IF (ImageMagick_FOUND) SET(CMAKE_SWIG_FLAGS "-DUSE_IMAGEMAGICK=1") ENDIF (ImageMagick_FOUND) - + ################### FFMPEG ##################### # Find FFmpeg libraries (used for video encoding / decoding) FIND_PACKAGE(FFmpeg REQUIRED) -# Include FFmpeg headers (needed for compile) -include_directories(${FFMPEG_INCLUDE_DIR}) +IF (AVCODEC_FOUND) + include_directories(${AVCODEC_INCLUDE_DIRS}) +ENDIF (AVCODEC_FOUND) +IF (AVDEVICE_FOUND) + include_directories(${AVDEVICE_INCLUDE_DIRS}) +ENDIF (AVDEVICE_FOUND) +IF (AVFORMAT_FOUND) + include_directories(${AVFORMAT_INCLUDE_DIRS}) +ENDIF (AVFORMAT_FOUND) +IF (AVFILTER_FOUND) + include_directories(${AVFILTER_INCLUDE_DIRS}) +ENDIF (AVFILTER_FOUND) +IF (AVUTIL_FOUND) + include_directories(${AVUTIL_INCLUDE_DIRS}) +ENDIF (AVUTIL_FOUND) +IF (POSTPROC_FOUND) + include_directories(${POSTPROC_INCLUDE_DIRS}) +ENDIF (POSTPROC_FOUND) +IF (SWSCALE_FOUND) + include_directories(${SWSCALE_INCLUDE_DIRS}) +ENDIF (SWSCALE_FOUND) +IF (SWRESAMPLE_FOUND) + include_directories(${SWRESAMPLE_INCLUDE_DIRS}) +ENDIF (SWRESAMPLE_FOUND) +IF (AVRESAMPLE_FOUND) + include_directories(${AVRESAMPLE_INCLUDE_DIRS}) +ENDIF (AVRESAMPLE_FOUND) ################# LIBOPENSHOT-AUDIO ################### # Find JUCE-based openshot Audio libraries FIND_PACKAGE(OpenShotAudio REQUIRED) -message('LIBOPENSHOT_AUDIO_INCLUDE_DIRS: ${LIBOPENSHOT_AUDIO_INCLUDE_DIRS}') - # Include Juce headers (needed for compile) include_directories(${LIBOPENSHOT_AUDIO_INCLUDE_DIRS}) @@ -112,11 +135,11 @@ add_definitions(${Qt5Gui_DEFINITIONS}) add_definitions(${Qt5Multimedia_DEFINITIONS}) add_definitions(${Qt5MultimediaWidgets_DEFINITIONS}) -SET(QT_LIBRARIES ${Qt5Widgets_LIBRARIES} - ${Qt5Core_LIBRARIES} - ${Qt5Gui_LIBRARIES} - ${Qt5Multimedia_LIBRARIES} - ${Qt5MultimediaWidgets_LIBRARIES}) +SET(QT_LIBRARIES ${Qt5Widgets_LIBRARIES} + ${Qt5Core_LIBRARIES} + ${Qt5Gui_LIBRARIES} + ${Qt5Multimedia_LIBRARIES} + ${Qt5MultimediaWidgets_LIBRARIES}) # Set compiler flags for Qt set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${Qt5Widgets_EXECUTABLE_COMPILE_FLAGS} ") @@ -133,15 +156,15 @@ qt5_wrap_cpp(MOC_FILES ${QT_HEADER_FILES}) # Find BlackMagic DeckLinkAPI libraries IF (ENABLE_BLACKMAGIC) FIND_PACKAGE(BlackMagic) - + IF (BLACKMAGIC_FOUND) # Include headers (needed for compile) include_directories(${BLACKMAGIC_INCLUDE_DIR}) - + # define a global var (used in the C++) add_definitions( -DUSE_BLACKMAGIC=1 ) SET(CMAKE_SWIG_FLAGS "-DUSE_BLACKMAGIC=1") - + ENDIF (BLACKMAGIC_FOUND) ENDIF (ENABLE_BLACKMAGIC) @@ -150,23 +173,36 @@ ENDIF (ENABLE_BLACKMAGIC) FIND_PACKAGE(OpenMP) if (OPENMP_FOUND) - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${OpenMP_CXX_FLAGS} ") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${OpenMP_CXX_FLAGS} ") endif(OPENMP_FOUND) ################### ZEROMQ ##################### # Find ZeroMQ library (used for socket communication & logging) FIND_PACKAGE(ZMQ REQUIRED) -# Include FFmpeg headers (needed for compile) +# Include ZeroMQ headers (needed for compile) include_directories(${ZMQ_INCLUDE_DIRS}) +################### RESVG ##################### +# Find resvg library (used for rendering svg files) +FIND_PACKAGE(RESVG) + +# Include resvg headers (optional SVG library) +if (RESVG_FOUND) + include_directories(${RESVG_INCLUDE_DIRS}) + + # define a global var (used in the C++) + add_definitions( -DUSE_RESVG=1 ) + SET(CMAKE_SWIG_FLAGS "-DUSE_RESVG=1") +endif(RESVG_FOUND) + ################### JSONCPP ##################### # Include jsoncpp headers (needed for JSON parsing) if (USE_SYSTEM_JSONCPP) find_package(JsonCpp REQUIRED) include_directories(${JSONCPP_INCLUDE_DIRS}) else() - message("Using embedded JsonCpp") + message("-- Could NOT find JsonCpp library (Using embedded JsonCpp instead)") include_directories("../thirdparty/jsoncpp/include") endif(USE_SYSTEM_JSONCPP) @@ -182,8 +218,8 @@ FILE(GLOB QT_PLAYER_FILES "${CMAKE_CURRENT_SOURCE_DIR}/Qt/*.cpp") ############### SET LIBRARY SOURCE FILES ################# SET ( OPENSHOT_SOURCE_FILES - AudioBufferSource.cpp - AudioReaderSource.cpp + AudioBufferSource.cpp + AudioReaderSource.cpp AudioResampler.cpp CacheBase.cpp CacheDisk.cpp @@ -214,38 +250,39 @@ SET ( OPENSHOT_SOURCE_FILES Profiles.cpp QtImageReader.cpp QtPlayer.cpp + Settings.cpp Timeline.cpp - + # Qt Video Player ${QT_PLAYER_FILES} ${MOC_FILES}) - IF (NOT USE_SYSTEM_JSONCPP) - # Third Party JSON Parser - SET ( OPENSHOT_SOURCE_FILES ${OPENSHOT_SOURCE_FILES} - ../thirdparty/jsoncpp/src/lib_json/json_reader.cpp - ../thirdparty/jsoncpp/src/lib_json/json_value.cpp - ../thirdparty/jsoncpp/src/lib_json/json_writer.cpp) - ENDIF (NOT USE_SYSTEM_JSONCPP) +IF (NOT USE_SYSTEM_JSONCPP) + # Third Party JSON Parser + SET ( OPENSHOT_SOURCE_FILES ${OPENSHOT_SOURCE_FILES} + ../thirdparty/jsoncpp/src/lib_json/json_reader.cpp + ../thirdparty/jsoncpp/src/lib_json/json_value.cpp + ../thirdparty/jsoncpp/src/lib_json/json_writer.cpp) +ENDIF (NOT USE_SYSTEM_JSONCPP) + +# ImageMagic related files +IF (ImageMagick_FOUND) + SET ( OPENSHOT_SOURCE_FILES ${OPENSHOT_SOURCE_FILES} + ImageReader.cpp + ImageWriter.cpp + TextReader.cpp) +ENDIF (ImageMagick_FOUND) + +# BlackMagic related files +IF (BLACKMAGIC_FOUND) + SET ( OPENSHOT_SOURCE_FILES ${OPENSHOT_SOURCE_FILES} + DecklinkInput.cpp + DecklinkReader.cpp + DecklinkOutput.cpp + DecklinkWriter.cpp) +ENDIF (BLACKMAGIC_FOUND) - # ImageMagic related files - IF (ImageMagick_FOUND) - SET ( OPENSHOT_SOURCE_FILES ${OPENSHOT_SOURCE_FILES} - ImageReader.cpp - ImageWriter.cpp - TextReader.cpp) - ENDIF (ImageMagick_FOUND) - # BlackMagic related files - IF (BLACKMAGIC_FOUND) - SET ( OPENSHOT_SOURCE_FILES ${OPENSHOT_SOURCE_FILES} - DecklinkInput.cpp - DecklinkReader.cpp - DecklinkOutput.cpp - DecklinkWriter.cpp) - ENDIF (BLACKMAGIC_FOUND) - - # Get list of headers file(GLOB_RECURSE headers ${CMAKE_SOURCE_DIR}/include/*.h) @@ -254,44 +291,75 @@ SET(CMAKE_MACOSX_RPATH 0) ############### CREATE LIBRARY ################# # Create shared openshot library -add_library(openshot SHARED - ${OPENSHOT_SOURCE_FILES} - ${headers} ) +add_library(openshot SHARED + ${OPENSHOT_SOURCE_FILES} + ${headers} ) # Set SONAME and other library properties set_target_properties(openshot - PROPERTIES - VERSION ${PROJECT_VERSION} - SOVERSION ${SO_VERSION} - INSTALL_NAME_DIR "${CMAKE_INSTALL_PREFIX}/lib" - ) + PROPERTIES + VERSION ${PROJECT_VERSION} + SOVERSION ${SO_VERSION} + INSTALL_NAME_DIR "${CMAKE_INSTALL_PREFIX}/lib" + ) ############### LINK LIBRARY ################# SET ( REQUIRED_LIBRARIES - ${FFMPEG_LIBRARIES} ${LIBOPENSHOT_AUDIO_LIBRARIES} ${QT_LIBRARIES} ${PROFILER} ${JSONCPP_LIBRARY} ${ZMQ_LIBRARIES} ) - - IF (OPENMP_FOUND) - SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${OpenMP_CXX_FLAGS} ) - ENDIF (OPENMP_FOUND) - - IF (ImageMagick_FOUND) - SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${ImageMagick_LIBRARIES} ) - ENDIF (ImageMagick_FOUND) - IF (BLACKMAGIC_FOUND) - SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${BLACKMAGIC_LIBRARY_DIR} ) - ENDIF (BLACKMAGIC_FOUND) +IF (AVCODEC_FOUND) + SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${AVCODEC_LIBRARIES} ) +ENDIF (AVCODEC_FOUND) +IF (AVDEVICE_FOUND) + SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${AVDEVICE_LIBRARIES} ) +ENDIF (AVDEVICE_FOUND) +IF (AVFORMAT_FOUND) + SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${AVFORMAT_LIBRARIES} ) +ENDIF (AVFORMAT_FOUND) +IF (AVFILTER_FOUND) + SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${AVFILTER_LIBRARIES} ) +ENDIF (AVFILTER_FOUND) +IF (AVUTIL_FOUND) + SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${AVUTIL_LIBRARIES} ) +ENDIF (AVUTIL_FOUND) +IF (POSTPROC_FOUND) + SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${POSTPROC_LIBRARIES} ) +ENDIF (POSTPROC_FOUND) +IF (SWSCALE_FOUND) + SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${SWSCALE_LIBRARIES} ) +ENDIF (SWSCALE_FOUND) +IF (SWRESAMPLE_FOUND) + SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${SWRESAMPLE_LIBRARIES} ) +ENDIF (SWRESAMPLE_FOUND) +IF (AVRESAMPLE_FOUND) + SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${AVRESAMPLE_LIBRARIES} ) +ENDIF (AVRESAMPLE_FOUND) - IF (WIN32) - # Required for exception handling on Windows - SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} "imagehlp" "dbghelp" ) - ENDIF(WIN32) +IF (RESVG_FOUND) + SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${RESVG_LIBRARIES} ) +ENDIF(RESVG_FOUND) + +IF (OPENMP_FOUND) + SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${OpenMP_CXX_FLAGS} ) +ENDIF (OPENMP_FOUND) + +IF (ImageMagick_FOUND) + SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${ImageMagick_LIBRARIES} ) +ENDIF (ImageMagick_FOUND) + +IF (BLACKMAGIC_FOUND) + SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} ${BLACKMAGIC_LIBRARY_DIR} ) +ENDIF (BLACKMAGIC_FOUND) + +IF (WIN32) + # Required for exception handling on Windows + SET ( REQUIRED_LIBRARIES ${REQUIRED_LIBRARIES} "imagehlp" "dbghelp" ) +ENDIF(WIN32) # Link all referenced libraries target_link_libraries(openshot ${REQUIRED_LIBRARIES}) @@ -314,9 +382,9 @@ target_link_libraries(openshot-player openshot) ############### TEST BLACKMAGIC CAPTURE APP ################ IF (BLACKMAGIC_FOUND) # Create test executable - add_executable(openshot-blackmagic - examples/ExampleBlackmagic.cpp) - + add_executable(openshot-blackmagic + examples/ExampleBlackmagic.cpp) + # Link test executable to the new library target_link_libraries(openshot-blackmagic openshot) ENDIF (BLACKMAGIC_FOUND) @@ -330,13 +398,13 @@ set(LIB_INSTALL_DIR lib${LIB_SUFFIX}) # determine correct lib folder # Install primary library INSTALL( TARGETS openshot - ARCHIVE DESTINATION ${LIB_INSTALL_DIR} - LIBRARY DESTINATION ${LIB_INSTALL_DIR} - COMPONENT library ) - -INSTALL(FILES ${headers} - DESTINATION ${CMAKE_INSTALL_PREFIX}/include/libopenshot ) + ARCHIVE DESTINATION ${LIB_INSTALL_DIR} + LIBRARY DESTINATION ${LIB_INSTALL_DIR} + COMPONENT library ) +INSTALL(DIRECTORY ${CMAKE_SOURCE_DIR}/include/ + DESTINATION ${CMAKE_INSTALL_PREFIX}/include/libopenshot + FILES_MATCHING PATTERN "*.h") ############### CPACK PACKAGING ############## IF(MINGW) diff --git a/src/ChunkReader.cpp b/src/ChunkReader.cpp index 8308a0c9..fe552243 100644 --- a/src/ChunkReader.cpp +++ b/src/ChunkReader.cpp @@ -26,6 +26,7 @@ */ #include "../include/ChunkReader.h" +#include "../include/FFmpegReader.h" using namespace openshot; @@ -227,7 +228,6 @@ std::shared_ptr ChunkReader::GetFrame(int64_t requested_frame) cout << "Load READER: " << chunk_video_path << endl; // Load new FFmpegReader local_reader = new FFmpegReader(chunk_video_path); - local_reader->enable_seek = false; // disable seeking local_reader->Open(); // open reader } catch (InvalidFile) diff --git a/src/Clip.cpp b/src/Clip.cpp index 913fd71f..207494e3 100644 --- a/src/Clip.cpp +++ b/src/Clip.cpp @@ -26,6 +26,15 @@ */ #include "../include/Clip.h" +#include "../include/FFmpegReader.h" +#include "../include/FrameMapper.h" +#ifdef USE_IMAGEMAGICK + #include "../include/ImageReader.h" + #include "../include/TextReader.h" +#endif +#include "../include/QtImageReader.h" +#include "../include/ChunkReader.h" +#include "../include/DummyReader.h" using namespace openshot; @@ -212,6 +221,9 @@ void Clip::Reader(ReaderBase* new_reader) // set reader pointer reader = new_reader; + // set parent + reader->SetClip(this); + // Init rotation (if any) init_reader_rotation(); } @@ -328,13 +340,13 @@ std::shared_ptr Clip::GetFrame(int64_t requested_frame) frame->AddAudio(true, channel, 0, original_frame->GetAudioSamples(channel), original_frame->GetAudioSamplesCount(), 1.0); // Get time mapped frame number (used to increase speed, change direction, etc...) - std::shared_ptr new_frame = get_time_mapped_frame(frame, requested_frame); + get_time_mapped_frame(frame, requested_frame); // Apply effects to the frame (if any) - apply_effects(new_frame); + apply_effects(frame); // Return processed 'frame' - return new_frame; + return frame; } else // Throw error if reader not initialized @@ -377,7 +389,7 @@ void Clip::reverse_buffer(juce::AudioSampleBuffer* buffer) } // Adjust the audio and image of a time mapped frame -std::shared_ptr Clip::get_time_mapped_frame(std::shared_ptr frame, int64_t frame_number) +void Clip::get_time_mapped_frame(std::shared_ptr frame, int64_t frame_number) { // Check for valid reader if (!reader) @@ -388,7 +400,6 @@ std::shared_ptr Clip::get_time_mapped_frame(std::shared_ptr frame, if (time.Values.size() > 1) { const GenericScopedLock lock(getFrameCriticalSection); - std::shared_ptr new_frame; // create buffer and resampler juce::AudioSampleBuffer *samples = NULL; @@ -396,14 +407,7 @@ std::shared_ptr Clip::get_time_mapped_frame(std::shared_ptr frame, resampler = new AudioResampler(); // Get new frame number - int new_frame_number = adjust_frame_number_minimum(round(time.GetValue(frame_number))); - - // Create a new frame - int samples_in_frame = Frame::GetSamplesPerFrame(new_frame_number, reader->info.fps, reader->info.sample_rate, frame->GetAudioChannelsCount()); - new_frame = std::make_shared(new_frame_number, 1, 1, "#000000", samples_in_frame, frame->GetAudioChannelsCount()); - - // Copy the image from the new frame - new_frame->AddImage(std::shared_ptr(new QImage(*GetOrCreateFrame(new_frame_number)->GetImage()))); + int new_frame_number = frame->number; // Get delta (difference in previous Y value) int delta = int(round(time.GetDelta(frame_number))); @@ -451,7 +455,7 @@ std::shared_ptr Clip::get_time_mapped_frame(std::shared_ptr frame, start -= 1; for (int channel = 0; channel < channels; channel++) // Add new (slower) samples, to the frame object - new_frame->AddAudio(true, channel, 0, resampled_buffer->getReadPointer(channel, start), + frame->AddAudio(true, channel, 0, resampled_buffer->getReadPointer(channel, start), number_of_samples, 1.0f); // Clean up @@ -559,7 +563,7 @@ std::shared_ptr Clip::get_time_mapped_frame(std::shared_ptr frame, // Add the newly resized audio samples to the current frame for (int channel = 0; channel < channels; channel++) // Add new (slower) samples, to the frame object - new_frame->AddAudio(true, channel, 0, buffer->getReadPointer(channel), number_of_samples, 1.0f); + frame->AddAudio(true, channel, 0, buffer->getReadPointer(channel), number_of_samples, 1.0f); // Clean up buffer = NULL; @@ -580,7 +584,7 @@ std::shared_ptr Clip::get_time_mapped_frame(std::shared_ptr frame, // Add reversed samples to the frame object for (int channel = 0; channel < channels; channel++) - new_frame->AddAudio(true, channel, 0, samples->getReadPointer(channel), number_of_samples, 1.0f); + frame->AddAudio(true, channel, 0, samples->getReadPointer(channel), number_of_samples, 1.0f); } @@ -588,13 +592,7 @@ std::shared_ptr Clip::get_time_mapped_frame(std::shared_ptr frame, delete samples; samples = NULL; } - - // Return new time mapped frame - return new_frame; - - } else - // Use original frame - return frame; + } } // Adjust frame number minimum value @@ -620,35 +618,6 @@ std::shared_ptr Clip::GetOrCreateFrame(int64_t number) // Debug output ZmqLogger::Instance()->AppendDebugMethod("Clip::GetOrCreateFrame (from reader)", "number", number, "samples_in_frame", samples_in_frame, "", -1, "", -1, "", -1, "", -1); - // Determine the max size of this clips source image (based on the timeline's size, the scaling mode, - // and the scaling keyframes). This is a performance improvement, to keep the images as small as possible, - // without losing quality. NOTE: We cannot go smaller than the timeline itself, or the add_layer timeline - // method will scale it back to timeline size before scaling it smaller again. This needs to be fixed in - // the future. - if (scale == SCALE_FIT || scale == SCALE_STRETCH) { - // Best fit or Stretch scaling (based on max timeline size * scaling keyframes) - float max_scale_x = scale_x.GetMaxPoint().co.Y; - float max_scale_y = scale_y.GetMaxPoint().co.Y; - reader->SetMaxSize(max(float(max_width), max_width * max_scale_x), max(float(max_height), max_height * max_scale_y)); - - } else if (scale == SCALE_CROP) { - // Cropping scale mode (based on max timeline size * cropped size * scaling keyframes) - float max_scale_x = scale_x.GetMaxPoint().co.Y; - float max_scale_y = scale_y.GetMaxPoint().co.Y; - QSize width_size(max_width * max_scale_x, round(max_width / (float(reader->info.width) / float(reader->info.height)))); - QSize height_size(round(max_height / (float(reader->info.height) / float(reader->info.width))), max_height * max_scale_y); - - // respect aspect ratio - if (width_size.width() >= max_width && width_size.height() >= max_height) - reader->SetMaxSize(max(max_width, width_size.width()), max(max_height, width_size.height())); - else - reader->SetMaxSize(max(max_width, height_size.width()), max(max_height, height_size.height())); - - } else { - // No scaling, use original image size (slower) - reader->SetMaxSize(0, 0); - } - // Attempt to get a frame (but this could fail if a reader has just been closed) new_frame = reader->GetFrame(number); @@ -671,6 +640,7 @@ std::shared_ptr Clip::GetOrCreateFrame(int64_t number) new_frame = std::make_shared(number, reader->info.width, reader->info.height, "#000000", samples_in_frame, reader->info.channels); new_frame->SampleRate(reader->info.sample_rate); new_frame->ChannelsLayout(reader->info.channel_layout); + new_frame->AddAudioSilence(samples_in_frame); return new_frame; } @@ -925,13 +895,14 @@ void Clip::SetJsonValue(Json::Value root) { if (!existing_effect["type"].isNull()) { // Create instance of effect - e = EffectInfo().CreateEffect(existing_effect["type"].asString()); + if (e = EffectInfo().CreateEffect(existing_effect["type"].asString())) { - // Load Json into Effect - e->SetJsonValue(existing_effect); + // Load Json into Effect + e->SetJsonValue(existing_effect); - // Add Effect to Timeline - AddEffect(e); + // Add Effect to Timeline + AddEffect(e); + } } } } @@ -994,9 +965,11 @@ void Clip::SetJsonValue(Json::Value root) { reader->SetJsonValue(root["reader"]); } - // mark as managed reader - if (reader) + // mark as managed reader and set parent + if (reader) { + reader->SetClip(this); manage_reader = true; + } // Re-Open reader (if needed) if (already_open) diff --git a/src/Coordinate.cpp b/src/Coordinate.cpp index 41ee420a..60ea90b2 100644 --- a/src/Coordinate.cpp +++ b/src/Coordinate.cpp @@ -32,12 +32,12 @@ using namespace openshot; // Default constructor for a coordinate, which defaults the X and Y to zero (0,0) Coordinate::Coordinate() : - X(0), Y(0), increasing(true), repeated(1,1), delta(0.0) { + X(0), Y(0) { } // Constructor which also allows the user to set the X and Y Coordinate::Coordinate(double x, double y) : - X(x), Y(y), increasing(true), repeated(1,1), delta(0.0) { + X(x), Y(y) { } @@ -96,15 +96,4 @@ void Coordinate::SetJsonValue(Json::Value root) { X = root["X"].asDouble(); if (!root["Y"].isNull()) Y = root["Y"].asDouble(); - if (!root["increasing"].isNull()) - increasing = root["increasing"].asBool(); - if (!root["repeated"].isNull() && root["repeated"].isObject()) - { - if (!root["repeated"]["num"].isNull()) - repeated.num = root["repeated"]["num"].asInt(); - if (!root["repeated"]["den"].isNull()) - repeated.den = root["repeated"]["den"].asInt(); - } - if (!root["delta"].isNull()) - delta = root["delta"].asDouble(); } diff --git a/src/EffectInfo.cpp b/src/EffectInfo.cpp index 23bc9d02..f9e4c409 100644 --- a/src/EffectInfo.cpp +++ b/src/EffectInfo.cpp @@ -82,6 +82,7 @@ EffectBase* EffectInfo::CreateEffect(string effect_type) { else if (effect_type == "Wave") return new Wave(); + return NULL; } // Generate Json::JsonValue for this object diff --git a/src/FFmpegReader.cpp b/src/FFmpegReader.cpp index 4d435f37..a8d1d746 100644 --- a/src/FFmpegReader.cpp +++ b/src/FFmpegReader.cpp @@ -37,11 +37,12 @@ FFmpegReader::FFmpegReader(string path) audio_pts_offset(99999), video_pts_offset(99999), path(path), is_video_seek(true), check_interlace(false), check_fps(false), enable_seek(true), is_open(false), seek_audio_frame_found(0), seek_video_frame_found(0), prev_samples(0), prev_pts(0), pts_total(0), pts_counter(0), is_duration_known(false), largest_frame_processed(0), - current_video_frame(0), has_missing_frames(false), num_packets_since_video_frame(0), num_checks_since_final(0), packet(NULL) { + current_video_frame(0), has_missing_frames(false), num_packets_since_video_frame(0), num_checks_since_final(0), + packet(NULL) { // Initialize FFMpeg, and register all formats and codecs - av_register_all(); - avcodec_register_all(); + AV_REGISTER_ALL + AVCODEC_REGISTER_ALL // Init cache working_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * info.fps.ToDouble() * 2, info.width, info.height, info.sample_rate, info.channels); @@ -58,11 +59,12 @@ FFmpegReader::FFmpegReader(string path, bool inspect_reader) audio_pts_offset(99999), video_pts_offset(99999), path(path), is_video_seek(true), check_interlace(false), check_fps(false), enable_seek(true), is_open(false), seek_audio_frame_found(0), seek_video_frame_found(0), prev_samples(0), prev_pts(0), pts_total(0), pts_counter(0), is_duration_known(false), largest_frame_processed(0), - current_video_frame(0), has_missing_frames(false), num_packets_since_video_frame(0), num_checks_since_final(0), packet(NULL) { + current_video_frame(0), has_missing_frames(false), num_packets_since_video_frame(0), num_checks_since_final(0), + packet(NULL) { // Initialize FFMpeg, and register all formats and codecs - av_register_all(); - avcodec_register_all(); + AV_REGISTER_ALL + AVCODEC_REGISTER_ALL // Init cache working_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * info.fps.ToDouble() * 2, info.width, info.height, info.sample_rate, info.channels); @@ -151,7 +153,7 @@ void FFmpegReader::Open() pCodecCtx = AV_GET_CODEC_CONTEXT(pStream, pCodec); // Set number of threads equal to number of processors (not to exceed 16) - pCodecCtx->thread_count = min(OPEN_MP_NUM_PROCESSORS, 16); + pCodecCtx->thread_count = min(FF_NUM_PROCESSORS, 16); if (pCodec == NULL) { throw InvalidCodec("A valid video codec could not be found for this file.", path); @@ -189,7 +191,7 @@ void FFmpegReader::Open() aCodecCtx = AV_GET_CODEC_CONTEXT(aStream, aCodec); // Set number of threads equal to number of processors (not to exceed 16) - aCodecCtx->thread_count = min(OPEN_MP_NUM_PROCESSORS, 16); + aCodecCtx->thread_count = min(FF_NUM_PROCESSORS, 16); if (aCodec == NULL) { throw InvalidCodec("A valid audio codec could not be found for this file.", path); @@ -328,6 +330,11 @@ void FFmpegReader::UpdateAudioInfo() info.height = 480; } + // Fix invalid video lengths for certain types of files (MP3 for example) + if (info.has_video && ((info.duration * info.fps.ToDouble()) - info.video_length > 60)) { + info.video_length = info.duration * info.fps.ToDouble(); + } + // Add audio metadata (if any found) AVDictionaryEntry *tag = NULL; while ((tag = av_dict_get(aStream->metadata, "", tag, AV_DICT_IGNORE_SUFFIX))) { @@ -339,19 +346,21 @@ void FFmpegReader::UpdateAudioInfo() void FFmpegReader::UpdateVideoInfo() { + if (check_fps) + // Already initialized all the video metadata, no reason to do it again + return; + // Set values of FileInfo struct info.has_video = true; info.file_size = pFormatCtx->pb ? avio_size(pFormatCtx->pb) : -1; info.height = AV_GET_CODEC_ATTRIBUTES(pStream, pCodecCtx)->height; info.width = AV_GET_CODEC_ATTRIBUTES(pStream, pCodecCtx)->width; info.vcodec = pCodecCtx->codec->name; - info.video_bit_rate = pFormatCtx->bit_rate; - if (!check_fps) - { - // set frames per second (fps) - info.fps.num = pStream->avg_frame_rate.num; - info.fps.den = pStream->avg_frame_rate.den; - } + info.video_bit_rate = (pFormatCtx->bit_rate / 8); + + // set frames per second (fps) + info.fps.num = pStream->avg_frame_rate.num; + info.fps.den = pStream->avg_frame_rate.den; if (pStream->sample_aspect_ratio.num != 0) { @@ -415,16 +424,10 @@ void FFmpegReader::UpdateVideoInfo() } // Override an invalid framerate - if (info.fps.ToFloat() > 240.0f || (info.fps.num == 0 || info.fps.den == 0)) - { - // Set a few important default video settings (so audio can be divided into frames) - info.fps.num = 24; - info.fps.den = 1; - info.video_timebase.num = 1; - info.video_timebase.den = 24; - - // Calculate number of frames - info.video_length = round(info.duration * info.fps.ToDouble()); + if (info.fps.ToFloat() > 240.0f || (info.fps.num <= 0 || info.fps.den <= 0) || info.video_length <= 0) { + // Calculate FPS, duration, video bit rate, and video length manually + // by scanning through all the video stream packets + CheckFPS(); } // Add video metadata (if any) @@ -469,8 +472,6 @@ std::shared_ptr FFmpegReader::GetFrame(int64_t requested_frame) #pragma omp critical (ReadStream) { // Check the cache a 2nd time (due to a potential previous lock) - if (has_missing_frames) - CheckMissingFrame(requested_frame); frame = final_cache.GetFrame(requested_frame); if (frame) { // Debug output @@ -606,6 +607,12 @@ std::shared_ptr FFmpegReader::ReadStream(int64_t requested_frame) // Process Video Packet ProcessVideoPacket(requested_frame); + + if (openshot::Settings::Instance()->WAIT_FOR_VIDEO_PROCESSING_TASK) { + // Wait on each OMP task to complete before moving on to the next one. This slows + // down processing considerably, but might be more stable on some systems. + #pragma omp taskwait + } } } @@ -616,16 +623,16 @@ std::shared_ptr FFmpegReader::ReadStream(int64_t requested_frame) num_packets_since_video_frame++; // Check the status of a seek (if any) - if (is_seeking) - #pragma omp critical (openshot_seek) - check_seek = CheckSeek(false); - else - check_seek = false; + if (is_seeking) + #pragma omp critical (openshot_seek) + check_seek = CheckSeek(false); + else + check_seek = false; - if (check_seek) { - // Jump to the next iteration of this loop - continue; - } + if (check_seek) { + // Jump to the next iteration of this loop + continue; + } // Update PTS / Frame Offset (if any) UpdatePTSOffset(false); @@ -638,17 +645,13 @@ std::shared_ptr FFmpegReader::ReadStream(int64_t requested_frame) } // Check if working frames are 'finished' - bool is_cache_found = false; if (!is_seeking) { - // Check for any missing frames - CheckMissingFrame(requested_frame); - // Check for final frames CheckWorkingFrames(false, requested_frame); } // Check if requested 'final' frame is available - is_cache_found = (final_cache.GetFrame(requested_frame) != NULL); + bool is_cache_found = (final_cache.GetFrame(requested_frame) != NULL); // Increment frames processed packets_processed++; @@ -660,6 +663,7 @@ std::shared_ptr FFmpegReader::ReadStream(int64_t requested_frame) } // end while } // end omp single + } // end omp parallel // Debug output @@ -877,9 +881,53 @@ void FFmpegReader::ProcessVideoPacket(int64_t requested_frame) if (pFrameRGB == NULL) throw OutOfBoundsFrame("Convert Image Broke!", current_frame, video_length); - // Determine if video needs to be scaled down (for performance reasons) - // Timelines pass their size to the clips, which pass their size to the readers (as max size) - // If a clip is being scaled larger, it will set max_width and max_height = 0 (which means don't down scale) + // Determine the max size of this source image (based on the timeline's size, the scaling mode, + // and the scaling keyframes). This is a performance improvement, to keep the images as small as possible, + // without losing quality. NOTE: We cannot go smaller than the timeline itself, or the add_layer timeline + // method will scale it back to timeline size before scaling it smaller again. This needs to be fixed in + // the future. + int max_width = Settings::Instance()->MAX_WIDTH; + if (max_width <= 0) + max_width = info.width; + int max_height = Settings::Instance()->MAX_HEIGHT; + if (max_height <= 0) + max_height = info.height; + + Clip* parent = (Clip*) GetClip(); + if (parent) { + if (parent->scale == SCALE_FIT || parent->scale == SCALE_STRETCH) { + // Best fit or Stretch scaling (based on max timeline size * scaling keyframes) + float max_scale_x = parent->scale_x.GetMaxPoint().co.Y; + float max_scale_y = parent->scale_y.GetMaxPoint().co.Y; + max_width = max(float(max_width), max_width * max_scale_x); + max_height = max(float(max_height), max_height * max_scale_y); + + } else if (parent->scale == SCALE_CROP) { + // Cropping scale mode (based on max timeline size * cropped size * scaling keyframes) + float max_scale_x = parent->scale_x.GetMaxPoint().co.Y; + float max_scale_y = parent->scale_y.GetMaxPoint().co.Y; + QSize width_size(max_width * max_scale_x, + round(max_width / (float(info.width) / float(info.height)))); + QSize height_size(round(max_height / (float(info.height) / float(info.width))), + max_height * max_scale_y); + // respect aspect ratio + if (width_size.width() >= max_width && width_size.height() >= max_height) { + max_width = max(max_width, width_size.width()); + max_height = max(max_height, width_size.height()); + } + else { + max_width = max(max_width, height_size.width()); + max_height = max(max_height, height_size.height()); + } + + } else { + // No scaling, use original image size (slower) + max_width = info.width; + max_height = info.height; + } + } + + // Determine if image needs to be scaled (for performance reasons) int original_height = height; if (max_width != 0 && max_height != 0 && max_width < width && max_height < height) { // Override width and height (but maintain aspect ratio) @@ -907,8 +955,12 @@ void FFmpegReader::ProcessVideoPacket(int64_t requested_frame) // Copy picture data from one AVFrame (or AVPicture) to another one. AV_COPY_PICTURE_DATA(pFrameRGB, buffer, PIX_FMT_RGBA, width, height); + int scale_mode = SWS_FAST_BILINEAR; + if (openshot::Settings::Instance()->HIGH_QUALITY_SCALING) { + scale_mode = SWS_LANCZOS; + } SwsContext *img_convert_ctx = sws_getContext(info.width, info.height, AV_GET_CODEC_PIXEL_FORMAT(pStream, pCodecCtx), width, - height, PIX_FMT_RGBA, SWS_BILINEAR, NULL, NULL, NULL); + height, PIX_FMT_RGBA, scale_mode, NULL, NULL, NULL); // Resize / Convert to RGB sws_scale(img_convert_ctx, my_frame->data, my_frame->linesize, 0, @@ -978,7 +1030,7 @@ void FFmpegReader::ProcessAudioPacket(int64_t requested_frame, int64_t target_fr int data_size = 0; // re-initialize buffer size (it gets changed in the avcodec_decode_audio2 method call) - int buf_size = AVCODEC_MAX_AUDIO_FRAME_SIZE + FF_INPUT_BUFFER_PADDING_SIZE; + int buf_size = AVCODEC_MAX_AUDIO_FRAME_SIZE + MY_INPUT_BUFFER_PADDING_SIZE; #pragma omp critical (ProcessAudioPacket) { #if IS_FFMPEG_3_2 @@ -1083,7 +1135,7 @@ void FFmpegReader::ProcessAudioPacket(int64_t requested_frame, int64_t target_fr // Allocate audio buffer - int16_t *audio_buf = new int16_t[AVCODEC_MAX_AUDIO_FRAME_SIZE + FF_INPUT_BUFFER_PADDING_SIZE]; + int16_t *audio_buf = new int16_t[AVCODEC_MAX_AUDIO_FRAME_SIZE + MY_INPUT_BUFFER_PADDING_SIZE]; ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::ProcessAudioPacket (ReSample)", "packet_samples", packet_samples, "info.channels", info.channels, "info.sample_rate", info.sample_rate, "aCodecCtx->sample_fmt", AV_GET_SAMPLE_FORMAT(aStream, aCodecCtx), "AV_SAMPLE_FMT_S16", AV_SAMPLE_FMT_S16, "", -1); @@ -1093,11 +1145,11 @@ void FFmpegReader::ProcessAudioPacket(int64_t requested_frame, int64_t target_fr audio_converted->nb_samples = audio_frame->nb_samples; av_samples_alloc(audio_converted->data, audio_converted->linesize, info.channels, audio_frame->nb_samples, AV_SAMPLE_FMT_S16, 0); - AVAudioResampleContext *avr = NULL; + SWRCONTEXT *avr = NULL; int nb_samples = 0; // setup resample context - avr = avresample_alloc_context(); + avr = SWR_ALLOC(); av_opt_set_int(avr, "in_channel_layout", AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channel_layout, 0); av_opt_set_int(avr, "out_channel_layout", AV_GET_CODEC_ATTRIBUTES(aStream, aCodecCtx)->channel_layout, 0); av_opt_set_int(avr, "in_sample_fmt", AV_GET_SAMPLE_FORMAT(aStream, aCodecCtx), 0); @@ -1106,10 +1158,10 @@ void FFmpegReader::ProcessAudioPacket(int64_t requested_frame, int64_t target_fr av_opt_set_int(avr, "out_sample_rate", info.sample_rate, 0); av_opt_set_int(avr, "in_channels", info.channels, 0); av_opt_set_int(avr, "out_channels", info.channels, 0); - int r = avresample_open(avr); + int r = SWR_INIT(avr); // Convert audio samples - nb_samples = avresample_convert(avr, // audio resample context + nb_samples = SWR_CONVERT(avr, // audio resample context audio_converted->data, // output data pointers audio_converted->linesize[0], // output plane size, in bytes. (0 if unknown) audio_converted->nb_samples, // maximum number of samples that the output buffer can hold @@ -1121,8 +1173,8 @@ void FFmpegReader::ProcessAudioPacket(int64_t requested_frame, int64_t target_fr memcpy(audio_buf, audio_converted->data[0], audio_converted->nb_samples * av_get_bytes_per_sample(AV_SAMPLE_FMT_S16) * info.channels); // Deallocate resample buffer - avresample_close(avr); - avresample_free(&avr); + SWR_CLOSE(avr); + SWR_FREE(&avr); avr = NULL; // Free AVFrames @@ -1348,7 +1400,7 @@ void FFmpegReader::Seek(int64_t requested_frame) { seek_target = ConvertFrameToVideoPTS(requested_frame - buffer_amount); if (av_seek_frame(pFormatCtx, info.video_stream_index, seek_target, AVSEEK_FLAG_BACKWARD) < 0) { - fprintf(stderr, "%s: error while seeking video stream\n", pFormatCtx->filename); + fprintf(stderr, "%s: error while seeking video stream\n", pFormatCtx->AV_FILENAME); } else { // VIDEO SEEK @@ -1362,7 +1414,7 @@ void FFmpegReader::Seek(int64_t requested_frame) { seek_target = ConvertFrameToAudioPTS(requested_frame - buffer_amount); if (av_seek_frame(pFormatCtx, info.audio_stream_index, seek_target, AVSEEK_FLAG_BACKWARD) < 0) { - fprintf(stderr, "%s: error while seeking audio stream\n", pFormatCtx->filename); + fprintf(stderr, "%s: error while seeking audio stream\n", pFormatCtx->AV_FILENAME); } else { // AUDIO SEEK @@ -1594,7 +1646,7 @@ AudioLocation FFmpegReader::GetAudioPTSLocation(int64_t pts) for (int64_t audio_frame = previous_packet_location.frame; audio_frame < location.frame; audio_frame++) { if (!missing_audio_frames.count(audio_frame)) { ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::GetAudioPTSLocation (tracking missing frame)", "missing_audio_frame", audio_frame, "previous_audio_frame", previous_packet_location.frame, "new location frame", location.frame, "", -1, "", -1, "", -1); - missing_audio_frames.insert(pair(previous_packet_location.frame - 1, audio_frame)); + missing_audio_frames.insert(pair(audio_frame, previous_packet_location.frame - 1)); } } } @@ -1669,13 +1721,25 @@ bool FFmpegReader::CheckMissingFrame(int64_t requested_frame) map::iterator itr; bool found_missing_frame = false; - // Check if requested frame is a missing frame - if (missing_video_frames.count(requested_frame) || missing_audio_frames.count(requested_frame)) { - int64_t missing_source_frame = -1; - if (missing_video_frames.count(requested_frame)) - missing_source_frame = missing_video_frames.find(requested_frame)->second; - else if (missing_audio_frames.count(requested_frame)) - missing_source_frame = missing_audio_frames.find(requested_frame)->second; + // Special MP3 Handling (ignore more than 1 video frame) + if (info.has_audio and info.has_video) { + AVCodecID aCodecId = AV_FIND_DECODER_CODEC_ID(aStream); + AVCodecID vCodecId = AV_FIND_DECODER_CODEC_ID(pStream); + // If MP3 with single video frame, handle this special case by copying the previously + // decoded image to the new frame. Otherwise, it will spend a huge amount of + // CPU time looking for missing images for all the audio-only frames. + if (checked_count > 8 && !missing_video_frames.count(requested_frame) && + !processing_audio_frames.count(requested_frame) && processed_audio_frames.count(requested_frame) && + last_frame && last_video_frame->has_image_data && aCodecId == AV_CODEC_ID_MP3 && (vCodecId == AV_CODEC_ID_MJPEGB || vCodecId == AV_CODEC_ID_MJPEG)) { + missing_video_frames.insert(pair(requested_frame, last_video_frame->number)); + missing_video_frames_source.insert(pair(last_video_frame->number, requested_frame)); + missing_frames.Add(last_video_frame); + } + } + + // Check if requested video frame is a missing + if (missing_video_frames.count(requested_frame)) { + int64_t missing_source_frame = missing_video_frames.find(requested_frame)->second; // Increment missing source frame check count (or init to 1) if (checked_frames.count(missing_source_frame) == 0) @@ -1708,21 +1772,26 @@ bool FFmpegReader::CheckMissingFrame(int64_t requested_frame) std::shared_ptr parent_image = parent_frame->GetImage(); if (parent_image) { missing_frame->AddImage(std::shared_ptr(new QImage(*parent_image))); - processed_video_frames[missing_frame->number] = missing_frame->number; - processed_audio_frames[missing_frame->number] = missing_frame->number; - - // Move frame to final cache - final_cache.Add(missing_frame); - - // Remove frame from working cache - working_cache.Remove(missing_frame->number); - - // Update last_frame processed - last_frame = missing_frame->number; } } + } + // Check if requested audio frame is a missing + if (missing_audio_frames.count(requested_frame)) { + + // Create blank missing frame + std::shared_ptr missing_frame = CreateFrame(requested_frame); + + // Get Samples per frame (for this frame number) + int samples_per_frame = Frame::GetSamplesPerFrame(missing_frame->number, info.fps, info.sample_rate, info.channels); + + // Debug output + ZmqLogger::Instance()->AppendDebugMethod("FFmpegReader::CheckMissingFrame (Add Silence for Missing Audio Frame)", "requested_frame", requested_frame, "missing_frame->number", missing_frame->number, "samples_per_frame", samples_per_frame, "", -1, "", -1, "", -1); + + // Add this frame to the processed map (since it's already done) + missing_frame->AddAudioSilence(samples_per_frame); + processed_audio_frames[missing_frame->number] = missing_frame->number; } return found_missing_frame; @@ -1735,6 +1804,9 @@ void FFmpegReader::CheckWorkingFrames(bool end_of_stream, int64_t requested_fram bool checked_count_tripped = false; int max_checked_count = 80; + // Check if requested frame is 'missing' + CheckMissingFrame(requested_frame); + while (true) { // Get the front frame of working cache @@ -1857,16 +1929,14 @@ void FFmpegReader::CheckWorkingFrames(bool end_of_stream, int64_t requested_fram void FFmpegReader::CheckFPS() { check_fps = true; - AV_ALLOCATE_IMAGE(pFrame, AV_GET_CODEC_PIXEL_FORMAT(pStream, pCodecCtx), info.width, info.height); int first_second_counter = 0; int second_second_counter = 0; int third_second_counter = 0; int forth_second_counter = 0; int fifth_second_counter = 0; - - int iterations = 0; - int threshold = 500; + int frames_detected = 0; + int64_t pts = 0; // Loop through the stream while (true) @@ -1886,7 +1956,7 @@ void FFmpegReader::CheckFPS() UpdatePTSOffset(true); // Get PTS of this packet - int64_t pts = GetVideoPTS(); + pts = GetVideoPTS(); // Remove pFrame RemoveAVFrame(pFrame); @@ -1908,63 +1978,51 @@ void FFmpegReader::CheckFPS() forth_second_counter++; else if (video_seconds > 4.0 && video_seconds <= 5.0) fifth_second_counter++; - else - // Too far - break; + + // Increment counters + frames_detected++; } } - - // Increment counters - iterations++; - - // Give up (if threshold exceeded) - if (iterations > threshold) - break; } // Double check that all counters have greater than zero (or give up) - if (second_second_counter == 0 || third_second_counter == 0 || forth_second_counter == 0 || fifth_second_counter == 0) - { - // Seek to frame 1 - Seek(1); + if (second_second_counter != 0 && third_second_counter != 0 && forth_second_counter != 0 && fifth_second_counter != 0) { + // Calculate average FPS (average of first few seconds) + int sum_fps = second_second_counter + third_second_counter + forth_second_counter + fifth_second_counter; + int avg_fps = round(sum_fps / 4.0f); - // exit with no changes to FPS (not enough data to calculate) - return; + // Update FPS + info.fps = Fraction(avg_fps, 1); + + // Update Duration and Length + info.video_length = frames_detected; + info.duration = frames_detected / (sum_fps / 4.0f); + + // Update video bit rate + info.video_bit_rate = info.file_size / info.duration; + } else if (second_second_counter != 0 && third_second_counter != 0) { + // Calculate average FPS (only on second 2) + int sum_fps = second_second_counter; + + // Update FPS + info.fps = Fraction(sum_fps, 1); + + // Update Duration and Length + info.video_length = frames_detected; + info.duration = frames_detected / float(sum_fps); + + // Update video bit rate + info.video_bit_rate = info.file_size / info.duration; + } else { + // Too short to determine framerate, just default FPS + // Set a few important default video settings (so audio can be divided into frames) + info.fps.num = 30; + info.fps.den = 1; + + // Calculate number of frames + info.video_length = frames_detected; + info.duration = frames_detected / info.fps.ToFloat(); } - - int sum_fps = second_second_counter + third_second_counter + forth_second_counter + fifth_second_counter; - int avg_fps = round(sum_fps / 4.0f); - - // Sometimes the FPS is incorrectly detected by FFmpeg. If the 1st and 2nd seconds counters - // agree with each other, we are going to adjust the FPS of this reader instance. Otherwise, print - // a warning message. - - // Get diff from actual frame rate - double fps = info.fps.ToDouble(); - double diff = fps - double(avg_fps); - - // Is difference bigger than 1 frame? - if (diff <= -1 || diff >= 1) - { - // Compare to half the frame rate (the most common type of issue) - double half_fps = Fraction(info.fps.num / 2, info.fps.den).ToDouble(); - diff = half_fps - double(avg_fps); - - // Is difference bigger than 1 frame? - if (diff <= -1 || diff >= 1) - { - // Update FPS for this reader instance - info.fps = Fraction(avg_fps, 1); - } - else - { - // Update FPS for this reader instance (to 1/2 the original framerate) - info.fps = Fraction(info.fps.num / 2, info.fps.den); - } - } - - // Seek to frame 1 - Seek(1); } // Remove AVFrame from cache (and deallocate it's memory) @@ -1974,7 +2032,13 @@ void FFmpegReader::RemoveAVFrame(AVFrame* remove_frame) if (remove_frame) { // Free memory - av_freep(&remove_frame->data[0]); + #pragma omp critical (packet_cache) + { + av_freep(&remove_frame->data[0]); +#ifndef WIN32 + AV_FREE_FRAME(&remove_frame); +#endif + } } } diff --git a/src/FFmpegWriter.cpp b/src/FFmpegWriter.cpp index 4416040a..5d09341c 100644 --- a/src/FFmpegWriter.cpp +++ b/src/FFmpegWriter.cpp @@ -46,7 +46,7 @@ FFmpegWriter::FFmpegWriter(string path) : info.has_video = false; // Initialize FFMpeg, and register all formats and codecs - av_register_all(); + AV_REGISTER_ALL // auto detect format auto_detect_format(); @@ -55,16 +55,24 @@ FFmpegWriter::FFmpegWriter(string path) : // Open the writer void FFmpegWriter::Open() { - // Open the writer - is_open = true; + if (!is_open) { + // Open the writer + is_open = true; - // Prepare streams (if needed) - if (!prepare_streams) - PrepareStreams(); + // Prepare streams (if needed) + if (!prepare_streams) + PrepareStreams(); - // Write header (if needed) - if (!write_header) - WriteHeader(); + // Now that all the parameters are set, we can open the audio and video codecs and allocate the necessary encode buffers + if (info.has_video && video_st) + open_video(oc, video_st); + if (info.has_audio && audio_st) + open_audio(oc, audio_st); + + // Write header (if needed) + if (!write_header) + WriteHeader(); + } } // auto detect format (from path) @@ -146,7 +154,9 @@ void FFmpegWriter::SetVideoOptions(bool has_video, string codec, Fraction fps, i info.pixel_ratio.num = pixel_ratio.num; info.pixel_ratio.den = pixel_ratio.den; } - if (bit_rate >= 1000) + if (bit_rate >= 1000) // bit_rate is the bitrate in b/s + info.video_bit_rate = bit_rate; + if ((bit_rate >= 0) && (bit_rate < 64) ) // bit_rate is the bitrate in crf info.video_bit_rate = bit_rate; info.interlaced_frame = interlaced; @@ -237,7 +247,8 @@ void FFmpegWriter::SetOption(StreamType stream, string name, string value) // Was option found? if (option || (name == "g" || name == "qmin" || name == "qmax" || name == "max_b_frames" || name == "mb_decision" || - name == "level" || name == "profile" || name == "slices" || name == "rc_min_rate" || name == "rc_max_rate")) + name == "level" || name == "profile" || name == "slices" || name == "rc_min_rate" || name == "rc_max_rate" || + name == "crf")) { // Check for specific named options if (name == "g") @@ -284,6 +295,60 @@ void FFmpegWriter::SetOption(StreamType stream, string name, string value) // Buffer size convert >> c->rc_buffer_size; + else if (name == "crf") { + // encode quality and special settings like lossless + // This might be better in an extra methods as more options + // and way to set quality are possible + #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(55, 39, 101) + switch (c->codec_id) { + #if (LIBAVCODEC_VERSION_MAJOR >= 58) + case AV_CODEC_ID_AV1 : + c->bit_rate = 0; + av_opt_set_int(c->priv_data, "crf", min(stoi(value),63), 0); + break; + #endif + case AV_CODEC_ID_VP8 : + c->bit_rate = 10000000; + av_opt_set_int(c->priv_data, "crf", max(min(stoi(value),63),4), 0); // 4-63 + break; + case AV_CODEC_ID_VP9 : + c->bit_rate = 0; // Must be zero! + av_opt_set_int(c->priv_data, "crf", min(stoi(value),63), 0); // 0-63 + if (stoi(value) == 0) { + av_opt_set(c->priv_data, "preset", "veryslow", 0); + av_opt_set_int(c->priv_data, "lossless", 1, 0); + } + break; + case AV_CODEC_ID_H264 : + av_opt_set_int(c->priv_data, "crf", min(stoi(value),51), 0); // 0-51 + if (stoi(value) == 0) { + av_opt_set(c->priv_data, "preset", "veryslow", 0); + } + break; + case AV_CODEC_ID_H265 : + av_opt_set_int(c->priv_data, "crf", min(stoi(value),51), 0); // 0-51 + if (stoi(value) == 0) { + av_opt_set(c->priv_data, "preset", "veryslow", 0); + av_opt_set_int(c->priv_data, "lossless", 1, 0); + } + break; + default: + // If this codec doesn't support crf calculate a bitrate + // TODO: find better formula + double mbs = 15000000.0; + if (info.video_bit_rate > 0) { + if (info.video_bit_rate > 42) { + mbs = 380.0; + } + else { + mbs *= pow(0.912,info.video_bit_rate); + } + } + c->bit_rate = (int)(mbs); + } + #endif + } + else // Set AVOption AV_OPTION_SET(st, c->priv_data, name.c_str(), value.c_str(), c); @@ -299,7 +364,7 @@ void FFmpegWriter::SetOption(StreamType stream, string name, string value) /// Determine if codec name is valid bool FFmpegWriter::IsValidCodec(string codec_name) { // Initialize FFMpeg, and register all formats and codecs - av_register_all(); + AV_REGISTER_ALL // Find the codec (if any) if (avcodec_find_encoder_by_name(codec_name.c_str()) == NULL) @@ -319,12 +384,6 @@ void FFmpegWriter::PrepareStreams() // Initialize the streams (i.e. add the streams) initialize_streams(); - // Now that all the parameters are set, we can open the audio and video codecs and allocate the necessary encode buffers - if (info.has_video && video_st) - open_video(oc, video_st); - if (info.has_audio && audio_st) - open_audio(oc, audio_st); - // Mark as 'prepared' prepare_streams = true; } @@ -342,7 +401,7 @@ void FFmpegWriter::WriteHeader() } // Force the output filename (which doesn't always happen for some reason) - snprintf(oc->filename, sizeof(oc->filename), "%s", path.c_str()); + snprintf(oc->AV_FILENAME, sizeof(oc->AV_FILENAME), "%s", path.c_str()); // Write the stream header, if any // TODO: add avoptions / parameters instead of NULL @@ -559,8 +618,10 @@ void FFmpegWriter::flush_encoders() { if (info.has_audio && audio_codec && AV_GET_CODEC_TYPE(audio_st) == AVMEDIA_TYPE_AUDIO && AV_GET_CODEC_ATTRIBUTES(audio_st, audio_codec)->frame_size <= 1) return; +#if (LIBAVFORMAT_VERSION_MAJOR < 58) if (info.has_video && video_codec && AV_GET_CODEC_TYPE(video_st) == AVMEDIA_TYPE_VIDEO && (oc->oformat->flags & AVFMT_RAWPICTURE) && AV_FIND_DECODER_CODEC_ID(video_st) == AV_CODEC_ID_RAWVIDEO) return; +#endif int error_code = 0; int stop_encoding = 1; @@ -590,6 +651,23 @@ void FFmpegWriter::flush_encoders() // Encode video packet (latest version of FFmpeg) error_code = avcodec_send_frame(video_codec, NULL); got_packet = 0; + while (error_code >= 0) { + error_code = avcodec_receive_packet(video_codec, &pkt); + if (error_code == AVERROR(EAGAIN)|| error_code == AVERROR_EOF) { + got_packet = 0; + // Write packet + avcodec_flush_buffers(video_codec); + break; + } + if (pkt.pts != AV_NOPTS_VALUE) + pkt.pts = av_rescale_q(pkt.pts, video_codec->time_base, video_st->time_base); + if (pkt.dts != AV_NOPTS_VALUE) + pkt.dts = av_rescale_q(pkt.dts, video_codec->time_base, video_st->time_base); + if (pkt.duration > 0) + pkt.duration = av_rescale_q(pkt.duration, video_codec->time_base, video_st->time_base); + pkt.stream_index = video_st->index; + error_code = av_interleaved_write_frame(oc, &pkt); + } } #else @@ -734,14 +812,14 @@ void FFmpegWriter::close_audio(AVFormatContext *oc, AVStream *st) // Deallocate resample buffer if (avr) { - avresample_close(avr); - avresample_free(&avr); + SWR_CLOSE(avr); + SWR_FREE(&avr); avr = NULL; } if (avr_planar) { - avresample_close(avr_planar); - avresample_free(&avr_planar); + SWR_CLOSE(avr_planar); + SWR_FREE(&avr_planar); avr_planar = NULL; } } @@ -881,7 +959,11 @@ AVStream* FFmpegWriter::add_audio_stream() // some formats want stream headers to be separate if (oc->oformat->flags & AVFMT_GLOBALHEADER) +#if (LIBAVCODEC_VERSION_MAJOR >= 57) + c->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; +#else c->flags |= CODEC_FLAG_GLOBAL_HEADER; +#endif AV_COPY_PARAMS_FROM_CONTEXT(st, c); ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::add_audio_stream", "c->codec_id", c->codec_id, "c->bit_rate", c->bit_rate, "c->channels", c->channels, "c->sample_fmt", c->sample_fmt, "c->channel_layout", c->channel_layout, "c->sample_rate", c->sample_rate); @@ -911,7 +993,19 @@ AVStream* FFmpegWriter::add_video_stream() #endif /* Init video encoder options */ - c->bit_rate = info.video_bit_rate; + if (info.video_bit_rate >= 1000) { + c->bit_rate = info.video_bit_rate; + if (info.video_bit_rate >= 1500000) { + c->qmin = 2; + c->qmax = 30; + } + // Here should be the setting for low fixed bitrate + // Defaults are used because mpeg2 otherwise had problems + } + else { + c->qmin = 0; + c->qmax = 63; + } //TODO: Implement variable bitrate feature (which actually works). This implementation throws //invalid bitrate errors and rc buffer underflow errors, etc... @@ -920,8 +1014,6 @@ AVStream* FFmpegWriter::add_video_stream() //c->rc_buffer_size = FFMAX(c->rc_max_rate, 15000000) * 112L / 15000000 * 16384; //if ( !c->rc_initial_buffer_occupancy ) // c->rc_initial_buffer_occupancy = c->rc_buffer_size * 3/4; - c->qmin = 2; - c->qmax = 30; /* resolution must be a multiple of two */ // TODO: require /2 height and width @@ -953,7 +1045,11 @@ AVStream* FFmpegWriter::add_video_stream() c->mb_decision = 2; // some formats want stream headers to be separate if (oc->oformat->flags & AVFMT_GLOBALHEADER) +#if (LIBAVCODEC_VERSION_MAJOR >= 57) + c->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; +#else c->flags |= CODEC_FLAG_GLOBAL_HEADER; +#endif // Find all supported pixel formats for this codec const PixelFormat* supported_pixel_formats = codec->pix_fmts; @@ -970,10 +1066,12 @@ AVStream* FFmpegWriter::add_video_stream() // Raw video should use RGB24 c->pix_fmt = PIX_FMT_RGB24; +#if (LIBAVFORMAT_VERSION_MAJOR < 58) if (strcmp(fmt->name, "gif") != 0) // If not GIF format, skip the encoding process // Set raw picture flag (so we don't encode this video) oc->oformat->flags |= AVFMT_RAWPICTURE; +#endif } else { // Set the default codec c->pix_fmt = PIX_FMT_YUV420P; @@ -981,7 +1079,11 @@ AVStream* FFmpegWriter::add_video_stream() } AV_COPY_PARAMS_FROM_CONTEXT(st, c); +#if (LIBAVFORMAT_VERSION_MAJOR < 58) ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::add_video_stream (" + (string)fmt->name + " : " + (string)av_get_pix_fmt_name(c->pix_fmt) + ")", "c->codec_id", c->codec_id, "c->bit_rate", c->bit_rate, "c->pix_fmt", c->pix_fmt, "oc->oformat->flags", oc->oformat->flags, "AVFMT_RAWPICTURE", AVFMT_RAWPICTURE, "", -1); +#else + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::add_video_stream (" + (string)fmt->name + " : " + (string)av_get_pix_fmt_name(c->pix_fmt) + ")", "c->codec_id", c->codec_id, "c->bit_rate", c->bit_rate, "c->pix_fmt", c->pix_fmt, "oc->oformat->flags", oc->oformat->flags, "", -1, "", -1); +#endif return st; } @@ -993,7 +1095,7 @@ void FFmpegWriter::open_audio(AVFormatContext *oc, AVStream *st) AV_GET_CODEC_FROM_STREAM(st, audio_codec) // Set number of threads equal to number of processors (not to exceed 16) - audio_codec->thread_count = min(OPEN_MP_NUM_PROCESSORS, 16); + audio_codec->thread_count = min(FF_NUM_PROCESSORS, 16); // Find the audio encoder codec = avcodec_find_encoder_by_name(info.acodec.c_str()); @@ -1056,7 +1158,7 @@ void FFmpegWriter::open_audio(AVFormatContext *oc, AVStream *st) av_dict_set(&st->metadata, iter->first.c_str(), iter->second.c_str(), 0); } - ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::open_audio", "audio_codec->thread_count", audio_codec->thread_count, "audio_input_frame_size", audio_input_frame_size, "buffer_size", AVCODEC_MAX_AUDIO_FRAME_SIZE + FF_INPUT_BUFFER_PADDING_SIZE, "", -1, "", -1, "", -1); + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::open_audio", "audio_codec->thread_count", audio_codec->thread_count, "audio_input_frame_size", audio_input_frame_size, "buffer_size", AVCODEC_MAX_AUDIO_FRAME_SIZE + MY_INPUT_BUFFER_PADDING_SIZE, "", -1, "", -1, "", -1); } @@ -1067,7 +1169,7 @@ void FFmpegWriter::open_video(AVFormatContext *oc, AVStream *st) AV_GET_CODEC_FROM_STREAM(st, video_codec) // Set number of threads equal to number of processors (not to exceed 16) - video_codec->thread_count = min(OPEN_MP_NUM_PROCESSORS, 16); + video_codec->thread_count = min(FF_NUM_PROCESSORS, 16); /* find the video encoder */ codec = avcodec_find_encoder_by_name(info.vcodec.c_str()); @@ -1222,7 +1324,7 @@ void FFmpegWriter::write_audio_packets(bool final) // setup resample context if (!avr) { - avr = avresample_alloc_context(); + avr = SWR_ALLOC(); av_opt_set_int(avr, "in_channel_layout", channel_layout_in_frame, 0); av_opt_set_int(avr, "out_channel_layout", info.channel_layout, 0); av_opt_set_int(avr, "in_sample_fmt", AV_SAMPLE_FMT_S16, 0); @@ -1231,12 +1333,12 @@ void FFmpegWriter::write_audio_packets(bool final) av_opt_set_int(avr, "out_sample_rate", info.sample_rate, 0); av_opt_set_int(avr, "in_channels", channels_in_frame, 0); av_opt_set_int(avr, "out_channels", info.channels, 0); - avresample_open(avr); + SWR_INIT(avr); } int nb_samples = 0; // Convert audio samples - nb_samples = avresample_convert(avr, // audio resample context + nb_samples = SWR_CONVERT(avr, // audio resample context audio_converted->data, // output data pointers audio_converted->linesize[0], // output plane size, in bytes. (0 if unknown) audio_converted->nb_samples, // maximum number of samples that the output buffer can hold @@ -1297,7 +1399,7 @@ void FFmpegWriter::write_audio_packets(bool final) // setup resample context if (!avr_planar) { - avr_planar = avresample_alloc_context(); + avr_planar = SWR_ALLOC(); av_opt_set_int(avr_planar, "in_channel_layout", info.channel_layout, 0); av_opt_set_int(avr_planar, "out_channel_layout", info.channel_layout, 0); av_opt_set_int(avr_planar, "in_sample_fmt", output_sample_fmt, 0); @@ -1306,7 +1408,7 @@ void FFmpegWriter::write_audio_packets(bool final) av_opt_set_int(avr_planar, "out_sample_rate", info.sample_rate, 0); av_opt_set_int(avr_planar, "in_channels", info.channels, 0); av_opt_set_int(avr_planar, "out_channels", info.channels, 0); - avresample_open(avr_planar); + SWR_INIT(avr_planar); } // Create input frame (and allocate arrays) @@ -1329,7 +1431,7 @@ void FFmpegWriter::write_audio_packets(bool final) av_samples_alloc(frame_final->data, frame_final->linesize, info.channels, frame_final->nb_samples, audio_codec->sample_fmt, 0); // Convert audio samples - int nb_samples = avresample_convert(avr_planar, // audio resample context + int nb_samples = SWR_CONVERT(avr_planar, // audio resample context frame_final->data, // output data pointers frame_final->linesize[0], // output plane size, in bytes. (0 if unknown) frame_final->nb_samples, // maximum number of samples that the output buffer can hold @@ -1350,7 +1452,7 @@ void FFmpegWriter::write_audio_packets(bool final) } else { // Create a new array - final_samples = new int16_t[audio_input_position * (av_get_bytes_per_sample(audio_codec->sample_fmt) / av_get_bytes_per_sample(AV_SAMPLE_FMT_S16))]; + final_samples = (int16_t*)av_malloc(sizeof(int16_t) * audio_input_position * (av_get_bytes_per_sample(audio_codec->sample_fmt) / av_get_bytes_per_sample(AV_SAMPLE_FMT_S16))); // Copy audio into buffer for frame memcpy(final_samples, samples, audio_input_position * av_get_bytes_per_sample(audio_codec->sample_fmt)); @@ -1560,6 +1662,9 @@ void FFmpegWriter::process_video_packet(std::shared_ptr frame) // write video frame bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* frame_final) { +#if (LIBAVFORMAT_VERSION_MAJOR >= 58) + ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_video_packet", "frame->number", frame->number, "oc->oformat->flags", oc->oformat->flags, "", -1, "", -1, "", -1, "", -1); +#else ZmqLogger::Instance()->AppendDebugMethod("FFmpegWriter::write_video_packet", "frame->number", frame->number, "oc->oformat->flags & AVFMT_RAWPICTURE", oc->oformat->flags & AVFMT_RAWPICTURE, "", -1, "", -1, "", -1, "", -1); if (oc->oformat->flags & AVFMT_RAWPICTURE) { @@ -1587,7 +1692,9 @@ bool FFmpegWriter::write_video_packet(std::shared_ptr frame, AVFrame* fra // Deallocate packet AV_FREE_PACKET(&pkt); - } else { + } else +#endif + { AVPacket pkt; av_init_packet(&pkt); @@ -1710,11 +1817,16 @@ void FFmpegWriter::OutputStreamInfo() // Init a collection of software rescalers (thread safe) void FFmpegWriter::InitScalers(int source_width, int source_height) { + int scale_mode = SWS_FAST_BILINEAR; + if (openshot::Settings::Instance()->HIGH_QUALITY_SCALING) { + scale_mode = SWS_LANCZOS; + } + // Init software rescalers vector (many of them, one for each thread) for (int x = 0; x < num_of_rescalers; x++) { // Init the software scaler from FFMpeg - img_convert_ctx = sws_getContext(source_width, source_height, PIX_FMT_RGBA, info.width, info.height, AV_GET_CODEC_PIXEL_FORMAT(video_st, video_st->codec), SWS_BILINEAR, NULL, NULL, NULL); + img_convert_ctx = sws_getContext(source_width, source_height, PIX_FMT_RGBA, info.width, info.height, AV_GET_CODEC_PIXEL_FORMAT(video_st, video_st->codec), scale_mode, NULL, NULL, NULL); // Add rescaler to vector image_rescalers.push_back(img_convert_ctx); diff --git a/src/Frame.cpp b/src/Frame.cpp index 16a0d267..24b653a9 100644 --- a/src/Frame.cpp +++ b/src/Frame.cpp @@ -512,6 +512,8 @@ int Frame::GetSamplesPerFrame(int64_t number, Fraction fps, int sample_rate, int // Subtract the previous frame's total samples with this frame's total samples. Not all sample rates can // be evenly divided into frames, so each frame can have have different # of samples. int samples_per_frame = round(total_samples - previous_samples); + if (samples_per_frame < 0) + samples_per_frame = 0; return samples_per_frame; } @@ -951,11 +953,15 @@ void Frame::Play() return; AudioDeviceManager deviceManager; - deviceManager.initialise (0, /* number of input channels */ + String error = deviceManager.initialise (0, /* number of input channels */ 2, /* number of output channels */ 0, /* no XML settings.. */ true /* select default device on failure */); - //deviceManager.playTestSound(); + + // Output error (if any) + if (error.isNotEmpty()) { + cout << "Error on initialise(): " << error.toStdString() << endl; + } AudioSourcePlayer audioSourcePlayer; deviceManager.addAudioCallback (&audioSourcePlayer); diff --git a/src/FrameMapper.cpp b/src/FrameMapper.cpp index f49cbc4d..73b7bb22 100644 --- a/src/FrameMapper.cpp +++ b/src/FrameMapper.cpp @@ -54,9 +54,6 @@ FrameMapper::FrameMapper(ReaderBase *reader, Fraction target, PulldownType targe // Adjust cache size based on size of frame and audio final_cache.SetMaxBytesFromInfo(OPEN_MP_NUM_PROCESSORS * 2, info.width, info.height, info.sample_rate, info.channels); - - // init mapping between original and target frames - Init(); } // Destructor @@ -205,22 +202,23 @@ void FrameMapper::Init() } } else { - // Map the remaining framerates using a simple Keyframe curve - // Calculate the difference (to be used as a multiplier) + // Map the remaining framerates using a linear algorithm double rate_diff = target.ToDouble() / original.ToDouble(); int64_t new_length = reader->info.video_length * rate_diff; - // Build curve for framerate mapping - Keyframe rate_curve; - rate_curve.AddPoint(1, 1, LINEAR); - rate_curve.AddPoint(new_length, reader->info.video_length, LINEAR); + // Calculate the value difference + double value_increment = (reader->info.video_length + 1) / (double) (new_length); // Loop through curve, and build list of frames + double original_frame_num = 1.0f; for (int64_t frame_num = 1; frame_num <= new_length; frame_num++) { // Add 2 fields per frame - AddField(rate_curve.GetInt(frame_num)); - AddField(rate_curve.GetInt(frame_num)); + AddField(round(original_frame_num)); + AddField(round(original_frame_num)); + + // Increment original frame number + original_frame_num += value_increment; } } @@ -310,6 +308,11 @@ void FrameMapper::Init() MappedFrame FrameMapper::GetMappedFrame(int64_t TargetFrameNumber) { + // Check if mappings are dirty (and need to be recalculated) + if (is_dirty) + // Recalculate mappings + Init(); + // Ignore mapping on single image readers if (info.has_video and !info.has_audio and info.has_single_image) { // Return the same number @@ -352,9 +355,6 @@ std::shared_ptr FrameMapper::GetOrCreateFrame(int64_t number) // Debug output ZmqLogger::Instance()->AppendDebugMethod("FrameMapper::GetOrCreateFrame (from reader)", "number", number, "samples_in_frame", samples_in_frame, "", -1, "", -1, "", -1, "", -1); - // Set max image size (used for performance optimization) - reader->SetMaxSize(max_width, max_height); - // Attempt to get a frame (but this could fail if a reader has just been closed) new_frame = reader->GetFrame(number); @@ -376,6 +376,7 @@ std::shared_ptr FrameMapper::GetOrCreateFrame(int64_t number) new_frame = std::make_shared(number, info.width, info.height, "#000000", samples_in_frame, reader->info.channels); new_frame->SampleRate(reader->info.sample_rate); new_frame->ChannelsLayout(info.channel_layout); + new_frame->AddAudioSilence(samples_in_frame); return new_frame; } @@ -650,8 +651,8 @@ void FrameMapper::Close() // Deallocate resample buffer if (avr) { - avresample_close(avr); - avresample_free(&avr); + SWR_CLOSE(avr); + SWR_FREE(&avr); avr = NULL; } } @@ -741,18 +742,20 @@ void FrameMapper::ChangeMapping(Fraction target_fps, PulldownType target_pulldow // Deallocate resample buffer if (avr) { - avresample_close(avr); - avresample_free(&avr); + SWR_CLOSE(avr); + SWR_FREE(&avr); avr = NULL; } - - // Re-init mapping - Init(); } // Resample audio and map channels (if needed) void FrameMapper::ResampleMappedAudio(std::shared_ptr frame, int64_t original_frame_number) { + // Check if mappings are dirty (and need to be recalculated) + if (is_dirty) + // Recalculate mappings + Init(); + // Init audio buffers / variables int total_frame_samples = 0; int channels_in_frame = frame->GetAudioChannelsCount(); @@ -817,7 +820,7 @@ void FrameMapper::ResampleMappedAudio(std::shared_ptr frame, int64_t orig // setup resample context if (!avr) { - avr = avresample_alloc_context(); + avr = SWR_ALLOC(); av_opt_set_int(avr, "in_channel_layout", channel_layout_in_frame, 0); av_opt_set_int(avr, "out_channel_layout", info.channel_layout, 0); av_opt_set_int(avr, "in_sample_fmt", AV_SAMPLE_FMT_S16, 0); @@ -826,11 +829,11 @@ void FrameMapper::ResampleMappedAudio(std::shared_ptr frame, int64_t orig av_opt_set_int(avr, "out_sample_rate", info.sample_rate, 0); av_opt_set_int(avr, "in_channels", channels_in_frame, 0); av_opt_set_int(avr, "out_channels", info.channels, 0); - avresample_open(avr); + SWR_INIT(avr); } // Convert audio samples - nb_samples = avresample_convert(avr, // audio resample context + nb_samples = SWR_CONVERT(avr, // audio resample context audio_converted->data, // output data pointers audio_converted->linesize[0], // output plane size, in bytes. (0 if unknown) audio_converted->nb_samples, // maximum number of samples that the output buffer can hold diff --git a/src/KeyFrame.cpp b/src/KeyFrame.cpp index 05a8a769..025484a3 100644 --- a/src/KeyFrame.cpp +++ b/src/KeyFrame.cpp @@ -296,17 +296,48 @@ bool Keyframe::IsIncreasing(int index) Process(); // Is index a valid point? - if (index >= 0 && index < Values.size()) - // Return value - return long(round(Values[index].IsIncreasing())); - else if (index < 0 && Values.size() > 0) - // Return the minimum value - return long(round(Values[0].IsIncreasing())); - else if (index >= Values.size() && Values.size() > 0) - // return the maximum value - return long(round(Values[Values.size() - 1].IsIncreasing())); + if (index >= 1 && (index + 1) < Values.size()) { + int64_t current_value = GetLong(index); + int64_t previous_value = 0; + int64_t next_value = 0; + int64_t previous_repeats = 0; + int64_t next_repeats = 0; + + // Loop backwards and look for the next unique value + for (vector::iterator backwards_it = Values.begin() + index; backwards_it != Values.begin(); backwards_it--) { + previous_value = long(round((*backwards_it).Y)); + if (previous_value == current_value) { + // Found same value + previous_repeats++; + } else { + // Found non repeating value, no more repeats found + break; + } + } + + // Loop forwards and look for the next unique value + for (vector::iterator forwards_it = Values.begin() + (index + 1); forwards_it != Values.end(); forwards_it++) { + next_value = long(round((*forwards_it).Y)); + if (next_value == current_value) { + // Found same value + next_repeats++; + } else { + // Found non repeating value, no more repeats found + break; + } + } + + if (current_value < next_value) { + // Increasing + return true; + } + else if (current_value >= next_value) { + // Decreasing + return false; + } + } else - // return the default direction of most curves (i.e. increasing is true) + // return default true (since most curves increase) return true; } @@ -385,6 +416,7 @@ void Keyframe::SetJsonValue(Json::Value root) { } // Get the fraction that represents how many times this value is repeated in the curve +// This is depreciated and will be removed soon. Fraction Keyframe::GetRepeatFraction(int64_t index) { // Check if it needs to be processed @@ -392,17 +424,42 @@ Fraction Keyframe::GetRepeatFraction(int64_t index) Process(); // Is index a valid point? - if (index >= 0 && index < Values.size()) - // Return value - return Values[index].Repeat(); - else if (index < 0 && Values.size() > 0) - // Return the minimum value - return Values[0].Repeat(); - else if (index >= Values.size() && Values.size() > 0) - // return the maximum value - return Values[Values.size() - 1].Repeat(); + if (index >= 1 && (index + 1) < Values.size()) { + int64_t current_value = GetLong(index); + int64_t previous_value = 0; + int64_t next_value = 0; + int64_t previous_repeats = 0; + int64_t next_repeats = 0; + + // Loop backwards and look for the next unique value + for (vector::iterator backwards_it = Values.begin() + index; backwards_it != Values.begin(); backwards_it--) { + previous_value = long(round((*backwards_it).Y)); + if (previous_value == current_value) { + // Found same value + previous_repeats++; + } else { + // Found non repeating value, no more repeats found + break; + } + } + + // Loop forwards and look for the next unique value + for (vector::iterator forwards_it = Values.begin() + (index + 1); forwards_it != Values.end(); forwards_it++) { + next_value = long(round((*forwards_it).Y)); + if (next_value == current_value) { + // Found same value + next_repeats++; + } else { + // Found non repeating value, no more repeats found + break; + } + } + + int64_t total_repeats = previous_repeats + next_repeats; + return Fraction(previous_repeats, total_repeats); + } else - // return a blank coordinate (0,0) + // return a blank coordinate return Fraction(1,1); } @@ -414,17 +471,48 @@ double Keyframe::GetDelta(int64_t index) Process(); // Is index a valid point? - if (index >= 0 && index < Values.size()) - // Return value - return Values[index].Delta(); - else if (index < 0 && Values.size() > 0) - // Return the minimum value - return Values[0].Delta(); - else if (index >= Values.size() && Values.size() > 0) - // return the maximum value - return Values[Values.size() - 1].Delta(); + if (index >= 1 && (index + 1) < Values.size()) { + int64_t current_value = GetLong(index); + int64_t previous_value = 0; + int64_t next_value = 0; + int64_t previous_repeats = 0; + int64_t next_repeats = 0; + + // Loop backwards and look for the next unique value + for (vector::iterator backwards_it = Values.begin() + index; backwards_it != Values.begin(); backwards_it--) { + previous_value = long(round((*backwards_it).Y)); + if (previous_value == current_value) { + // Found same value + previous_repeats++; + } else { + // Found non repeating value, no more repeats found + break; + } + } + + // Loop forwards and look for the next unique value + for (vector::iterator forwards_it = Values.begin() + (index + 1); forwards_it != Values.end(); forwards_it++) { + next_value = long(round((*forwards_it).Y)); + if (next_value == current_value) { + // Found same value + next_repeats++; + } else { + // Found non repeating value, no more repeats found + break; + } + } + + // Check for matching previous value (special case for 1st element) + if (current_value == previous_value) + previous_value = 0; + + if (previous_repeats == 1) + return current_value - previous_value; + else + return 0.0; + } else - // return a blank coordinate (0,0) + // return a blank coordinate return 0.0; } @@ -529,7 +617,7 @@ void Keyframe::PrintValues() { for (vector::iterator it = Values.begin() + 1; it != Values.end(); it++) { Coordinate c = *it; - cout << long(round(c.X)) << "\t" << c.Y << "\t" << c.IsIncreasing() << "\t" << c.Repeat().num << "\t" << c.Repeat().den << "\t" << c.Delta() << endl; + cout << long(round(c.X)) << "\t" << c.Y << "\t" << IsIncreasing(c.X) << "\t" << GetRepeatFraction(c.X).num << "\t" << GetRepeatFraction(c.X).den << "\t" << GetDelta(c.X) << endl; } } @@ -567,69 +655,6 @@ void Keyframe::Process() { // process segment p1,p2 ProcessSegment(x, p1, p2); } - - // Loop through each Value, and set the direction of the coordinate. This is used - // when time mapping, to determine what direction the audio waveforms play. - bool increasing = true; - int repeat_count = 1; - int64_t last_value = 0; - for (vector::iterator it = Values.begin() + 1; it != Values.end(); it++) { - int current_value = long(round((*it).Y)); - int64_t next_value = long(round((*it).Y)); - int64_t prev_value = long(round((*it).Y)); - if (it + 1 != Values.end()) - next_value = long(round((*(it + 1)).Y)); - if (it - 1 >= Values.begin()) - prev_value = long(round((*(it - 1)).Y)); - - // Loop forward and look for the next unique value (to determine direction) - for (vector::iterator direction_it = it + 1; direction_it != Values.end(); direction_it++) { - int64_t next = long(round((*direction_it).Y)); - - // Detect direction - if (current_value < next) - { - increasing = true; - break; - } - else if (current_value > next) - { - increasing = false; - break; - } - } - - // Set direction - (*it).IsIncreasing(increasing); - - // Detect repeated Y value - if (current_value == last_value) - // repeated, so increment count - repeat_count++; - else - // reset repeat counter - repeat_count = 1; - - // Detect how many 'more' times it's repeated - int additional_repeats = 0; - for (vector::iterator repeat_it = it + 1; repeat_it != Values.end(); repeat_it++) { - int64_t next = long(round((*repeat_it).Y)); - if (next == current_value) - // repeated, so increment count - additional_repeats++; - else - break; // stop looping - } - - // Set repeat fraction - (*it).Repeat(Fraction(repeat_count, repeat_count + additional_repeats)); - - // Set delta (i.e. different from previous unique Y value) - (*it).Delta(current_value - last_value); - - // track the last value - last_value = current_value; - } } // reset flag diff --git a/src/Qt/AudioPlaybackThread.cpp b/src/Qt/AudioPlaybackThread.cpp index fac2e3fc..c64bd688 100644 --- a/src/Qt/AudioPlaybackThread.cpp +++ b/src/Qt/AudioPlaybackThread.cpp @@ -42,11 +42,18 @@ namespace openshot m_pInstance = new AudioDeviceManagerSingleton; // Initialize audio device only 1 time - m_pInstance->audioDeviceManager.initialise ( + String error = m_pInstance->audioDeviceManager.initialise ( 0, /* number of input channels */ numChannels, /* number of output channels */ 0, /* no XML settings.. */ true /* select default device on failure */); + + // Persist any errors detected + if (error.isNotEmpty()) { + m_pInstance->initialise_error = error.toStdString(); + } else { + m_pInstance->initialise_error = ""; + } } return m_pInstance; diff --git a/src/Qt/PlayerPrivate.cpp b/src/Qt/PlayerPrivate.cpp index 63d20e99..1839f1eb 100644 --- a/src/Qt/PlayerPrivate.cpp +++ b/src/Qt/PlayerPrivate.cpp @@ -149,7 +149,7 @@ namespace openshot else { // Update cache on which frame was retrieved - videoCache->current_display_frame = video_position; + videoCache->setCurrentFramePosition(video_position); // return frame from reader return reader->GetFrame(video_position); diff --git a/src/Qt/VideoCacheThread.cpp b/src/Qt/VideoCacheThread.cpp index ed224de5..208fcaab 100644 --- a/src/Qt/VideoCacheThread.cpp +++ b/src/Qt/VideoCacheThread.cpp @@ -102,6 +102,12 @@ namespace openshot // Ignore out of bounds frame exceptions } + // Is cache position behind current display frame? + if (position < current_display_frame) { + // Jump ahead + position = current_display_frame; + } + // Increment frame number position++; } diff --git a/src/QtImageReader.cpp b/src/QtImageReader.cpp index 764ef6ed..c500d221 100644 --- a/src/QtImageReader.cpp +++ b/src/QtImageReader.cpp @@ -26,6 +26,18 @@ */ #include "../include/QtImageReader.h" +#include "../include/Settings.h" +#include "../include/Clip.h" +#include "../include/CacheMemory.h" +#include +#include +#include + +#if USE_RESVG == 1 + // If defined and found in CMake, utilize the libresvg for parsing + // SVG files and rasterizing them to QImages. + #include "ResvgQt.h" +#endif using namespace openshot; @@ -51,17 +63,46 @@ void QtImageReader::Open() // Open reader if not already open if (!is_open) { - // Attempt to open file + bool success = true; image = std::shared_ptr(new QImage()); - bool success = image->load(QString::fromStdString(path)); + +#if USE_RESVG == 1 + // If defined and found in CMake, utilize the libresvg for parsing + // SVG files and rasterizing them to QImages. + // Only use resvg for files ending in '.svg' or '.svgz' + if (path.find(".svg") != std::string::npos || + path.find(".svgz") != std::string::npos) { + + ResvgRenderer renderer(QString::fromStdString(path)); + if (!renderer.isValid()) { + success = false; + } else { + + image = std::shared_ptr(new QImage(renderer.defaultSize(), QImage::Format_RGBA8888)); + image->fill(Qt::transparent); + + QPainter p(image.get()); + renderer.render(&p); + p.end(); + } + + } else { + // Attempt to open file (old method) + success = image->load(QString::fromStdString(path)); + if (success) + image = std::shared_ptr(new QImage(image->convertToFormat(QImage::Format_RGBA8888))); + } +#else + // Attempt to open file using Qt's build in image processing capabilities + success = image->load(QString::fromStdString(path)); + if (success) + image = std::shared_ptr(new QImage(image->convertToFormat(QImage::Format_RGBA8888))); +#endif if (!success) // raise exception throw InvalidFile("File could not be opened.", path); - // Set pixel format - image = std::shared_ptr(new QImage(image->convertToFormat(QImage::Format_RGBA8888))); - // Update image properties info.has_audio = false; info.has_video = true; @@ -111,21 +152,6 @@ void QtImageReader::Close() } } -void QtImageReader::SetMaxSize(int width, int height) -{ - // Determine if we need to scale the image (for performance reasons) - // The timeline passes its size to the clips, which pass their size to the readers, and eventually here - // A max_width/max_height = 0 means do not scale (probably because we are scaling the image larger than 100%) - - // Remove cache that is no longer valid (if needed) - if (cached_image && (cached_image->width() != width && cached_image->height() != height)) - // Expire this cache - cached_image.reset(); - - max_width = width; - max_height = height; -} - // Get an openshot::Frame object for a specific frame number of this reader. std::shared_ptr QtImageReader::GetFrame(int64_t requested_frame) { @@ -133,39 +159,96 @@ std::shared_ptr QtImageReader::GetFrame(int64_t requested_frame) if (!is_open) throw ReaderClosed("The Image is closed. Call Open() before calling this method.", path); - if (max_width != 0 && max_height != 0 && max_width < info.width && max_height < info.height) - { - // Scale image smaller (or use a previous scaled image) - if (!cached_image) { - // Create a scoped lock, allowing only a single thread to run the following code at one time - const GenericScopedLock lock(getFrameCriticalSection); + // Create a scoped lock, allowing only a single thread to run the following code at one time + const GenericScopedLock lock(getFrameCriticalSection); + // Determine the max size of this source image (based on the timeline's size, the scaling mode, + // and the scaling keyframes). This is a performance improvement, to keep the images as small as possible, + // without losing quality. NOTE: We cannot go smaller than the timeline itself, or the add_layer timeline + // method will scale it back to timeline size before scaling it smaller again. This needs to be fixed in + // the future. + int max_width = Settings::Instance()->MAX_WIDTH; + if (max_width <= 0) + max_width = info.width; + int max_height = Settings::Instance()->MAX_HEIGHT; + if (max_height <= 0) + max_height = info.height; + + Clip* parent = (Clip*) GetClip(); + if (parent) { + if (parent->scale == SCALE_FIT || parent->scale == SCALE_STRETCH) { + // Best fit or Stretch scaling (based on max timeline size * scaling keyframes) + float max_scale_x = parent->scale_x.GetMaxPoint().co.Y; + float max_scale_y = parent->scale_y.GetMaxPoint().co.Y; + max_width = max(float(max_width), max_width * max_scale_x); + max_height = max(float(max_height), max_height * max_scale_y); + + } else if (parent->scale == SCALE_CROP) { + // Cropping scale mode (based on max timeline size * cropped size * scaling keyframes) + float max_scale_x = parent->scale_x.GetMaxPoint().co.Y; + float max_scale_y = parent->scale_y.GetMaxPoint().co.Y; + QSize width_size(max_width * max_scale_x, + round(max_width / (float(info.width) / float(info.height)))); + QSize height_size(round(max_height / (float(info.height) / float(info.width))), + max_height * max_scale_y); + // respect aspect ratio + if (width_size.width() >= max_width && width_size.height() >= max_height) { + max_width = max(max_width, width_size.width()); + max_height = max(max_height, width_size.height()); + } + else { + max_width = max(max_width, height_size.width()); + max_height = max(max_height, height_size.height()); + } + + } else { + // No scaling, use original image size (slower) + max_width = info.width; + max_height = info.height; + } + } + + // Scale image smaller (or use a previous scaled image) + if (!cached_image || (cached_image && cached_image->width() != max_width || cached_image->height() != max_height)) { + +#if USE_RESVG == 1 + // If defined and found in CMake, utilize the libresvg for parsing + // SVG files and rasterizing them to QImages. + // Only use resvg for files ending in '.svg' or '.svgz' + if (path.find(".svg") != std::string::npos || + path.find(".svgz") != std::string::npos) { + ResvgRenderer renderer(QString::fromStdString(path)); + if (renderer.isValid()) { + + cached_image = std::shared_ptr(new QImage(QSize(max_width, max_height), QImage::Format_RGBA8888)); + cached_image->fill(Qt::transparent); + + QPainter p(cached_image.get()); + renderer.render(&p); + p.end(); + } + } else { // We need to resize the original image to a smaller image (for performance reasons) // Only do this once, to prevent tons of unneeded scaling operations cached_image = std::shared_ptr(new QImage(image->scaled(max_width, max_height, Qt::KeepAspectRatio, Qt::SmoothTransformation))); cached_image = std::shared_ptr(new QImage(cached_image->convertToFormat(QImage::Format_RGBA8888))); } - - // Create or get frame object - std::shared_ptr image_frame(new Frame(requested_frame, cached_image->width(), cached_image->height(), "#000000", Frame::GetSamplesPerFrame(requested_frame, info.fps, info.sample_rate, info.channels), info.channels)); - - // Add Image data to frame - image_frame->AddImage(cached_image); - - // return frame object - return image_frame; - - } else { - // Use original image (higher quality but slower) - // Create or get frame object - std::shared_ptr image_frame(new Frame(requested_frame, info.width, info.height, "#000000", Frame::GetSamplesPerFrame(requested_frame, info.fps, info.sample_rate, info.channels), info.channels)); - - // Add Image data to frame - image_frame->AddImage(image); - - // return frame object - return image_frame; +#else + // We need to resize the original image to a smaller image (for performance reasons) + // Only do this once, to prevent tons of unneeded scaling operations + cached_image = std::shared_ptr(new QImage(image->scaled(max_width, max_height, Qt::KeepAspectRatio, Qt::SmoothTransformation))); + cached_image = std::shared_ptr(new QImage(cached_image->convertToFormat(QImage::Format_RGBA8888))); +#endif } + + // Create or get frame object + std::shared_ptr image_frame(new Frame(requested_frame, cached_image->width(), cached_image->height(), "#000000", Frame::GetSamplesPerFrame(requested_frame, info.fps, info.sample_rate, info.channels), info.channels)); + + // Add Image data to frame + image_frame->AddImage(cached_image); + + // return frame object + return image_frame; } // Generate JSON string of this object diff --git a/src/QtPlayer.cpp b/src/QtPlayer.cpp index 028a9b70..4f53c7ca 100644 --- a/src/QtPlayer.cpp +++ b/src/QtPlayer.cpp @@ -59,12 +59,21 @@ void QtPlayer::CloseAudioDevice() AudioDeviceManagerSingleton::Instance(0)->CloseAudioDevice(); } +// Return any error string during initialization +string QtPlayer::GetError() { + if (reader && threads_started) { + // Get error from audio thread (if any) + return p->audioPlayback->getError(); + } else { + return ""; + } +} + void QtPlayer::SetSource(const std::string &source) { FFmpegReader *ffreader = new FFmpegReader(source); ffreader->DisplayInfo(); - //reader = new FrameMapper(ffreader, ffreader->info.fps, PULLDOWN_NONE, ffreader->info.sample_rate, ffreader->info.channels, ffreader->info.channel_layout); reader = new Timeline(ffreader->info.width, ffreader->info.height, ffreader->info.fps, ffreader->info.sample_rate, ffreader->info.channels, ffreader->info.channel_layout); Clip *c = new Clip(source); @@ -72,9 +81,6 @@ void QtPlayer::SetSource(const std::string &source) tm->AddClip(c); tm->Open(); -// ZmqLogger::Instance()->Path("/home/jonathan/.openshot_qt/libopenshot.log"); -// ZmqLogger::Instance()->Enable(true); - // Set the reader Reader(reader); } diff --git a/src/ReaderBase.cpp b/src/ReaderBase.cpp index 5de6fdff..f2607cfd 100644 --- a/src/ReaderBase.cpp +++ b/src/ReaderBase.cpp @@ -58,8 +58,9 @@ ReaderBase::ReaderBase() info.channel_layout = LAYOUT_MONO; info.audio_stream_index = -1; info.audio_timebase = Fraction(); - max_width = 0; - max_height = 0; + + // Init parent clip + parent = NULL; } // Display file information @@ -246,3 +247,13 @@ void ReaderBase::SetJsonValue(Json::Value root) { } } } + +/// Parent clip object of this reader (which can be unparented and NULL) +ClipBase* ReaderBase::GetClip() { + return parent; +} + +/// Set parent clip object of this reader +void ReaderBase::SetClip(ClipBase* clip) { + parent = clip; +} diff --git a/src/Settings.cpp b/src/Settings.cpp new file mode 100644 index 00000000..b13f0f5a --- /dev/null +++ b/src/Settings.cpp @@ -0,0 +1,52 @@ +/** + * @file + * @brief Source file for global Settings class + * @author Jonathan Thomas + * + * @section LICENSE + * + * Copyright (c) 2008-2014 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#include "../include/Settings.h" + +using namespace std; +using namespace openshot; + + +// Global reference to logger +Settings *Settings::m_pInstance = NULL; + +// Create or Get an instance of the logger singleton +Settings *Settings::Instance() +{ + if (!m_pInstance) { + // Create the actual instance of logger only once + m_pInstance = new Settings; + m_pInstance->HARDWARE_DECODE = false; + m_pInstance->HARDWARE_ENCODE = false; + m_pInstance->HIGH_QUALITY_SCALING = false; + m_pInstance->MAX_WIDTH = 0; + m_pInstance->MAX_HEIGHT = 0; + m_pInstance->WAIT_FOR_VIDEO_PROCESSING_TASK = false; + } + + return m_pInstance; +} diff --git a/src/Timeline.cpp b/src/Timeline.cpp index d042aeeb..384273bd 100644 --- a/src/Timeline.cpp +++ b/src/Timeline.cpp @@ -60,7 +60,7 @@ Timeline::Timeline(int width, int height, Fraction fps, int sample_rate, int cha info.video_length = info.fps.ToFloat() * info.duration; // Init max image size - SetMaxSize(info.width, info.height); + SetMaxSize(info.width, info.height); // Init cache final_cache = new CacheMemory(); @@ -213,9 +213,6 @@ std::shared_ptr Timeline::GetOrCreateFrame(Clip* clip, int64_t number) // Debug output ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetOrCreateFrame (from reader)", "number", number, "samples_in_frame", samples_in_frame, "", -1, "", -1, "", -1, "", -1); - // Set max image size (used for performance optimization) - clip->SetMaxSize(info.width, info.height); - // Attempt to get a frame (but this could fail if a reader has just been closed) #pragma omp critical (T_GetOtCreateFrame) new_frame = std::shared_ptr(clip->GetFrame(number)); @@ -235,7 +232,7 @@ std::shared_ptr Timeline::GetOrCreateFrame(Clip* clip, int64_t number) ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetOrCreateFrame (create blank)", "number", number, "samples_in_frame", samples_in_frame, "", -1, "", -1, "", -1, "", -1); // Create blank frame - new_frame = std::make_shared(number, max_width, max_height, "#000000", samples_in_frame, info.channels); + new_frame = std::make_shared(number, Settings::Instance()->MAX_WIDTH, Settings::Instance()->MAX_HEIGHT, "#000000", samples_in_frame, info.channels); #pragma omp critical (T_GetOtCreateFrame) { new_frame->SampleRate(info.sample_rate); @@ -274,13 +271,14 @@ void Timeline::add_layer(std::shared_ptr new_frame, Clip* source_clip, in // Generate Waveform Dynamically (the size of the timeline) std::shared_ptr source_image; #pragma omp critical (T_addLayer) - source_image = source_frame->GetWaveform(max_width, max_height, red, green, blue, alpha); + source_image = source_frame->GetWaveform(Settings::Instance()->MAX_WIDTH, Settings::Instance()->MAX_HEIGHT, red, green, blue, alpha); source_frame->AddImage(std::shared_ptr(source_image)); } /* Apply effects to the source frame (if any). If multiple clips are overlapping, only process the * effects on the top clip. */ if (is_top_clip && source_frame) + #pragma omp critical (T_addLayer) source_frame = apply_effects(source_frame, timeline_frame_number, source_clip->Layer()); // Declare an image to hold the source frame's image @@ -386,35 +384,48 @@ void Timeline::add_layer(std::shared_ptr new_frame, Clip* source_clip, in QSize source_size = source_image->size(); switch (source_clip->scale) { - case (SCALE_FIT): - // keep aspect ratio - source_size.scale(max_width, max_height, Qt::KeepAspectRatio); + case (SCALE_FIT): { + // keep aspect ratio + source_size.scale(Settings::Instance()->MAX_WIDTH, Settings::Instance()->MAX_HEIGHT, Qt::KeepAspectRatio); - // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Scale: SCALE_FIT)", "source_frame->number", source_frame->number, "source_width", source_size.width(), "source_height", source_size.height(), "", -1, "", -1, "", -1); - break; + // Debug output + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Scale: SCALE_FIT)", "source_frame->number", source_frame->number, "source_width", source_size.width(), "source_height", source_size.height(), "", -1, "", -1, "", -1); + break; + } + case (SCALE_STRETCH): { + // ignore aspect ratio + source_size.scale(Settings::Instance()->MAX_WIDTH, Settings::Instance()->MAX_HEIGHT, Qt::IgnoreAspectRatio); - case (SCALE_STRETCH): - // ignore aspect ratio - source_size.scale(max_width, max_height, Qt::IgnoreAspectRatio); + // Debug output + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Scale: SCALE_STRETCH)", "source_frame->number", source_frame->number, "source_width", source_size.width(), "source_height", source_size.height(), "", -1, "", -1, "", -1); + break; + } + case (SCALE_CROP): { + QSize width_size(Settings::Instance()->MAX_WIDTH, round(Settings::Instance()->MAX_WIDTH / (float(source_size.width()) / float(source_size.height())))); + QSize height_size(round(Settings::Instance()->MAX_HEIGHT / (float(source_size.height()) / float(source_size.width()))), Settings::Instance()->MAX_HEIGHT); - // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Scale: SCALE_STRETCH)", "source_frame->number", source_frame->number, "source_width", source_size.width(), "source_height", source_size.height(), "", -1, "", -1, "", -1); - break; + // respect aspect ratio + if (width_size.width() >= Settings::Instance()->MAX_WIDTH && width_size.height() >= Settings::Instance()->MAX_HEIGHT) + source_size.scale(width_size.width(), width_size.height(), Qt::KeepAspectRatio); + else + source_size.scale(height_size.width(), height_size.height(), Qt::KeepAspectRatio); - case (SCALE_CROP): - QSize width_size(max_width, round(max_width / (float(source_size.width()) / float(source_size.height())))); - QSize height_size(round(max_height / (float(source_size.height()) / float(source_size.width()))), max_height); + // Debug output + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Scale: SCALE_CROP)", "source_frame->number", source_frame->number, "source_width", source_size.width(), "source_height", source_size.height(), "", -1, "", -1, "", -1); + break; + } + case (SCALE_NONE): { + // Calculate ratio of source size to project size + // Even with no scaling, previews need to be adjusted correctly + // (otherwise NONE scaling draws the frame image outside of the preview) + float source_width_ratio = source_size.width() / float(info.width); + float source_height_ratio = source_size.height() / float(info.height); + source_size.scale(Settings::Instance()->MAX_WIDTH * source_width_ratio, Settings::Instance()->MAX_HEIGHT * source_height_ratio, Qt::KeepAspectRatio); - // respect aspect ratio - if (width_size.width() >= max_width && width_size.height() >= max_height) - source_size.scale(width_size.width(), width_size.height(), Qt::KeepAspectRatio); - else - source_size.scale(height_size.width(), height_size.height(), Qt::KeepAspectRatio); - - // Debug output - ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Scale: SCALE_CROP)", "source_frame->number", source_frame->number, "source_width", source_size.width(), "source_height", source_size.height(), "", -1, "", -1, "", -1); - break; + // Debug output + ZmqLogger::Instance()->AppendDebugMethod("Timeline::add_layer (Scale: SCALE_NONE)", "source_frame->number", source_frame->number, "source_width", source_size.width(), "source_height", source_size.height(), "", -1, "", -1, "", -1); + break; + } } /* GRAVITY LOCATION - Initialize X & Y to the correct values (before applying location curves) */ @@ -430,32 +441,32 @@ void Timeline::add_layer(std::shared_ptr new_frame, Clip* source_clip, in switch (source_clip->gravity) { case (GRAVITY_TOP): - x = (max_width - scaled_source_width) / 2.0; // center + x = (Settings::Instance()->MAX_WIDTH - scaled_source_width) / 2.0; // center break; case (GRAVITY_TOP_RIGHT): - x = max_width - scaled_source_width; // right + x = Settings::Instance()->MAX_WIDTH - scaled_source_width; // right break; case (GRAVITY_LEFT): - y = (max_height - scaled_source_height) / 2.0; // center + y = (Settings::Instance()->MAX_HEIGHT - scaled_source_height) / 2.0; // center break; case (GRAVITY_CENTER): - x = (max_width - scaled_source_width) / 2.0; // center - y = (max_height - scaled_source_height) / 2.0; // center + x = (Settings::Instance()->MAX_WIDTH - scaled_source_width) / 2.0; // center + y = (Settings::Instance()->MAX_HEIGHT - scaled_source_height) / 2.0; // center break; case (GRAVITY_RIGHT): - x = max_width - scaled_source_width; // right - y = (max_height - scaled_source_height) / 2.0; // center + x = Settings::Instance()->MAX_WIDTH - scaled_source_width; // right + y = (Settings::Instance()->MAX_HEIGHT - scaled_source_height) / 2.0; // center break; case (GRAVITY_BOTTOM_LEFT): - y = (max_height - scaled_source_height); // bottom + y = (Settings::Instance()->MAX_HEIGHT - scaled_source_height); // bottom break; case (GRAVITY_BOTTOM): - x = (max_width - scaled_source_width) / 2.0; // center - y = (max_height - scaled_source_height); // bottom + x = (Settings::Instance()->MAX_WIDTH - scaled_source_width) / 2.0; // center + y = (Settings::Instance()->MAX_HEIGHT - scaled_source_height); // bottom break; case (GRAVITY_BOTTOM_RIGHT): - x = max_width - scaled_source_width; // right - y = (max_height - scaled_source_height); // bottom + x = Settings::Instance()->MAX_WIDTH - scaled_source_width; // right + y = (Settings::Instance()->MAX_HEIGHT - scaled_source_height); // bottom break; } @@ -464,8 +475,8 @@ void Timeline::add_layer(std::shared_ptr new_frame, Clip* source_clip, in /* LOCATION, ROTATION, AND SCALE */ float r = source_clip->rotation.GetValue(clip_frame_number); // rotate in degrees - x += (max_width * source_clip->location_x.GetValue(clip_frame_number)); // move in percentage of final width - y += (max_height * source_clip->location_y.GetValue(clip_frame_number)); // move in percentage of final height + x += (Settings::Instance()->MAX_WIDTH * source_clip->location_x.GetValue(clip_frame_number)); // move in percentage of final width + y += (Settings::Instance()->MAX_HEIGHT * source_clip->location_y.GetValue(clip_frame_number)); // move in percentage of final height float shear_x = source_clip->shear_x.GetValue(clip_frame_number); float shear_y = source_clip->shear_y.GetValue(clip_frame_number); @@ -576,8 +587,13 @@ void Timeline::update_open_clips(Clip *clip, bool does_clip_intersect) // Add clip to 'opened' list, because it's missing open_clips[clip] = clip; - // Open the clip - clip->Open(); + try { + // Open the clip + clip->Open(); + + } catch (const InvalidFile & e) { + // ... + } } // Debug output @@ -717,7 +733,7 @@ std::shared_ptr Timeline::GetFrame(int64_t requested_frame) #pragma omp parallel { // Loop through all requested frames - #pragma omp for ordered firstprivate(nearby_clips, requested_frame, minimum_frames) + #pragma omp for ordered firstprivate(nearby_clips, requested_frame, minimum_frames) schedule(static,1) for (int64_t frame_number = requested_frame; frame_number < requested_frame + minimum_frames; frame_number++) { // Debug output @@ -727,7 +743,7 @@ std::shared_ptr Timeline::GetFrame(int64_t requested_frame) int samples_in_frame = Frame::GetSamplesPerFrame(frame_number, info.fps, info.sample_rate, info.channels); // Create blank frame (which will become the requested frame) - std::shared_ptr new_frame(std::make_shared(frame_number, max_width, max_height, "#000000", samples_in_frame, info.channels)); + std::shared_ptr new_frame(std::make_shared(frame_number, Settings::Instance()->MAX_WIDTH, Settings::Instance()->MAX_HEIGHT, "#000000", samples_in_frame, info.channels)); #pragma omp critical (T_GetFrame) { new_frame->AddAudioSilence(samples_in_frame); @@ -741,7 +757,7 @@ std::shared_ptr Timeline::GetFrame(int64_t requested_frame) // Add Background Color to 1st layer (if animated or not black) if ((color.red.Points.size() > 1 || color.green.Points.size() > 1 || color.blue.Points.size() > 1) || (color.red.GetValue(frame_number) != 0.0 || color.green.GetValue(frame_number) != 0.0 || color.blue.GetValue(frame_number) != 0.0)) - new_frame->AddColor(max_width, max_height, color.GetColorHex(frame_number)); + new_frame->AddColor(Settings::Instance()->MAX_WIDTH, Settings::Instance()->MAX_HEIGHT, color.GetColorHex(frame_number)); // Debug output ZmqLogger::Instance()->AppendDebugMethod("Timeline::GetFrame (Loop through clips)", "frame_number", frame_number, "clips.size()", clips.size(), "nearby_clips.size()", nearby_clips.size(), "", -1, "", -1, "", -1); @@ -1000,13 +1016,14 @@ void Timeline::SetJsonValue(Json::Value root) { if (!existing_effect["type"].isNull()) { // Create instance of effect - e = EffectInfo().CreateEffect(existing_effect["type"].asString()); + if (e = EffectInfo().CreateEffect(existing_effect["type"].asString())) { - // Load Json into Effect - e->SetJsonValue(existing_effect); + // Load Json into Effect + e->SetJsonValue(existing_effect); - // Add Effect to Timeline - AddEffect(e); + // Add Effect to Timeline + AddEffect(e); + } } } } @@ -1175,17 +1192,6 @@ void Timeline::apply_json_to_clips(Json::Value change) { // Apply framemapper (or update existing framemapper) apply_mapper_to_clip(existing_clip); - - // Clear any cached image sizes (since size might have changed) - existing_clip->SetMaxSize(0, 0); // force clearing of cached image size - if (existing_clip->Reader()) { - existing_clip->Reader()->SetMaxSize(0, 0); - if (existing_clip->Reader()->Name() == "FrameMapper") { - FrameMapper *nested_reader = (FrameMapper *) existing_clip->Reader(); - if (nested_reader->Reader()) - nested_reader->Reader()->SetMaxSize(0, 0); - } - } } } else if (change_type == "delete") { @@ -1270,13 +1276,14 @@ void Timeline::apply_json_to_effects(Json::Value change, EffectBase* existing_ef EffectBase *e = NULL; // Init the matching effect object - e = EffectInfo().CreateEffect(effect_type); + if (e = EffectInfo().CreateEffect(effect_type)) { - // Load Json into Effect - e->SetJsonValue(change["value"]); + // Load Json into Effect + e->SetJsonValue(change["value"]); - // Add Effect to Timeline - AddEffect(e); + // Add Effect to Timeline + AddEffect(e); + } } else if (change_type == "update") { @@ -1431,3 +1438,11 @@ void Timeline::ClearAllCache() { } } + +// Set Max Image Size (used for performance optimization). Convenience function for setting +// Settings::Instance()->MAX_WIDTH and Settings::Instance()->MAX_HEIGHT. +void Timeline::SetMaxSize(int width, int height) { + // Init max image size (choose the smallest one) + Settings::Instance()->MAX_WIDTH = min(width, info.width); + Settings::Instance()->MAX_HEIGHT = min(height, info.height); +} \ No newline at end of file diff --git a/src/bindings/python/openshot.i b/src/bindings/python/openshot.i index f338f18a..de1f020c 100644 --- a/src/bindings/python/openshot.i +++ b/src/bindings/python/openshot.i @@ -84,6 +84,7 @@ #include "../../../include/QtPlayer.h" #include "../../../include/KeyFrame.h" #include "../../../include/RendererBase.h" +#include "../../../include/Settings.h" #include "../../../include/Timeline.h" #include "../../../include/ZmqLogger.h" @@ -150,6 +151,7 @@ %include "../../../include/QtPlayer.h" %include "../../../include/KeyFrame.h" %include "../../../include/RendererBase.h" +%include "../../../include/Settings.h" %include "../../../include/Timeline.h" %include "../../../include/ZmqLogger.h" diff --git a/src/bindings/ruby/openshot.i b/src/bindings/ruby/openshot.i index c2f6fdf9..b9a35d41 100644 --- a/src/bindings/ruby/openshot.i +++ b/src/bindings/ruby/openshot.i @@ -88,6 +88,7 @@ namespace std { #include "../../../include/QtPlayer.h" #include "../../../include/KeyFrame.h" #include "../../../include/RendererBase.h" +#include "../../../include/Settings.h" #include "../../../include/Timeline.h" #include "../../../include/ZmqLogger.h" @@ -143,6 +144,7 @@ namespace std { %include "../../../include/QtPlayer.h" %include "../../../include/KeyFrame.h" %include "../../../include/RendererBase.h" +%include "../../../include/Settings.h" %include "../../../include/Timeline.h" %include "../../../include/ZmqLogger.h" diff --git a/src/effects/Mask.cpp b/src/effects/Mask.cpp index 58f00d20..f8f34ac6 100644 --- a/src/effects/Mask.cpp +++ b/src/effects/Mask.cpp @@ -30,14 +30,14 @@ using namespace openshot; /// Blank constructor, useful when using Json to load the effect properties -Mask::Mask() : reader(NULL), replace_image(false) { +Mask::Mask() : reader(NULL), replace_image(false), needs_refresh(true) { // Init effect properties init_effect_details(); } // Default constructor Mask::Mask(ReaderBase *mask_reader, Keyframe mask_brightness, Keyframe mask_contrast) : - reader(mask_reader), brightness(mask_brightness), contrast(mask_contrast), replace_image(false) + reader(mask_reader), brightness(mask_brightness), contrast(mask_contrast), replace_image(false), needs_refresh(true) { // Init effect properties init_effect_details(); @@ -77,7 +77,7 @@ std::shared_ptr Mask::GetFrame(std::shared_ptr frame, int64_t fram // Get mask image (if missing or different size than frame image) #pragma omp critical (open_mask_reader) { - if (!original_mask || !reader->info.has_single_image || + if (!original_mask || !reader->info.has_single_image || needs_refresh || (original_mask && original_mask->size() != frame_image->size())) { // Only get mask if needed @@ -91,6 +91,9 @@ std::shared_ptr Mask::GetFrame(std::shared_ptr frame, int64_t fram } } + // Refresh no longer needed + needs_refresh = false; + // Get pixel arrays unsigned char *pixels = (unsigned char *) frame_image->bits(); unsigned char *mask_pixels = (unsigned char *) original_mask->bits(); @@ -206,47 +209,51 @@ void Mask::SetJsonValue(Json::Value root) { contrast.SetJsonValue(root["contrast"]); if (!root["reader"].isNull()) // does Json contain a reader? { - - if (!root["reader"]["type"].isNull()) // does the reader Json contain a 'type'? + #pragma omp critical (open_mask_reader) { - // Close previous reader (if any) - if (reader) + // This reader has changed, so refresh cached assets + needs_refresh = true; + + if (!root["reader"]["type"].isNull()) // does the reader Json contain a 'type'? { - // Close and delete existing reader (if any) - reader->Close(); - delete reader; - reader = NULL; - } + // Close previous reader (if any) + if (reader) { + // Close and delete existing reader (if any) + reader->Close(); + delete reader; + reader = NULL; + } - // Create new reader (and load properties) - string type = root["reader"]["type"].asString(); + // Create new reader (and load properties) + string type = root["reader"]["type"].asString(); - if (type == "FFmpegReader") { + if (type == "FFmpegReader") { - // Create new reader - reader = new FFmpegReader(root["reader"]["path"].asString()); - reader->SetJsonValue(root["reader"]); + // Create new reader + reader = new FFmpegReader(root["reader"]["path"].asString()); + reader->SetJsonValue(root["reader"]); -#ifdef USE_IMAGEMAGICK - } else if (type == "ImageReader") { + #ifdef USE_IMAGEMAGICK + } else if (type == "ImageReader") { - // Create new reader - reader = new ImageReader(root["reader"]["path"].asString()); - reader->SetJsonValue(root["reader"]); -#endif + // Create new reader + reader = new ImageReader(root["reader"]["path"].asString()); + reader->SetJsonValue(root["reader"]); + #endif - } else if (type == "QtImageReader") { + } else if (type == "QtImageReader") { - // Create new reader - reader = new QtImageReader(root["reader"]["path"].asString()); - reader->SetJsonValue(root["reader"]); + // Create new reader + reader = new QtImageReader(root["reader"]["path"].asString()); + reader->SetJsonValue(root["reader"]); - } else if (type == "ChunkReader") { + } else if (type == "ChunkReader") { - // Create new reader - reader = new ChunkReader(root["reader"]["path"].asString(), (ChunkVersion) root["reader"]["chunk_version"].asInt()); - reader->SetJsonValue(root["reader"]); + // Create new reader + reader = new ChunkReader(root["reader"]["path"].asString(), (ChunkVersion) root["reader"]["chunk_version"].asInt()); + reader->SetJsonValue(root["reader"]); + } } } @@ -275,6 +282,11 @@ string Mask::PropertiesJSON(int64_t requested_frame) { root["brightness"] = add_property_json("Brightness", brightness.GetValue(requested_frame), "float", "", &brightness, -1.0, 1.0, false, requested_frame); root["contrast"] = add_property_json("Contrast", contrast.GetValue(requested_frame), "float", "", &contrast, 0, 20, false, requested_frame); + if (reader) + root["reader"] = add_property_json("Source", 0.0, "reader", reader->Json(), NULL, 0, 1, false, requested_frame); + else + root["reader"] = add_property_json("Source", 0.0, "reader", "{}", NULL, 0, 1, false, requested_frame); + // Return formatted string return root.toStyledString(); } diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 2c455503..f2ae9377 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -79,7 +79,33 @@ ENDIF (ImageMagick_FOUND) FIND_PACKAGE(FFmpeg REQUIRED) # Include FFmpeg headers (needed for compile) -include_directories(${FFMPEG_INCLUDE_DIR}) +IF (AVCODEC_FOUND) + include_directories(${AVCODEC_INCLUDE_DIRS}) +ENDIF (AVCODEC_FOUND) +IF (AVDEVICE_FOUND) + include_directories(${AVDEVICE_INCLUDE_DIRS}) +ENDIF (AVDEVICE_FOUND) +IF (AVFORMAT_FOUND) + include_directories(${AVFORMAT_INCLUDE_DIRS}) +ENDIF (AVFORMAT_FOUND) +IF (AVFILTER_FOUND) + include_directories(${AVFILTER_INCLUDE_DIRS}) +ENDIF (AVFILTER_FOUND) +IF (AVUTIL_FOUND) + include_directories(${AVUTIL_INCLUDE_DIRS}) +ENDIF (AVUTIL_FOUND) +IF (POSTPROC_FOUND) + include_directories(${POSTPROC_INCLUDE_DIRS}) +ENDIF (POSTPROC_FOUND) +IF (SWSCALE_FOUND) + include_directories(${SWSCALE_INCLUDE_DIRS}) +ENDIF (SWSCALE_FOUND) +IF (SWRESAMPLE_FOUND) + include_directories(${SWRESAMPLE_INCLUDE_DIRS}) +ENDIF (SWRESAMPLE_FOUND) +IF (AVRESAMPLE_FOUND) + include_directories(${AVRESAMPLE_INCLUDE_DIRS}) +ENDIF (AVRESAMPLE_FOUND) ################# LIBOPENSHOT-AUDIO ################### # Find JUCE-based openshot Audio libraries @@ -150,12 +176,26 @@ endif(OPENMP_FOUND) # Find ZeroMQ library (used for socket communication & logging) FIND_PACKAGE(ZMQ REQUIRED) -# Include FFmpeg headers (needed for compile) +# Include ZeroMQ headers (needed for compile) include_directories(${ZMQ_INCLUDE_DIRS}) +################### RESVG ##################### +# Find resvg library (used for rendering svg files) +FIND_PACKAGE(RESVG) + +# Include resvg headers (optional SVG library) +if (RESVG_FOUND) + include_directories(${RESVG_INCLUDE_DIRS}) +endif(RESVG_FOUND) + ################### JSONCPP ##################### # Include jsoncpp headers (needed for JSON parsing) -include_directories("../thirdparty/jsoncpp/include") +if (USE_SYSTEM_JSONCPP) + find_package(JsonCpp REQUIRED) + include_directories(${JSONCPP_INCLUDE_DIRS}) +else() + include_directories("../thirdparty/jsoncpp/include") +endif(USE_SYSTEM_JSONCPP) IF (NOT DISABLE_TESTS) ############### SET TEST SOURCE FILES ################# @@ -172,6 +212,7 @@ IF (NOT DISABLE_TESTS) FrameMapper_Tests.cpp KeyFrame_Tests.cpp Point_Tests.cpp + Settings_Tests.cpp Timeline_Tests.cpp ) ################ TESTER EXECUTABLE ################# diff --git a/tests/FrameMapper_Tests.cpp b/tests/FrameMapper_Tests.cpp index 2f61179d..053df31f 100644 --- a/tests/FrameMapper_Tests.cpp +++ b/tests/FrameMapper_Tests.cpp @@ -199,9 +199,9 @@ TEST(FrameMapper_resample_audio_48000_to_41000) // Check details CHECK_EQUAL(1, map.GetFrame(1)->GetAudioChannelsCount()); - CHECK_EQUAL(882, map.GetFrame(1)->GetAudioSamplesCount()); - CHECK_EQUAL(882, map.GetFrame(2)->GetAudioSamplesCount()); - CHECK_EQUAL(882, map.GetFrame(50)->GetAudioSamplesCount()); + CHECK_CLOSE(882, map.GetFrame(1)->GetAudioSamplesCount(), 10.0); + CHECK_CLOSE(882, map.GetFrame(2)->GetAudioSamplesCount(), 10.0); + CHECK_CLOSE(882, map.GetFrame(50)->GetAudioSamplesCount(), 10.0); // Close mapper map.Close(); diff --git a/tests/KeyFrame_Tests.cpp b/tests/KeyFrame_Tests.cpp index fe96ea5a..cbd1a0e0 100644 --- a/tests/KeyFrame_Tests.cpp +++ b/tests/KeyFrame_Tests.cpp @@ -377,4 +377,20 @@ TEST(Keyframe_Remove_Duplicate_Point) // Spot check values from the curve CHECK_EQUAL(kf.GetLength(), 1); CHECK_CLOSE(kf.GetPoint(0).co.Y, 2.0, 0.01); +} + +TEST(Keyframe_Large_Number_Values) +{ + // Large value + int64_t large_value = 30 * 60 * 90; + + // Create a keyframe curve with 2 points + Keyframe kf; + kf.AddPoint(1, 1.0); + kf.AddPoint(large_value, 100.0); // 90 minutes long + + // Spot check values from the curve + CHECK_EQUAL(kf.GetLength(), large_value + 1); + CHECK_CLOSE(kf.GetPoint(0).co.Y, 1.0, 0.01); + CHECK_CLOSE(kf.GetPoint(1).co.Y, 100.0, 0.01); } \ No newline at end of file diff --git a/tests/ReaderBase_Tests.cpp b/tests/ReaderBase_Tests.cpp index 9d435304..70ca90d5 100644 --- a/tests/ReaderBase_Tests.cpp +++ b/tests/ReaderBase_Tests.cpp @@ -44,9 +44,9 @@ TEST(ReaderBase_Derived_Class) std::shared_ptr GetFrame(int64_t number) { std::shared_ptr f(new Frame()); return f; } void Close() { }; void Open() { }; - string Json() { }; + string Json() { return NULL; }; void SetJson(string value) { }; - Json::Value JsonValue() { }; + Json::Value JsonValue() { return (int) NULL; }; void SetJsonValue(Json::Value root) { }; bool IsOpen() { return true; }; string Name() { return "TestReader"; }; diff --git a/tests/Settings_Tests.cpp b/tests/Settings_Tests.cpp new file mode 100644 index 00000000..86790653 --- /dev/null +++ b/tests/Settings_Tests.cpp @@ -0,0 +1,63 @@ +/** + * @file + * @brief Unit tests for openshot::Color + * @author Jonathan Thomas + * + * @section LICENSE + * + * Copyright (c) 2008-2014 OpenShot Studios, LLC + * . This file is part of + * OpenShot Library (libopenshot), an open-source project dedicated to + * delivering high quality video editing and animation solutions to the + * world. For more information visit . + * + * OpenShot Library (libopenshot) is free software: you can redistribute it + * and/or modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation, either version 3 of the + * License, or (at your option) any later version. + * + * OpenShot Library (libopenshot) is distributed in the hope that it will be + * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with OpenShot Library. If not, see . + */ + +#include "UnitTest++.h" +#include "../include/OpenShot.h" + +using namespace std; +using namespace openshot; + +TEST(Settings_Default_Constructor) +{ + // Create an empty color + Settings *s = Settings::Instance(); + + CHECK_EQUAL(false, s->HARDWARE_DECODE); + CHECK_EQUAL(false, s->HARDWARE_ENCODE); + CHECK_EQUAL(false, s->HIGH_QUALITY_SCALING); + CHECK_EQUAL(false, s->WAIT_FOR_VIDEO_PROCESSING_TASK); +} + +TEST(Settings_Change_Settings) +{ + // Create an empty color + Settings *s = Settings::Instance(); + s->HARDWARE_DECODE = true; + s->HARDWARE_ENCODE = true; + s->HIGH_QUALITY_SCALING = true; + s->WAIT_FOR_VIDEO_PROCESSING_TASK = true; + + CHECK_EQUAL(true, s->HARDWARE_DECODE); + CHECK_EQUAL(true, s->HARDWARE_ENCODE); + CHECK_EQUAL(true, s->HIGH_QUALITY_SCALING); + CHECK_EQUAL(true, s->WAIT_FOR_VIDEO_PROCESSING_TASK); + + CHECK_EQUAL(true, s->HARDWARE_DECODE); + CHECK_EQUAL(true, s->HARDWARE_ENCODE); + CHECK_EQUAL(true, Settings::Instance()->HIGH_QUALITY_SCALING); + CHECK_EQUAL(true, Settings::Instance()->WAIT_FOR_VIDEO_PROCESSING_TASK); +} \ No newline at end of file