Merge branch 'OpenShot:develop' into develop

This commit is contained in:
Vũ Quang Hải
2024-12-13 10:33:55 +07:00
committed by GitHub
11 changed files with 57 additions and 30 deletions

View File

@@ -52,7 +52,7 @@ mac-builder:
- unzip artifacts.zip
- export LIBOPENSHOT_AUDIO_DIR=$CI_PROJECT_DIR/build/install-x64
- mkdir -p build; cd build;
- cmake -DCMAKE_EXE_LINKER_FLAGS="-stdlib=libc++" -DCMAKE_SHARED_LINKER_FLAGS="-stdlib=libc++" -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON -D"CMAKE_INSTALL_PREFIX:PATH=$CI_PROJECT_DIR/build/install-x64" -DCMAKE_CXX_COMPILER=clang++ -DCMAKE_C_COMPILER=clang -D"CMAKE_BUILD_TYPE:STRING=Release" -D"CMAKE_OSX_SYSROOT=/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.11.sdk" -D"CMAKE_OSX_DEPLOYMENT_TARGET=10.9" -DCMAKE_PREFIX_PATH=/usr/local/qt5.15.X/qt5.15/5.15.0/clang_64/ -D"CMAKE_INSTALL_RPATH_USE_LINK_PATH=1" -D"ENABLE_RUBY=0" ../
- cmake -DCMAKE_EXE_LINKER_FLAGS="-stdlib=libc++" -DCMAKE_SHARED_LINKER_FLAGS="-stdlib=libc++" -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON -D"CMAKE_INSTALL_PREFIX:PATH=$CI_PROJECT_DIR/build/install-x64" -DCMAKE_CXX_COMPILER=clang++ -DCMAKE_C_COMPILER=clang -D"CMAKE_BUILD_TYPE:STRING=Release" -D"CMAKE_OSX_SYSROOT=/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.14.sdk" -D"CMAKE_OSX_DEPLOYMENT_TARGET=10.12" -DCMAKE_PREFIX_PATH=/usr/local/qt5.15.X/qt5.15/5.15.0/clang_64/ -D"CMAKE_INSTALL_RPATH_USE_LINK_PATH=1" -D"ENABLE_RUBY=0" ../
- make -j 9
- make install
- PROJECT_VERSION=$(grep -E '^set\(PROJECT_VERSION_FULL "(.*)' ../CMakeLists.txt | awk '{print $2}' | tr -d '")')
@@ -122,7 +122,7 @@ trigger-pipeline:
stage: trigger-openshot-qt
script:
- "curl -X POST -F token=$OPENSHOT_QT_PIPELINE_TOKEN -F ref=$CI_COMMIT_REF_NAME http://gitlab.openshot.org/api/v4/projects/3/trigger/pipeline"
when: always
when: on_success
dependencies: []
except:
- tags

View File

@@ -97,7 +97,7 @@ if ((${CMAKE_CXX_COMPILER_ID} STREQUAL "GNU") AND
endif()
#### Set C++ standard level
set(CMAKE_CXX_STANDARD 14)
set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_CXX_EXTENSIONS OFF)

View File

@@ -14,7 +14,6 @@
#include "Exceptions.h"
#include "Frame.h"
using namespace std;
using namespace openshot;
@@ -43,7 +42,6 @@ void AudioReaderSource::getNextAudioBlock(const juce::AudioSourceChannelInfo& in
}
while (remaining_samples > 0) {
try {
// Get current frame object
if (reader) {

View File

@@ -25,6 +25,8 @@
#include <thread> // for std::this_thread::sleep_for
#include <chrono> // for std::chrono::milliseconds
#include <sstream>
#include <condition_variable>
#include <mutex>
using namespace juce;
@@ -57,9 +59,9 @@ namespace openshot
std::stringstream constructor_title;
constructor_title << "AudioDeviceManagerSingleton::Instance (default audio device type: " <<
Settings::Instance()->PLAYBACK_AUDIO_DEVICE_TYPE << ", default audio device name: " <<
Settings::Instance()->PLAYBACK_AUDIO_DEVICE_NAME << ")";
ZmqLogger::Instance()->AppendDebugMethod(constructor_title.str(), "channels", channels);
Settings::Instance()->PLAYBACK_AUDIO_DEVICE_TYPE << ", default audio device name: " <<
Settings::Instance()->PLAYBACK_AUDIO_DEVICE_NAME << ")";
ZmqLogger::Instance()->AppendDebugMethod(constructor_title.str(), "channels", channels, "buffer", Settings::Instance()->PLAYBACK_AUDIO_BUFFER_SIZE);
// Get preferred audio device type and name (if any - these can be blank)
openshot::AudioDeviceInfo requested_device = {Settings::Instance()->PLAYBACK_AUDIO_DEVICE_TYPE,
@@ -81,10 +83,17 @@ namespace openshot
// Populate all possible device types and device names (starting with the user's requested settings)
std::vector<openshot::AudioDeviceInfo> devices{ { requested_device } };
for (const auto t : mgr->getAvailableDeviceTypes()) {
std::stringstream type_debug;
type_debug << "AudioDeviceManagerSingleton::Instance (iterate audio device type: " << t->getTypeName() << ")";
ZmqLogger::Instance()->AppendDebugMethod(type_debug.str(), "rate", rate, "channels", channels);
t->scanForDevices();
for (const auto n : t->getDeviceNames()) {
AudioDeviceInfo device = { t->getTypeName(), n.trim() };
devices.push_back(device);
std::stringstream device_debug;
device_debug << "AudioDeviceManagerSingleton::Instance (iterate audio device name: " << device.name << ", type: " << t->getTypeName() << ")";
ZmqLogger::Instance()->AppendDebugMethod(device_debug.str(), "rate", rate, "channels", channels);
}
}
@@ -104,6 +113,7 @@ namespace openshot
AudioDeviceManager::AudioDeviceSetup deviceSetup = AudioDeviceManager::AudioDeviceSetup();
deviceSetup.inputChannels = 0;
deviceSetup.outputChannels = channels;
deviceSetup.bufferSize = Settings::Instance()->PLAYBACK_AUDIO_BUFFER_SIZE;
// Loop through common sample rates, starting with the user's requested rate
// Not all sample rates are supported by audio devices, for example, many VMs
@@ -234,16 +244,21 @@ namespace openshot
}
}
// Play the audio
// Override Play and Stop to notify of state changes
void AudioPlaybackThread::Play() {
// Start playing
is_playing = true;
NotifyTransportStateChanged();
}
// Stop the audio
void AudioPlaybackThread::Stop() {
// Stop playing
is_playing = false;
NotifyTransportStateChanged();
}
void AudioPlaybackThread::NotifyTransportStateChanged()
{
std::lock_guard<std::mutex> lock(transportMutex);
transportCondition.notify_all();
}
// Start audio thread
@@ -260,7 +275,7 @@ namespace openshot
audioInstance->audioDeviceManager.addAudioCallback(&player);
// Create TimeSliceThread for audio buffering
time_thread.startThread();
time_thread.startThread(Priority::high);
// Connect source to transport
transport.setSource(
@@ -279,8 +294,13 @@ namespace openshot
// Start the transport
transport.start();
while (!threadShouldExit() && transport.isPlaying() && is_playing)
std::this_thread::sleep_for(std::chrono::milliseconds(2));
while (!threadShouldExit() && transport.isPlaying() && is_playing) {
// Wait until transport state changes or thread should exit
std::unique_lock<std::mutex> lock(transportMutex);
transportCondition.wait_for(lock, std::chrono::milliseconds(10), [this]() {
return threadShouldExit() || !transport.isPlaying() || !is_playing;
});
}
// Stop audio and shutdown transport
Stop();

View File

@@ -86,12 +86,17 @@ public:
bool is_playing;
juce::TimeSliceThread time_thread;
openshot::VideoCacheThread *videoCache; /// The cache thread (for pre-roll checking)
std::mutex transportMutex;
std::condition_variable transportCondition;
/// Constructor
AudioPlaybackThread(openshot::VideoCacheThread* cache);
/// Destructor
~AudioPlaybackThread();
/// Notify all
void NotifyTransportStateChanged();
/// Set the current thread's reader
void Reader(openshot::ReaderBase *reader);

View File

@@ -49,10 +49,10 @@ namespace openshot
// Start the threads
if (reader->info.has_audio)
audioPlayback->startThread(8);
audioPlayback->startThread(Priority::high);
if (reader->info.has_video) {
videoCache->startThread(2);
videoPlayback->startThread(4);
videoCache->startThread(Priority::high);
videoPlayback->startThread(Priority::high);
}
using std::chrono::duration_cast;
@@ -179,7 +179,7 @@ namespace openshot
if (video_position < 0) return false;
stopPlayback();
startThread(1);
startThread(Priority::high);
return true;
}

View File

@@ -103,6 +103,9 @@ namespace openshot {
/// The device type for the playback audio devices
std::string PLAYBACK_AUDIO_DEVICE_TYPE = "";
/// Size of playback buffer before audio playback starts
int PLAYBACK_AUDIO_BUFFER_SIZE = 512;
/// The current install path of OpenShot (needs to be set when using Timeline(path), since certain
/// paths depend on the location of OpenShot transitions and files)
std::string PATH_OPENSHOT_INSTALL = "";

View File

@@ -482,7 +482,7 @@ double Timeline::GetMinTime() {
int64_t Timeline::GetMinFrame() {
double fps = info.fps.ToDouble();
auto min_time = GetMinTime();
return std::round(min_time * fps);
return std::round(min_time * fps) + 1;
}
// Apply a FrameMapper to a clip which matches the settings of this timeline

View File

@@ -289,7 +289,7 @@ namespace openshot {
/// Look up the position/start time of the first timeline element
double GetMinTime();
/// Look up the start frame number of the first element on the timeline
/// Look up the start frame number of the first element on the timeline (first frame is 1)
int64_t GetMinFrame();
/// Close the timeline reader (and any resources it was consuming)

View File

@@ -411,7 +411,8 @@ void ObjectDetection::SetJsonValue(const Json::Value root) {
QString qClassFilter = QString::fromStdString(root["class_filter"].asString());
// Split the QString by commas and automatically trim each resulting string
QStringList classList = qClassFilter.split(',', QString::SkipEmptyParts);
QStringList classList = qClassFilter.split(',');
classList.removeAll(""); // Skip empty parts
display_classes.clear();
// Iterate over the QStringList and add each trimmed, non-empty string

View File

@@ -676,7 +676,7 @@ TEST_CASE( "GetMinFrame and GetMinTime", "[libopenshot][timeline]" )
t.AddClip(&clip1);
CHECK(t.GetMinTime() == Approx(50.0).margin(0.001));
CHECK(t.GetMinFrame() == 50 * 30);
CHECK(t.GetMinFrame() == (50 * 30) + 1);
Clip clip2(path1.str());
clip2.Id("C2");
@@ -686,24 +686,24 @@ TEST_CASE( "GetMinFrame and GetMinTime", "[libopenshot][timeline]" )
t.AddClip(&clip2);
CHECK(t.GetMinTime() == Approx(0.0).margin(0.001));
CHECK(t.GetMinFrame() == 0);
CHECK(t.GetMinFrame() == 1);
clip1.Position(80); // Move clip1 to start at 80 seconds
clip2.Position(100); // Move clip2 to start at 100 seconds
CHECK(t.GetMinTime() == Approx(80.0).margin(0.001));
CHECK(t.GetMinFrame() == 80 * 30);
CHECK(t.GetMinFrame() == (80 * 30) + 1);
clip2.Position(20); // Adjust clip2 to start at 20 seconds
CHECK(t.GetMinTime() == Approx(20.0).margin(0.001));
CHECK(t.GetMinFrame() == 20 * 30);
CHECK(t.GetMinFrame() == (20 * 30) + 1);
clip2.End(35); // Adjust clip2 to end at 35 seconds
CHECK(t.GetMinTime() == Approx(20.0).margin(0.001));
CHECK(t.GetMinFrame() == 20 * 30);
CHECK(t.GetMinFrame() == (20 * 30) + 1);
t.RemoveClip(&clip1);
CHECK(t.GetMinTime() == Approx(20.0).margin(0.001));
CHECK(t.GetMinFrame() == 20 * 30);
CHECK(t.GetMinFrame() == (20 * 30) + 1);
// Update Clip's basic properties with JSON Diff
std::stringstream json_change1;
@@ -711,7 +711,7 @@ TEST_CASE( "GetMinFrame and GetMinTime", "[libopenshot][timeline]" )
t.ApplyJsonDiff(json_change1.str());
CHECK(t.GetMinTime() == Approx(5.0).margin(0.001));
CHECK(t.GetMinFrame() == 5 * 30);
CHECK(t.GetMinFrame() == (5 * 30) + 1);
// Insert NEW Clip with JSON Diff
std::stringstream json_change2;
@@ -719,7 +719,7 @@ TEST_CASE( "GetMinFrame and GetMinTime", "[libopenshot][timeline]" )
t.ApplyJsonDiff(json_change2.str());
CHECK(t.GetMinTime() == Approx(5.0).margin(0.001));
CHECK(t.GetMinFrame() == 5 * 30);
CHECK(t.GetMinFrame() == (5 * 30) + 1);
}
TEST_CASE( "Multi-threaded Timeline GetFrame", "[libopenshot][timeline]" )