Remove (broken) BlackMagic DeckLink support (#760)

This commit is contained in:
Frank Dana
2021-10-20 06:49:08 -04:00
committed by GitHub
parent dae8ca5e4e
commit 2d2a062bdc
16 changed files with 18 additions and 1784 deletions

View File

@@ -83,13 +83,6 @@
%}
#ifdef USE_BLACKMAGIC
%{
#include "DecklinkReader.h"
#include "DecklinkWriter.h"
%}
#endif
#ifdef USE_IMAGEMAGICK
%{
#include "ImageReader.h"
@@ -245,10 +238,6 @@
%include "Clip.h"
%include "Coordinate.h"
%include "Color.h"
#ifdef USE_BLACKMAGIC
%include "DecklinkReader.h"
%include "DecklinkWriter.h"
#endif
%include "DummyReader.h"
%include "EffectBase.h"
%include "Effects.h"

View File

@@ -107,13 +107,6 @@
#endif
%}
#ifdef USE_BLACKMAGIC
%{
#include "DecklinkReader.h"
#include "DecklinkWriter.h"
%}
#endif
#ifdef USE_IMAGEMAGICK
%{
#include "ImageReader.h"
@@ -135,10 +128,6 @@
%include "Clip.h"
%include "Coordinate.h"
%include "Color.h"
#ifdef USE_BLACKMAGIC
%include "DecklinkReader.h"
%include "DecklinkWriter.h"
#endif
%include "DummyReader.h"
%include "EffectBase.h"
%include "Effects.h"

View File

@@ -1,72 +0,0 @@
# © OpenShot Studios, LLC
#
# SPDX-License-Identifier: LGPL-3.0-or-later
# - Try to find the BlackMagic DeckLinkAPI
# Once done this will define
#
# BLACKMAGIC_FOUND - system has BlackMagic DeckLinkAPI installed
# BLACKMAGIC_INCLUDE_DIR - the include directory containing DeckLinkAPIDispatch.cpp
# BLACKMAGIC_LIBRARY_DIR - the directory containing libDeckLinkAPI.so
#
# A user-defined environment variable is required to find the BlackMagic SDK
if(NOT EXISTS "$ENV{BLACKMAGIC_DIR}")
message("-- Note: BLACKMAGIC_DIR environment variable is not defined")
endif(NOT EXISTS "$ENV{BLACKMAGIC_DIR}")
IF (WIN32)
# WINDOWS
FIND_PATH( BLACKMAGIC_INCLUDE_DIR DeckLinkAPI.h
PATHS $ENV{BLACKMAGIC_DIR}/Win/include/
"/home/jonathan/Blackmagic DeckLink SDK 10.3.1/Win/include/" )
FIND_LIBRARY( BLACKMAGIC_LIBRARY_DIR DeckLinkAPI
PATHS /usr/lib/
/usr/local/lib/
$ENV{BLACKMAGIC_DIR}/lib/ )
ELSE (WIN32)
IF (UNIX)
IF (APPLE)
# APPLE
FIND_PATH( BLACKMAGIC_INCLUDE_DIR DeckLinkAPI.h
PATHS $ENV{BLACKMAGIC_DIR}/Mac/include/
"/home/jonathan/Blackmagic DeckLink SDK 10.3.1/Mac/include/" )
FIND_LIBRARY( BLACKMAGIC_LIBRARY_DIR DeckLinkAPI
PATHS /usr/lib/
/usr/local/lib/
$ENV{BLACKMAGIC_DIR}/lib/ )
ELSE (APPLE)
# LINUX
FIND_PATH( BLACKMAGIC_INCLUDE_DIR DeckLinkAPI.h
PATHS $ENV{BLACKMAGIC_DIR}/Linux/include/
"/home/jonathan/Blackmagic DeckLink SDK 10.3.1/Linux/include/" )
FIND_LIBRARY( BLACKMAGIC_LIBRARY_DIR DeckLinkAPI
PATHS /usr/lib/
/usr/local/lib/
$ENV{BLACKMAGIC_DIR}/lib/ )
ENDIF(APPLE)
ENDIF(UNIX)
ENDIF(WIN32)
SET( BLACKMAGIC_FOUND FALSE )
IF ( BLACKMAGIC_INCLUDE_DIR AND BLACKMAGIC_LIBRARY_DIR )
SET ( BLACKMAGIC_FOUND TRUE )
ENDIF ( BLACKMAGIC_INCLUDE_DIR AND BLACKMAGIC_LIBRARY_DIR )
MARK_AS_ADVANCED(
BLACKMAGIC_INCLUDE_DIR
BLACKMAGIC_LIBRARY_DIR
)
include(FindPackageHandleStandardArgs)
# handle the QUIETLY and REQUIRED arguments and set BLACKMAGIC_FOUND to TRUE
# if all listed variables are TRUE
find_package_handle_standard_args(BLACKMAGIC DEFAULT_MSG
BLACKMAGIC_LIBRARY_DIR BLACKMAGIC_INCLUDE_DIR)

View File

@@ -33,27 +33,21 @@ target_link_libraries(openshot-html-example openshot Qt5::Gui)
# Create test executable
add_executable(openshot-player qt-demo/main.cpp)
set_target_properties(openshot-player PROPERTIES AUTOMOC ON)
set_target_properties(openshot-player
PROPERTIES
AUTOMOC ON
WIN32_EXECUTABLE ON
)
# Link test executable to the new library
target_link_libraries(openshot-player openshot)
############### TEST BLACKMAGIC CAPTURE APP ################
if (BLACKMAGIC_FOUND)
# Create test executable
add_executable(openshot-blackmagic
examples/ExampleBlackmagic.cpp)
# Link test executable to the new library
target_link_libraries(openshot-blackmagic openshot)
endif()
############### OPENCV EXAMPLE ################
#if (DEFINED CACHE{HAVE_OPENCV})
# # Create test executable
# add_executable(openshot-example-opencv
# Example_opencv.cpp)
#
#
# target_compile_definitions(openshot-example-opencv PRIVATE
# -DTEST_MEDIA_PATH="${TEST_MEDIA_PATH}" )
# # Link test executable to the new library

View File

@@ -1,213 +0,0 @@
/**
* @file
* @brief Source file for Main_Blackmagic class (live greenscreen example app)
* @author Jonathan Thomas <jonathan@openshot.org>
*
* @ref License
*/
// Copyright (c) 2008-2019 OpenShot Studios, LLC
//
// SPDX-License-Identifier: LGPL-3.0-or-later
#include <fstream>
#include <iostream>
#include <map>
#include <queue>
#include <memory>
#include "../../include/OpenShot.h"
#include <omp.h>
#include <time.h>
using namespace openshot;
int main(int argc, char *argv[])
{
// Init datetime
time_t rawtime;
struct tm * timeinfo;
/* TIMELINE ---------------- */
Timeline t(1920, 1080, Fraction(30,1), 48000, 2, LAYOUT_STEREO);
// Create background video
ImageReader b1("/home/jonathan/Pictures/moon.jpg");
ImageReader b2("/home/jonathan/Pictures/trees.jpg");
ImageReader b3("/home/jonathan/Pictures/clouds.jpg");
ImageReader b4("/home/jonathan/Pictures/minecraft.png");
ImageReader b5("/home/jonathan/Pictures/colorpgg03.jpg");
Clip c1(&b1);
// Background counter
int background_frame = 0;
int background_id = 1;
DecklinkReader dr(1, 11, 0, 2, 16);
Clip c2(&dr);
Clip c3(new ImageReader("/home/jonathan/Pictures/watermark.png"));
// mask
Clip c4(new ImageReader("/home/jonathan/Pictures/mask_small.png"));
// CLIP 1 (background image)
c1.Position(0.0);
c1.scale = SCALE_NONE;
c1.Layer(0);
t.AddClip(&c1);
// CLIP 2 (decklink live stream)
c2.Position(0.0);
c2.scale = SCALE_NONE;
c2.Layer(1);
t.AddClip(&c2);
// CLIP 3 (foreground image 1)
c3.Position(0.0);
c3.gravity = GRAVITY_TOP;
//c3.gravity = GRAVITY_BOTTOM;
c3.scale = SCALE_NONE;
c3.Layer(2);
t.AddClip(&c3);
// CLIP 4 (foreground image 2)
c4.Position(0.0);
c4.gravity = GRAVITY_TOP;
c4.scale = SCALE_NONE;
c4.Layer(3);
//t.AddClip(&c4);
// Decklink writer
DecklinkWriter w(0, 11, 3, 2, 16);
w.Open();
// Loop through reader
int x = 0;
while (true)
{
std::shared_ptr<Frame> f = t.GetFrame(x);
if (f)
{
if (x != 0 && x % 30 == 0)
{
cout << "30 frames... (" << abs(dr.GetCurrentFrameNumber() - x) << " diff)" << endl;
if (x != 0 && x % 60 == 0)
{
time ( &rawtime );
timeinfo = localtime ( &rawtime );
stringstream timestamp;
timestamp << asctime (timeinfo);
stringstream filename;
filename << "/home/jonathan/Pictures/screenshots/detailed/" << timestamp.str() << ".jpeg";
f->Save(filename.str(), 1.0);
stringstream filename_small;
filename_small << "/home/jonathan/Pictures/screenshots/thumbs/" << timestamp.str() << ".jpeg";
f->Save(filename_small.str(), 0.15);
}
}
// Send current frame to BlackMagic
w.WriteFrame(f);
// Increment background frame #
background_frame++;
// Change background
if (background_frame == 300)
{
background_frame = 0;
switch (background_id)
{
case 1:
c1.Reader(&b2);
background_id = 2;
break;
case 2:
c1.Reader(&b3);
background_id = 3;
break;
case 3:
c1.Reader(&b4);
background_id = 4;
break;
case 4:
c1.Reader(&b5);
background_id = 5;
break;
case 5:
c1.Reader(&b1);
background_id = 1;
break;
}
}
//usleep(500 * 1);
// Go to next frame on timeline
if (abs(dr.GetCurrentFrameNumber() - x) > 40 || x == 90)
{
// Got behind... skip ahead some
x = dr.GetCurrentFrameNumber();
cout << "JUMPING AHEAD to " << x << ", background moved to " << (float(x) / 30.0f) << endl;
}
else
// Go to the next frame
x++;
}
}
// Sleep
sleep(4);
// Image Reader
// ImageReader r1("/home/jonathan/Pictures/Screenshot from 2013-02-10 15:06:38.png");
// r1.Open();
// std::shared_ptr<Frame> f1 = r1.GetFrame(1);
// r1.Close();
// f1->TransparentColors("#8fa09a", 20.0);
// f1->Display();
// return 0;
// ImageReader r2("/home/jonathan/Pictures/trees.jpg");
// r2.Open();
// std::shared_ptr<Frame> f2 = r2.GetFrame(1);
// r2.Close();
// DecklinkReader dr(1, 11, 0, 2, 16);
// dr.Open();
//
// DecklinkWriter w(0, 11, 3, 2, 16);
// w.Open();
//
// // Loop through reader
// int x = 0;
// while (true)
// {
// if (x % 30 == 0)
// cout << "30 frames..." << endl;
//
// std::shared_ptr<Frame> f = dr.GetFrame(0);
// if (f)
// {
// //f->Display();
// w.WriteFrame(f);
// usleep(1000 * 1);
//
// x++;
// }
// }
//
// // Sleep
// sleep(4);
//
// // Close writer
// w.Close();
return 0;
}

View File

@@ -421,30 +421,6 @@ if (TARGET cppzmq)
target_link_libraries(openshot PUBLIC cppzmq)
endif()
################# BLACKMAGIC DECKLINK ###################
# Find BlackMagic DeckLinkAPI libraries
if (ENABLE_BLACKMAGIC)
find_package(BlackMagic)
if (BLACKMAGIC_FOUND)
# BlackMagic related files
target_sources(openshot PRIVATE
DecklinkInput.cpp
DecklinkReader.cpp
DecklinkOutput.cpp
DecklinkWriter.cpp)
# Include Blackmagic headers (needed for compile)
target_include_directories(openshot PRIVATE ${BLACKMAGIC_INCLUDE_DIR})
# Link libopenshot with BlackMagic libs
target_link_libraries(openshot PUBLIC ${BLACKMAGIC_LIBRARY_DIR})
# define a preprocessor macro (used in the C++)
target_compile_definitions(openshot PUBLIC USE_BLACKMAGIC=1)
endif()
endif()
################## OPENCV ###################
if(ENABLE_OPENCV)
find_package(OpenCV 4)

View File

@@ -1,251 +0,0 @@
/**
* @file
* @brief Source file for DecklinkInput class
* @author Jonathan Thomas <jonathan@openshot.org>, Blackmagic Design
*
* @ref License
*/
// Copyright (c) 2008-2019 OpenShot Studios, LLC
// Copyright (c) 2009 Blackmagic Design
//
// SPDX-License-Identifier: LGPL-3.0-or-later
// SPDX-License-Identifier: MIT
#include "DecklinkInput.h"
using namespace std;
DeckLinkInputDelegate::DeckLinkInputDelegate(pthread_cond_t* m_sleepCond, IDeckLinkOutput* m_deckLinkOutput, IDeckLinkVideoConversion* m_deckLinkConverter)
: m_refCount(0), g_timecodeFormat(0), frameCount(0), final_frameCount(0)
{
sleepCond = m_sleepCond;
deckLinkOutput = m_deckLinkOutput;
deckLinkConverter = m_deckLinkConverter;
// Set cache size (20 1080p frames)
final_frames.SetMaxBytes(60 * 1920 * 1080 * 4 + (44100 * 2 * 4));
pthread_mutex_init(&m_mutex, NULL);
}
DeckLinkInputDelegate::~DeckLinkInputDelegate()
{
pthread_mutex_destroy(&m_mutex);
}
ULONG DeckLinkInputDelegate::AddRef(void)
{
pthread_mutex_lock(&m_mutex);
m_refCount++;
pthread_mutex_unlock(&m_mutex);
return (ULONG)m_refCount;
}
ULONG DeckLinkInputDelegate::Release(void)
{
pthread_mutex_lock(&m_mutex);
m_refCount--;
pthread_mutex_unlock(&m_mutex);
if (m_refCount == 0)
{
delete this;
return 0;
}
return (ULONG)m_refCount;
}
unsigned long DeckLinkInputDelegate::GetCurrentFrameNumber()
{
if (final_frameCount > 0)
return final_frameCount - 1;
else
return 0;
}
std::shared_ptr<openshot::Frame> DeckLinkInputDelegate::GetFrame(int64_t requested_frame)
{
std::shared_ptr<openshot::Frame> f;
// Is this frame for the future?
while (requested_frame > GetCurrentFrameNumber())
{
usleep(500 * 1);
}
#pragma omp critical (blackmagic_input_queue)
{
if (final_frames.Exists(requested_frame))
{
// Get the frame and remove it from the cache
f = final_frames.GetFrame(requested_frame);
final_frames.Remove(requested_frame);
}
else
{
cout << "Can't find " << requested_frame << ", GetCurrentFrameNumber(): " << GetCurrentFrameNumber() << endl;
final_frames.Display();
}
}
return f;
}
HRESULT DeckLinkInputDelegate::VideoInputFrameArrived(IDeckLinkVideoInputFrame* videoFrame, IDeckLinkAudioInputPacket* audioFrame)
{
// Handle Video Frame
if(videoFrame)
{
if (videoFrame->GetFlags() & bmdFrameHasNoInputSource)
{
fprintf(stderr, "Frame received (#%lu) - No input signal detected\n", frameCount);
}
else
{
const char *timecodeString = NULL;
if (g_timecodeFormat != 0)
{
IDeckLinkTimecode *timecode;
if (videoFrame->GetTimecode(g_timecodeFormat, &timecode) == S_OK)
{
timecode->GetString(&timecodeString);
}
}
// fprintf(stderr, "Frame received (#%lu) [%s] - Size: %li bytes\n",
// frameCount,
// timecodeString != NULL ? timecodeString : "No timecode",
// videoFrame->GetRowBytes() * videoFrame->GetHeight());
if (timecodeString)
free((void*)timecodeString);
// Create a new copy of the YUV frame object
IDeckLinkMutableVideoFrame *m_yuvFrame = NULL;
int width = videoFrame->GetWidth();
int height = videoFrame->GetHeight();
HRESULT res = deckLinkOutput->CreateVideoFrame(
width,
height,
videoFrame->GetRowBytes(),
bmdFormat8BitYUV,
bmdFrameFlagDefault,
&m_yuvFrame);
// Copy pixel and audio to copied frame
void *frameBytesSource;
void *frameBytesDest;
videoFrame->GetBytes(&frameBytesSource);
m_yuvFrame->GetBytes(&frameBytesDest);
memcpy(frameBytesDest, frameBytesSource, videoFrame->GetRowBytes() * height);
// Add raw YUV frame to queue
raw_video_frames.push_back(m_yuvFrame);
// Process frames once we have a few (to take advantage of multiple threads)
int number_to_process = raw_video_frames.size();
if (number_to_process >= OPEN_MP_NUM_PROCESSORS)
{
//omp_set_num_threads(1);
omp_set_nested(true);
#pragma omp parallel
{
#pragma omp single
{
// Temp frame counters (to keep the frames in order)
//frameCount = 0;
// Loop through each queued image frame
while (!raw_video_frames.empty())
{
// Get front frame (from the queue)
IDeckLinkMutableVideoFrame* frame = raw_video_frames.front();
raw_video_frames.pop_front();
// declare local variables (for OpenMP)
IDeckLinkOutput *copy_deckLinkOutput(deckLinkOutput);
IDeckLinkVideoConversion *copy_deckLinkConverter(deckLinkConverter);
unsigned long copy_frameCount(frameCount);
#pragma omp task firstprivate(copy_deckLinkOutput, copy_deckLinkConverter, frame, copy_frameCount)
{
// *********** CONVERT YUV source frame to RGB ************
void *frameBytes;
void *audioFrameBytes;
// Create a new RGB frame object
IDeckLinkMutableVideoFrame *m_rgbFrame = NULL;
int width = videoFrame->GetWidth();
int height = videoFrame->GetHeight();
HRESULT res = copy_deckLinkOutput->CreateVideoFrame(
width,
height,
width * 4,
bmdFormat8BitARGB,
bmdFrameFlagDefault,
&m_rgbFrame);
if(res != S_OK)
cout << "BMDOutputDelegate::StartRunning: Error creating RGB frame, res:" << res << endl;
// Create a RGB version of this YUV video frame
copy_deckLinkConverter->ConvertFrame(frame, m_rgbFrame);
// Get RGB Byte array
m_rgbFrame->GetBytes(&frameBytes);
// *********** CREATE OPENSHOT FRAME **********
auto f = std::make_shared<openshot::Frame>(
copy_frameCount, width, height, "#000000", 2048, 2);
// Add Image data to openshot frame
// TODO: Fix Decklink support with QImage Upgrade
//f->AddImage(width, height, "ARGB", Magick::CharPixel, (uint8_t*)frameBytes);
#pragma omp critical (blackmagic_input_queue)
{
// Add processed frame to cache (to be recalled in order after the thread pool is done)
final_frames.Add(f);
}
// Release RGB data
if (m_rgbFrame)
m_rgbFrame->Release();
// Release RGB data
if (frame)
frame->Release();
} // end task
// Increment frame count
frameCount++;
} // end while
} // omp single
} // omp parallel
// Update final frameCount (since they are done processing now)
final_frameCount += number_to_process;
} // if size > num processors
} // has video source
} // if videoFrame
return S_OK;
}
HRESULT DeckLinkInputDelegate::VideoInputFormatChanged(BMDVideoInputFormatChangedEvents events, IDeckLinkDisplayMode *mode, BMDDetectedVideoInputFormatFlags)
{
return S_OK;
}

View File

@@ -1,66 +0,0 @@
/**
* @file
* @brief Header file for DecklinkInput class
* @author Jonathan Thomas <jonathan@openshot.org>, Blackmagic Design
*
* @ref License
*/
// Copyright (c) 2008-2019 OpenShot Studios, LLC
// Copyright (c) 2009 Blackmagic Design
//
// SPDX-License-Identifier: LGPL-3.0-or-later
// SPDX-License-Identifier: MIT
#ifndef OPENSHOT_DECKLINK_INPUT_H
#define OPENSHOT_DECKLINK_INPUT_H
#include <iostream>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <pthread.h>
#include <unistd.h>
#include <fcntl.h>
#include "DeckLinkAPI.h"
#include "Frame.h"
#include "CacheMemory.h"
#include "OpenMPUtilities.h"
/// Implementation of the Blackmagic Decklink API (used by the DecklinkReader)
class DeckLinkInputDelegate : public IDeckLinkInputCallback
{
public:
pthread_cond_t* sleepCond;
BMDTimecodeFormat g_timecodeFormat;
unsigned long frameCount;
unsigned long final_frameCount;
// Queue of raw video frames
std::deque<IDeckLinkMutableVideoFrame*> raw_video_frames;
openshot::CacheMemory final_frames;
// Convert between YUV and RGB
IDeckLinkOutput *deckLinkOutput;
IDeckLinkVideoConversion *deckLinkConverter;
DeckLinkInputDelegate(pthread_cond_t* m_sleepCond, IDeckLinkOutput* deckLinkOutput, IDeckLinkVideoConversion* deckLinkConverter);
~DeckLinkInputDelegate();
virtual HRESULT STDMETHODCALLTYPE QueryInterface(REFIID iid, LPVOID *ppv) { return E_NOINTERFACE; }
virtual ULONG STDMETHODCALLTYPE AddRef(void);
virtual ULONG STDMETHODCALLTYPE Release(void);
virtual HRESULT STDMETHODCALLTYPE VideoInputFormatChanged(BMDVideoInputFormatChangedEvents, IDeckLinkDisplayMode*, BMDDetectedVideoInputFormatFlags);
virtual HRESULT STDMETHODCALLTYPE VideoInputFrameArrived(IDeckLinkVideoInputFrame*, IDeckLinkAudioInputPacket*);
// Extra methods
std::shared_ptr<openshot::Frame> GetFrame(int64_t requested_frame);
unsigned long GetCurrentFrameNumber();
private:
ULONG m_refCount;
pthread_mutex_t m_mutex;
};
#endif

View File

@@ -1,280 +0,0 @@
/**
* @file
* @brief Source file for DecklinkOutput class
* @author Jonathan Thomas <jonathan@openshot.org>, Blackmagic Design
*
* @ref License
*/
// Copyright (c) 2008-2019 OpenShot Studios, LLC
// Copyright (c) 2009 Blackmagic Design
//
// SPDX-License-Identifier: LGPL-3.0-or-later
// SPDX-License-Identifier: MIT
#include "DecklinkOutput.h"
using namespace std;
DeckLinkOutputDelegate::DeckLinkOutputDelegate(IDeckLinkDisplayMode *displayMode, IDeckLinkOutput* m_deckLinkOutput)
: m_refCount(0), displayMode(displayMode), width(0), height(0)
{
// reference to output device
deckLinkOutput = m_deckLinkOutput;
// init some variables
m_totalFramesScheduled = 0;
m_audioChannelCount = 2;
m_audioSampleRate = bmdAudioSampleRate48kHz;
m_audioSampleDepth = 16;
m_outputSignal = kOutputSignalDrop;
m_currentFrame = NULL;
// Get framerate
displayMode->GetFrameRate(&frameRateDuration, &frameRateScale);
m_framesPerSecond = (unsigned long)((frameRateScale + (frameRateDuration-1)) / frameRateDuration);
// Allocate audio array
m_audioBufferSampleLength = (unsigned long)((m_framesPerSecond * m_audioSampleRate * frameRateDuration) / frameRateScale);
m_audioBuffer = valloc(m_audioBufferSampleLength * m_audioChannelCount * (m_audioSampleDepth / 8));
// Zero the buffer (interpreted as audio silence)
memset(m_audioBuffer, 0x0, (m_audioBufferSampleLength * m_audioChannelCount * m_audioSampleDepth/8));
audioSamplesPerFrame = (unsigned long)((m_audioSampleRate * frameRateDuration) / frameRateScale);
pthread_mutex_init(&m_mutex, NULL);
}
DeckLinkOutputDelegate::~DeckLinkOutputDelegate()
{
cout << "DESTRUCTOR!!!" << endl;
pthread_mutex_destroy(&m_mutex);
}
/************************* DeckLink API Delegate Methods *****************************/
HRESULT DeckLinkOutputDelegate::ScheduledFrameCompleted (IDeckLinkVideoFrame* completedFrame, BMDOutputFrameCompletionResult result)
{
//cout << "Scheduled Successfully!" << endl;
// When a video frame has been released by the API, schedule another video frame to be output
ScheduleNextFrame(false);
return S_OK;
}
HRESULT DeckLinkOutputDelegate::ScheduledPlaybackHasStopped ()
{
//cout << "PLAYBACK HAS STOPPED!!!" << endl;
return S_OK;
}
HRESULT DeckLinkOutputDelegate::RenderAudioSamples (bool preroll)
{
// // Provide further audio samples to the DeckLink API until our preferred buffer waterlevel is reached
// const unsigned long kAudioWaterlevel = 48000;
// unsigned int bufferedSamples;
//
// // Try to maintain the number of audio samples buffered in the API at a specified waterlevel
// if ((deckLinkOutput->GetBufferedAudioSampleFrameCount(&bufferedSamples) == S_OK) && (bufferedSamples < kAudioWaterlevel))
// {
// unsigned int samplesToEndOfBuffer;
// unsigned int samplesToWrite;
// unsigned int samplesWritten;
//
// samplesToEndOfBuffer = (m_audioBufferSampleLength - m_audioBufferOffset);
// samplesToWrite = (kAudioWaterlevel - bufferedSamples);
// if (samplesToWrite > samplesToEndOfBuffer)
// samplesToWrite = samplesToEndOfBuffer;
//
// if (deckLinkOutput->ScheduleAudioSamples((void*)((unsigned long)m_audioBuffer + (m_audioBufferOffset * m_audioChannelCount * m_audioSampleDepth/8)), samplesToWrite, 0, 0, &samplesWritten) == S_OK)
// {
// m_audioBufferOffset = ((m_audioBufferOffset + samplesWritten) % m_audioBufferSampleLength);
// }
// }
//
//
// if (preroll)
// {
// // Start audio and video output
// deckLinkOutput->StartScheduledPlayback(0, 100, 1.0);
// }
return S_OK;
}
// Schedule the next frame
void DeckLinkOutputDelegate::ScheduleNextFrame(bool prerolling)
{
// Get oldest frame (if any)
if (final_frames.size() > 0)
{
#pragma omp critical (blackmagic_output_queue)
{
// Get the next frame off the queue
uint8_t* castBytes = final_frames.front();
final_frames.pop_front(); // remove this frame from the queue
// Release the current frame (if any)
if (m_currentFrame)
{
m_currentFrame->Release();
m_currentFrame = NULL;
}
// Create a new one
while (deckLinkOutput->CreateVideoFrame(
width,
height,
width * 4,
bmdFormat8BitARGB,
bmdFrameFlagDefault,
&m_currentFrame) != S_OK)
{
cout << "failed to create video frame" << endl;
usleep(1000 * 1);
}
// Copy pixel data to frame
void *frameBytesDest;
m_currentFrame->GetBytes(&frameBytesDest);
memcpy(frameBytesDest, castBytes, width * height * 4);
// Delete temp array
delete[] castBytes;
castBytes = NULL;
} // critical
}
//else
// cout << "Queue: empty on writer..." << endl;
// Schedule a frame to be displayed
if (m_currentFrame && deckLinkOutput->ScheduleVideoFrame(m_currentFrame, (m_totalFramesScheduled * frameRateDuration), frameRateDuration, frameRateScale) != S_OK)
cout << "ScheduleVideoFrame FAILED!!! " << m_totalFramesScheduled << endl;
// Update the timestamp (regardless of previous frame's success)
m_totalFramesScheduled += 1;
}
void DeckLinkOutputDelegate::WriteFrame(std::shared_ptr<openshot::Frame> frame)
{
#pragma omp critical (blackmagic_output_queue)
// Add raw OpenShot frame object
raw_video_frames.push_back(frame);
// Process frames once we have a few (to take advantage of multiple threads)
if (raw_video_frames.size() >= OPEN_MP_NUM_PROCESSORS)
{
//omp_set_num_threads(1);
omp_set_nested(true);
#pragma omp parallel
{
#pragma omp single
{
// Temp frame counters (to keep the frames in order)
frameCount = 0;
// Loop through each queued image frame
while (!raw_video_frames.empty())
{
// Get front frame (from the queue)
std::shared_ptr<openshot::Frame> frame = raw_video_frames.front();
raw_video_frames.pop_front();
// copy of frame count
unsigned long copy_frameCount(frameCount);
#pragma omp task firstprivate(frame, copy_frameCount)
{
// *********** CONVERT YUV source frame to RGB ************
void *frameBytes;
void *audioFrameBytes;
width = frame->GetWidth();
height = frame->GetHeight();
// Get RGB Byte array
int numBytes = frame->GetHeight() * frame->GetWidth() * 4;
uint8_t *castBytes = new uint8_t[numBytes];
// TODO: Fix Decklink support with QImage Upgrade
// Get a list of pixels in our frame's image. Each pixel is represented by
// a PixelPacket struct, which has 4 properties: .red, .blue, .green, .alpha
// const Magick::PixelPacket *pixel_packets = frame->GetPixels();
//
// // loop through ImageMagic pixel structs, and put the colors in a regular array, and move the
// // colors around to match the Decklink order (ARGB).
// for (int packet = 0, row = 0; row < numBytes; packet++, row+=4)
// {
// // Update buffer (which is already linked to the AVFrame: pFrameRGB)
// // Each color needs to be scaled to 8 bit (using the ImageMagick built-in ScaleQuantumToChar function)
// castBytes[row] = MagickCore::ScaleQuantumToChar((Magick::Quantum) 0); // alpha
// castBytes[row+1] = MagickCore::ScaleQuantumToChar((Magick::Quantum) pixel_packets[packet].red);
// castBytes[row+2] = MagickCore::ScaleQuantumToChar((Magick::Quantum) pixel_packets[packet].green);
// castBytes[row+3] = MagickCore::ScaleQuantumToChar((Magick::Quantum) pixel_packets[packet].blue);
// }
#pragma omp critical (blackmagic_output_queue)
{
//if (20 == frame->number)
// frame->Display();
// Add processed frame to cache (to be recalled in order after the thread pool is done)
temp_cache[copy_frameCount] = castBytes;
}
} // end task
// Increment frame count
frameCount++;
} // end while
} // omp single
} // omp parallel
// Add frames to final queue (in order)
#pragma omp critical (blackmagic_output_queue)
for (int z = 0; z < frameCount; z++)
{
// Add to final queue
final_frames.push_back(temp_cache[z]);
}
// Clear temp cache
temp_cache.clear();
//cout << "final_frames.size(): " << final_frames.size() << ", raw_video_frames.size(): " << raw_video_frames.size() << endl;
if (final_frames.size() >= m_framesPerSecond && m_totalFramesScheduled == 0)
{
cout << "Prerolling!" << endl;
for (int x = 0; x < final_frames.size(); x++)
ScheduleNextFrame(true);
cout << "Starting scheduled playback!" << endl;
// Start playback when enough frames have been processed
deckLinkOutput->StartScheduledPlayback(0, 100, 1.0);
}
else
{
// Be sure we don't have too many extra frames
#pragma omp critical (blackmagic_output_queue)
while (final_frames.size() > (m_framesPerSecond + 15))
{
//cout << "Too many, so remove some..." << endl;
// Remove oldest frame
delete[] final_frames.front();
final_frames.pop_front();
}
}
} // if
}

View File

@@ -1,98 +0,0 @@
/**
* @file
* @brief Header file for DecklinkOutput class
* @author Jonathan Thomas <jonathan@openshot.org>, Blackmagic Design
*
* @ref License
*/
// Copyright (c) 2008-2019 OpenShot Studios, LLC
// Copyright (c) 2009 Blackmagic Design
//
// SPDX-License-Identifier: LGPL-3.0-or-later
// SPDX-License-Identifier: MIT
#ifndef OPENSHOT_DECKLINK_OUTPUT_H
#define OPENSHOT_DECKLINK_OUTPUT_H
#include <iostream>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <pthread.h>
#include <unistd.h>
#include <fcntl.h>
#include "DeckLinkAPI.h"
#include "CacheMemory.h"
#include "Frame.h"
#include "OpenMPUtilities.h"
enum OutputSignal {
kOutputSignalPip = 0,
kOutputSignalDrop = 1
};
/// Implementation of the Blackmagic Decklink API (used by the DecklinkWriter)
class DeckLinkOutputDelegate : public IDeckLinkVideoOutputCallback, public IDeckLinkAudioOutputCallback
{
protected:
unsigned long m_totalFramesScheduled;
OutputSignal m_outputSignal;
void* m_audioBuffer;
unsigned long m_audioBufferSampleLength;
unsigned long m_audioBufferOffset;
unsigned long m_audioChannelCount;
BMDAudioSampleRate m_audioSampleRate;
unsigned long m_audioSampleDepth;
unsigned long audioSamplesPerFrame;
unsigned long m_framesPerSecond;
int height;
int width;
unsigned long frameCount;
//map<int, IDeckLinkMutableVideoFrame* > temp_cache;
std::map<int, uint8_t * > temp_cache;
BMDTimeValue frameRateDuration, frameRateScale;
// Queue of raw video frames
//deque<IDeckLinkMutableVideoFrame*> final_frames;
std::deque<uint8_t * > final_frames;
std::deque<std::shared_ptr<openshot::Frame> > raw_video_frames;
// Convert between YUV and RGB
IDeckLinkOutput *deckLinkOutput;
IDeckLinkDisplayMode *displayMode;
// Current frame being displayed
IDeckLinkMutableVideoFrame *m_currentFrame;
public:
DeckLinkOutputDelegate(IDeckLinkDisplayMode *displayMode, IDeckLinkOutput* deckLinkOutput);
~DeckLinkOutputDelegate();
// *** DeckLink API implementation of IDeckLinkVideoOutputCallback IDeckLinkAudioOutputCallback *** //
// IUnknown needs only a dummy implementation
virtual HRESULT STDMETHODCALLTYPE QueryInterface (REFIID iid, LPVOID *ppv) {return E_NOINTERFACE;}
virtual ULONG STDMETHODCALLTYPE AddRef () {return 1;}
virtual ULONG STDMETHODCALLTYPE Release () {return 1;}
virtual HRESULT STDMETHODCALLTYPE ScheduledFrameCompleted (IDeckLinkVideoFrame* completedFrame, BMDOutputFrameCompletionResult result);
virtual HRESULT STDMETHODCALLTYPE ScheduledPlaybackHasStopped ();
virtual HRESULT STDMETHODCALLTYPE RenderAudioSamples (bool preroll);
/// Schedule the next frame
void ScheduleNextFrame(bool prerolling);
/// Custom method to write new frames
void WriteFrame(std::shared_ptr<openshot::Frame> frame);
private:
ULONG m_refCount;
pthread_mutex_t m_mutex;
};
#endif

View File

@@ -1,278 +0,0 @@
/**
* @file
* @brief Source file for DecklinkReader class
* @author Jonathan Thomas <jonathan@openshot.org>
*
* @ref License
*/
// Copyright (c) 2008-2019 OpenShot Studios, LLC
//
// SPDX-License-Identifier: LGPL-3.0-or-later
#include "DecklinkReader.h"
#include "Exceptions.h"
using namespace openshot;
DecklinkReader::DecklinkReader(int device, int video_mode, int pixel_format, int channels, int sample_depth)
: device(device), is_open(false), g_videoModeIndex(video_mode), g_audioChannels(channels), g_audioSampleDepth(sample_depth)
{
// Init decklink variables
inputFlags = 0;
selectedDisplayMode = bmdModeNTSC;
pixelFormat = bmdFormat8BitYUV;
displayModeCount = 0;
exitStatus = 1;
foundDisplayMode = false;
pthread_mutex_init(&sleepMutex, NULL);
pthread_cond_init(&sleepCond, NULL);
switch(pixel_format)
{
case 0: pixelFormat = bmdFormat8BitYUV; break;
case 1: pixelFormat = bmdFormat10BitYUV; break;
case 2: pixelFormat = bmdFormat10BitRGB; break;
default:
throw DecklinkError("Pixel format is not valid (must be 0,1,2).");
}
// Attempt to open blackmagic card
deckLinkIterator = CreateDeckLinkIteratorInstance();
if (!deckLinkIterator)
throw DecklinkError("This application requires the DeckLink drivers installed.");
/* Connect to a DeckLink instance */
for (int device_count = 0; device_count <= device; device_count++)
{
// Check for requested device
result = deckLinkIterator->Next(&deckLink);
if (result != S_OK)
throw DecklinkError("No DeckLink PCI cards found.");
if (device_count == device)
break;
}
if (deckLink->QueryInterface(IID_IDeckLinkInput, (void**)&deckLinkInput) != S_OK)
throw DecklinkError("DeckLink QueryInterface Failed.");
// Obtain an IDeckLinkDisplayModeIterator to enumerate the display modes supported on output
result = deckLinkInput->GetDisplayModeIterator(&displayModeIterator);
if (result != S_OK)
throw DecklinkError("Could not obtain the video output display mode iterator.");
// Init deckLinkOutput (needed for color conversion)
if (deckLink->QueryInterface(IID_IDeckLinkOutput, (void**)&m_deckLinkOutput) != S_OK)
throw DecklinkError("Failed to create a deckLinkOutput(), used to convert YUV to RGB.");
// Init the YUV to RGB conversion
if(!(m_deckLinkConverter = CreateVideoConversionInstance()))
throw DecklinkError("Failed to create a VideoConversionInstance(), used to convert YUV to RGB.");
// Create Delegate & Pass in pointers to the output and converters
delegate = new DeckLinkInputDelegate(&sleepCond, m_deckLinkOutput, m_deckLinkConverter);
deckLinkInput->SetCallback(delegate);
if (g_videoModeIndex < 0)
throw DecklinkError("No video mode specified.");
// Loop through all available display modes, until a match is found (if any)
while (displayModeIterator->Next(&displayMode) == S_OK)
{
if (g_videoModeIndex == displayModeCount)
{
BMDDisplayModeSupport result;
foundDisplayMode = true;
displayMode->GetName(&displayModeName);
selectedDisplayMode = displayMode->GetDisplayMode();
deckLinkInput->DoesSupportVideoMode(selectedDisplayMode, pixelFormat, bmdVideoInputFlagDefault, &result, NULL);
// Get framerate
displayMode->GetFrameRate(&frameRateDuration, &frameRateScale);
if (result == bmdDisplayModeNotSupported)
throw DecklinkError("The display mode does not support the selected pixel format.");
if (inputFlags & bmdVideoInputDualStream3D)
{
if (!(displayMode->GetFlags() & bmdDisplayModeSupports3D))
throw DecklinkError("The display mode does not support 3D.");
}
break;
}
displayModeCount++;
displayMode->Release();
}
if (!foundDisplayMode)
throw DecklinkError("Invalid video mode. No matching ones found.");
// Check for video input
result = deckLinkInput->EnableVideoInput(selectedDisplayMode, pixelFormat, inputFlags);
if(result != S_OK)
throw DecklinkError("Failed to enable video input. Is another application using the card?");
// Check for audio input
result = deckLinkInput->EnableAudioInput(bmdAudioSampleRate48kHz, g_audioSampleDepth, g_audioChannels);
if(result != S_OK)
throw DecklinkError("Failed to enable audio input. Is another application using the card?");
}
// destructor
DecklinkReader::~DecklinkReader()
{
if (displayModeIterator != NULL)
{
displayModeIterator->Release();
displayModeIterator = NULL;
}
if (deckLinkInput != NULL)
{
deckLinkInput->Release();
deckLinkInput = NULL;
}
if (deckLink != NULL)
{
deckLink->Release();
deckLink = NULL;
}
if (deckLinkIterator != NULL)
deckLinkIterator->Release();
}
// Open image file
void DecklinkReader::Open()
{
// Open reader if not already open
if (!is_open)
{
// Start the streams
result = deckLinkInput->StartStreams();
if(result != S_OK)
throw DecklinkError("Failed to start the video and audio streams.");
// Update image properties
info.has_audio = false;
info.has_video = true;
info.vcodec = displayModeName;
info.width = displayMode->GetWidth();
info.height = displayMode->GetHeight();
info.file_size = info.width * info.height * sizeof(char) * 4;
info.pixel_ratio.num = 1;
info.pixel_ratio.den = 1;
info.duration = 60 * 60 * 24; // 24 hour duration... since we're capturing a live stream
info.fps.num = frameRateScale;
info.fps.den = frameRateDuration;
info.video_timebase.num = frameRateDuration;
info.video_timebase.den = frameRateScale;
info.video_length = round(info.duration * info.fps.ToDouble());
// Calculate the DAR (display aspect ratio)
Fraction size(info.width * info.pixel_ratio.num, info.height * info.pixel_ratio.den);
// Reduce size fraction
size.Reduce();
// Set the ratio based on the reduced fraction
info.display_ratio.num = size.num;
info.display_ratio.den = size.den;
// Mark as "open"
is_open = true;
}
}
// Close device and video stream
void DecklinkReader::Close()
{
// Close all objects, if reader is 'open'
if (is_open)
{
// Stop streams
result = deckLinkInput->StopStreams();
if(result != S_OK)
throw DecklinkError("Failed to stop the video and audio streams.");
// Mark as "closed"
is_open = false;
}
}
unsigned long DecklinkReader::GetCurrentFrameNumber()
{
return delegate->GetCurrentFrameNumber();
}
// Get an openshot::Frame object for the next available LIVE frame
std::shared_ptr<Frame> DecklinkReader::GetFrame(int64_t requested_frame)
{
// Get a frame from the delegate decklink class (which is collecting them on another thread)
std::shared_ptr<Frame> f = delegate->GetFrame(requested_frame);
// cout << "Change the frame number to " << requested_frame << endl;
// f->SetFrameNumber(requested_frame);
return f; // frame # does not matter, since it always gets the oldest frame
}
// Generate JSON string of this object
std::string DecklinkReader::Json() const {
// Return formatted string
return JsonValue().toStyledString();
}
// Generate Json::Value for this object
Json::Value DecklinkReader::JsonValue() const {
// Create root json object
Json::Value root = ReaderBase::JsonValue(); // get parent properties
root["type"] = "DecklinkReader";
// return JsonValue
return root;
}
// Load JSON string into this object
void DecklinkReader::SetJson(const std::string value) {
// Parse JSON string into JSON objects
try
{
const Json::Value root = openshot::stringToJson(value);
// Set all values that match
SetJsonValue(root);
}
catch (const std::exception& e)
{
// Error parsing JSON (or missing keys)
throw InvalidJSON("JSON is invalid (missing keys or invalid data types)");
}
}
// Load Json::Value into this object
void DecklinkReader::SetJsonValue(const Json::Value root) {
// Set parent data
ReaderBase::SetJsonValue(root);
// Re-Open path, and re-init everything (if needed)
if (is_open)
{
Close();
Open();
}
}

View File

@@ -1,113 +0,0 @@
/**
* @file
* @brief Header file for DecklinkReader class
* @author Jonathan Thomas <jonathan@openshot.org>
*
* @ref License
*/
// Copyright (c) 2008-2019 OpenShot Studios, LLC
//
// SPDX-License-Identifier: LGPL-3.0-or-later
#ifndef OPENSHOT_DECKLINK_READER_H
#define OPENSHOT_DECKLINK_READER_H
#include "ReaderBase.h"
#include <cmath>
#include <ctime>
#include <fcntl.h>
#include <iostream>
#include <omp.h>
#include <pthread.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <memory>
#include <unistd.h>
#include "CacheMemory.h"
#include "Frame.h"
#include "DecklinkInput.h"
namespace openshot
{
/**
* @brief This class uses the Blackmagic Decklink libraries, to open video streams on Blackmagic devices.
*
* This requires special hardware manufactured by <a href="http://www.blackmagicdesign.com/products">Blackmagic Designs</a>.
* Once the device is acquired and connected, this reader returns openshot::Frame objects containing the image and audio data.
*/
class DecklinkReader : public ReaderBase
{
private:
bool is_open;
IDeckLink *deckLink;
IDeckLinkInput *deckLinkInput;
IDeckLinkDisplayModeIterator *displayModeIterator;
IDeckLinkOutput *m_deckLinkOutput;
IDeckLinkVideoConversion *m_deckLinkConverter;
pthread_mutex_t sleepMutex;
pthread_cond_t sleepCond;
IDeckLinkIterator *deckLinkIterator;
DeckLinkInputDelegate *delegate;
IDeckLinkDisplayMode *displayMode;
BMDVideoInputFlags inputFlags;
BMDDisplayMode selectedDisplayMode;
BMDPixelFormat pixelFormat;
int displayModeCount;
int exitStatus;
int ch;
bool foundDisplayMode;
HRESULT result;
int g_videoModeIndex;
int g_audioChannels;
int g_audioSampleDepth;
int g_maxFrames;
int device;
BMDTimeValue frameRateDuration, frameRateScale;
const char *displayModeName;
public:
/// Constructor for DecklinkReader. This automatically opens the device and loads
/// the first second of video, or it throws one of the following exceptions.
DecklinkReader(int device, int video_mode, int pixel_format, int channels, int sample_depth);
~DecklinkReader(); /// Destructor
/// Close the device and video stream
void Close();
/// Get the cache object used by this reader (always returns NULL for this reader)
CacheMemory* GetCache() { return NULL; };
/// Get an openshot::Frame object for a specific frame number of this reader. Frame number
/// is ignored, since it always gets the latest LIVE frame.
///
/// @returns The requested frame (containing the image)
/// @param requested_frame The frame number that is requested.
std::shared_ptr<Frame> GetFrame(int64_t requested_frame);
unsigned long GetCurrentFrameNumber();
/// Determine if reader is open or closed
bool IsOpen() { return is_open; };
/// Return the type name of the class
std::string Name() { return "DecklinkReader"; };
// Get and Set JSON methods
std::string Json() const override; ///< Generate JSON string of this object
void SetJson(const std::string value); ///< Load JSON string into this object
Json::Value JsonValue() const; ///< Generate Json::Value for this object
void SetJsonValue(const Json::Value root); ///< Load Json::Value into this object
/// Open device and video stream - which is called by the constructor automatically
void Open();
};
}
#endif

View File

@@ -1,236 +0,0 @@
/**
* @file
* @brief Source file for DecklinkWriter class
* @author Jonathan Thomas <jonathan@openshot.org>
*
* @ref License
*/
// Copyright (c) 2008-2019 OpenShot Studios, LLC
//
// SPDX-License-Identifier: LGPL-3.0-or-later
#include "DecklinkWriter.h"
using namespace openshot;
DecklinkWriter::DecklinkWriter(int device, int video_mode, int pixel_format, int channels, int sample_depth)
: device(device), is_open(false), g_videoModeIndex(video_mode), g_audioChannels(channels), g_audioSampleDepth(sample_depth)
{
// Init decklink variables
inputFlags = 0;
selectedDisplayMode = bmdModeNTSC;
pixelFormat = bmdFormat8BitYUV;
displayModeCount = 0;
exitStatus = 1;
foundDisplayMode = false;
pthread_mutex_init(&sleepMutex, NULL);
pthread_cond_init(&sleepCond, NULL);
switch(pixel_format)
{
case 0: pixelFormat = bmdFormat8BitYUV; break;
case 1: pixelFormat = bmdFormat10BitYUV; break;
case 2: pixelFormat = bmdFormat10BitRGB; break;
case 3: pixelFormat = bmdFormat8BitARGB; break;
default:
throw DecklinkError("Pixel format is not valid (must be 0,1,2,3).");
}
}
// Open decklink writer
void DecklinkWriter::Open()
{
// Open reader if not already open
if (!is_open)
{
// Attempt to open blackmagic card
deckLinkIterator = CreateDeckLinkIteratorInstance();
if (!deckLinkIterator)
throw DecklinkError("This application requires the DeckLink drivers installed.");
/* Connect to a DeckLink instance */
for (int device_count = 0; device_count <= device; device_count++)
{
// Check for requested device
result = deckLinkIterator->Next(&deckLink);
if (result != S_OK)
throw DecklinkError("No DeckLink PCI cards found.");
if (device_count == device)
break;
}
if (deckLink->QueryInterface(IID_IDeckLinkOutput, (void**)&deckLinkOutput) != S_OK)
throw DecklinkError("DeckLink QueryInterface Failed.");
// Obtain an IDeckLinkDisplayModeIterator to enumerate the display modes supported on output
result = deckLinkOutput->GetDisplayModeIterator(&displayModeIterator);
if (result != S_OK)
throw DecklinkError("Could not obtain the video output display mode iterator.");
if (g_videoModeIndex < 0)
throw DecklinkError("No video mode specified.");
// Loop through all available display modes, until a match is found (if any)
const char *displayModeName;
BMDTimeValue frameRateDuration, frameRateScale;
while (displayModeIterator->Next(&displayMode) == S_OK)
{
if (g_videoModeIndex == displayModeCount)
{
BMDDisplayModeSupport result;
foundDisplayMode = true;
displayMode->GetName(&displayModeName);
selectedDisplayMode = displayMode->GetDisplayMode();
//deckLinkOutput->DoesSupportVideoMode(selectedDisplayMode, pixelFormat, bmdVideoOutputFlagDefault, &result, NULL);
// Get framerate
displayMode->GetFrameRate(&frameRateDuration, &frameRateScale);
//if (result == bmdDisplayModeNotSupported)
//{
// cout << "The display mode does not support the selected pixel format." << endl;
// throw DecklinkError("The display mode does not support the selected pixel format.");
//}
break;
}
displayModeCount++;
}
if (!foundDisplayMode)
throw DecklinkError("Invalid video mode. No matching ones found.");
// Calculate FPS
unsigned long m_framesPerSecond = (unsigned long)((frameRateScale + (frameRateDuration-1)) / frameRateDuration);
// Create Delegate & Pass in pointers to the output and converters
delegate = new DeckLinkOutputDelegate(displayMode, deckLinkOutput);
// Provide this class as a delegate to the audio and video output interfaces
deckLinkOutput->SetScheduledFrameCompletionCallback(delegate);
//deckLinkOutput->SetAudioCallback(delegate);
// Check for video input
if (deckLinkOutput->EnableVideoOutput(displayMode->GetDisplayMode(), bmdVideoOutputFlagDefault) != S_OK)
throw DecklinkError("Failed to enable video output. Is another application using the card?");
// Check for audio input
//if (deckLinkOutput->EnableAudioOutput(bmdAudioSampleRate48kHz, g_audioSampleDepth, g_audioChannels, bmdAudioOutputStreamContinuous) != S_OK)
// throw DecklinkError("Failed to enable audio output. Is another application using the card?");
// Begin video preroll by scheduling a second of frames in hardware
//auto f = std::make_shared<Frame>(1, displayMode->GetWidth(), displayMode->GetHeight(), "Blue");
//f->AddColor(displayMode->GetWidth(), displayMode->GetHeight(), "Blue");
// Preroll 1 second of video
//for (unsigned i = 0; i < 16; i++)
//{
// // Write 30 blank frames (for preroll)
// delegate->WriteFrame(f);
// delegate->ScheduleNextFrame(true);
//}
//deckLinkOutput->StartScheduledPlayback(0, 100, 1.0);
//if (deckLinkOutput->BeginAudioPreroll() != S_OK)
// throw DecklinkError("Failed to begin audio preroll.");
// Update image properties
info.has_audio = true;
info.has_video = true;
info.vcodec = displayModeName;
info.width = displayMode->GetWidth();
info.height = displayMode->GetHeight();
info.file_size = info.width * info.height * sizeof(char) * 4;
info.pixel_ratio.num = 1;
info.pixel_ratio.den = 1;
info.duration = 60 * 60 * 24; // 24 hour duration... since we're capturing a live stream
info.fps.num = frameRateScale;
info.fps.den = frameRateDuration;
info.video_timebase.num = frameRateDuration;
info.video_timebase.den = frameRateScale;
info.video_length = round(info.duration * info.fps.ToDouble());
// Calculate the DAR (display aspect ratio)
Fraction size(info.width * info.pixel_ratio.num, info.height * info.pixel_ratio.den);
// Reduce size fraction
size.Reduce();
// Set the ratio based on the reduced fraction
info.display_ratio.num = size.num;
info.display_ratio.den = size.den;
// Mark as "open"
is_open = true;
}
}
// Close device and video stream
void DecklinkWriter::Close()
{
// Close all objects, if reader is 'open'
if (is_open)
{
// Stop the audio and video output streams immediately
deckLinkOutput->StopScheduledPlayback(0, NULL, 0);
deckLinkOutput->DisableAudioOutput();
deckLinkOutput->DisableVideoOutput();
// Release DisplayMode
displayMode->Release();
if (displayModeIterator != NULL)
{
displayModeIterator->Release();
displayModeIterator = NULL;
}
if (deckLinkOutput != NULL)
{
deckLinkOutput->Release();
deckLinkOutput = NULL;
}
if (deckLink != NULL)
{
deckLink->Release();
deckLink = NULL;
}
if (deckLinkIterator != NULL)
deckLinkIterator->Release();
// Mark as "closed"
is_open = false;
}
}
// This method is required for all derived classes of WriterBase. Write a Frame to the video file.
void DecklinkWriter::WriteFrame(std::shared_ptr<Frame> frame)
{
// Check for open reader (or throw exception)
if (!is_open)
throw WriterClosed("The DecklinkWriter is closed. Call Open() before calling this method.");
delegate->WriteFrame(frame);
}
// This method is required for all derived classes of WriterBase. Write a block of frames from a reader.
void DecklinkWriter::WriteFrame(ReaderBase* reader, int start, int length)
{
// Loop through each frame (and encoded it)
for (int number = start; number <= length; number++)
{
// Get the frame
std::shared_ptr<Frame> f = reader->GetFrame(number);
// Encode frame
WriteFrame(f);
}
}

View File

@@ -1,97 +0,0 @@
/**
* @file
* @brief Header file for DecklinkWriter class
* @author Jonathan Thomas <jonathan@openshot.org>
*
* @ref License
*/
// Copyright (c) 2008-2019 OpenShot Studios, LLC
//
// SPDX-License-Identifier: LGPL-3.0-or-later
#ifndef OPENSHOT_DECKLINK_WRITER_H
#define OPENSHOT_DECKLINK_WRITER_H
#include "WriterBase.h"
#include <cmath>
#include <ctime>
#include <fcntl.h>
#include <iostream>
#include <omp.h>
#include <pthread.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <memory>
#include <unistd.h>
#include "CacheMemory.h"
#include "Frame.h"
#include "DecklinkOutput.h"
namespace openshot
{
/**
* @brief This class uses the Blackmagic Decklink libraries, to send video streams to Blackmagic devices.
*
* This requires special hardware manufactured by <a href="http://www.blackmagicdesign.com/products">Blackmagic Designs</a>.
* Once the device is acquired and connected, this reader returns openshot::Frame objects containing the image and audio data.
*/
class DecklinkWriter : public WriterBase
{
private:
bool is_open;
IDeckLink *deckLink;
IDeckLinkDisplayModeIterator *displayModeIterator;
IDeckLinkOutput *deckLinkOutput;
IDeckLinkVideoConversion *m_deckLinkConverter;
pthread_mutex_t sleepMutex;
pthread_cond_t sleepCond;
IDeckLinkIterator *deckLinkIterator;
DeckLinkOutputDelegate *delegate;
IDeckLinkDisplayMode *displayMode;
BMDVideoInputFlags inputFlags;
BMDDisplayMode selectedDisplayMode;
BMDPixelFormat pixelFormat;
int displayModeCount;
int exitStatus;
int ch;
bool foundDisplayMode;
HRESULT result;
int g_videoModeIndex;
int g_audioChannels;
int g_audioSampleDepth;
int g_maxFrames;
int device;
public:
/// Constructor for DecklinkWriter. This automatically opens the device or it
/// throws one of the following exceptions.
DecklinkWriter(int device, int video_mode, int pixel_format, int channels, int sample_depth);
/// Close the device and video stream
void Close();
/// This method is required for all derived classes of WriterBase. Write a Frame to the video file.
void WriteFrame(std::shared_ptr<Frame> frame);
/// This method is required for all derived classes of WriterBase. Write a block of frames from a reader.
void WriteFrame(ReaderBase* reader, int start, int length);
/// Open device and video stream - which is called by the constructor automatically
void Open();
/// Determine if writer is open or closed
bool IsOpen() { return is_open; };
};
}
#endif

View File

@@ -113,10 +113,6 @@
#include "Clip.h"
#include "ClipBase.h"
#include "Coordinate.h"
#ifdef USE_BLACKMAGIC
#include "DecklinkReader.h"
#include "DecklinkWriter.h"
#endif
#include "DummyReader.h"
#include "EffectBase.h"
#include "Effects.h"

View File

@@ -16,17 +16,6 @@ endif()
# Test media path, used by unit tests for input data
file(TO_NATIVE_PATH "${PROJECT_SOURCE_DIR}/examples/" TEST_MEDIA_PATH)
################# BLACKMAGIC DECKLINK ###################
if(ENABLE_BLACKMAGIC)
# Find BlackMagic DeckLinkAPI libraries
find_package(BlackMagic)
if(BLACKMAGIC_FOUND)
# Include Blackmagic headers (needed for compile)
include_directories(${BLACKMAGIC_INCLUDE_DIR})
endif()
endif()
###
### TEST SOURCE FILES
###
@@ -82,11 +71,15 @@ add_library(catch-main OBJECT catch_main.cpp)
target_link_libraries(catch-main PUBLIC Catch2::Catch2)
foreach(tname ${OPENSHOT_TESTS})
add_executable(openshot-${tname}-test ${tname}.cpp $<TARGET_OBJECTS:catch-main>)
add_executable(openshot-${tname}-test
${tname}.cpp
$<TARGET_OBJECTS:catch-main>
)
target_compile_definitions(openshot-${tname}-test PRIVATE
TEST_MEDIA_PATH="${TEST_MEDIA_PATH}"
)
target_link_libraries(openshot-${tname}-test Catch2::Catch2 openshot)
# Automatically configure CTest targets from Catch2 test cases
catch_discover_tests(
openshot-${tname}-test
@@ -97,13 +90,14 @@ foreach(tname ${OPENSHOT_TESTS})
list(APPEND CATCH2_TEST_TARGETS openshot-${tname}-test)
list(APPEND CATCH2_TEST_NAMES ${tname})
endforeach()
# Add an additional special-case test, for an envvar-dependent setting
add_test(NAME [=["Settings:Debug logging (enabled)"]=]
COMMAND
openshot-Settings-test "[environment]"
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
)
set_tests_properties([=["Settings:Debug logging (enabled)"]=]
catch_discover_tests(
openshot-Settings-test
TEST_LIST Settings_EXTRA_TESTS
TEST_PREFIX Settings:
TEST_SUFFIX "(enabled)"
TEST_WORKING_DIR "${_test_dir}"
PROPERTIES
LABELS Settings
ENVIRONMENT "LIBOPENSHOT_DEBUG=1"