Added a simple chroma key method to a frame object, and added multi-threaded optimizations to the black magic capture code.

This commit is contained in:
Jonathan Thomas
2013-01-26 23:02:21 -06:00
parent 6c1d0a9568
commit fbf169a7fa
3 changed files with 120 additions and 18 deletions

View File

@@ -164,6 +164,9 @@ namespace openshot
/// Set Sample Rate, used for playback (Play() method)
void SetSampleRate(int sample_rate);
/// Make colors in a specific range transparent
void TransparentColors(string color, double fuzz);
/// Play audio samples for this frame
void Play();
};

View File

@@ -25,15 +25,20 @@
** -LICENSE-END-
*/
#include <iostream>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <pthread.h>
#include <unistd.h>
#include <fcntl.h>
#include <omp.h>
#include "DeckLinkAPI.h"
#include "../include/Capture.h"
#include "../include/Frame.h"
using namespace std;
pthread_mutex_t sleepMutex;
pthread_cond_t sleepCond;
@@ -54,6 +59,10 @@ static int g_maxFrames = -1;
static unsigned long frameCount = 0;
// Convert between YUV and RGB
IDeckLinkOutput *m_deckLinkOutput;
IDeckLinkVideoConversion *m_deckLinkConverter;
DeckLinkCaptureDelegate::DeckLinkCaptureDelegate() : m_refCount(0)
{
pthread_mutex_init(&m_mutex, NULL);
@@ -92,8 +101,6 @@ HRESULT DeckLinkCaptureDelegate::VideoInputFrameArrived(IDeckLinkVideoInputFrame
{
IDeckLinkVideoFrame* rightEyeFrame = NULL;
IDeckLinkVideoFrame3DExtensions* threeDExtensions = NULL;
void* frameBytes;
void* audioFrameBytes;
// Handle Video Frame
if(videoFrame)
@@ -136,20 +143,84 @@ HRESULT DeckLinkCaptureDelegate::VideoInputFrameArrived(IDeckLinkVideoInputFrame
if (videoOutputFile != -1)
{
videoFrame->GetBytes(&frameBytes);
write(videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());
if (rightEyeFrame)
{
rightEyeFrame->GetBytes(&frameBytes);
write(videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());
}
//omp_set_num_threads(1);
omp_set_nested(true);
#pragma omp parallel
{
#pragma omp single
{
#pragma omp task firstprivate(m_deckLinkOutput, m_deckLinkConverter, videoFrame, frameCount)
{
cout << "Start processing frame " << frameCount << endl;
// *********** CONVERT YUV source frame to RGB ************
void *frameBytes;
void *audioFrameBytes;
// Create a new RGB frame object
IDeckLinkMutableVideoFrame *m_rgbFrame = NULL;
int width = videoFrame->GetWidth();
int height = videoFrame->GetHeight();
HRESULT res = m_deckLinkOutput->CreateVideoFrame(
width,
height,
width * 4,
bmdFormat8BitARGB,
bmdFrameFlagDefault,
&m_rgbFrame);
if(res != S_OK)
cout << "BMDOutputDelegate::StartRunning: Error creating RGB frame, res:" << res << endl;
// Create a RGB version of this YUV video frame
m_deckLinkConverter->ConvertFrame(videoFrame, m_rgbFrame);
// Get RGB Byte array
m_rgbFrame->GetBytes(&frameBytes);
// *********** CREATE OPENSHOT FRAME **********
tr1::shared_ptr<openshot::Frame> f(new openshot::Frame(frameCount, width, height, "#000000", 2048, 2));
// Add Image data to openshot frame
f->AddImage(width, height, "ARGB", Magick::CharPixel, (uint8_t*)frameBytes);
// Remove background color
f->TransparentColors("#546466ff", 15.0);
// Display Image DEBUG
if (frameCount == 40)
#pragma omp critical (image_magick)
f->Display();
// Release RGB data
if (m_rgbFrame)
m_rgbFrame->Release();
cout << "End processing frame " << frameCount << endl;
}
}
}
// ORIGINAL EXAMPLE - write raw video and audio files
//videoFrame->GetBytes(&frameBytes);
//write(videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());
//
//if (rightEyeFrame)
//{
// rightEyeFrame->GetBytes(&frameBytes);
// write(videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());
//}
}
}
if (rightEyeFrame)
rightEyeFrame->Release();
// Increment frame count
frameCount++;
if (g_maxFrames > 0 && frameCount >= g_maxFrames)
@@ -159,14 +230,14 @@ HRESULT DeckLinkCaptureDelegate::VideoInputFrameArrived(IDeckLinkVideoInputFrame
}
// Handle Audio Frame
if (audioFrame)
{
if (audioOutputFile != -1)
{
audioFrame->GetBytes(&audioFrameBytes);
write(audioOutputFile, audioFrameBytes, audioFrame->GetSampleFrameCount() * g_audioChannels * (g_audioSampleDepth / 8));
}
}
//if (audioFrame)
//{
// if (audioOutputFile != -1)
// {
// audioFrame->GetBytes(&audioFrameBytes);
// write(audioOutputFile, audioFrameBytes, audioFrame->GetSampleFrameCount() * g_audioChannels * (g_audioSampleDepth / 8));
// }
//}
return S_OK;
}
@@ -278,6 +349,21 @@ int main(int argc, char *argv[])
goto bail;
}
// Init deckLinkOutput (needed for color conversion)
if (deckLink->QueryInterface(IID_IDeckLinkOutput, (void**)&m_deckLinkOutput) != S_OK)
{
cout << "Failed to create a deckLinkOutput(), used to convert YUV to RGB." << endl;
m_deckLinkOutput = NULL;
}
// Init the YUV to RGB conversion
if(!(m_deckLinkConverter = CreateVideoConversionInstance()))
{
cout << "Failed to create a VideoConversionInstance(), used to convert YUV to RGB." << endl;
m_deckLinkConverter = NULL;
}
// Parse command line options
while ((ch = getopt(argc, argv, "?h3c:s:f:a:m:n:p:t:")) != -1)
{

View File

@@ -396,6 +396,19 @@ int Frame::GetWidth()
return image->columns();
}
// Make colors in a specific range transparent
void Frame::TransparentColors(string color, double fuzz)
{
// Make this range of colors transparent
image->colorFuzz(fuzz * 65535 / 100.0);
image->transparent(Magick::Color(color));
image->colorFuzz(0);
//image->quantumOperator(Magick::OpacityChannel, Magick::UndefinedEvaluateOperator, "K0*b K1*g + K2");
//image->fx("g-2*b+1");
//K0*b K1*g + K2
}
// Save the frame image to the specified path. The image format is determined from the extension (i.e. image.PNG, image.JPEG)
void Frame::Save(string path, float scale)
{