Bug 1185388 - Limit the number of cores used by WMF in ClearKey CDM - r=cpearce

This commit is contained in:
Edwin Flores 2015-09-01 17:31:48 +12:00
parent 0506145bc1
commit a2ff24c645
5 changed files with 22 additions and 3 deletions

View File

@ -57,7 +57,7 @@ VideoDecoder::InitDecode(const GMPVideoCodec& aCodecSettings,
mCallback = aCallback;
assert(mCallback);
mDecoder = new WMFH264Decoder();
HRESULT hr = mDecoder->Init();
HRESULT hr = mDecoder->Init(aCoreCount);
if (FAILED(hr)) {
CK_LOGD("VideoDecoder::InitDecode failed to init WMFH264Decoder");
mCallback->Error(GMPGenericErr);

View File

@ -16,6 +16,7 @@
#include "WMFH264Decoder.h"
#include <algorithm>
#include <codecapi.h>
namespace wmf {
@ -31,7 +32,7 @@ WMFH264Decoder::~WMFH264Decoder()
}
HRESULT
WMFH264Decoder::Init()
WMFH264Decoder::Init(int32_t aCoreCount)
{
HRESULT hr;
@ -47,6 +48,13 @@ WMFH264Decoder::Init()
}
ENSURE(SUCCEEDED(hr), hr);
CComPtr<IMFAttributes> attr;
hr = mDecoder->GetAttributes(&attr);
ENSURE(SUCCEEDED(hr), hr);
hr = attr->SetUINT32(CODECAPI_AVDecNumWorkerThreads,
GetNumThreads(aCoreCount));
ENSURE(SUCCEEDED(hr), hr);
hr = SetDecoderInputType();
ENSURE(SUCCEEDED(hr), hr);

View File

@ -26,7 +26,7 @@ public:
WMFH264Decoder();
~WMFH264Decoder();
HRESULT Init();
HRESULT Init(int32_t aCoreCount);
HRESULT Input(const uint8_t* aData,
uint32_t aDataSize,

View File

@ -18,6 +18,7 @@
#include "ClearKeyUtils.h"
#include <versionhelpers.h>
#include <algorithm>
#include <stdio.h>
#define INITGUID
@ -253,4 +254,10 @@ CreateMFT(const CLSID& clsid,
return S_OK;
}
int32_t
GetNumThreads(int32_t aCoreCount)
{
return aCoreCount > 4 ? -1 : (std::max)(aCoreCount - 1, 1);
}
} // namespace

View File

@ -259,6 +259,10 @@ enum CodecType {
// the given windows version we're running on.
const char* WMFDecoderDllNameFor(CodecType aCodec);
// Returns the maximum number of threads we want WMF to use for decoding
// given the number of logical processors available.
int32_t GetNumThreads(int32_t aCoreCount);
} // namespace wmf
#endif // __WMFUtils_h__