mirror of
https://gitlab.winehq.org/wine/wine-gecko.git
synced 2024-09-13 09:24:08 -07:00
Bug 1193547 - Fallback to software decoding explicitly if the GPU doesn't support decoding the current resolution in hardware. r=cpearce,jya
This commit is contained in:
parent
ca36359af3
commit
22c48f6fe8
@ -62,14 +62,120 @@ public:
|
||||
ImageContainer* aContainer,
|
||||
Image** aOutImage) override;
|
||||
|
||||
virtual bool SupportsConfig(IMFMediaType* aType) override;
|
||||
|
||||
private:
|
||||
nsRefPtr<IDirect3D9Ex> mD3D9;
|
||||
nsRefPtr<IDirect3DDevice9Ex> mDevice;
|
||||
nsRefPtr<IDirect3DDeviceManager9> mDeviceManager;
|
||||
RefPtr<D3D9RecycleAllocator> mTextureClientAllocator;
|
||||
nsRefPtr<IDirectXVideoDecoderService> mDecoderService;
|
||||
UINT32 mResetToken;
|
||||
};
|
||||
|
||||
void GetDXVA2ExtendedFormatFromMFMediaType(IMFMediaType *pType,
|
||||
DXVA2_ExtendedFormat *pFormat)
|
||||
{
|
||||
// Get the interlace mode.
|
||||
MFVideoInterlaceMode interlace =
|
||||
(MFVideoInterlaceMode)MFGetAttributeUINT32(pType, MF_MT_INTERLACE_MODE, MFVideoInterlace_Unknown);
|
||||
|
||||
if (interlace == MFVideoInterlace_MixedInterlaceOrProgressive) {
|
||||
pFormat->SampleFormat = DXVA2_SampleFieldInterleavedEvenFirst;
|
||||
} else {
|
||||
pFormat->SampleFormat = (UINT)interlace;
|
||||
}
|
||||
|
||||
pFormat->VideoChromaSubsampling =
|
||||
MFGetAttributeUINT32(pType, MF_MT_VIDEO_CHROMA_SITING, MFVideoChromaSubsampling_Unknown);
|
||||
pFormat->NominalRange =
|
||||
MFGetAttributeUINT32(pType, MF_MT_VIDEO_NOMINAL_RANGE, MFNominalRange_Unknown);
|
||||
pFormat->VideoTransferMatrix =
|
||||
MFGetAttributeUINT32(pType, MF_MT_YUV_MATRIX, MFVideoTransferMatrix_Unknown);
|
||||
pFormat->VideoLighting =
|
||||
MFGetAttributeUINT32(pType, MF_MT_VIDEO_LIGHTING, MFVideoLighting_Unknown);
|
||||
pFormat->VideoPrimaries =
|
||||
MFGetAttributeUINT32(pType, MF_MT_VIDEO_PRIMARIES, MFVideoPrimaries_Unknown);
|
||||
pFormat->VideoTransferFunction =
|
||||
MFGetAttributeUINT32(pType, MF_MT_TRANSFER_FUNCTION, MFVideoTransFunc_Unknown);
|
||||
}
|
||||
|
||||
HRESULT ConvertMFTypeToDXVAType(IMFMediaType *pType, DXVA2_VideoDesc *pDesc)
|
||||
{
|
||||
ZeroMemory(pDesc, sizeof(*pDesc));
|
||||
|
||||
// The D3D format is the first DWORD of the subtype GUID.
|
||||
GUID subtype = GUID_NULL;
|
||||
HRESULT hr = pType->GetGUID(MF_MT_SUBTYPE, &subtype);
|
||||
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
|
||||
pDesc->Format = (D3DFORMAT)subtype.Data1;
|
||||
|
||||
UINT32 width = 0;
|
||||
UINT32 height = 0;
|
||||
hr = MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &width, &height);
|
||||
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
|
||||
pDesc->SampleWidth = width;
|
||||
pDesc->SampleHeight = height;
|
||||
|
||||
UINT32 fpsNumerator = 0;
|
||||
UINT32 fpsDenominator = 0;
|
||||
hr = MFGetAttributeRatio(pType, MF_MT_FRAME_RATE, &fpsNumerator, &fpsDenominator);
|
||||
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
|
||||
pDesc->InputSampleFreq.Numerator = fpsNumerator;
|
||||
pDesc->InputSampleFreq.Denominator = fpsDenominator;
|
||||
|
||||
GetDXVA2ExtendedFormatFromMFMediaType(pType, &pDesc->SampleFormat);
|
||||
pDesc->OutputFrameFreq = pDesc->InputSampleFreq;
|
||||
if ((pDesc->SampleFormat.SampleFormat == DXVA2_SampleFieldInterleavedEvenFirst) ||
|
||||
(pDesc->SampleFormat.SampleFormat == DXVA2_SampleFieldInterleavedOddFirst)) {
|
||||
pDesc->OutputFrameFreq.Numerator *= 2;
|
||||
}
|
||||
|
||||
return S_OK;
|
||||
}
|
||||
|
||||
static const GUID DXVA2_ModeH264_E = {
|
||||
0x1b81be68, 0xa0c7, 0x11d3, { 0xb9, 0x84, 0x00, 0xc0, 0x4f, 0x2e, 0x73, 0xc5 }
|
||||
};
|
||||
|
||||
// This tests if a DXVA video decoder can be created for the given media type/resolution.
|
||||
// It uses the same decoder device (DXVA2_ModeH264_E - DXVA2_ModeH264_VLD_NoFGT) as the H264
|
||||
// decoder MFT provided by windows (CLSID_CMSH264DecoderMFT) uses, so we can use it to determine
|
||||
// if the MFT will use software fallback or not.
|
||||
bool
|
||||
D3D9DXVA2Manager::SupportsConfig(IMFMediaType* aType)
|
||||
{
|
||||
DXVA2_VideoDesc desc;
|
||||
HRESULT hr = ConvertMFTypeToDXVAType(aType, &desc);
|
||||
NS_ENSURE_TRUE(SUCCEEDED(hr), false);
|
||||
|
||||
UINT configCount;
|
||||
DXVA2_ConfigPictureDecode* configs = nullptr;
|
||||
hr = mDecoderService->GetDecoderConfigurations(DXVA2_ModeH264_E, &desc, nullptr, &configCount, &configs);
|
||||
NS_ENSURE_TRUE(SUCCEEDED(hr), false);
|
||||
|
||||
nsRefPtr<IDirect3DSurface9> surface;
|
||||
hr = mDecoderService->CreateSurface(desc.SampleWidth, desc.SampleHeight, 0, (D3DFORMAT)MAKEFOURCC('N', 'V', '1', '2'),
|
||||
D3DPOOL_DEFAULT, 0, DXVA2_VideoDecoderRenderTarget,
|
||||
surface.StartAssignment(), NULL);
|
||||
if (!SUCCEEDED(hr)) {
|
||||
CoTaskMemFree(configs);
|
||||
return false;
|
||||
}
|
||||
|
||||
for (UINT i = 0; i < configCount; i++) {
|
||||
nsRefPtr<IDirectXVideoDecoder> decoder;
|
||||
IDirect3DSurface9* surfaces = surface;
|
||||
hr = mDecoderService->CreateVideoDecoder(DXVA2_ModeH264_E, &desc, &configs[i], &surfaces, 1, decoder.StartAssignment());
|
||||
if (SUCCEEDED(hr) && decoder) {
|
||||
CoTaskMemFree(configs);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
CoTaskMemFree(configs);
|
||||
return false;
|
||||
}
|
||||
|
||||
D3D9DXVA2Manager::D3D9DXVA2Manager()
|
||||
: mResetToken(0)
|
||||
{
|
||||
@ -180,6 +286,35 @@ D3D9DXVA2Manager::Init(nsACString& aFailureReason)
|
||||
return hr;
|
||||
}
|
||||
|
||||
HANDLE deviceHandle;
|
||||
nsRefPtr<IDirectXVideoDecoderService> decoderService;
|
||||
hr = deviceManager->OpenDeviceHandle(&deviceHandle);
|
||||
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
|
||||
|
||||
hr = deviceManager->GetVideoService(deviceHandle, IID_PPV_ARGS(decoderService.StartAssignment()));
|
||||
deviceManager->CloseDeviceHandle(deviceHandle);
|
||||
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
|
||||
|
||||
UINT deviceCount;
|
||||
GUID* decoderDevices = nullptr;
|
||||
hr = decoderService->GetDecoderDeviceGuids(&deviceCount, &decoderDevices);
|
||||
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
|
||||
|
||||
bool found = false;
|
||||
for (UINT i = 0; i < deviceCount; i++) {
|
||||
if (decoderDevices[i] == DXVA2_ModeH264_E) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
CoTaskMemFree(decoderDevices);
|
||||
|
||||
if (!found) {
|
||||
return E_FAIL;
|
||||
}
|
||||
|
||||
mDecoderService = decoderService;
|
||||
|
||||
mResetToken = resetToken;
|
||||
mD3D9 = d3d9Ex;
|
||||
mDevice = device;
|
||||
|
@ -44,6 +44,8 @@ public:
|
||||
|
||||
virtual ~DXVA2Manager();
|
||||
|
||||
virtual bool SupportsConfig(IMFMediaType* aType) { return true; }
|
||||
|
||||
protected:
|
||||
Mutex mLock;
|
||||
DXVA2Manager();
|
||||
|
@ -59,6 +59,11 @@ public:
|
||||
int64_t aTimestampUsecs);
|
||||
HRESULT Input(IMFSample* aSample);
|
||||
|
||||
HRESULT CreateInputSample(const uint8_t* aData,
|
||||
uint32_t aDataSize,
|
||||
int64_t aTimestampUsecs,
|
||||
RefPtr<IMFSample>* aOutSample);
|
||||
|
||||
// Retrieves output from the MFT. Call this once Input() returns
|
||||
// MF_E_NOTACCEPTING. Some MFTs with hardware acceleration (the H.264
|
||||
// decoder MFT in particular) can't handle it if clients hold onto
|
||||
@ -80,14 +85,10 @@ public:
|
||||
// Sends a message to the MFT.
|
||||
HRESULT SendMFTMessage(MFT_MESSAGE_TYPE aMsg, ULONG_PTR aData);
|
||||
|
||||
private:
|
||||
|
||||
HRESULT SetDecoderOutputType(ConfigureOutputCallback aCallback, void* aData);
|
||||
private:
|
||||
|
||||
HRESULT CreateInputSample(const uint8_t* aData,
|
||||
uint32_t aDataSize,
|
||||
int64_t aTimestampUsecs,
|
||||
RefPtr<IMFSample>* aOutSample);
|
||||
|
||||
HRESULT CreateOutputSample(RefPtr<IMFSample>* aOutSample);
|
||||
|
||||
|
@ -102,7 +102,7 @@ WMFDecoderModule::CreateVideoDecoder(const VideoInfo& aConfig,
|
||||
new WMFVideoMFTManager(aConfig,
|
||||
aLayersBackend,
|
||||
aImageContainer,
|
||||
sDXVAEnabled && ShouldUseDXVA(aConfig)));
|
||||
sDXVAEnabled));
|
||||
|
||||
nsRefPtr<MFTDecoder> mft = manager->Init();
|
||||
|
||||
@ -133,33 +133,12 @@ WMFDecoderModule::CreateAudioDecoder(const AudioInfo& aConfig,
|
||||
return decoder.forget();
|
||||
}
|
||||
|
||||
bool
|
||||
WMFDecoderModule::ShouldUseDXVA(const VideoInfo& aConfig) const
|
||||
{
|
||||
static bool isAMD = false;
|
||||
static bool initialized = false;
|
||||
if (!initialized) {
|
||||
nsCOMPtr<nsIGfxInfo> gfxInfo = services::GetGfxInfo();
|
||||
nsAutoString vendor;
|
||||
gfxInfo->GetAdapterVendorID(vendor);
|
||||
isAMD = vendor.Equals(widget::GfxDriverInfo::GetDeviceVendor(widget::VendorAMD), nsCaseInsensitiveStringComparator()) ||
|
||||
vendor.Equals(widget::GfxDriverInfo::GetDeviceVendor(widget::VendorATI), nsCaseInsensitiveStringComparator());
|
||||
initialized = true;
|
||||
}
|
||||
if (!isAMD) {
|
||||
return true;
|
||||
}
|
||||
// Don't use DXVA for 4k videos or above, since it seems to perform poorly.
|
||||
return aConfig.mDisplay.width <= 1920 && aConfig.mDisplay.height <= 1200;
|
||||
}
|
||||
|
||||
bool
|
||||
WMFDecoderModule::SupportsSharedDecoders(const VideoInfo& aConfig) const
|
||||
{
|
||||
// If DXVA is enabled, but we're not going to use it for this specific config, then
|
||||
// we can't use the shared decoder.
|
||||
return !AgnosticMimeType(aConfig.mMimeType) &&
|
||||
(!sDXVAEnabled || ShouldUseDXVA(aConfig));
|
||||
return !AgnosticMimeType(aConfig.mMimeType);
|
||||
}
|
||||
|
||||
bool
|
||||
|
@ -52,7 +52,6 @@ public:
|
||||
// Called from any thread, must call init first
|
||||
static int GetNumDecoderThreads();
|
||||
private:
|
||||
bool ShouldUseDXVA(const VideoInfo& aConfig) const;
|
||||
bool mWMFInitialized;
|
||||
};
|
||||
|
||||
|
@ -214,15 +214,16 @@ WMFVideoMFTManager::InitInternal(bool aForceD3D9)
|
||||
RefPtr<IMFAttributes> attr(decoder->GetAttributes());
|
||||
UINT32 aware = 0;
|
||||
if (attr) {
|
||||
attr->GetUINT32(MF_SA_D3D_AWARE, &aware);
|
||||
attr->SetUINT32(CODECAPI_AVDecNumWorkerThreads,
|
||||
WMFDecoderModule::GetNumDecoderThreads());
|
||||
hr = attr->SetUINT32(CODECAPI_AVLowLatencyMode, TRUE);
|
||||
if (SUCCEEDED(hr)) {
|
||||
LOG("Enabling Low Latency Mode");
|
||||
} else {
|
||||
LOG("Couldn't enable Low Latency Mode");
|
||||
}
|
||||
attr->GetUINT32(MF_SA_D3D_AWARE, &aware);
|
||||
attr->SetUINT32(CODECAPI_AVDecNumWorkerThreads,
|
||||
WMFDecoderModule::GetNumDecoderThreads());
|
||||
hr = attr->SetUINT32(CODECAPI_AVLowLatencyMode, TRUE);
|
||||
if (SUCCEEDED(hr)) {
|
||||
LOG("Enabling Low Latency Mode");
|
||||
}
|
||||
else {
|
||||
LOG("Couldn't enable Low Latency Mode");
|
||||
}
|
||||
}
|
||||
|
||||
if (useDxva) {
|
||||
@ -236,42 +237,19 @@ WMFVideoMFTManager::InitInternal(bool aForceD3D9)
|
||||
if (SUCCEEDED(hr)) {
|
||||
mUseHwAccel = true;
|
||||
} else {
|
||||
mDXVA2Manager = nullptr;
|
||||
mDXVAFailureReason = nsPrintfCString("MFT_MESSAGE_SET_D3D_MANAGER failed with code %X", hr);
|
||||
}
|
||||
} else {
|
||||
}
|
||||
else {
|
||||
mDXVAFailureReason.AssignLiteral("Decoder returned false for MF_SA_D3D_AWARE");
|
||||
}
|
||||
}
|
||||
|
||||
// Setup the input/output media types.
|
||||
RefPtr<IMFMediaType> inputType;
|
||||
hr = wmf::MFCreateMediaType(byRef(inputType));
|
||||
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
|
||||
|
||||
hr = inputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
|
||||
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
|
||||
|
||||
hr = inputType->SetGUID(MF_MT_SUBTYPE, GetMediaSubtypeGUID());
|
||||
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
|
||||
|
||||
hr = inputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_MixedInterlaceOrProgressive);
|
||||
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
|
||||
|
||||
RefPtr<IMFMediaType> outputType;
|
||||
hr = wmf::MFCreateMediaType(byRef(outputType));
|
||||
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
|
||||
|
||||
hr = outputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
|
||||
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
|
||||
|
||||
GUID outputSubType = mUseHwAccel ? MFVideoFormat_NV12 : MFVideoFormat_YV12;
|
||||
hr = outputType->SetGUID(MF_MT_SUBTYPE, outputSubType);
|
||||
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
|
||||
|
||||
hr = decoder->SetMediaTypes(inputType, outputType);
|
||||
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
|
||||
|
||||
mDecoder = decoder;
|
||||
hr = SetDecoderMediaTypes();
|
||||
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
|
||||
|
||||
LOG("Video Decoder initialized, Using DXVA: %s", (mUseHwAccel ? "Yes" : "No"));
|
||||
|
||||
// Just in case ConfigureVideoFrameGeometry() does not set these
|
||||
@ -284,6 +262,37 @@ WMFVideoMFTManager::InitInternal(bool aForceD3D9)
|
||||
return decoder.forget();
|
||||
}
|
||||
|
||||
HRESULT
|
||||
WMFVideoMFTManager::SetDecoderMediaTypes()
|
||||
{
|
||||
// Setup the input/output media types.
|
||||
RefPtr<IMFMediaType> inputType;
|
||||
HRESULT hr = wmf::MFCreateMediaType(byRef(inputType));
|
||||
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
|
||||
|
||||
hr = inputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
|
||||
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
|
||||
|
||||
hr = inputType->SetGUID(MF_MT_SUBTYPE, GetMediaSubtypeGUID());
|
||||
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
|
||||
|
||||
hr = inputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_MixedInterlaceOrProgressive);
|
||||
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
|
||||
|
||||
RefPtr<IMFMediaType> outputType;
|
||||
hr = wmf::MFCreateMediaType(byRef(outputType));
|
||||
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
|
||||
|
||||
hr = outputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
|
||||
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
|
||||
|
||||
GUID outputSubType = mUseHwAccel ? MFVideoFormat_NV12 : MFVideoFormat_YV12;
|
||||
hr = outputType->SetGUID(MF_MT_SUBTYPE, outputSubType);
|
||||
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
|
||||
|
||||
return mDecoder->SetMediaTypes(inputType, outputType);
|
||||
}
|
||||
|
||||
HRESULT
|
||||
WMFVideoMFTManager::Input(MediaRawData* aSample)
|
||||
{
|
||||
@ -291,10 +300,55 @@ WMFVideoMFTManager::Input(MediaRawData* aSample)
|
||||
// This can happen during shutdown.
|
||||
return E_FAIL;
|
||||
}
|
||||
|
||||
HRESULT hr = mDecoder->CreateInputSample(aSample->Data(),
|
||||
uint32_t(aSample->Size()),
|
||||
aSample->mTime,
|
||||
&mLastInput);
|
||||
NS_ENSURE_TRUE(SUCCEEDED(hr) && mLastInput != nullptr, hr);
|
||||
|
||||
// Forward sample data to the decoder.
|
||||
return mDecoder->Input(aSample->Data(),
|
||||
uint32_t(aSample->Size()),
|
||||
aSample->mTime);
|
||||
return mDecoder->Input(mLastInput);
|
||||
}
|
||||
|
||||
// The MFTransform we use for decoding h264 video will silently fall
|
||||
// back to software decoding (even if we've negotiated DXVA) if the GPU
|
||||
// doesn't support decoding the given resolution. It will then upload
|
||||
// the software decoded frames into d3d textures to preserve behaviour.
|
||||
//
|
||||
// Unfortunately this seems to cause corruption (see bug 1193547) and is
|
||||
// slow because the upload is done into a non-shareable texture and requires
|
||||
// us to copy it.
|
||||
//
|
||||
// This code tests if the given resolution can be supported directly on the GPU,
|
||||
// and makes sure we only ask the MFT for DXVA if it can be supported properly.
|
||||
bool
|
||||
WMFVideoMFTManager::MaybeToggleDXVA(IMFMediaType* aType)
|
||||
{
|
||||
// SupportsConfig only checks for valid h264 decoders currently.
|
||||
if (!mDXVA2Manager || mStreamType != H264) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (mDXVA2Manager->SupportsConfig(aType)) {
|
||||
if (!mUseHwAccel) {
|
||||
// DXVA disabled, but supported for this resolution
|
||||
ULONG_PTR manager = ULONG_PTR(mDXVA2Manager->GetDXVADeviceManager());
|
||||
HRESULT hr = mDecoder->SendMFTMessage(MFT_MESSAGE_SET_D3D_MANAGER, manager);
|
||||
if (SUCCEEDED(hr)) {
|
||||
mUseHwAccel = true;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
} else if (mUseHwAccel) {
|
||||
// DXVA enabled, and not supported for this resolution
|
||||
HRESULT hr = mDecoder->SendMFTMessage(MFT_MESSAGE_SET_D3D_MANAGER, 0);
|
||||
MOZ_ASSERT(SUCCEEDED(hr), "Attempting to fall back to software failed?");
|
||||
mUseHwAccel = false;
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
HRESULT
|
||||
@ -304,6 +358,20 @@ WMFVideoMFTManager::ConfigureVideoFrameGeometry()
|
||||
HRESULT hr = mDecoder->GetOutputMediaType(mediaType);
|
||||
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
|
||||
|
||||
// If we enabled/disabled DXVA in response to a resolution
|
||||
// change then we need to renegotiate our media types,
|
||||
// and resubmit our previous frame (since the MFT appears
|
||||
// to lose it otherwise).
|
||||
if (MaybeToggleDXVA(mediaType)) {
|
||||
hr = SetDecoderMediaTypes();
|
||||
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
|
||||
|
||||
HRESULT hr = mDecoder->GetOutputMediaType(mediaType);
|
||||
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
|
||||
|
||||
mDecoder->Input(mLastInput);
|
||||
}
|
||||
|
||||
// Verify that the video subtype is what we expect it to be.
|
||||
// When using hardware acceleration/DXVA2 the video format should
|
||||
// be NV12, which is DXVA2's preferred format. For software decoding
|
||||
|
@ -56,6 +56,10 @@ private:
|
||||
int64_t aStreamOffset,
|
||||
VideoData** aOutVideoData);
|
||||
|
||||
HRESULT SetDecoderMediaTypes();
|
||||
|
||||
bool MaybeToggleDXVA(IMFMediaType* aType);
|
||||
|
||||
// Video frame geometry.
|
||||
VideoInfo mVideoInfo;
|
||||
uint32_t mVideoStride;
|
||||
@ -67,6 +71,8 @@ private:
|
||||
RefPtr<layers::ImageContainer> mImageContainer;
|
||||
nsAutoPtr<DXVA2Manager> mDXVA2Manager;
|
||||
|
||||
RefPtr<IMFSample> mLastInput;
|
||||
|
||||
const bool mDXVAEnabled;
|
||||
const layers::LayersBackend mLayersBackend;
|
||||
bool mUseHwAccel;
|
||||
|
Loading…
Reference in New Issue
Block a user