Bug 1207245 - part 3 - switch all uses of mozilla::RefPtr<T> to nsRefPtr<T>; r=ehsan

This commit was generated using the following script, executed at the
top level of a typical source code checkout.

 # Don't modify select files in mfbt/ because it's not worth trying to
 # tease out the dependencies currently.
 #
 # Don't modify anything in media/gmp-clearkey/0.1/ because those files
 # use their own RefPtr, defined in their own RefCounted.h.
find . -name '*.cpp' -o -name '*.h' -o -name '*.mm' -o -name '*.idl'| \
    grep -v 'mfbt/RefPtr.h' | \
    grep -v 'mfbt/nsRefPtr.h' | \
    grep -v 'mfbt/RefCounted.h' | \
    grep -v 'media/gmp-clearkey/0.1/' | \
    xargs perl -p -i -e '
 s/mozilla::RefPtr/nsRefPtr/g; # handle declarations in headers
 s/\bRefPtr</nsRefPtr</g; # handle local variables in functions
 s#mozilla/RefPtr.h#mozilla/nsRefPtr.h#; # handle #includes
 s#mfbt/RefPtr.h#mfbt/nsRefPtr.h#;       # handle strange #includes
'

 # |using mozilla::RefPtr;| is OK; |using nsRefPtr;| is invalid syntax.
find . -name '*.cpp' -o -name '*.mm' | xargs sed -i -e '/using nsRefPtr/d'

 # RefPtr.h used |byRef| for dealing with COM-style outparams.
 # nsRefPtr.h uses |getter_AddRefs|.
 # Fixup that mismatch.
find . -name '*.cpp' -o -name '*.h'| \
    xargs perl -p -i -e 's/byRef/getter_AddRefs/g'
This commit is contained in:
Nathan Froyd 2015-10-18 00:40:10 -04:00
parent f8dbdcbaa7
commit 5254890206
678 changed files with 3235 additions and 3240 deletions

View File

@ -8,7 +8,7 @@
#include "imgIContainer.h"
#include "imgIRequest.h"
#include "mozilla/gfx/2D.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "nsIDOMElement.h"
#include "nsIDOMHTMLImageElement.h"
#include "nsIImageLoadingContent.h"
@ -1075,7 +1075,7 @@ WriteBitmap(nsIFile* aFile, imgIContainer* aImage)
{
nsresult rv;
RefPtr<SourceSurface> surface =
nsRefPtr<SourceSurface> surface =
aImage->GetFrame(imgIContainer::FRAME_FIRST,
imgIContainer::FLAG_SYNC_DECODE);
NS_ENSURE_TRUE(surface, NS_ERROR_FAILURE);
@ -1087,7 +1087,7 @@ WriteBitmap(nsIFile* aFile, imgIContainer* aImage)
MOZ_ASSERT(surface->GetFormat() == SurfaceFormat::B8G8R8A8 ||
surface->GetFormat() == SurfaceFormat::B8G8R8X8);
RefPtr<DataSourceSurface> dataSurface = surface->GetDataSurface();
nsRefPtr<DataSourceSurface> dataSurface = surface->GetDataSurface();
NS_ENSURE_TRUE(dataSurface, NS_ERROR_FAILURE);
int32_t width = dataSurface->GetSize().width;

View File

@ -13,7 +13,7 @@
#include "mozilla/HashFunctions.h"
#include "mozilla/Maybe.h"
#include "mozilla/RefCounted.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "mozilla/TimeStamp.h"
#include "mozilla/UniquePtr.h"

View File

@ -5,7 +5,7 @@
#include "LocalCertService.h"
#include "mozilla/ModuleUtils.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "cert.h"
#include "CryptoTask.h"
#include "nsIPK11Token.h"
@ -419,7 +419,7 @@ LocalCertService::GetOrCreateCert(const nsACString& aNickname,
return NS_OK;
}
RefPtr<LocalCertGetTask> task(new LocalCertGetTask(aNickname, aCallback));
nsRefPtr<LocalCertGetTask> task(new LocalCertGetTask(aNickname, aCallback));
return task->Dispatch("LocalCertGet");
}
@ -441,7 +441,7 @@ LocalCertService::RemoveCert(const nsACString& aNickname,
return NS_OK;
}
RefPtr<LocalCertRemoveTask> task(
nsRefPtr<LocalCertRemoveTask> task(
new LocalCertRemoveTask(aNickname, aCallback));
return task->Dispatch("LocalCertRm");
}

View File

@ -9,7 +9,7 @@
#include "mozilla/gfx/2D.h"
#include "mozilla/gfx/DataSurfaceHelpers.h"
#include "mozilla/layers/AsyncCanvasRenderer.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "mozilla/SyncRunnable.h"
#include "mozilla/unused.h"
#include "gfxUtils.h"
@ -53,8 +53,8 @@ public:
}
private:
RefPtr<layers::Image> mImage;
RefPtr<gfx::DataSourceSurface> mDataSourceSurface;
nsRefPtr<layers::Image> mImage;
nsRefPtr<gfx::DataSourceSurface> mDataSourceSurface;
};
// This function returns a DataSourceSurface in B8G8R8A8 format.
@ -402,8 +402,8 @@ ImageEncoder::ExtractDataInternal(const nsAString& aType,
imgIEncoder::INPUT_FORMAT_HOSTARGB,
aOptions);
} else {
RefPtr<gfx::DataSourceSurface> dataSurface;
RefPtr<layers::Image> image(aImage);
nsRefPtr<gfx::DataSourceSurface> dataSurface;
nsRefPtr<layers::Image> image(aImage);
dataSurface = GetBRGADataSourceSurfaceSync(image.forget());
DataSourceSurface::MappedSurface map;
@ -428,7 +428,7 @@ ImageEncoder::ExtractDataInternal(const nsAString& aType,
// note that if we didn't have a current context, the spec says we're
// supposed to just return transparent black pixels of the canvas
// dimensions.
RefPtr<DataSourceSurface> emptyCanvas =
nsRefPtr<DataSourceSurface> emptyCanvas =
Factory::CreateDataSourceSurfaceWithStride(IntSize(aSize.width, aSize.height),
SurfaceFormat::B8G8R8A8,
4 * aSize.width, true);

View File

@ -7497,11 +7497,11 @@ nsContentUtils::TransferableToIPCTransferable(nsITransferable* aTransferable,
// Images to be placed on the clipboard are imgIContainers.
nsCOMPtr<imgIContainer> image(do_QueryInterface(data));
if (image) {
RefPtr<mozilla::gfx::SourceSurface> surface =
nsRefPtr<mozilla::gfx::SourceSurface> surface =
image->GetFrame(imgIContainer::FRAME_CURRENT,
imgIContainer::FLAG_SYNC_DECODE);
if (surface) {
mozilla::RefPtr<mozilla::gfx::DataSourceSurface> dataSurface =
nsRefPtr<mozilla::gfx::DataSourceSurface> dataSurface =
surface->GetDataSurface();
size_t length;
int32_t stride;

View File

@ -1425,8 +1425,8 @@ nsDOMWindowUtils::CompareCanvases(nsIDOMHTMLCanvasElement *aCanvas1,
retVal == nullptr)
return NS_ERROR_FAILURE;
RefPtr<DataSourceSurface> img1 = CanvasToDataSourceSurface(aCanvas1);
RefPtr<DataSourceSurface> img2 = CanvasToDataSourceSurface(aCanvas2);
nsRefPtr<DataSourceSurface> img1 = CanvasToDataSourceSurface(aCanvas1);
nsRefPtr<DataSourceSurface> img2 = CanvasToDataSourceSurface(aCanvas2);
DataSourceSurface::ScopedMap map1(img1, DataSourceSurface::READ);
DataSourceSurface::ScopedMap map2(img2, DataSourceSurface::READ);
@ -2268,7 +2268,7 @@ nsDOMWindowUtils::AdvanceTimeAndRefresh(int64_t aMilliseconds)
nsRefreshDriver* driver = presContext->RefreshDriver();
driver->AdvanceTimeAndRefresh(aMilliseconds);
RefPtr<LayerTransactionChild> transaction = GetLayerTransaction();
nsRefPtr<LayerTransactionChild> transaction = GetLayerTransaction();
if (transaction && transaction->IPCOpen()) {
transaction->SendSetTestSampleTime(driver->MostRecentRefresh());
}
@ -2283,7 +2283,7 @@ nsDOMWindowUtils::RestoreNormalRefresh()
// Kick the compositor out of test mode before the refresh driver, so that
// the refresh driver doesn't send an update that gets ignored by the
// compositor.
RefPtr<LayerTransactionChild> transaction = GetLayerTransaction();
nsRefPtr<LayerTransactionChild> transaction = GetLayerTransaction();
if (transaction && transaction->IPCOpen()) {
transaction->SendLeaveTestMode();
}

View File

@ -17,7 +17,7 @@
#include "mozilla/dom/ipc/BlobParent.h"
#include "mozilla/dom/File.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "mozilla/Services.h"
#include "mozilla/StaticPtr.h"
#include "nsAutoPtr.h"

View File

@ -16,7 +16,7 @@
#include "mozilla/dom/bluetooth/BluetoothTypes.h"
#include "mozilla/dom/ipc/BlobParent.h"
#include "mozilla/dom/File.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "mozilla/Services.h"
#include "mozilla/StaticPtr.h"
#include "nsAutoPtr.h"

View File

@ -15,7 +15,7 @@
#include "mozilla/dom/BluetoothPbapParametersBinding.h"
#include "mozilla/dom/File.h"
#include "mozilla/dom/ipc/BlobParent.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "mozilla/Services.h"
#include "mozilla/StaticPtr.h"
#include "nsAutoPtr.h"

View File

@ -12,7 +12,7 @@
#include "BluetoothUtils.h"
#include "mozilla/ipc/UnixSocketWatcher.h"
#include "mozilla/FileUtils.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "nsISupportsImpl.h" // for MOZ_COUNT_CTOR, MOZ_COUNT_DTOR
#include "nsXULAppAPI.h"

View File

@ -16,7 +16,7 @@
#include "mozilla/dom/bluetooth/BluetoothTypes.h"
#include "mozilla/dom/File.h"
#include "mozilla/dom/ipc/BlobParent.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "mozilla/Services.h"
#include "mozilla/StaticPtr.h"
#include "nsAutoPtr.h"

View File

@ -8,7 +8,7 @@
#include <fcntl.h>
#include "BluetoothSocketObserver.h"
#include "BluetoothUnixSocketConnector.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "nsISupportsImpl.h" // for MOZ_COUNT_CTOR, MOZ_COUNT_DTOR
#include "nsXULAppAPI.h"

View File

@ -27,7 +27,7 @@
#include "nsDebug.h"
#include "mozilla/layers/TextureClient.h"
#include "CameraPreferences.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#if defined(MOZ_WIDGET_GONK) && ANDROID_VERSION >= 21
#include "GonkBufferQueueProducer.h"
#endif
@ -71,7 +71,7 @@ GonkCameraHardware::OnNewFrame()
if (mClosing) {
return;
}
RefPtr<TextureClient> buffer = mNativeWindow->getCurrentBuffer();
nsRefPtr<TextureClient> buffer = mNativeWindow->getCurrentBuffer();
if (!buffer) {
DOM_CAMERA_LOGE("received null frame");
return;

View File

@ -29,7 +29,7 @@
#include <media/stagefright/foundation/AMessage.h>
#endif
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "GonkCameraHwMgr.h"
namespace android {

View File

@ -7,7 +7,7 @@
#include "mozilla/Attributes.h"
#include "nsTArray.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "mozilla/dom/CanvasRenderingContext2DBinding.h"
#include "mozilla/dom/CanvasRenderingContext2D.h"
#include "mozilla/gfx/2D.h"
@ -70,7 +70,7 @@ protected:
nsRefPtr<CanvasRenderingContext2D> mContext;
nsTArray<mozilla::gfx::GradientStop> mRawStops;
mozilla::RefPtr<mozilla::gfx::GradientStops> mStops;
nsRefPtr<mozilla::gfx::GradientStops> mStops;
Type mType;
virtual ~CanvasGradient() {}
};

View File

@ -52,7 +52,7 @@ struct ImageCacheEntryData {
nsRefPtr<HTMLCanvasElement> mCanvas;
// Value
nsCOMPtr<imgIRequest> mRequest;
RefPtr<SourceSurface> mSourceSurface;
nsRefPtr<SourceSurface> mSourceSurface;
IntSize mSize;
nsExpirationState mState;
};
@ -110,7 +110,7 @@ public:
enum { ALLOW_MEMMOVE = true };
nsCOMPtr<imgIRequest> mRequest;
RefPtr<SourceSurface> mSourceSurface;
nsRefPtr<SourceSurface> mSourceSurface;
};
static bool sPrefsInitialized = false;

View File

@ -6,7 +6,7 @@
#define CanvasPath_h
#include "mozilla/Attributes.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "nsWrapperCache.h"
#include "mozilla/gfx/2D.h"
#include "mozilla/dom/BindingDeclarations.h"
@ -75,8 +75,8 @@ private:
nsCOMPtr<nsISupports> mParent;
static gfx::Float ToFloat(double aValue) { return gfx::Float(aValue); }
mutable RefPtr<gfx::Path> mPath;
mutable RefPtr<gfx::PathBuilder> mPathBuilder;
mutable nsRefPtr<gfx::Path> mPath;
mutable nsRefPtr<gfx::PathBuilder> mPathBuilder;
void EnsurePathBuilder() const;
};

View File

@ -8,7 +8,7 @@
#include "mozilla/Attributes.h"
#include "mozilla/dom/CanvasRenderingContext2DBinding.h"
#include "mozilla/dom/CanvasRenderingContext2D.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "nsISupports.h"
#include "nsWrapperCache.h"
@ -66,7 +66,7 @@ public:
void SetTransform(SVGMatrix& matrix);
nsRefPtr<CanvasRenderingContext2D> mContext;
RefPtr<gfx::SourceSurface> mSurface;
nsRefPtr<gfx::SourceSurface> mSurface;
nsCOMPtr<nsIPrincipal> mPrincipal;
mozilla::gfx::Matrix mTransform;
const bool mForceWriteOnly;

View File

@ -350,7 +350,7 @@ public:
return nullptr;
}
RefPtr<DrawTarget> dt =
nsRefPtr<DrawTarget> dt =
mFinalTarget->CreateSimilarDrawTarget(aRect.Size(), SurfaceFormat::B8G8R8A8);
if (!dt) {
aRect.SetEmpty();
@ -376,11 +376,11 @@ public:
return;
}
RefPtr<SourceSurface> snapshot = mTarget->Snapshot();
nsRefPtr<SourceSurface> snapshot = mTarget->Snapshot();
RefPtr<SourceSurface> fillPaint =
nsRefPtr<SourceSurface> fillPaint =
DoSourcePaint(mFillPaintRect, CanvasRenderingContext2D::Style::FILL);
RefPtr<SourceSurface> strokePaint =
nsRefPtr<SourceSurface> strokePaint =
DoSourcePaint(mStrokePaintRect, CanvasRenderingContext2D::Style::STROKE);
AutoRestoreTransform autoRestoreTransform(mFinalTarget);
@ -403,8 +403,8 @@ public:
}
private:
RefPtr<DrawTarget> mTarget;
RefPtr<DrawTarget> mFinalTarget;
nsRefPtr<DrawTarget> mTarget;
nsRefPtr<DrawTarget> mFinalTarget;
CanvasRenderingContext2D *mCtx;
gfx::IntRect mSourceGraphicRect;
gfx::IntRect mFillPaintRect;
@ -471,7 +471,7 @@ public:
return;
}
RefPtr<SourceSurface> snapshot = mTarget->Snapshot();
nsRefPtr<SourceSurface> snapshot = mTarget->Snapshot();
mFinalTarget->DrawSurfaceWithShadow(snapshot, mTempRect.TopLeft(),
Color::FromABGR(mCtx->CurrentState().shadowColor),
@ -490,8 +490,8 @@ public:
}
private:
RefPtr<DrawTarget> mTarget;
RefPtr<DrawTarget> mFinalTarget;
nsRefPtr<DrawTarget> mTarget;
nsRefPtr<DrawTarget> mFinalTarget;
CanvasRenderingContext2D *mCtx;
Float mSigma;
gfx::IntRect mTempRect;
@ -645,7 +645,7 @@ private:
return gfx::Rect(extents.GetBounds());
}
RefPtr<DrawTarget> mTarget;
nsRefPtr<DrawTarget> mTarget;
UniquePtr<AdjustedTargetForShadow> mShadowTarget;
UniquePtr<AdjustedTargetForFilter> mFilterTarget;
};
@ -1205,7 +1205,7 @@ bool CanvasRenderingContext2D::SwitchRenderingMode(RenderingMode aRenderingMode)
}
#endif
RefPtr<SourceSurface> snapshot;
nsRefPtr<SourceSurface> snapshot;
Matrix transform;
if (mTarget) {
@ -1644,12 +1644,12 @@ CanvasRenderingContext2D::GetImageBuffer(uint8_t** aImageBuffer,
*aFormat = 0;
EnsureTarget();
RefPtr<SourceSurface> snapshot = mTarget->Snapshot();
nsRefPtr<SourceSurface> snapshot = mTarget->Snapshot();
if (!snapshot) {
return;
}
RefPtr<DataSourceSurface> data = snapshot->GetDataSurface();
nsRefPtr<DataSourceSurface> data = snapshot->GetDataSurface();
if (!data || data->GetSize() != IntSize(mWidth, mHeight)) {
return;
}
@ -2057,7 +2057,7 @@ CanvasRenderingContext2D::CreatePattern(const CanvasImageSource& source,
nsICanvasRenderingContextInternal *srcCanvas = canvas->GetContextAtIndex(0);
if (srcCanvas) {
// This might not be an Azure canvas!
RefPtr<SourceSurface> srcSurf = srcCanvas->GetSurfaceSnapshot();
nsRefPtr<SourceSurface> srcSurf = srcCanvas->GetSurfaceSnapshot();
nsRefPtr<CanvasPattern> pat =
new CanvasPattern(this, srcSurf, repeatMode, htmlElement->NodePrincipal(), canvas->IsWriteOnly(), false);
@ -2078,7 +2078,7 @@ CanvasRenderingContext2D::CreatePattern(const CanvasImageSource& source,
// Special case for ImageBitmap
ImageBitmap& imgBitmap = source.GetAsImageBitmap();
EnsureTarget();
RefPtr<SourceSurface> srcSurf = imgBitmap.PrepareForDrawTarget(mTarget);
nsRefPtr<SourceSurface> srcSurf = imgBitmap.PrepareForDrawTarget(mTarget);
// An ImageBitmap never taints others so we set principalForSecurityCheck to
// nullptr and set CORSUsed to true for passing the security check in
@ -2700,7 +2700,7 @@ void CanvasRenderingContext2D::Fill(const CanvasPath& path, const CanvasWindingR
{
EnsureTarget();
RefPtr<gfx::Path> gfxpath = path.GetPath(winding, mTarget);
nsRefPtr<gfx::Path> gfxpath = path.GetPath(winding, mTarget);
if (!gfxpath) {
return;
@ -2753,7 +2753,7 @@ CanvasRenderingContext2D::Stroke(const CanvasPath& path)
{
EnsureTarget();
RefPtr<gfx::Path> gfxpath = path.GetPath(CanvasWindingRule::Nonzero, mTarget);
nsRefPtr<gfx::Path> gfxpath = path.GetPath(CanvasWindingRule::Nonzero, mTarget);
if (!gfxpath) {
return;
@ -2872,7 +2872,7 @@ CanvasRenderingContext2D::Clip(const CanvasPath& path, const CanvasWindingRule&
{
EnsureTarget();
RefPtr<gfx::Path> gfxpath = path.GetPath(winding, mTarget);
nsRefPtr<gfx::Path> gfxpath = path.GetPath(winding, mTarget);
if (!gfxpath) {
return;
@ -3058,7 +3058,7 @@ CanvasRenderingContext2D::EnsureUserSpacePath(const CanvasWindingRule& winding)
}
if (mDSPathBuilder) {
RefPtr<Path> dsPath;
nsRefPtr<Path> dsPath;
dsPath = mDSPathBuilder->Finish();
mDSPathBuilder = nullptr;
@ -3314,7 +3314,7 @@ CanvasRenderingContext2D::MeasureText(const nsAString& rawText,
void
CanvasRenderingContext2D::AddHitRegion(const HitRegionOptions& options, ErrorResult& error)
{
RefPtr<gfx::Path> path;
nsRefPtr<gfx::Path> path;
if (options.mPath) {
EnsureTarget();
path = options.mPath->GetPath(CanvasWindingRule::Nonzero, mTarget);
@ -3361,7 +3361,7 @@ CanvasRenderingContext2D::AddHitRegion(const HitRegionOptions& options, ErrorRes
RegionInfo info;
info.mId = options.mId;
info.mElement = options.mControl;
RefPtr<PathBuilder> pathBuilder = path->TransformedCopyToBuilder(mTarget->GetTransform());
nsRefPtr<PathBuilder> pathBuilder = path->TransformedCopyToBuilder(mTarget->GetTransform());
info.mPath = pathBuilder->Finish();
mHitRegionsOptions.InsertElementAt(0, info);
@ -3530,7 +3530,7 @@ struct MOZ_STACK_CLASS CanvasBidiProcessor : public nsBidiPresUtils::BidiProcess
const gfxTextRun::CompressedGlyph *glyphs = mTextRun->GetCharacterGlyphs();
RefPtr<ScaledFont> scaledFont =
nsRefPtr<ScaledFont> scaledFont =
gfxPlatform::GetPlatform()->GetScaledFontForFont(mCtx->mTarget, font);
if (!scaledFont) {
@ -3565,7 +3565,7 @@ struct MOZ_STACK_CLASS CanvasBidiProcessor : public nsBidiPresUtils::BidiProcess
mCtx->mTarget->SetTransform(mat);
}
RefPtr<GlyphRenderingOptions> renderingOptions = font->GetGlyphRenderingOptions();
nsRefPtr<GlyphRenderingOptions> renderingOptions = font->GetGlyphRenderingOptions();
GlyphBuffer buffer;
@ -3678,7 +3678,7 @@ struct MOZ_STACK_CLASS CanvasBidiProcessor : public nsBidiPresUtils::BidiProcess
const DrawOptions drawOpts(state.globalAlpha, mCtx->UsedOperation());
for (unsigned i = glyphBuf.size(); i > 0; --i) {
RefPtr<Path> path = scaledFont->GetPathForGlyphs(buffer, mCtx->mTarget);
nsRefPtr<Path> path = scaledFont->GetPathForGlyphs(buffer, mCtx->mTarget);
target->Stroke(path, patForStyle, strokeOpts, drawOpts);
buffer.mGlyphs++;
}
@ -4189,7 +4189,7 @@ bool CanvasRenderingContext2D::IsPointInPath(const CanvasPath& mPath, double x,
}
EnsureTarget();
RefPtr<gfx::Path> tempPath = mPath.GetPath(mWinding, mTarget);
nsRefPtr<gfx::Path> tempPath = mPath.GetPath(mWinding, mTarget);
return tempPath->ContainsPoint(Point(x, y), mTarget->GetTransform());
}
@ -4229,7 +4229,7 @@ bool CanvasRenderingContext2D::IsPointInStroke(const CanvasPath& mPath, double x
}
EnsureTarget();
RefPtr<gfx::Path> tempPath = mPath.GetPath(CanvasWindingRule::Nonzero, mTarget);
nsRefPtr<gfx::Path> tempPath = mPath.GetPath(CanvasWindingRule::Nonzero, mTarget);
const ContextState &state = CurrentState();
@ -4254,15 +4254,15 @@ ExtractSubrect(SourceSurface* aSurface, gfx::Rect* aSourceRect, DrawTarget* aTar
roundedOutSourceRect.RoundOut();
gfx::IntRect roundedOutSourceRectInt;
if (!roundedOutSourceRect.ToIntRect(&roundedOutSourceRectInt)) {
RefPtr<SourceSurface> surface(aSurface);
nsRefPtr<SourceSurface> surface(aSurface);
return surface.forget();
}
RefPtr<DrawTarget> subrectDT =
nsRefPtr<DrawTarget> subrectDT =
aTargetDT->CreateSimilarDrawTarget(roundedOutSourceRectInt.Size(), SurfaceFormat::B8G8R8A8);
if (!subrectDT) {
RefPtr<SourceSurface> surface(aSurface);
nsRefPtr<SourceSurface> surface(aSurface);
return surface.forget();
}
@ -4376,7 +4376,7 @@ CanvasRenderingContext2D::DrawImage(const CanvasImageSource& image,
NormalizeRect(dx, dy, dw, dh);
}
RefPtr<SourceSurface> srcSurf;
nsRefPtr<SourceSurface> srcSurf;
gfx::IntSize imgSize;
Element* element = nullptr;
@ -4857,7 +4857,7 @@ CanvasRenderingContext2D::DrawWindow(nsGlobalWindow& window, double x,
}
nsRefPtr<gfxContext> thebes;
RefPtr<DrawTarget> drawDT;
nsRefPtr<DrawTarget> drawDT;
// Rendering directly is faster and can be done if mTarget supports Azure
// and does not need alpha blending.
if (gfxPlatform::GetPlatform()->SupportsAzureContentForDrawTarget(mTarget) &&
@ -4882,15 +4882,15 @@ CanvasRenderingContext2D::DrawWindow(nsGlobalWindow& window, double x,
nsCOMPtr<nsIPresShell> shell = presContext->PresShell();
unused << shell->RenderDocument(r, renderDocFlags, backgroundColor, thebes);
if (drawDT) {
RefPtr<SourceSurface> snapshot = drawDT->Snapshot();
RefPtr<DataSourceSurface> data = snapshot->GetDataSurface();
nsRefPtr<SourceSurface> snapshot = drawDT->Snapshot();
nsRefPtr<DataSourceSurface> data = snapshot->GetDataSurface();
DataSourceSurface::MappedSurface rawData;
if (NS_WARN_IF(!data->Map(DataSourceSurface::READ, &rawData))) {
error.Throw(NS_ERROR_FAILURE);
return;
}
RefPtr<SourceSurface> source =
nsRefPtr<SourceSurface> source =
mTarget->CreateSourceSurfaceFromData(rawData.mData,
data->GetSize(),
rawData.mStride,
@ -5037,7 +5037,7 @@ CanvasRenderingContext2D::DrawWidgetAsOnScreen(nsGlobalWindow& aWindow,
error.Throw(NS_ERROR_FAILURE);
return;
}
RefPtr<SourceSurface> snapshot = widget->SnapshotWidgetOnScreen();
nsRefPtr<SourceSurface> snapshot = widget->SnapshotWidgetOnScreen();
if (!snapshot) {
error.Throw(NS_ERROR_FAILURE);
return;
@ -5179,10 +5179,10 @@ CanvasRenderingContext2D::GetImageDataArray(JSContext* aCx,
IntRect srcRect(0, 0, mWidth, mHeight);
IntRect destRect(aX, aY, aWidth, aHeight);
IntRect srcReadRect = srcRect.Intersect(destRect);
RefPtr<DataSourceSurface> readback;
nsRefPtr<DataSourceSurface> readback;
DataSourceSurface::MappedSurface rawData;
if (!srcReadRect.IsEmpty()) {
RefPtr<SourceSurface> snapshot = mTarget->Snapshot();
nsRefPtr<SourceSurface> snapshot = mTarget->Snapshot();
if (snapshot) {
readback = snapshot->GetDataSurface();
}
@ -5274,7 +5274,7 @@ CanvasRenderingContext2D::EnsureErrorTarget()
return;
}
RefPtr<DrawTarget> errorTarget = gfxPlatform::GetPlatform()->CreateOffscreenCanvasDrawTarget(IntSize(1, 1), SurfaceFormat::B8G8R8A8);
nsRefPtr<DrawTarget> errorTarget = gfxPlatform::GetPlatform()->CreateOffscreenCanvasDrawTarget(IntSize(1, 1), SurfaceFormat::B8G8R8A8);
MOZ_ASSERT(errorTarget, "Failed to allocate the error target!");
sErrorTarget = errorTarget;
@ -5444,7 +5444,7 @@ CanvasRenderingContext2D::PutImageData_explicit(int32_t x, int32_t y, uint32_t w
return NS_ERROR_FAILURE;
}
RefPtr<SourceSurface> sourceSurface =
nsRefPtr<SourceSurface> sourceSurface =
mTarget->CreateSourceSurfaceFromData(imgsurf->Data(), IntSize(copyWidth, copyHeight), imgsurf->Stride(), SurfaceFormat::B8G8R8A8);
// In certain scenarios, requesting larger than 8k image fails. Bug 803568
@ -5721,7 +5721,7 @@ CanvasPath::Constructor(const GlobalObject& aGlobal, ErrorResult& aRv)
already_AddRefed<CanvasPath>
CanvasPath::Constructor(const GlobalObject& aGlobal, CanvasPath& aCanvasPath, ErrorResult& aRv)
{
RefPtr<gfx::Path> tempPath = aCanvasPath.GetPath(CanvasWindingRule::Nonzero,
nsRefPtr<gfx::Path> tempPath = aCanvasPath.GetPath(CanvasWindingRule::Nonzero,
gfxPlatform::GetPlatform()->ScreenReferenceDrawTarget());
nsRefPtr<CanvasPath> path = new CanvasPath(aGlobal.GetAsSupports(), tempPath->CopyToBuilder());
@ -5731,7 +5731,7 @@ CanvasPath::Constructor(const GlobalObject& aGlobal, CanvasPath& aCanvasPath, Er
already_AddRefed<CanvasPath>
CanvasPath::Constructor(const GlobalObject& aGlobal, const nsAString& aPathString, ErrorResult& aRv)
{
RefPtr<gfx::Path> tempPath = SVGContentUtils::GetPath(aPathString);
nsRefPtr<gfx::Path> tempPath = SVGContentUtils::GetPath(aPathString);
if (!tempPath) {
return Constructor(aGlobal, aRv);
}
@ -5895,7 +5895,7 @@ CanvasPath::BezierTo(const gfx::Point& aCP1,
void
CanvasPath::AddPath(CanvasPath& aCanvasPath, const Optional<NonNull<SVGMatrix>>& aMatrix)
{
RefPtr<gfx::Path> tempPath = aCanvasPath.GetPath(CanvasWindingRule::Nonzero,
nsRefPtr<gfx::Path> tempPath = aCanvasPath.GetPath(CanvasWindingRule::Nonzero,
gfxPlatform::GetPlatform()->ScreenReferenceDrawTarget());
if (aMatrix.WasPassed()) {
@ -5903,7 +5903,7 @@ CanvasPath::AddPath(CanvasPath& aCanvasPath, const Optional<NonNull<SVGMatrix>>&
Matrix transform(m.A(), m.B(), m.C(), m.D(), m.E(), m.F());
if (!transform.IsIdentity()) {
RefPtr<PathBuilder> tempBuilder = tempPath->TransformedCopyToBuilder(transform, FillRule::FILL_WINDING);
nsRefPtr<PathBuilder> tempBuilder = tempPath->TransformedCopyToBuilder(transform, FillRule::FILL_WINDING);
tempPath = tempBuilder->Finish();
}
}
@ -5923,7 +5923,7 @@ CanvasPath::GetPath(const CanvasWindingRule& winding, const DrawTarget* aTarget)
if (mPath &&
(mPath->GetBackendType() == aTarget->GetBackendType()) &&
(mPath->GetFillRule() == fillRule)) {
RefPtr<gfx::Path> path(mPath);
nsRefPtr<gfx::Path> path(mPath);
return path.forget();
}
@ -5932,7 +5932,7 @@ CanvasPath::GetPath(const CanvasWindingRule& winding, const DrawTarget* aTarget)
MOZ_ASSERT(mPathBuilder);
mPath = mPathBuilder->Finish();
if (!mPath) {
RefPtr<gfx::Path> path(mPath);
nsRefPtr<gfx::Path> path(mPath);
return path.forget();
}
@ -5941,15 +5941,15 @@ CanvasPath::GetPath(const CanvasWindingRule& winding, const DrawTarget* aTarget)
// retarget our backend if we're used with a different backend
if (mPath->GetBackendType() != aTarget->GetBackendType()) {
RefPtr<PathBuilder> tmpPathBuilder = aTarget->CreatePathBuilder(fillRule);
nsRefPtr<PathBuilder> tmpPathBuilder = aTarget->CreatePathBuilder(fillRule);
mPath->StreamToSink(tmpPathBuilder);
mPath = tmpPathBuilder->Finish();
} else if (mPath->GetFillRule() != fillRule) {
RefPtr<PathBuilder> tmpPathBuilder = mPath->CopyToBuilder(fillRule);
nsRefPtr<PathBuilder> tmpPathBuilder = mPath->CopyToBuilder(fillRule);
mPath = tmpPathBuilder->Finish();
}
RefPtr<gfx::Path> path(mPath);
nsRefPtr<gfx::Path> path(mPath);
return path.forget();
}

View File

@ -9,7 +9,7 @@
#include <vector>
#include "nsIDOMCanvasRenderingContext2D.h"
#include "nsICanvasRenderingContextInternal.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "nsColor.h"
#include "mozilla/dom/HTMLCanvasElement.h"
#include "mozilla/dom/HTMLVideoElement.h"
@ -729,9 +729,9 @@ protected:
// This is created lazily so it is necessary to call EnsureTarget before
// accessing it. In the event of an error it will be equal to
// sErrorTarget.
mozilla::RefPtr<mozilla::gfx::DrawTarget> mTarget;
nsRefPtr<mozilla::gfx::DrawTarget> mTarget;
mozilla::RefPtr<mozilla::layers::PersistentBufferProvider> mBufferProvider;
nsRefPtr<mozilla::layers::PersistentBufferProvider> mBufferProvider;
uint32_t SkiaGLTex() const;
@ -788,9 +788,9 @@ protected:
*
* mPath is always in user-space.
*/
mozilla::RefPtr<mozilla::gfx::Path> mPath;
mozilla::RefPtr<mozilla::gfx::PathBuilder> mDSPathBuilder;
mozilla::RefPtr<mozilla::gfx::PathBuilder> mPathBuilder;
nsRefPtr<mozilla::gfx::Path> mPath;
nsRefPtr<mozilla::gfx::PathBuilder> mDSPathBuilder;
nsRefPtr<mozilla::gfx::PathBuilder> mPathBuilder;
bool mPathTransformWillUpdate;
mozilla::gfx::Matrix mPathToDS;
@ -809,7 +809,7 @@ protected:
// fallback element for a11y
nsRefPtr<Element> mElement;
// Path of the hit region in the 2d context coordinate space (not user space)
RefPtr<gfx::Path> mPath;
nsRefPtr<gfx::Path> mPath;
};
nsTArray<RegionInfo> mHitRegionsOptions;
@ -984,7 +984,7 @@ protected:
return std::min(SIGMA_MAX, shadowBlur / 2.0f);
}
nsTArray<mozilla::RefPtr<mozilla::gfx::Path> > clipsPushed;
nsTArray<nsRefPtr<mozilla::gfx::Path> > clipsPushed;
nsRefPtr<gfxFontGroup> fontGroup;
nsCOMPtr<nsIAtom> fontLanguage;
@ -1018,7 +1018,7 @@ protected:
nsTArray<nsStyleFilter> filterChain;
nsRefPtr<nsSVGFilterChainObserver> filterChainObserver;
mozilla::gfx::FilterDescription filter;
nsTArray<mozilla::RefPtr<mozilla::gfx::SourceSurface>> filterAdditionalImages;
nsTArray<nsRefPtr<mozilla::gfx::SourceSurface>> filterAdditionalImages;
bool imageSmoothingEnabled;
bool fontExplicitLanguage;

View File

@ -9,7 +9,7 @@
#include "gfx2DGlue.h"
#include "gfxPattern.h"
#include "mozilla/gfx/2D.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "nsPIDOMWindow.h"
#include "nsIDOMWindow.h"
#include "nsIDocShell.h"
@ -74,7 +74,7 @@ DocumentRendererChild::RenderDocument(nsIDOMWindow *window,
// Draw directly into the output array.
data.SetLength(renderSize.width * renderSize.height * 4);
RefPtr<DrawTarget> dt =
nsRefPtr<DrawTarget> dt =
Factory::CreateDrawTargetForData(BackendType::CAIRO,
reinterpret_cast<uint8_t*>(data.BeginWriting()),
IntSize(renderSize.width, renderSize.height),

View File

@ -7,7 +7,7 @@
#include "gfx2DGlue.h"
#include "mozilla/gfx/2D.h"
#include "mozilla/gfx/PathHelpers.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "nsICanvasRenderingContextInternal.h"
using namespace mozilla;
@ -36,7 +36,7 @@ void DocumentRendererParent::DrawToCanvas(const nsIntSize& aSize,
DrawTarget* drawTarget = mCanvasContext->GetDrawTarget();
Rect rect(0, 0, aSize.width, aSize.height);
MaybeSnapToDevicePixels(rect, *drawTarget, true);
RefPtr<DataSourceSurface> dataSurface =
nsRefPtr<DataSourceSurface> dataSurface =
Factory::CreateWrappingDataSourceSurface(reinterpret_cast<uint8_t*>(const_cast<nsCString&>(aData).BeginWriting()),
aSize.width * 4,
IntSize(aSize.width, aSize.height),

View File

@ -105,7 +105,7 @@ CropAndCopyDataSourceSurface(DataSourceSurface* aSurface, const IntRect& aCropRe
const uint32_t dstStride = dstSize.width * bytesPerPixel;
// Create a new SourceSurface.
RefPtr<DataSourceSurface> dstDataSurface =
nsRefPtr<DataSourceSurface> dstDataSurface =
Factory::CreateDataSourceSurfaceWithStride(dstSize, format, dstStride, true);
if (NS_WARN_IF(!dstDataSurface)) {
@ -181,7 +181,7 @@ CreateSurfaceFromRawData(const gfx::IntSize& aSize,
MOZ_ASSERT(aBuffer);
// Wrap the source buffer into a SourceSurface.
RefPtr<DataSourceSurface> dataSurface =
nsRefPtr<DataSourceSurface> dataSurface =
Factory::CreateWrappingDataSourceSurface(aBuffer, aStride, aSize, aFormat);
if (NS_WARN_IF(!dataSurface)) {
@ -194,7 +194,7 @@ CreateSurfaceFromRawData(const gfx::IntSize& aSize,
const IntRect cropRect = aCropRect.valueOr(IntRect(0, 0, aSize.width, aSize.height));
// Copy the source buffer in the _cropRect_ area into a new SourceSurface.
RefPtr<DataSourceSurface> result = CropAndCopyDataSourceSurface(dataSurface, cropRect);
nsRefPtr<DataSourceSurface> result = CropAndCopyDataSourceSurface(dataSurface, cropRect);
if (NS_WARN_IF(!result)) {
aRv.Throw(NS_ERROR_NOT_AVAILABLE);
@ -216,7 +216,7 @@ CreateImageFromRawData(const gfx::IntSize& aSize,
MOZ_ASSERT(NS_IsMainThread());
// Copy and crop the source buffer into a SourceSurface.
RefPtr<SourceSurface> rgbaSurface =
nsRefPtr<SourceSurface> rgbaSurface =
CreateSurfaceFromRawData(aSize, aStride, aFormat,
aBuffer, aBufferLength,
aCropRect, aRv);
@ -226,8 +226,8 @@ CreateImageFromRawData(const gfx::IntSize& aSize,
}
// Convert RGBA to BGRA
RefPtr<DataSourceSurface> rgbaDataSurface = rgbaSurface->GetDataSurface();
RefPtr<DataSourceSurface> bgraDataSurface =
nsRefPtr<DataSourceSurface> rgbaDataSurface = rgbaSurface->GetDataSurface();
nsRefPtr<DataSourceSurface> bgraDataSurface =
Factory::CreateDataSourceSurfaceWithStride(rgbaDataSurface->GetSize(),
SurfaceFormat::B8G8R8A8,
rgbaDataSurface->Stride());
@ -368,7 +368,7 @@ GetSurfaceFromElement(nsIGlobalObject* aGlobal, HTMLElementType& aElement, Error
return nullptr;
}
RefPtr<SourceSurface> surface(res.mSourceSurface);
nsRefPtr<SourceSurface> surface(res.mSourceSurface);
return surface.forget();
}
@ -435,7 +435,7 @@ ImageBitmap::PrepareForDrawTarget(gfx::DrawTarget* aTarget)
return nullptr;
}
RefPtr<DrawTarget> target = aTarget;
nsRefPtr<DrawTarget> target = aTarget;
IntRect surfRect(0, 0, mSurface->GetSize().width, mSurface->GetSize().height);
// Check if we still need to crop our surface
@ -446,7 +446,7 @@ ImageBitmap::PrepareForDrawTarget(gfx::DrawTarget* aTarget)
// the crop lies entirely outside the surface area, nothing to draw
if (surfPortion.IsEmpty()) {
mSurface = nullptr;
RefPtr<gfx::SourceSurface> surface(mSurface);
nsRefPtr<gfx::SourceSurface> surface(mSurface);
return surface.forget();
}
@ -461,7 +461,7 @@ ImageBitmap::PrepareForDrawTarget(gfx::DrawTarget* aTarget)
if (!target) {
mSurface = nullptr;
RefPtr<gfx::SourceSurface> surface(mSurface);
nsRefPtr<gfx::SourceSurface> surface(mSurface);
return surface.forget();
}
@ -475,10 +475,10 @@ ImageBitmap::PrepareForDrawTarget(gfx::DrawTarget* aTarget)
// if the mPictureRect is not starts from the upper-left point.
if (target->GetBackendType() == BackendType::DIRECT2D1_1 &&
mSurface->GetType() != SurfaceType::D2D1_1_IMAGE) {
RefPtr<DataSourceSurface> dataSurface = mSurface->GetDataSurface();
nsRefPtr<DataSourceSurface> dataSurface = mSurface->GetDataSurface();
if (NS_WARN_IF(!dataSurface)) {
mSurface = nullptr;
RefPtr<gfx::SourceSurface> surface(mSurface);
nsRefPtr<gfx::SourceSurface> surface(mSurface);
return surface.forget();
}
@ -497,7 +497,7 @@ ImageBitmap::PrepareForDrawTarget(gfx::DrawTarget* aTarget)
// This call should be a no-op for already-optimized surfaces
mSurface = target->OptimizeSourceSurface(mSurface);
RefPtr<gfx::SourceSurface> surface(mSurface);
nsRefPtr<gfx::SourceSurface> surface(mSurface);
return surface.forget();
}
@ -519,7 +519,7 @@ ImageBitmap::CreateInternal(nsIGlobalObject* aGlobal, HTMLImageElement& aImageEl
// Get the SourceSurface out from the image element and then do security
// checking.
RefPtr<SourceSurface> surface = GetSurfaceFromElement(aGlobal, aImageEl, aRv);
nsRefPtr<SourceSurface> surface = GetSurfaceFromElement(aGlobal, aImageEl, aRv);
if (NS_WARN_IF(aRv.Failed())) {
return nullptr;
@ -596,14 +596,14 @@ ImageBitmap::CreateInternal(nsIGlobalObject* aGlobal, HTMLCanvasElement& aCanvas
return nullptr;
}
RefPtr<SourceSurface> surface = GetSurfaceFromElement(aGlobal, aCanvasEl, aRv);
nsRefPtr<SourceSurface> surface = GetSurfaceFromElement(aGlobal, aCanvasEl, aRv);
if (NS_WARN_IF(aRv.Failed())) {
return nullptr;
}
// Crop the source surface if needed.
RefPtr<SourceSurface> croppedSurface;
nsRefPtr<SourceSurface> croppedSurface;
IntRect cropRect = aCropRect.valueOr(IntRect());
// If the HTMLCanvasElement's rendering context is WebGL, then the snapshot
@ -616,7 +616,7 @@ ImageBitmap::CreateInternal(nsIGlobalObject* aGlobal, HTMLCanvasElement& aCanvas
MOZ_ASSERT(surface->GetType() == SurfaceType::DATA,
"The snapshot SourceSurface from WebGL rendering contest is not \
DataSourceSurface.");
RefPtr<DataSourceSurface> dataSurface = surface->GetDataSurface();
nsRefPtr<DataSourceSurface> dataSurface = surface->GetDataSurface();
croppedSurface = CropAndCopyDataSourceSurface(dataSurface, cropRect);
cropRect.MoveTo(0, 0);
}
@ -713,7 +713,7 @@ ImageBitmap::CreateInternal(nsIGlobalObject* aGlobal, CanvasRenderingContext2D&
return nullptr;
}
RefPtr<SourceSurface> surface = aCanvasCtx.GetSurfaceSnapshot();
nsRefPtr<SourceSurface> surface = aCanvasCtx.GetSurfaceSnapshot();
if (NS_WARN_IF(!surface)) {
aRv.Throw(NS_ERROR_NOT_AVAILABLE);
@ -863,7 +863,7 @@ DecodeBlob(Blob& aBlob, ErrorResult& aRv)
// Get the surface out.
uint32_t frameFlags = imgIContainer::FLAG_SYNC_DECODE | imgIContainer::FLAG_WANT_DATA_SURFACE;
uint32_t whichFrame = imgIContainer::FRAME_FIRST;
RefPtr<SourceSurface> surface = imgContainer->GetFrame(whichFrame, frameFlags);
nsRefPtr<SourceSurface> surface = imgContainer->GetFrame(whichFrame, frameFlags);
if (NS_WARN_IF(!surface)) {
aRv.Throw(NS_ERROR_NOT_AVAILABLE);
@ -877,14 +877,14 @@ static already_AddRefed<layers::Image>
DecodeAndCropBlob(Blob& aBlob, Maybe<IntRect>& aCropRect, ErrorResult& aRv)
{
// Decode the blob into a SourceSurface.
RefPtr<SourceSurface> surface = DecodeBlob(aBlob, aRv);
nsRefPtr<SourceSurface> surface = DecodeBlob(aBlob, aRv);
if (NS_WARN_IF(aRv.Failed())) {
return nullptr;
}
// Crop the source surface if needed.
RefPtr<SourceSurface> croppedSurface = surface;
nsRefPtr<SourceSurface> croppedSurface = surface;
if (aCropRect.isSome()) {
// The blob is just decoded into a RasterImage and not optimized yet, so the
@ -899,7 +899,7 @@ DecodeAndCropBlob(Blob& aBlob, Maybe<IntRect>& aCropRect, ErrorResult& aRv)
// TODO: Bug1189632 is going to refactor this create-from-blob part to
// decode the blob off the main thread. Re-check if we should do
// cropping at this moment again there.
RefPtr<DataSourceSurface> dataSurface = surface->GetDataSurface();
nsRefPtr<DataSourceSurface> dataSurface = surface->GetDataSurface();
croppedSurface = CropAndCopyDataSourceSurface(dataSurface, aCropRect.ref());
aCropRect->MoveTo(0, 0);
}
@ -964,7 +964,7 @@ protected:
nsRefPtr<Promise> mPromise;
nsCOMPtr<nsIGlobalObject> mGlobalObject;
RefPtr<mozilla::dom::Blob> mBlob;
nsRefPtr<mozilla::dom::Blob> mBlob;
Maybe<IntRect> mCropRect;
};

View File

@ -159,7 +159,7 @@ protected:
* buffer.
*/
nsRefPtr<layers::Image> mData;
RefPtr<gfx::SourceSurface> mSurface;
nsRefPtr<gfx::SourceSurface> mSurface;
/*
* The mPictureRect is the size of the source image in default, however, if

View File

@ -9,7 +9,7 @@
#include "mozilla/DOMEventTargetHelper.h"
#include "mozilla/layers/LayersTypes.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "CanvasRenderingContextHelper.h"
#include "nsCycleCollectionParticipant.h"
@ -38,7 +38,7 @@ struct OffscreenCanvasCloneData final
bool aNeutered);
~OffscreenCanvasCloneData();
RefPtr<layers::AsyncCanvasRenderer> mRenderer;
nsRefPtr<layers::AsyncCanvasRenderer> mRenderer;
uint32_t mWidth;
uint32_t mHeight;
layers::LayersBackend mCompositorBackendType;
@ -170,7 +170,7 @@ private:
layers::LayersBackend mCompositorBackendType;
layers::CanvasClient* mCanvasClient;
RefPtr<layers::AsyncCanvasRenderer> mCanvasRenderer;
nsRefPtr<layers::AsyncCanvasRenderer> mCanvasRenderer;
};
} // namespace dom

View File

@ -561,7 +561,7 @@ static already_AddRefed<gl::GLContext>
CreateGLWithEGL(const gl::SurfaceCaps& caps, gl::CreateContextFlags flags,
WebGLContext* webgl)
{
RefPtr<GLContext> gl;
nsRefPtr<GLContext> gl;
#ifndef XP_MACOSX // Mac doesn't have GLContextProviderEGL.
gfx::IntSize dummySize(16, 16);
gl = gl::GLContextProviderEGL::CreateOffscreen(dummySize, caps,
@ -581,7 +581,7 @@ static already_AddRefed<GLContext>
CreateGLWithANGLE(const gl::SurfaceCaps& caps, gl::CreateContextFlags flags,
WebGLContext* webgl)
{
RefPtr<GLContext> gl;
nsRefPtr<GLContext> gl;
#ifdef XP_WIN
gfx::IntSize dummySize(16, 16);
@ -613,7 +613,7 @@ CreateGLWithDefault(const gl::SurfaceCaps& caps, gl::CreateContextFlags flags,
}
gfx::IntSize dummySize(16, 16);
RefPtr<GLContext> gl = gl::GLContextProvider::CreateOffscreen(dummySize, caps, flags);
nsRefPtr<GLContext> gl = gl::GLContextProvider::CreateOffscreen(dummySize, caps, flags);
if (!gl) {
webgl->GenerateWarning("Error during native OpenGL init.");
return nullptr;
@ -1055,14 +1055,14 @@ WebGLContext::GetImageBuffer(uint8_t** out_imageBuffer, int32_t* out_format)
// Use GetSurfaceSnapshot() to make sure that appropriate y-flip gets applied
bool premult;
RefPtr<SourceSurface> snapshot =
nsRefPtr<SourceSurface> snapshot =
GetSurfaceSnapshot(mOptions.premultipliedAlpha ? nullptr : &premult);
if (!snapshot)
return;
MOZ_ASSERT(mOptions.premultipliedAlpha || !premult, "We must get unpremult when we ask for it!");
RefPtr<DataSourceSurface> dataSurface = snapshot->GetDataSurface();
nsRefPtr<DataSourceSurface> dataSurface = snapshot->GetDataSurface();
return gfxUtils::GetImageBuffer(dataSurface, mOptions.premultipliedAlpha,
out_imageBuffer, out_format);
@ -1079,14 +1079,14 @@ WebGLContext::GetInputStream(const char* mimeType,
// Use GetSurfaceSnapshot() to make sure that appropriate y-flip gets applied
bool premult;
RefPtr<SourceSurface> snapshot =
nsRefPtr<SourceSurface> snapshot =
GetSurfaceSnapshot(mOptions.premultipliedAlpha ? nullptr : &premult);
if (!snapshot)
return NS_ERROR_FAILURE;
MOZ_ASSERT(mOptions.premultipliedAlpha || !premult, "We must get unpremult when we ask for it!");
RefPtr<DataSourceSurface> dataSurface = snapshot->GetDataSurface();
nsRefPtr<DataSourceSurface> dataSurface = snapshot->GetDataSurface();
return gfxUtils::GetInputStream(dataSurface, mOptions.premultipliedAlpha, mimeType,
encoderOptions, out_stream);
}
@ -1752,7 +1752,7 @@ WebGLContext::GetSurfaceSnapshot(bool* out_premultAlpha)
bool hasAlpha = mOptions.alpha;
SurfaceFormat surfFormat = hasAlpha ? SurfaceFormat::B8G8R8A8
: SurfaceFormat::B8G8R8X8;
RefPtr<DataSourceSurface> surf;
nsRefPtr<DataSourceSurface> surf;
surf = Factory::CreateDataSourceSurfaceWithStride(IntSize(mWidth, mHeight),
surfFormat,
mWidth * 4);
@ -1780,7 +1780,7 @@ WebGLContext::GetSurfaceSnapshot(bool* out_premultAlpha)
}
}
RefPtr<DrawTarget> dt =
nsRefPtr<DrawTarget> dt =
Factory::CreateDrawTarget(BackendType::CAIRO,
IntSize(mWidth, mHeight),
SurfaceFormat::B8G8R8A8);

View File

@ -1290,7 +1290,7 @@ protected:
nsresult
SurfaceFromElementResultToImageSurface(nsLayoutUtils::SurfaceFromElementResult& res,
RefPtr<gfx::DataSourceSurface>& imageOut,
nsRefPtr<gfx::DataSourceSurface>& imageOut,
WebGLTexelFormat* format);
// Returns false if `object` is null or not valid.
@ -1360,7 +1360,7 @@ protected:
void ResolveTexturesForDraw() const;
WebGLRefPtr<WebGLProgram> mCurrentProgram;
RefPtr<const webgl::LinkedProgramInfo> mActiveProgramLinkInfo;
nsRefPtr<const webgl::LinkedProgramInfo> mActiveProgramLinkInfo;
GLenum LastColorAttachment() const {
return LOCAL_GL_COLOR_ATTACHMENT0 + mGLMaxColorAttachments - 1;
@ -1453,7 +1453,7 @@ protected:
GLsizei mViewportHeight;
bool mAlreadyWarnedAboutViewportLargerThanDest;
RefPtr<WebGLContextLossHandler> mContextLossHandler;
nsRefPtr<WebGLContextLossHandler> mContextLossHandler;
bool mAllowContextRestore;
bool mLastLossWasSimulated;
ContextStatus mContextStatus;

View File

@ -1885,14 +1885,14 @@ WebGLContext::StencilOpSeparate(GLenum face, GLenum sfail, GLenum dpfail, GLenum
nsresult
WebGLContext::SurfaceFromElementResultToImageSurface(nsLayoutUtils::SurfaceFromElementResult& res,
RefPtr<DataSourceSurface>& imageOut,
nsRefPtr<DataSourceSurface>& imageOut,
WebGLTexelFormat* format)
{
*format = WebGLTexelFormat::None;
if (!res.mSourceSurface)
return NS_OK;
RefPtr<DataSourceSurface> data = res.mSourceSurface->GetDataSurface();
nsRefPtr<DataSourceSurface> data = res.mSourceSurface->GetDataSurface();
if (!data) {
// SurfaceFromElement lied!
return NS_OK;

View File

@ -71,10 +71,10 @@ ParseName(const nsCString& name, nsCString* const out_baseName,
static void
AddActiveInfo(WebGLContext* webgl, GLint elemCount, GLenum elemType, bool isArray,
const nsACString& baseUserName, const nsACString& baseMappedName,
std::vector<RefPtr<WebGLActiveInfo>>* activeInfoList,
std::vector<nsRefPtr<WebGLActiveInfo>>* activeInfoList,
std::map<nsCString, const WebGLActiveInfo*>* infoLocMap)
{
RefPtr<WebGLActiveInfo> info = new WebGLActiveInfo(webgl, elemCount, elemType,
nsRefPtr<WebGLActiveInfo> info = new WebGLActiveInfo(webgl, elemCount, elemType,
isArray, baseUserName,
baseMappedName);
activeInfoList->push_back(info);
@ -85,9 +85,9 @@ AddActiveInfo(WebGLContext* webgl, GLint elemCount, GLenum elemType, bool isArra
static void
AddActiveBlockInfo(const nsACString& baseUserName,
const nsACString& baseMappedName,
std::vector<RefPtr<webgl::UniformBlockInfo>>* activeInfoList)
std::vector<nsRefPtr<webgl::UniformBlockInfo>>* activeInfoList)
{
RefPtr<webgl::UniformBlockInfo> info = new webgl::UniformBlockInfo(baseUserName, baseMappedName);
nsRefPtr<webgl::UniformBlockInfo> info = new webgl::UniformBlockInfo(baseUserName, baseMappedName);
activeInfoList->push_back(info);
}
@ -97,7 +97,7 @@ AddActiveBlockInfo(const nsACString& baseUserName,
static already_AddRefed<const webgl::LinkedProgramInfo>
QueryProgramInfo(WebGLProgram* prog, gl::GLContext* gl)
{
RefPtr<webgl::LinkedProgramInfo> info(new webgl::LinkedProgramInfo(prog));
nsRefPtr<webgl::LinkedProgramInfo> info(new webgl::LinkedProgramInfo(prog));
GLuint maxAttribLenWithNull = 0;
gl->fGetProgramiv(prog->mGLName, LOCAL_GL_ACTIVE_ATTRIBUTE_MAX_LENGTH,
@ -437,7 +437,7 @@ WebGLProgram::GetActiveAttrib(GLuint index) const
return nullptr;
}
RefPtr<WebGLActiveInfo> ret = activeList[index];
nsRefPtr<WebGLActiveInfo> ret = activeList[index];
return ret.forget();
}
@ -458,7 +458,7 @@ WebGLProgram::GetActiveUniform(GLuint index) const
return nullptr;
}
RefPtr<WebGLActiveInfo> ret = activeList[index];
nsRefPtr<WebGLActiveInfo> ret = activeList[index];
return ret.forget();
}
@ -598,7 +598,7 @@ WebGLProgram::GetUniformBlockIndex(const nsAString& userName_wide) const
if (!ParseName(userName, &baseUserName, &isArray, &arrayIndex))
return LOCAL_GL_INVALID_INDEX;
RefPtr<const webgl::UniformBlockInfo> info;
nsRefPtr<const webgl::UniformBlockInfo> info;
if (!LinkInfo()->FindUniformBlock(baseUserName, &info)) {
return LOCAL_GL_INVALID_INDEX;
}
@ -1025,7 +1025,7 @@ WebGLProgram::GetTransformFeedbackVarying(GLuint index)
LinkInfo()->FindAttrib(varyingUserName, (const WebGLActiveInfo**) &info);
MOZ_ASSERT(info);
RefPtr<WebGLActiveInfo> ret(info);
nsRefPtr<WebGLActiveInfo> ret(info);
return ret.forget();
}

View File

@ -11,7 +11,7 @@
#include <vector>
#include "mozilla/LinkedList.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "mozilla/WeakPtr.h"
#include "nsString.h"
#include "nsWrapperCache.h"
@ -59,8 +59,8 @@ struct LinkedProgramInfo final
MOZ_DECLARE_WEAKREFERENCE_TYPENAME(LinkedProgramInfo)
WebGLProgram* const prog;
std::vector<RefPtr<WebGLActiveInfo>> activeAttribs;
std::vector<RefPtr<WebGLActiveInfo>> activeUniforms;
std::vector<nsRefPtr<WebGLActiveInfo>> activeAttribs;
std::vector<nsRefPtr<WebGLActiveInfo>> activeUniforms;
// Needed for Get{Attrib,Uniform}Location. The keys for these are non-mapped
// user-facing `GLActiveInfo::name`s, without any final "[0]".
@ -68,7 +68,7 @@ struct LinkedProgramInfo final
std::map<nsCString, const WebGLActiveInfo*> uniformMap;
std::map<nsCString, const nsCString>* fragDataMap;
std::vector<RefPtr<UniformBlockInfo>> uniformBlocks;
std::vector<nsRefPtr<UniformBlockInfo>> uniformBlocks;
// Needed for draw call validation.
std::set<GLuint> activeAttribLocs;
@ -98,7 +98,7 @@ struct LinkedProgramInfo final
}
bool FindUniformBlock(const nsCString& baseUserName,
RefPtr<const UniformBlockInfo>* const out_info) const
nsRefPtr<const UniformBlockInfo>* const out_info) const
{
const size_t count = uniformBlocks.size();
for (size_t i = 0; i < count; i++) {
@ -211,7 +211,7 @@ private:
std::vector<nsCString> mTransformFeedbackVaryings;
GLenum mTransformFeedbackBufferMode;
nsCString mLinkLog;
RefPtr<const webgl::LinkedProgramInfo> mMostRecentLinkInfo;
nsRefPtr<const webgl::LinkedProgramInfo> mMostRecentLinkInfo;
// Storage for transform feedback varyings before link.
// (Work around for bug seen on nVidia drivers.)
std::vector<std::string> mTempMappedVaryings;

View File

@ -696,7 +696,7 @@ WebGLTexture::TexImage2D(TexImageTarget texImageTarget, GLint level,
return;
}
RefPtr<gfx::DataSourceSurface> data;
nsRefPtr<gfx::DataSourceSurface> data;
WebGLTexelFormat srcFormat;
nsLayoutUtils::SurfaceFromElementResult res = mContext->SurfaceFromElement(elem);
*out_rv = mContext->SurfaceFromElementResultToImageSurface(res, data, &srcFormat);
@ -1014,7 +1014,7 @@ WebGLTexture::TexSubImage2D(TexImageTarget texImageTarget, GLint level, GLint xO
return;
}
RefPtr<gfx::DataSourceSurface> data;
nsRefPtr<gfx::DataSourceSurface> data;
WebGLTexelFormat srcFormat;
nsLayoutUtils::SurfaceFromElementResult res = mContext->SurfaceFromElement(elem);
*out_rv = mContext->SurfaceFromElementResultToImageSurface(res, data, &srcFormat);

View File

@ -13,7 +13,7 @@
#include "nsRefreshDriver.h"
#include "mozilla/dom/HTMLCanvasElement.h"
#include "mozilla/dom/OffscreenCanvas.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#define NS_ICANVASRENDERINGCONTEXTINTERNAL_IID \
{ 0xb84f2fed, 0x9d4b, 0x430b, \

View File

@ -10,7 +10,7 @@
#include "nsIFile.h"
#include "nsIPrincipal.h"
#include "mozilla/DOMEventTargetHelper.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "mozilla/StaticPtr.h"
#include "mozilla/dom/DOMRequest.h"
#include "nsWeakReference.h"

View File

@ -67,9 +67,9 @@ public:
}
static already_AddRefed<DataSourceSurface>
CopySurface(const RefPtr<SourceSurface>& aSurface)
CopySurface(const nsRefPtr<SourceSurface>& aSurface)
{
RefPtr<DataSourceSurface> data = aSurface->GetDataSurface();
nsRefPtr<DataSourceSurface> data = aSurface->GetDataSurface();
if (!data) {
return nullptr;
}
@ -79,7 +79,7 @@ public:
return nullptr;
}
RefPtr<DataSourceSurface> copy =
nsRefPtr<DataSourceSurface> copy =
Factory::CreateDataSourceSurfaceWithStride(data->GetSize(),
data->GetFormat(),
read.GetStride());
@ -122,12 +122,12 @@ public:
return;
}
RefPtr<SourceSurface> snapshot = mOwningElement->GetSurfaceSnapshot(nullptr);
nsRefPtr<SourceSurface> snapshot = mOwningElement->GetSurfaceSnapshot(nullptr);
if (!snapshot) {
return;
}
RefPtr<DataSourceSurface> copy = CopySurface(snapshot);
nsRefPtr<DataSourceSurface> copy = CopySurface(snapshot);
mOwningElement->SetFrameCapture(copy.forget());
mOwningElement->MarkContextCleanForFrameCapture();
@ -177,7 +177,7 @@ private:
bool mRegistered;
HTMLCanvasElement* const mOwningElement;
RefPtr<nsRefreshDriver> mRefreshDriver;
nsRefPtr<nsRefreshDriver> mRefreshDriver;
};
// ---------------------------------------------------------------------------
@ -1169,7 +1169,7 @@ HTMLCanvasElement::IsFrameCaptureRequested() const
void
HTMLCanvasElement::SetFrameCapture(already_AddRefed<SourceSurface> aSurface)
{
RefPtr<SourceSurface> surface = aSurface;
nsRefPtr<SourceSurface> surface = aSurface;
CairoImage::Data imageData;
imageData.mSize = surface->GetSize();

View File

@ -2765,7 +2765,7 @@ ContentParent::RecvSetClipboard(const IPCDataTransfer& aDataTransfer,
}
nsCString text = item.data().get_nsCString();
mozilla::RefPtr<gfx::DataSourceSurface> image =
nsRefPtr<gfx::DataSourceSurface> image =
new mozilla::gfx::SourceSurfaceRawData();
mozilla::gfx::SourceSurfaceRawData* raw =
static_cast<mozilla::gfx::SourceSurfaceRawData*>(image.get());

View File

@ -8,7 +8,7 @@
#define mozilla_dom_ipc_StructuredCloneData_h
#include <algorithm>
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "mozilla/dom/StructuredCloneHolder.h"
#include "nsISupportsImpl.h"
@ -138,7 +138,7 @@ private:
uint64_t* MOZ_NON_OWNING_REF mExternalData;
size_t mExternalDataLength;
RefPtr<SharedJSAllocatedData> mSharedData;
nsRefPtr<SharedJSAllocatedData> mSharedData;
};
} // namespace ipc

View File

@ -2114,7 +2114,7 @@ TabParent::RecvSetCustomCursor(const nsCString& aCursorData,
if (mTabSetsCursor) {
const gfx::IntSize size(aWidth, aHeight);
mozilla::RefPtr<gfx::DataSourceSurface> customCursor = new mozilla::gfx::SourceSurfaceRawData();
nsRefPtr<gfx::DataSourceSurface> customCursor = new mozilla::gfx::SourceSurfaceRawData();
mozilla::gfx::SourceSurfaceRawData* raw = static_cast<mozilla::gfx::SourceSurfaceRawData*>(customCursor.get());
raw->InitWrappingData(
reinterpret_cast<uint8_t*>(const_cast<nsCString&>(aCursorData).BeginWriting()),
@ -3601,7 +3601,7 @@ TabParent::AddInitialDnDDataTo(DataTransfer* aDataTransfer)
}
void
TabParent::TakeDragVisualization(RefPtr<mozilla::gfx::SourceSurface>& aSurface,
TabParent::TakeDragVisualization(nsRefPtr<mozilla::gfx::SourceSurface>& aSurface,
int32_t& aDragAreaX, int32_t& aDragAreaY)
{
aSurface = mDnDVisualization.forget();

View File

@ -15,7 +15,7 @@
#include "mozilla/dom/TabContext.h"
#include "mozilla/EventForwards.h"
#include "mozilla/dom/File.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "nsCOMPtr.h"
#include "nsIAuthPromptProvider.h"
#include "nsIBrowserDOMWindow.h"
@ -450,7 +450,7 @@ public:
void AddInitialDnDDataTo(DataTransfer* aDataTransfer);
void TakeDragVisualization(RefPtr<mozilla::gfx::SourceSurface>& aSurface,
void TakeDragVisualization(nsRefPtr<mozilla::gfx::SourceSurface>& aSurface,
int32_t& aDragAreaX, int32_t& aDragAreaY);
layout::RenderFrameParent* GetRenderFrame();
@ -566,7 +566,7 @@ private:
};
nsTArray<nsTArray<DataTransferItem>> mInitialDataTransferItems;
mozilla::RefPtr<gfx::DataSourceSurface> mDnDVisualization;
nsRefPtr<gfx::DataSourceSurface> mDnDVisualization;
int32_t mDragAreaX;
int32_t mDragAreaY;

View File

@ -12,7 +12,7 @@
#include "nsThreadUtils.h"
#include "mozilla/dom/AudioChannelBinding.h"
#include "mozilla/Monitor.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "mozilla/TimeStamp.h"
#include "mozilla/UniquePtr.h"
#include "CubebUtils.h"

View File

@ -538,7 +538,7 @@ DOMMediaStream::AddTrack(MediaStreamTrack& aTrack)
{
MOZ_RELEASE_ASSERT(mPlaybackStream);
RefPtr<ProcessedMediaStream> dest = mPlaybackStream->AsProcessedStream();
nsRefPtr<ProcessedMediaStream> dest = mPlaybackStream->AsProcessedStream();
MOZ_ASSERT(dest);
if (!dest) {
return;
@ -573,10 +573,10 @@ DOMMediaStream::AddTrack(MediaStreamTrack& aTrack)
return;
}
RefPtr<DOMMediaStream> addedDOMStream = aTrack.GetStream();
nsRefPtr<DOMMediaStream> addedDOMStream = aTrack.GetStream();
MOZ_RELEASE_ASSERT(addedDOMStream);
RefPtr<MediaStream> owningStream = addedDOMStream->GetOwnedStream();
nsRefPtr<MediaStream> owningStream = addedDOMStream->GetOwnedStream();
MOZ_RELEASE_ASSERT(owningStream);
CombineWithPrincipal(addedDOMStream->mPrincipal);

View File

@ -338,7 +338,7 @@ MediaDecoderReader::RequestVideoData(bool aSkipToNextKeyframe,
// keyframe. Post another task to the decode task queue to decode
// again. We don't just decode straight in a loop here, as that
// would hog the decode task queue.
RefPtr<nsIRunnable> task(new ReRequestVideoWithSkipTask(this, aTimeThreshold));
nsRefPtr<nsIRunnable> task(new ReRequestVideoWithSkipTask(this, aTimeThreshold));
mTaskQueue->Dispatch(task.forget());
return p;
}
@ -374,7 +374,7 @@ MediaDecoderReader::RequestAudioData()
// coming in gstreamer 1.x when there is still video buffer waiting to be
// consumed. (|mVideoSinkBufferCount| > 0)
if (AudioQueue().GetSize() == 0) {
RefPtr<nsIRunnable> task(new ReRequestAudioTask(this));
nsRefPtr<nsIRunnable> task(new ReRequestAudioTask(this));
mTaskQueue->Dispatch(task.forget());
return p;
}

View File

@ -293,7 +293,7 @@ public:
// trigger-happy with notifications anyway.
void DispatchNotifyDataArrived(uint32_t aLength, int64_t aOffset, bool aThrottleUpdates)
{
RefPtr<nsRunnable> r =
nsRefPtr<nsRunnable> r =
NS_NewRunnableMethodWithArg<media::Interval<int64_t>>(this, aThrottleUpdates ? &MediaDecoderReader::ThrottledNotifyDataArrived
: &MediaDecoderReader::NotifyDataArrived,
media::Interval<int64_t>(aOffset, aOffset + aLength));

View File

@ -2230,7 +2230,7 @@ MediaDecoderStateMachine::FinishShutdown()
// dispatch an event to the main thread to release the decoder and
// state machine.
DECODER_LOG("Shutting down state machine task queue");
RefPtr<DecoderDisposer> disposer = new DecoderDisposer(mDecoder, this);
nsRefPtr<DecoderDisposer> disposer = new DecoderDisposer(mDecoder, this);
OwnerThread()->BeginShutdown()->Then(AbstractThread::MainThread(), __func__,
disposer.get(),
&DecoderDisposer::OnTaskQueueShutdown,

View File

@ -828,7 +828,7 @@ MediaFormatReader::ScheduleUpdate(TrackType aTrack)
}
LOGV("SchedulingUpdate(%s)", TrackTypeToStr(aTrack));
decoder.mUpdateScheduled = true;
RefPtr<nsIRunnable> task(
nsRefPtr<nsIRunnable> task(
NS_NewRunnableMethodWithArg<TrackType>(this, &MediaFormatReader::Update, aTrack));
OwnerThread()->Dispatch(task.forget());
}
@ -1262,7 +1262,7 @@ MediaFormatReader::Output(TrackType aTrack, MediaData* aSample)
return;
}
RefPtr<nsIRunnable> task =
nsRefPtr<nsIRunnable> task =
NS_NewRunnableMethodWithArgs<TrackType, MediaData*>(
this, &MediaFormatReader::NotifyNewOutput, aTrack, aSample);
OwnerThread()->Dispatch(task.forget());
@ -1271,7 +1271,7 @@ MediaFormatReader::Output(TrackType aTrack, MediaData* aSample)
void
MediaFormatReader::DrainComplete(TrackType aTrack)
{
RefPtr<nsIRunnable> task =
nsRefPtr<nsIRunnable> task =
NS_NewRunnableMethodWithArg<TrackType>(
this, &MediaFormatReader::NotifyDrainComplete, aTrack);
OwnerThread()->Dispatch(task.forget());
@ -1280,7 +1280,7 @@ MediaFormatReader::DrainComplete(TrackType aTrack)
void
MediaFormatReader::InputExhausted(TrackType aTrack)
{
RefPtr<nsIRunnable> task =
nsRefPtr<nsIRunnable> task =
NS_NewRunnableMethodWithArg<TrackType>(
this, &MediaFormatReader::NotifyInputExhausted, aTrack);
OwnerThread()->Dispatch(task.forget());
@ -1289,7 +1289,7 @@ MediaFormatReader::InputExhausted(TrackType aTrack)
void
MediaFormatReader::Error(TrackType aTrack)
{
RefPtr<nsIRunnable> task =
nsRefPtr<nsIRunnable> task =
NS_NewRunnableMethodWithArg<TrackType>(
this, &MediaFormatReader::NotifyError, aTrack);
OwnerThread()->Dispatch(task.forget());
@ -1407,7 +1407,7 @@ MediaFormatReader::Seek(int64_t aTime, int64_t aUnused)
nsRefPtr<SeekPromise> p = mSeekPromise.Ensure(__func__);
RefPtr<nsIRunnable> task(
nsRefPtr<nsIRunnable> task(
NS_NewRunnableMethod(this, &MediaFormatReader::AttemptSeek));
OwnerThread()->Dispatch(task.forget());

View File

@ -2602,7 +2602,7 @@ MediaManager::Observe(nsISupports* aSubject, const char* aTopic,
}
}
nsRefPtr<nsRunnable> mReply;
RefPtr<MediaEngine> mBackend;
nsRefPtr<MediaEngine> mBackend;
};
// Post ShutdownTask to execute on mMediaThread and pass in a lambda
@ -2615,7 +2615,7 @@ MediaManager::Observe(nsISupports* aSubject, const char* aTopic,
// note that this == sSingleton
nsRefPtr<MediaManager> that(sSingleton);
// Release the backend (and call Shutdown()) from within the MediaManager thread
RefPtr<MediaEngine> temp;
nsRefPtr<MediaEngine> temp;
{
MutexAutoLock lock(mMutex);
temp = mBackend.forget();

View File

@ -533,7 +533,7 @@ private:
Mutex mMutex;
// protected with mMutex:
RefPtr<MediaEngine> mBackend;
nsRefPtr<MediaEngine> mBackend;
static StaticRefPtr<MediaManager> sSingleton;

View File

@ -1679,7 +1679,7 @@ public:
return NS_OK;
}
RefPtr<MediaDecoder> mDecoder;
nsRefPtr<MediaDecoder> mDecoder;
int64_t mNumBytes;
int64_t mOffset;
};
@ -1689,7 +1689,7 @@ void BaseMediaResource::DispatchBytesConsumed(int64_t aNumBytes, int64_t aOffset
if (aNumBytes <= 0) {
return;
}
RefPtr<nsIRunnable> event(new DispatchBytesConsumedEvent(mDecoder, aNumBytes, aOffset));
nsRefPtr<nsIRunnable> event(new DispatchBytesConsumedEvent(mDecoder, aNumBytes, aOffset));
NS_DispatchToMainThread(event);
}

View File

@ -9,7 +9,7 @@
#include "nsIObserver.h"
#include "mozilla/Monitor.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "mozilla/StaticPtr.h"
#include "nsIThread.h"
#include "nsCOMPtr.h"

View File

@ -11,7 +11,7 @@
#include "nsComponentManagerUtils.h"
#include "nsThreadUtils.h"
#include "mozilla/DebugOnly.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "mozilla/SharedThreadPool.h"
namespace mozilla {
@ -29,7 +29,7 @@ MediaTimer::MediaTimer()
// Use the SharedThreadPool to create an nsIThreadPool with a maximum of one
// thread, which is equivalent to an nsIThread for our purposes.
RefPtr<SharedThreadPool> threadPool(
nsRefPtr<SharedThreadPool> threadPool(
SharedThreadPool::Get(NS_LITERAL_CSTRING("MediaTimer"), 1));
mThread = threadPool.get();
mTimer->SetTarget(mThread);

View File

@ -12,7 +12,7 @@
#include "mozilla/CheckedInt.h"
#include "mozilla/MozPromise.h"
#include "mozilla/ReentrantMonitor.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "nsIThread.h"
#include "nsSize.h"

View File

@ -143,7 +143,7 @@ AudioSinkFilter::GetSampleSink()
if (!mInputPin) { \
return E_NOTIMPL; \
} \
RefPtr<IMediaSeeking> pinSeeking = mInputPin->GetConnectedPinSeeking(); \
nsRefPtr<IMediaSeeking> pinSeeking = mInputPin->GetConnectedPinSeeking(); \
if (!pinSeeking) { \
return E_NOTIMPL; \
}

View File

@ -10,7 +10,7 @@
#include "BaseFilter.h"
#include "DirectShowUtils.h"
#include "nsAutoPtr.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
namespace mozilla {

View File

@ -134,11 +134,11 @@ AudioSinkInputPin::Receive(IMediaSample* aSample )
already_AddRefed<IMediaSeeking>
AudioSinkInputPin::GetConnectedPinSeeking()
{
RefPtr<IPin> peer = GetConnected();
nsRefPtr<IPin> peer = GetConnected();
if (!peer)
return nullptr;
RefPtr<IMediaSeeking> seeking;
peer->QueryInterface(static_cast<IMediaSeeking**>(byRef(seeking)));
nsRefPtr<IMediaSeeking> seeking;
peer->QueryInterface(static_cast<IMediaSeeking**>(getter_AddRefs(seeking)));
return seeking.forget();
}

View File

@ -9,7 +9,7 @@
#include "BaseInputPin.h"
#include "DirectShowUtils.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "nsAutoPtr.h"
namespace mozilla {

View File

@ -6,7 +6,7 @@
#include "DirectShowReader.h"
#include "MediaDecoderReader.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "DirectShowUtils.h"
#include "AudioSinkFilter.h"
#include "SourceFilter.h"
@ -103,7 +103,7 @@ DirectShowReader::ReadMetadata(MediaInfo* aInfo,
nullptr,
CLSCTX_INPROC_SERVER,
IID_IGraphBuilder,
reinterpret_cast<void**>(static_cast<IGraphBuilder**>(byRef(mGraph))));
reinterpret_cast<void**>(static_cast<IGraphBuilder**>(getter_AddRefs(mGraph))));
NS_ENSURE_TRUE(SUCCEEDED(hr) && mGraph, NS_ERROR_FAILURE);
rv = ParseMP3Headers(&mMP3FrameParser, mDecoder->GetResource());
@ -119,10 +119,10 @@ DirectShowReader::ReadMetadata(MediaInfo* aInfo,
#endif
// Extract the interface pointers we'll need from the filter graph.
hr = mGraph->QueryInterface(static_cast<IMediaControl**>(byRef(mControl)));
hr = mGraph->QueryInterface(static_cast<IMediaControl**>(getter_AddRefs(mControl)));
NS_ENSURE_TRUE(SUCCEEDED(hr) && mControl, NS_ERROR_FAILURE);
hr = mGraph->QueryInterface(static_cast<IMediaSeeking**>(byRef(mMediaSeeking)));
hr = mGraph->QueryInterface(static_cast<IMediaSeeking**>(getter_AddRefs(mMediaSeeking)));
NS_ENSURE_TRUE(SUCCEEDED(hr) && mMediaSeeking, NS_ERROR_FAILURE);
// Build the graph. Create the filters we need, and connect them. We
@ -140,26 +140,26 @@ DirectShowReader::ReadMetadata(MediaInfo* aInfo,
NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
// The MPEG demuxer.
RefPtr<IBaseFilter> demuxer;
nsRefPtr<IBaseFilter> demuxer;
hr = CreateAndAddFilter(mGraph,
CLSID_MPEG1Splitter,
L"MPEG1Splitter",
byRef(demuxer));
getter_AddRefs(demuxer));
NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
// Platform MP3 decoder.
RefPtr<IBaseFilter> decoder;
nsRefPtr<IBaseFilter> decoder;
// Firstly try to create the MP3 decoder filter that ships with WinXP
// directly. This filter doesn't normally exist on later versions of
// Windows.
hr = CreateAndAddFilter(mGraph,
CLSID_MPEG_LAYER_3_DECODER_FILTER,
L"MPEG Layer 3 Decoder",
byRef(decoder));
getter_AddRefs(decoder));
if (FAILED(hr)) {
// Failed to create MP3 decoder filter. Try to instantiate
// the MP3 decoder DMO.
hr = AddMP3DMOWrapperFilter(mGraph, byRef(decoder));
hr = AddMP3DMOWrapperFilter(mGraph, getter_AddRefs(decoder));
NS_ENSURE_TRUE(SUCCEEDED(hr), NS_ERROR_FAILURE);
}
@ -237,8 +237,8 @@ DirectShowReader::Finish(HRESULT aStatus)
LOG("DirectShowReader::Finish(0x%x)", aStatus);
// Notify the filter graph of end of stream.
RefPtr<IMediaEventSink> eventSink;
HRESULT hr = mGraph->QueryInterface(static_cast<IMediaEventSink**>(byRef(eventSink)));
nsRefPtr<IMediaEventSink> eventSink;
HRESULT hr = mGraph->QueryInterface(static_cast<IMediaEventSink**>(getter_AddRefs(eventSink)));
if (SUCCEEDED(hr) && eventSink) {
eventSink->Notify(EC_COMPLETE, aStatus, 0);
}
@ -301,7 +301,7 @@ DirectShowReader::DecodeAudioData()
// Get the next chunk of audio samples. This blocks until the sample
// arrives, or an error occurs (like the stream is shutdown).
RefPtr<IMediaSample> sample;
nsRefPtr<IMediaSample> sample;
hr = sink->Extract(sample);
if (FAILED(hr) || hr == S_FALSE) {
return Finish(hr);

View File

@ -9,7 +9,7 @@
#include "windows.h" // HRESULT, DWORD
#include "MediaDecoderReader.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "MP3FrameParser.h"
struct IGraphBuilder;
@ -74,16 +74,16 @@ private:
// DirectShow filter graph, and associated playback and seeking
// control interfaces.
RefPtr<IGraphBuilder> mGraph;
RefPtr<IMediaControl> mControl;
RefPtr<IMediaSeeking> mMediaSeeking;
nsRefPtr<IGraphBuilder> mGraph;
nsRefPtr<IMediaControl> mControl;
nsRefPtr<IMediaSeeking> mMediaSeeking;
// Wraps the MediaResource, and feeds undecoded data into the filter graph.
RefPtr<SourceFilter> mSourceFilter;
nsRefPtr<SourceFilter> mSourceFilter;
// Sits at the end of the graph, removing decoded samples from the graph.
// The graph will block while this is blocked, i.e. it will pause decoding.
RefPtr<AudioSinkFilter> mAudioSinkFilter;
nsRefPtr<AudioSinkFilter> mAudioSinkFilter;
// Some MP3s are variable bitrate, so DirectShow's duration estimation
// can make its duration estimation based on the wrong bitrate. So we parse

View File

@ -9,7 +9,7 @@
#include "dmoreg.h"
#include "nsAutoPtr.h"
#include "mozilla/ArrayUtils.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "nsPrintfCString.h"
#define WARN(...) NS_WARNING(nsPrintfCString(__VA_ARGS__).get())
@ -312,8 +312,8 @@ MatchUnconnectedPin(IPin* aPin,
NS_ENSURE_TRUE(aOutMatches, E_POINTER);
// Ensure the pin is unconnected.
RefPtr<IPin> peer;
HRESULT hr = aPin->ConnectedTo(byRef(peer));
nsRefPtr<IPin> peer;
HRESULT hr = aPin->ConnectedTo(getter_AddRefs(peer));
if (hr != VFW_E_NOT_CONNECTED) {
*aOutMatches = false;
return hr;
@ -332,14 +332,14 @@ MatchUnconnectedPin(IPin* aPin,
already_AddRefed<IPin>
GetUnconnectedPin(IBaseFilter* aFilter, PIN_DIRECTION aPinDir)
{
RefPtr<IEnumPins> enumPins;
nsRefPtr<IEnumPins> enumPins;
HRESULT hr = aFilter->EnumPins(byRef(enumPins));
HRESULT hr = aFilter->EnumPins(getter_AddRefs(enumPins));
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
// Test each pin to see if it matches the direction we're looking for.
RefPtr<IPin> pin;
while (S_OK == enumPins->Next(1, byRef(pin), nullptr)) {
nsRefPtr<IPin> pin;
while (S_OK == enumPins->Next(1, getter_AddRefs(pin), nullptr)) {
bool matches = FALSE;
if (SUCCEEDED(MatchUnconnectedPin(pin, aPinDir, &matches)) &&
matches) {
@ -355,10 +355,10 @@ ConnectFilters(IGraphBuilder* aGraph,
IBaseFilter* aOutputFilter,
IBaseFilter* aInputFilter)
{
RefPtr<IPin> output = GetUnconnectedPin(aOutputFilter, PINDIR_OUTPUT);
nsRefPtr<IPin> output = GetUnconnectedPin(aOutputFilter, PINDIR_OUTPUT);
NS_ENSURE_TRUE(output, E_FAIL);
RefPtr<IPin> input = GetUnconnectedPin(aInputFilter, PINDIR_INPUT);
nsRefPtr<IPin> input = GetUnconnectedPin(aInputFilter, PINDIR_INPUT);
NS_ENSURE_TRUE(output, E_FAIL);
return aGraph->Connect(output, input);

View File

@ -81,7 +81,7 @@ SampleSink::Receive(IMediaSample* aSample)
}
HRESULT
SampleSink::Extract(RefPtr<IMediaSample>& aOutSample)
SampleSink::Extract(nsRefPtr<IMediaSample>& aOutSample)
{
ReentrantMonitorAutoEnter mon(mMonitor);
// Loop until we have a sample, or we should abort.

View File

@ -10,7 +10,7 @@
#include "BaseFilter.h"
#include "DirectShowUtils.h"
#include "nsAutoPtr.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "mozilla/ReentrantMonitor.h"
namespace mozilla {
@ -35,7 +35,7 @@ public:
// Retrieves a sample from the sample queue, blocking until one becomes
// available, or until an error occurs. Returns S_FALSE on EOS.
HRESULT Extract(RefPtr<IMediaSample>& aOutSample);
HRESULT Extract(nsRefPtr<IMediaSample>& aOutSample);
// Unblocks any threads waiting in GetSample().
// Clears mSample, which unblocks upstream stream.
@ -54,7 +54,7 @@ public:
private:
// All data in this class is syncronized by mMonitor.
ReentrantMonitor mMonitor;
RefPtr<IMediaSample> mSample;
nsRefPtr<IMediaSample> mSample;
// Format of the audio stream we're receiving.
WAVEFORMATEX mAudioFormat;

View File

@ -6,7 +6,7 @@
#include "SourceFilter.h"
#include "MediaResource.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "DirectShowUtils.h"
#include "MP3FrameParser.h"
#include "mozilla/Logging.h"
@ -56,7 +56,7 @@ public:
MOZ_COUNT_DTOR(ReadRequest);
}
RefPtr<IMediaSample> mSample;
nsRefPtr<IMediaSample> mSample;
DWORD_PTR mDwUser;
uint32_t mOffset;
uint32_t mCount;

View File

@ -14,7 +14,7 @@
#include "nsDeque.h"
#include "nsAutoPtr.h"
#include "DirectShowUtils.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
namespace mozilla {

View File

@ -399,7 +399,7 @@ MediaKeySession::DispatchKeyError(uint32_t aSystemCode)
EME_LOG("MediaKeySession[%p,'%s'] DispatchKeyError() systemCode=%u.",
this, NS_ConvertUTF16toUTF8(mSessionId).get(), aSystemCode);
RefPtr<MediaKeyError> event(new MediaKeyError(this, aSystemCode));
nsRefPtr<MediaKeyError> event(new MediaKeyError(this, aSystemCode));
nsRefPtr<AsyncEventDispatcher> asyncDispatcher =
new AsyncEventDispatcher(this, event);
asyncDispatcher->PostDOMEvent();

View File

@ -11,7 +11,7 @@
#include "nsWrapperCache.h"
#include "nsISupports.h"
#include "mozilla/Attributes.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "nsCOMPtr.h"
#include "nsCycleCollectionParticipant.h"
#include "nsRefPtrHashtable.h"

View File

@ -370,13 +370,13 @@ nsresult VP8TrackEncoder::PrepareRawFrame(VideoChunk &aChunk)
} else {
// Not YCbCr at all. Try to get access to the raw data and convert.
RefPtr<SourceSurface> surf = img->GetAsSourceSurface();
nsRefPtr<SourceSurface> surf = img->GetAsSourceSurface();
if (!surf) {
VP8LOG("Getting surface from %s image failed\n", Stringify(format).c_str());
return NS_ERROR_FAILURE;
}
RefPtr<DataSourceSurface> data = surf->GetDataSurface();
nsRefPtr<DataSourceSurface> data = surf->GetDataSurface();
if (!data) {
VP8LOG("Getting data surface from %s image with %s (%s) surface failed\n",
Stringify(format).c_str(), Stringify(surf->GetType()).c_str(),

View File

@ -6,7 +6,7 @@
#ifndef GMPAudioDecoderParent_h_
#define GMPAudioDecoderParent_h_
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "gmp-audio-decode.h"
#include "gmp-audio-codec.h"
#include "mozilla/gmp/PGMPAudioDecoderParent.h"

View File

@ -7,7 +7,7 @@
#define GMPDecryptorParent_h_
#include "mozilla/gmp/PGMPDecryptorParent.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "gmp-decryption.h"
#include "GMPDecryptorProxy.h"

View File

@ -6,7 +6,7 @@
#ifndef GMPVideoDecoderParent_h_
#define GMPVideoDecoderParent_h_
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "gmp-video-decode.h"
#include "mozilla/gmp/PGMPVideoDecoderParent.h"
#include "GMPMessageUtils.h"

View File

@ -6,7 +6,7 @@
#ifndef GMPVideoEncoderParent_h_
#define GMPVideoEncoderParent_h_
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "gmp-video-encode.h"
#include "mozilla/gmp/PGMPVideoEncoderParent.h"
#include "GMPMessageUtils.h"

View File

@ -262,7 +262,7 @@ DecodedAudioDataSink::InitializeAudioStream()
// AudioStream initialization can block for extended periods in unusual
// circumstances, so we take care to drop the decoder monitor while
// initializing.
RefPtr<AudioStream> audioStream(new AudioStream());
nsRefPtr<AudioStream> audioStream(new AudioStream());
nsresult rv = audioStream->Init(mInfo.mChannels, mInfo.mRate, mChannel);
if (NS_FAILED(rv)) {
audioStream->Shutdown();

View File

@ -70,7 +70,7 @@ private:
return !GetTaskQueue() || GetTaskQueue()->IsCurrentThreadIn();
}
RefPtr<TaskQueue> mTaskQueue;
nsRefPtr<TaskQueue> mTaskQueue;
nsTArray<nsRefPtr<MediaSourceTrackDemuxer>> mDemuxers;
nsTArray<nsRefPtr<TrackBuffersManager>> mSourceBuffers;

View File

@ -335,7 +335,7 @@ private:
{
return !GetTaskQueue() || GetTaskQueue()->IsCurrentThreadIn();
}
RefPtr<TaskQueue> mTaskQueue;
nsRefPtr<TaskQueue> mTaskQueue;
media::TimeInterval mAppendWindow;
media::TimeUnit mTimestampOffset;

View File

@ -41,7 +41,7 @@ struct VideoFrame {
VideoPlane Y;
VideoPlane Cb;
VideoPlane Cr;
mozilla::RefPtr<mozilla::layers::TextureClient> mGraphicBuffer;
nsRefPtr<mozilla::layers::TextureClient> mGraphicBuffer;
VideoFrame() :
mTimeUs(0),

View File

@ -314,7 +314,7 @@ void
MediaCodecReader::DispatchAudioTask()
{
if (mAudioTrack.mTaskQueue) {
RefPtr<nsIRunnable> task =
nsRefPtr<nsIRunnable> task =
NS_NewRunnableMethod(this,
&MediaCodecReader::DecodeAudioDataTask);
mAudioTrack.mTaskQueue->Dispatch(task.forget());
@ -325,7 +325,7 @@ void
MediaCodecReader::DispatchVideoTask(int64_t aTimeThreshold)
{
if (mVideoTrack.mTaskQueue) {
RefPtr<nsIRunnable> task =
nsRefPtr<nsIRunnable> task =
NS_NewRunnableMethodWithArg<int64_t>(this,
&MediaCodecReader::DecodeVideoFrameTask,
aTimeThreshold);
@ -813,7 +813,7 @@ MediaCodecReader::TextureClientRecycleCallback(TextureClient* aClient)
}
if (mVideoTrack.mReleaseBufferTaskQueue->IsEmpty()) {
RefPtr<nsIRunnable> task =
nsRefPtr<nsIRunnable> task =
NS_NewRunnableMethod(this,
&MediaCodecReader::WaitFenceAndReleaseOutputBuffer);
mVideoTrack.mReleaseBufferTaskQueue->Dispatch(task.forget());
@ -916,7 +916,7 @@ MediaCodecReader::DecodeVideoFrameSync(int64_t aTimeThreshold)
}
nsRefPtr<VideoData> v;
RefPtr<TextureClient> textureClient;
nsRefPtr<TextureClient> textureClient;
sp<GraphicBuffer> graphicBuffer;
if (bufferInfo.mBuffer != nullptr) {
MOZ_ASSERT(mStreamSource);

View File

@ -402,7 +402,7 @@ ConvertSourceSurfaceToNV12(const nsRefPtr<SourceSurface>& aSurface, uint8_t* aDe
return NS_ERROR_FAILURE;
}
RefPtr<DataSourceSurface> data = aSurface->GetDataSurface();
nsRefPtr<DataSourceSurface> data = aSurface->GetDataSurface();
if (!data) {
CODEC_ERROR("Getting data surface from %s image with %s (%s) surface failed",
Stringify(format).c_str(), Stringify(aSurface->GetType()).c_str(),

View File

@ -631,7 +631,7 @@ bool OmxDecoder::ReadVideo(VideoFrame *aFrame, int64_t aTimeUs,
unreadable = 0;
}
RefPtr<mozilla::layers::TextureClient> textureClient;
nsRefPtr<mozilla::layers::TextureClient> textureClient;
if ((mVideoBuffer->graphicBuffer().get())) {
textureClient = mNativeWindow->getTextureClientFromBuffer(mVideoBuffer->graphicBuffer().get());
}

View File

@ -12,7 +12,7 @@
#include "mozilla/MozPromise.h"
#include "mozilla/layers/LayersTypes.h"
#include "nsTArray.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include <queue>
namespace mozilla {

View File

@ -7,7 +7,7 @@
#include "MediaDecoderReader.h"
#include "PlatformDecoderModule.h"
#include "nsRect.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "mozilla/CheckedInt.h"
#include "VideoUtils.h"
#include "ImageContainer.h"
@ -72,7 +72,7 @@ public:
// The MediaDataDecoder must delete the sample when we're finished
// with it, so the OutputEvent stores it in an nsAutoPtr and deletes
// it once it's run.
RefPtr<nsIRunnable> r(new OutputEvent(aSample, mCallback, mCreator));
nsRefPtr<nsIRunnable> r(new OutputEvent(aSample, mCallback, mCreator));
mTaskQueue->Dispatch(r.forget());
return NS_OK;
}
@ -89,7 +89,7 @@ public:
private:
nsAutoPtr<BlankMediaDataCreator> mCreator;
RefPtr<FlushableTaskQueue> mTaskQueue;
nsRefPtr<FlushableTaskQueue> mTaskQueue;
MediaDataDecoderCallback* mCallback;
TrackInfo::TrackType mType;
};
@ -158,7 +158,7 @@ private:
gfx::IntRect mPicture;
uint32_t mFrameWidth;
uint32_t mFrameHeight;
RefPtr<layers::ImageContainer> mImageContainer;
nsRefPtr<layers::ImageContainer> mImageContainer;
};

View File

@ -280,7 +280,7 @@ OpusDataDecoder::DoDrain()
nsresult
OpusDataDecoder::Drain()
{
RefPtr<nsIRunnable> runnable(
nsRefPtr<nsIRunnable> runnable(
NS_NewRunnableMethod(this, &OpusDataDecoder::DoDrain));
mTaskQueue->Dispatch(runnable.forget());
return NS_OK;

View File

@ -38,7 +38,7 @@ private:
void DoDrain ();
const AudioInfo& mInfo;
RefPtr<FlushableTaskQueue> mTaskQueue;
nsRefPtr<FlushableTaskQueue> mTaskQueue;
MediaDataDecoderCallback* mCallback;
// Opus decoder state

View File

@ -49,7 +49,7 @@ private:
void OutputDelayedFrames ();
nsRefPtr<ImageContainer> mImageContainer;
RefPtr<FlushableTaskQueue> mTaskQueue;
nsRefPtr<FlushableTaskQueue> mTaskQueue;
MediaDataDecoderCallback* mCallback;
// VPx decoder state

View File

@ -41,7 +41,7 @@ private:
void DoDrain ();
const AudioInfo& mInfo;
RefPtr<FlushableTaskQueue> mTaskQueue;
nsRefPtr<FlushableTaskQueue> mTaskQueue;
MediaDataDecoderCallback* mCallback;
// Vorbis decoder state

View File

@ -51,7 +51,7 @@ SamplesWaitingForKey::NotifyUsable(const CencKeyId& aKeyId)
size_t i = 0;
while (i < mSamples.Length()) {
if (aKeyId == mSamples[i]->mCrypto.mKeyId) {
RefPtr<nsIRunnable> task;
nsRefPtr<nsIRunnable> task;
task = NS_NewRunnableMethodWithArg<nsRefPtr<MediaRawData>>(mDecoder,
&MediaDataDecoder::Input,
nsRefPtr<MediaRawData>(mSamples[i]));

View File

@ -208,7 +208,7 @@ protected:
layers::ImageContainer* mImageContainer;
const VideoInfo& mConfig;
RefPtr<AndroidSurfaceTexture> mSurfaceTexture;
nsRefPtr<AndroidSurfaceTexture> mSurfaceTexture;
nsRefPtr<GLContext> mGLContext;
};

View File

@ -7,7 +7,7 @@
#if !defined(GonkAudioDecoderManager_h_)
#define GonkAudioDecoderManager_h_
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "GonkMediaDataDecoder.h"
using namespace android;

View File

@ -145,7 +145,7 @@ public:
nsresult Shutdown() override;
private:
RefPtr<FlushableTaskQueue> mTaskQueue;
nsRefPtr<FlushableTaskQueue> mTaskQueue;
android::sp<GonkDecoderManager> mManager;
};

View File

@ -185,7 +185,7 @@ GonkVideoDecoderManager::CreateVideoData(int64_t aStreamOffset, VideoData **v)
picture.height = (mFrameInfo.mHeight * mPicture.height) / mInitialFrame.height;
}
RefPtr<mozilla::layers::TextureClient> textureClient;
nsRefPtr<mozilla::layers::TextureClient> textureClient;
if ((mVideoBuffer->graphicBuffer().get())) {
textureClient = mNativeWindow->getTextureClientFromBuffer(mVideoBuffer->graphicBuffer().get());

View File

@ -9,7 +9,7 @@
#include "nsRect.h"
#include "GonkMediaDataDecoder.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "I420ColorConverterHelper.h"
#include "MediaCodecProxy.h"
#include "GonkNativeWindow.h"

View File

@ -98,7 +98,7 @@ private:
nsRefPtr<IDirect3D9Ex> mD3D9;
nsRefPtr<IDirect3DDevice9Ex> mDevice;
nsRefPtr<IDirect3DDeviceManager9> mDeviceManager;
RefPtr<D3D9RecycleAllocator> mTextureClientAllocator;
nsRefPtr<D3D9RecycleAllocator> mTextureClientAllocator;
nsRefPtr<IDirectXVideoDecoderService> mDecoderService;
GUID mDecoderGUID;
UINT32 mResetToken;
@ -490,14 +490,14 @@ public:
private:
HRESULT CreateFormatConverter();
HRESULT CreateOutputSample(RefPtr<IMFSample>& aSample,
HRESULT CreateOutputSample(nsRefPtr<IMFSample>& aSample,
ID3D11Texture2D* aTexture);
RefPtr<ID3D11Device> mDevice;
RefPtr<ID3D11DeviceContext> mContext;
RefPtr<IMFDXGIDeviceManager> mDXGIDeviceManager;
RefPtr<MFTDecoder> mTransform;
RefPtr<D3D11RecycleAllocator> mTextureClientAllocator;
nsRefPtr<ID3D11Device> mDevice;
nsRefPtr<ID3D11DeviceContext> mContext;
nsRefPtr<IMFDXGIDeviceManager> mDXGIDeviceManager;
nsRefPtr<MFTDecoder> mTransform;
nsRefPtr<D3D11RecycleAllocator> mTextureClientAllocator;
GUID mDecoderGUID;
uint32_t mWidth;
uint32_t mHeight;
@ -508,8 +508,8 @@ private:
bool
D3D11DXVA2Manager::SupportsConfig(IMFMediaType* aType, float aFramerate)
{
RefPtr<ID3D11VideoDevice> videoDevice;
HRESULT hr = mDevice->QueryInterface(static_cast<ID3D11VideoDevice**>(byRef(videoDevice)));
nsRefPtr<ID3D11VideoDevice> videoDevice;
HRESULT hr = mDevice->QueryInterface(static_cast<ID3D11VideoDevice**>(getter_AddRefs(videoDevice)));
NS_ENSURE_TRUE(SUCCEEDED(hr), false);
D3D11_VIDEO_DECODER_DESC desc;
@ -581,13 +581,13 @@ D3D11DXVA2Manager::Init(nsACString& aFailureReason)
return E_FAIL;
}
mDevice->GetImmediateContext(byRef(mContext));
mDevice->GetImmediateContext(getter_AddRefs(mContext));
if (!mContext) {
aFailureReason.AssignLiteral("Failed to get immediate context for d3d11 device");
return E_FAIL;
}
hr = wmf::MFCreateDXGIDeviceManager(&mDeviceManagerToken, byRef(mDXGIDeviceManager));
hr = wmf::MFCreateDXGIDeviceManager(&mDeviceManagerToken, getter_AddRefs(mDXGIDeviceManager));
if (!SUCCEEDED(hr)) {
aFailureReason = nsPrintfCString("MFCreateDXGIDeviceManager failed with code %X", hr);
return hr;
@ -612,8 +612,8 @@ D3D11DXVA2Manager::Init(nsACString& aFailureReason)
return hr;
}
RefPtr<ID3D11VideoDevice> videoDevice;
hr = mDevice->QueryInterface(static_cast<ID3D11VideoDevice**>(byRef(videoDevice)));
nsRefPtr<ID3D11VideoDevice> videoDevice;
hr = mDevice->QueryInterface(static_cast<ID3D11VideoDevice**>(getter_AddRefs(videoDevice)));
if (!SUCCEEDED(hr)) {
aFailureReason = nsPrintfCString("QI to ID3D11VideoDevice failed with code %X", hr);
return hr;
@ -646,8 +646,8 @@ D3D11DXVA2Manager::Init(nsACString& aFailureReason)
return E_FAIL;
}
RefPtr<IDXGIDevice> dxgiDevice;
hr = mDevice->QueryInterface(static_cast<IDXGIDevice**>(byRef(dxgiDevice)));
nsRefPtr<IDXGIDevice> dxgiDevice;
hr = mDevice->QueryInterface(static_cast<IDXGIDevice**>(getter_AddRefs(dxgiDevice)));
if (!SUCCEEDED(hr)) {
aFailureReason = nsPrintfCString("QI to IDXGIDevice failed with code %X", hr);
return hr;
@ -684,14 +684,14 @@ D3D11DXVA2Manager::Init(nsACString& aFailureReason)
}
HRESULT
D3D11DXVA2Manager::CreateOutputSample(RefPtr<IMFSample>& aSample, ID3D11Texture2D* aTexture)
D3D11DXVA2Manager::CreateOutputSample(nsRefPtr<IMFSample>& aSample, ID3D11Texture2D* aTexture)
{
RefPtr<IMFSample> sample;
HRESULT hr = wmf::MFCreateSample(byRef(sample));
nsRefPtr<IMFSample> sample;
HRESULT hr = wmf::MFCreateSample(getter_AddRefs(sample));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
RefPtr<IMFMediaBuffer> buffer;
hr = wmf::MFCreateDXGISurfaceBuffer(__uuidof(ID3D11Texture2D), aTexture, 0, FALSE, byRef(buffer));
nsRefPtr<IMFMediaBuffer> buffer;
hr = wmf::MFCreateDXGISurfaceBuffer(__uuidof(ID3D11Texture2D), aTexture, 0, FALSE, getter_AddRefs(buffer));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
sample->AddBuffer(buffer);
@ -729,13 +729,13 @@ D3D11DXVA2Manager::CopyToImage(IMFSample* aVideoSample,
hr = mTransform->Input(aVideoSample);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
RefPtr<IMFSample> sample;
RefPtr<ID3D11Texture2D> texture = videoImage->GetTexture();
nsRefPtr<IMFSample> sample;
nsRefPtr<ID3D11Texture2D> texture = videoImage->GetTexture();
hr = CreateOutputSample(sample, texture);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
RefPtr<IDXGIKeyedMutex> keyedMutex;
hr = texture->QueryInterface(static_cast<IDXGIKeyedMutex**>(byRef(keyedMutex)));
nsRefPtr<IDXGIKeyedMutex> keyedMutex;
hr = texture->QueryInterface(static_cast<IDXGIKeyedMutex**>(getter_AddRefs(keyedMutex)));
NS_ENSURE_TRUE(SUCCEEDED(hr) && keyedMutex, hr);
hr = keyedMutex->AcquireSync(0, INFINITE);
@ -772,8 +772,8 @@ D3D11DXVA2Manager::ConfigureForSize(uint32_t aWidth, uint32_t aHeight)
mWidth = aWidth;
mHeight = aHeight;
RefPtr<IMFMediaType> inputType;
HRESULT hr = wmf::MFCreateMediaType(byRef(inputType));
nsRefPtr<IMFMediaType> inputType;
HRESULT hr = wmf::MFCreateMediaType(getter_AddRefs(inputType));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = inputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
@ -788,7 +788,7 @@ D3D11DXVA2Manager::ConfigureForSize(uint32_t aWidth, uint32_t aHeight)
hr = inputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
RefPtr<IMFAttributes> attr = mTransform->GetAttributes();
nsRefPtr<IMFAttributes> attr = mTransform->GetAttributes();
hr = attr->SetUINT32(MF_XVP_PLAYBACK_MODE, TRUE);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
@ -799,8 +799,8 @@ D3D11DXVA2Manager::ConfigureForSize(uint32_t aWidth, uint32_t aHeight)
hr = MFSetAttributeSize(inputType, MF_MT_FRAME_SIZE, aWidth, aHeight);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
RefPtr<IMFMediaType> outputType;
hr = wmf::MFCreateMediaType(byRef(outputType));
nsRefPtr<IMFMediaType> outputType;
hr = wmf::MFCreateMediaType(getter_AddRefs(outputType));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = outputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);

View File

@ -35,7 +35,7 @@ MFTDecoder::Create(const GUID& aMFTClsID)
nullptr,
CLSCTX_INPROC_SERVER,
IID_IMFTransform,
reinterpret_cast<void**>(static_cast<IMFTransform**>(byRef(mDecoder))));
reinterpret_cast<void**>(static_cast<IMFTransform**>(getter_AddRefs(mDecoder))));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
return S_OK;
@ -71,8 +71,8 @@ MFTDecoder::SetMediaTypes(IMFMediaType* aInputType,
already_AddRefed<IMFAttributes>
MFTDecoder::GetAttributes()
{
RefPtr<IMFAttributes> attr;
HRESULT hr = mDecoder->GetAttributes(byRef(attr));
nsRefPtr<IMFAttributes> attr;
HRESULT hr = mDecoder->GetAttributes(getter_AddRefs(attr));
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
return attr.forget();
}
@ -85,9 +85,9 @@ MFTDecoder::SetDecoderOutputType(ConfigureOutputCallback aCallback, void* aData)
// Iterate the enumerate the output types, until we find one compatible
// with what we need.
HRESULT hr;
RefPtr<IMFMediaType> outputType;
nsRefPtr<IMFMediaType> outputType;
UINT32 typeIndex = 0;
while (SUCCEEDED(mDecoder->GetOutputAvailableType(0, typeIndex++, byRef(outputType)))) {
while (SUCCEEDED(mDecoder->GetOutputAvailableType(0, typeIndex++, getter_AddRefs(outputType)))) {
BOOL resultMatch;
hr = mOutputType->Compare(outputType, MF_ATTRIBUTES_MATCH_OUR_ITEMS, &resultMatch);
if (SUCCEEDED(hr) && resultMatch == TRUE) {
@ -123,19 +123,19 @@ HRESULT
MFTDecoder::CreateInputSample(const uint8_t* aData,
uint32_t aDataSize,
int64_t aTimestamp,
RefPtr<IMFSample>* aOutSample)
nsRefPtr<IMFSample>* aOutSample)
{
NS_ENSURE_TRUE(mDecoder != nullptr, E_POINTER);
HRESULT hr;
RefPtr<IMFSample> sample;
hr = wmf::MFCreateSample(byRef(sample));
nsRefPtr<IMFSample> sample;
hr = wmf::MFCreateSample(getter_AddRefs(sample));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
RefPtr<IMFMediaBuffer> buffer;
nsRefPtr<IMFMediaBuffer> buffer;
int32_t bufferSize = std::max<uint32_t>(uint32_t(mInputStreamInfo.cbSize), aDataSize);
UINT32 alignment = (mInputStreamInfo.cbAlignment > 1) ? mInputStreamInfo.cbAlignment - 1 : 0;
hr = wmf::MFCreateAlignedMemoryBuffer(bufferSize, alignment, byRef(buffer));
hr = wmf::MFCreateAlignedMemoryBuffer(bufferSize, alignment, getter_AddRefs(buffer));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
DWORD maxLength = 0;
@ -165,19 +165,19 @@ MFTDecoder::CreateInputSample(const uint8_t* aData,
}
HRESULT
MFTDecoder::CreateOutputSample(RefPtr<IMFSample>* aOutSample)
MFTDecoder::CreateOutputSample(nsRefPtr<IMFSample>* aOutSample)
{
NS_ENSURE_TRUE(mDecoder != nullptr, E_POINTER);
HRESULT hr;
RefPtr<IMFSample> sample;
hr = wmf::MFCreateSample(byRef(sample));
nsRefPtr<IMFSample> sample;
hr = wmf::MFCreateSample(getter_AddRefs(sample));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
RefPtr<IMFMediaBuffer> buffer;
nsRefPtr<IMFMediaBuffer> buffer;
int32_t bufferSize = mOutputStreamInfo.cbSize;
UINT32 alignment = (mOutputStreamInfo.cbAlignment > 1) ? mOutputStreamInfo.cbAlignment - 1 : 0;
hr = wmf::MFCreateAlignedMemoryBuffer(bufferSize, alignment, byRef(buffer));
hr = wmf::MFCreateAlignedMemoryBuffer(bufferSize, alignment, getter_AddRefs(buffer));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = sample->AddBuffer(buffer);
@ -189,7 +189,7 @@ MFTDecoder::CreateOutputSample(RefPtr<IMFSample>* aOutSample)
}
HRESULT
MFTDecoder::Output(RefPtr<IMFSample>* aOutput)
MFTDecoder::Output(nsRefPtr<IMFSample>* aOutput)
{
NS_ENSURE_TRUE(mDecoder != nullptr, E_POINTER);
@ -198,7 +198,7 @@ MFTDecoder::Output(RefPtr<IMFSample>* aOutput)
MFT_OUTPUT_DATA_BUFFER output = {0};
bool providedSample = false;
RefPtr<IMFSample> sample;
nsRefPtr<IMFSample> sample;
if (*aOutput) {
output.pSample = *aOutput;
providedSample = true;
@ -263,7 +263,7 @@ MFTDecoder::Input(const uint8_t* aData,
{
NS_ENSURE_TRUE(mDecoder != nullptr, E_POINTER);
RefPtr<IMFSample> input;
nsRefPtr<IMFSample> input;
HRESULT hr = CreateInputSample(aData, aDataSize, aTimestamp, &input);
NS_ENSURE_TRUE(SUCCEEDED(hr) && input != nullptr, hr);
@ -295,10 +295,10 @@ MFTDecoder::Flush()
}
HRESULT
MFTDecoder::GetOutputMediaType(RefPtr<IMFMediaType>& aMediaType)
MFTDecoder::GetOutputMediaType(nsRefPtr<IMFMediaType>& aMediaType)
{
NS_ENSURE_TRUE(mDecoder, E_POINTER);
return mDecoder->GetOutputCurrentType(0, byRef(aMediaType));
return mDecoder->GetOutputCurrentType(0, getter_AddRefs(aMediaType));
}
} // namespace mozilla

View File

@ -8,7 +8,7 @@
#define MFTDecoder_h_
#include "WMF.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "mozilla/ReentrantMonitor.h"
#include "nsIThread.h"
@ -47,7 +47,7 @@ public:
// Retrieves the media type being output. This may not be valid until
// the first sample is decoded.
HRESULT GetOutputMediaType(RefPtr<IMFMediaType>& aMediaType);
HRESULT GetOutputMediaType(nsRefPtr<IMFMediaType>& aMediaType);
// Submits data into the MFT for processing.
//
@ -62,7 +62,7 @@ public:
HRESULT CreateInputSample(const uint8_t* aData,
uint32_t aDataSize,
int64_t aTimestampUsecs,
RefPtr<IMFSample>* aOutSample);
nsRefPtr<IMFSample>* aOutSample);
// Retrieves output from the MFT. Call this once Input() returns
// MF_E_NOTACCEPTING. Some MFTs with hardware acceleration (the H.264
@ -76,7 +76,7 @@ public:
// - MF_E_TRANSFORM_NEED_MORE_INPUT if no output can be produced
// due to lack of input.
// - S_OK if an output frame is produced.
HRESULT Output(RefPtr<IMFSample>* aOutput);
HRESULT Output(nsRefPtr<IMFSample>* aOutput);
// Sends a flush message to the MFT. This causes it to discard all
// input data. Use before seeking.
@ -90,14 +90,14 @@ public:
private:
HRESULT CreateOutputSample(RefPtr<IMFSample>* aOutSample);
HRESULT CreateOutputSample(nsRefPtr<IMFSample>* aOutSample);
MFT_INPUT_STREAM_INFO mInputStreamInfo;
MFT_OUTPUT_STREAM_INFO mOutputStreamInfo;
RefPtr<IMFTransform> mDecoder;
nsRefPtr<IMFTransform> mDecoder;
RefPtr<IMFMediaType> mOutputType;
nsRefPtr<IMFMediaType> mOutputType;
// True if the IMFTransform allocates the samples that it returns.
bool mMFTProvidesOutputSamples;

View File

@ -118,15 +118,15 @@ WMFAudioMFTManager::Init()
{
NS_ENSURE_TRUE(mStreamType != Unknown, false);
RefPtr<MFTDecoder> decoder(new MFTDecoder());
nsRefPtr<MFTDecoder> decoder(new MFTDecoder());
HRESULT hr = decoder->Create(GetMFTGUID());
NS_ENSURE_TRUE(SUCCEEDED(hr), false);
// Setup input/output media types
RefPtr<IMFMediaType> inputType;
nsRefPtr<IMFMediaType> inputType;
hr = wmf::MFCreateMediaType(byRef(inputType));
hr = wmf::MFCreateMediaType(getter_AddRefs(inputType));
NS_ENSURE_TRUE(SUCCEEDED(hr), false);
hr = inputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio);
@ -151,8 +151,8 @@ WMFAudioMFTManager::Init()
NS_ENSURE_TRUE(SUCCEEDED(hr), false);
}
RefPtr<IMFMediaType> outputType;
hr = wmf::MFCreateMediaType(byRef(outputType));
nsRefPtr<IMFMediaType> outputType;
hr = wmf::MFCreateMediaType(getter_AddRefs(outputType));
NS_ENSURE_TRUE(SUCCEEDED(hr), false);
hr = outputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio);
@ -185,7 +185,7 @@ WMFAudioMFTManager::UpdateOutputType()
{
HRESULT hr;
RefPtr<IMFMediaType> type;
nsRefPtr<IMFMediaType> type;
hr = mDecoder->GetOutputMediaType(type);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
@ -203,7 +203,7 @@ WMFAudioMFTManager::Output(int64_t aStreamOffset,
nsRefPtr<MediaData>& aOutData)
{
aOutData = nullptr;
RefPtr<IMFSample> sample;
nsRefPtr<IMFSample> sample;
HRESULT hr;
int typeChangeCount = 0;
while (true) {
@ -226,8 +226,8 @@ WMFAudioMFTManager::Output(int64_t aStreamOffset,
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
RefPtr<IMFMediaBuffer> buffer;
hr = sample->ConvertToContiguousBuffer(byRef(buffer));
nsRefPtr<IMFMediaBuffer> buffer;
hr = sample->ConvertToContiguousBuffer(getter_AddRefs(buffer));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
BYTE* data = nullptr; // Note: *data will be owned by the IMFMediaBuffer, we don't need to free it.

View File

@ -9,7 +9,7 @@
#include "WMF.h"
#include "MFTDecoder.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "WMFMediaDataDecoder.h"
extern const GUID CLSID_WebmMfVp8Dec;

View File

@ -130,7 +130,7 @@ CanCreateMFTDecoder(const GUID& aGuid)
if (FAILED(wmf::MFStartup())) {
return false;
}
RefPtr<MFTDecoder> decoder(new MFTDecoder());
nsRefPtr<MFTDecoder> decoder(new MFTDecoder());
bool hasH264 = SUCCEEDED(decoder->Create(aGuid));
wmf::MFShutdown();
return hasH264;

View File

@ -10,7 +10,7 @@
#include "WMF.h"
#include "MFTDecoder.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "PlatformDecoderModule.h"
namespace mozilla {
@ -54,7 +54,7 @@ public:
protected:
// IMFTransform wrapper that performs the decoding.
RefPtr<MFTDecoder> mDecoder;
nsRefPtr<MFTDecoder> mDecoder;
};
// Decodes audio and video using Windows Media Foundation. Samples are decoded
@ -101,7 +101,7 @@ private:
void ProcessShutdown();
RefPtr<FlushableTaskQueue> mTaskQueue;
nsRefPtr<FlushableTaskQueue> mTaskQueue;
MediaDataDecoderCallback* mCallback;
nsAutoPtr<MFTManager> mMFTManager;

View File

@ -7,7 +7,7 @@
#include "WMFUtils.h"
#include <stdint.h>
#include "mozilla/ArrayUtils.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "mozilla/WindowsVersion.h"
#include "mozilla/Logging.h"
#include "nsThreadUtils.h"

View File

@ -205,12 +205,12 @@ WMFVideoMFTManager::InitInternal(bool aForceD3D9)
mUseHwAccel = false; // default value; changed if D3D setup succeeds.
bool useDxva = InitializeDXVA(aForceD3D9);
RefPtr<MFTDecoder> decoder(new MFTDecoder());
nsRefPtr<MFTDecoder> decoder(new MFTDecoder());
HRESULT hr = decoder->Create(GetMFTGUID());
NS_ENSURE_TRUE(SUCCEEDED(hr), false);
RefPtr<IMFAttributes> attr(decoder->GetAttributes());
nsRefPtr<IMFAttributes> attr(decoder->GetAttributes());
UINT32 aware = 0;
if (attr) {
attr->GetUINT32(MF_SA_D3D_AWARE, &aware);
@ -265,8 +265,8 @@ HRESULT
WMFVideoMFTManager::SetDecoderMediaTypes()
{
// Setup the input/output media types.
RefPtr<IMFMediaType> inputType;
HRESULT hr = wmf::MFCreateMediaType(byRef(inputType));
nsRefPtr<IMFMediaType> inputType;
HRESULT hr = wmf::MFCreateMediaType(getter_AddRefs(inputType));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = inputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
@ -278,8 +278,8 @@ WMFVideoMFTManager::SetDecoderMediaTypes()
hr = inputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_MixedInterlaceOrProgressive);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
RefPtr<IMFMediaType> outputType;
hr = wmf::MFCreateMediaType(byRef(outputType));
nsRefPtr<IMFMediaType> outputType;
hr = wmf::MFCreateMediaType(getter_AddRefs(outputType));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
hr = outputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
@ -342,7 +342,7 @@ WMFVideoMFTManager::CanUseDXVA(IMFMediaType* aType)
HRESULT
WMFVideoMFTManager::ConfigureVideoFrameGeometry()
{
RefPtr<IMFMediaType> mediaType;
nsRefPtr<IMFMediaType> mediaType;
HRESULT hr = mDecoder->GetOutputMediaType(mediaType);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
@ -429,10 +429,10 @@ WMFVideoMFTManager::CreateBasicVideoFrame(IMFSample* aSample,
*aOutVideoData = nullptr;
HRESULT hr;
RefPtr<IMFMediaBuffer> buffer;
nsRefPtr<IMFMediaBuffer> buffer;
// Must convert to contiguous buffer to use IMD2DBuffer interface.
hr = aSample->ConvertToContiguousBuffer(byRef(buffer));
hr = aSample->ConvertToContiguousBuffer(getter_AddRefs(buffer));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
// Try and use the IMF2DBuffer interface if available, otherwise fallback
@ -440,8 +440,8 @@ WMFVideoMFTManager::CreateBasicVideoFrame(IMFSample* aSample,
// but only some systems (Windows 8?) support it.
BYTE* data = nullptr;
LONG stride = 0;
RefPtr<IMF2DBuffer> twoDBuffer;
hr = buffer->QueryInterface(static_cast<IMF2DBuffer**>(byRef(twoDBuffer)));
nsRefPtr<IMF2DBuffer> twoDBuffer;
hr = buffer->QueryInterface(static_cast<IMF2DBuffer**>(getter_AddRefs(twoDBuffer)));
if (SUCCEEDED(hr)) {
hr = twoDBuffer->Lock2D(&data, &stride);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
@ -566,7 +566,7 @@ HRESULT
WMFVideoMFTManager::Output(int64_t aStreamOffset,
nsRefPtr<MediaData>& aOutData)
{
RefPtr<IMFSample> sample;
nsRefPtr<IMFSample> sample;
HRESULT hr;
aOutData = nullptr;
int typeChangeCount = 0;

View File

@ -11,7 +11,7 @@
#include "MFTDecoder.h"
#include "nsRect.h"
#include "WMFMediaDataDecoder.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
namespace mozilla {
@ -66,10 +66,10 @@ private:
uint32_t mVideoHeight;
nsIntRect mPictureRegion;
RefPtr<layers::ImageContainer> mImageContainer;
nsRefPtr<layers::ImageContainer> mImageContainer;
nsAutoPtr<DXVA2Manager> mDXVA2Manager;
RefPtr<IMFSample> mLastInput;
nsRefPtr<IMFSample> mLastInput;
float mLastDuration;
bool mDXVAEnabled;

View File

@ -536,7 +536,7 @@ class LoadInfoCollectRunner : public nsRunnable
{
public:
LoadInfoCollectRunner(nsRefPtr<LoadMonitor> loadMonitor,
RefPtr<RTCLoadInfo> loadInfo,
nsRefPtr<RTCLoadInfo> loadInfo,
nsIThread *loadInfoThread)
: mThread(loadInfoThread),
mLoadUpdateInterval(loadMonitor->mLoadUpdateInterval),
@ -585,7 +585,7 @@ public:
private:
nsCOMPtr<nsIThread> mThread;
RefPtr<RTCLoadInfo> mLoadInfo;
nsRefPtr<RTCLoadInfo> mLoadInfo;
nsRefPtr<LoadMonitor> mLoadMonitor;
int mLoadUpdateInterval;
int mLoadNoiseCounter;
@ -629,7 +629,7 @@ LoadMonitor::Init(nsRefPtr<LoadMonitor> &self)
{
LOG(("Initializing LoadMonitor"));
RefPtr<RTCLoadInfo> load_info = new RTCLoadInfo();
nsRefPtr<RTCLoadInfo> load_info = new RTCLoadInfo();
nsresult rv = load_info->Init(mLoadUpdateInterval);
if (NS_FAILED(rv)) {

View File

@ -8,7 +8,7 @@
#include "mozilla/Mutex.h"
#include "mozilla/CondVar.h"
#include "mozilla/RefPtr.h"
#include "mozilla/nsRefPtr.h"
#include "mozilla/Atomics.h"
#include "nsAutoPtr.h"
#include "nsCOMPtr.h"

View File

@ -480,7 +480,7 @@ AsyncDecodeWebAudio(const char* aContentType, uint8_t* aBuffer,
return;
}
RefPtr<MediaDecodeTask> task =
nsRefPtr<MediaDecodeTask> task =
new MediaDecodeTask(aContentType, aBuffer, aLength, aDecodeJob);
if (!task->CreateReader()) {
nsCOMPtr<nsIRunnable> event =

Some files were not shown because too many files have changed in this diff Show More