Bug 784739 - Switch from NULL to nullptr in content/; r=ehsan

--HG--
extra : rebase_source : 9237726507e8002479616a98a82646a763932507
This commit is contained in:
Birunthan Mohanathas 2013-10-23 16:34:10 -04:00
parent 7356a80cad
commit 3560f05e45
36 changed files with 111 additions and 110 deletions

View File

@ -78,8 +78,8 @@ private:
// Creates and returns an encoder instance of the type specified in aType.
// aType may change to "image/png" if no instance of the original type could
// be created and we had to fall back to a PNG encoder. A return value of
// NULL should be interpreted as NS_IMAGELIB_ERROR_NO_ENCODER and aType is
// be created and we had to fall back to a PNG encoder. A null return value
// should be interpreted as NS_IMAGELIB_ERROR_NO_ENCODER and aType is
// undefined in this case.
static already_AddRefed<imgIEncoder> GetImageEncoder(nsAString& aType);

View File

@ -520,7 +520,8 @@ DecoderTraits::CreateDecoder(const nsACString& aType, MediaDecoderOwner* aOwner)
}
#endif
#ifdef MOZ_MEDIA_PLUGINS
if (MediaDecoder::IsMediaPluginsEnabled() && GetMediaPluginHost()->FindDecoder(aType, NULL)) {
if (MediaDecoder::IsMediaPluginsEnabled() &&
GetMediaPluginHost()->FindDecoder(aType, nullptr)) {
decoder = new MediaPluginDecoder(aType);
}
#endif

View File

@ -166,14 +166,14 @@ public:
Plane mPlanes[3];
};
// Constructs a VideoData object. If aImage is NULL, creates a new Image
// holding a copy of the YCbCr data passed in aBuffer. If aImage is not NULL,
// it's stored as the underlying video image and aBuffer is assumed to point
// to memory within aImage so no copy is made. aTimecode is a codec specific
// number representing the timestamp of the frame of video data. Returns
// nsnull if an error occurs. This may indicate that memory couldn't be
// allocated to create the VideoData object, or it may indicate some problem
// with the input data (e.g. negative stride).
// Constructs a VideoData object. If aImage is nullptr, creates a new Image
// holding a copy of the YCbCr data passed in aBuffer. If aImage is not
// nullptr, it's stored as the underlying video image and aBuffer is assumed
// to point to memory within aImage so no copy is made. aTimecode is a codec
// specific number representing the timestamp of the frame of video data.
// Returns nsnull if an error occurs. This may indicate that memory couldn't
// be allocated to create the VideoData object, or it may indicate some
// problem with the input data (e.g. negative stride).
static VideoData* Create(VideoInfo& aInfo,
ImageContainer* aContainer,
Image* aImage,

View File

@ -14,7 +14,7 @@
#include <initguid.h>
#include <wmsdkidl.h>
#define DELETE_RESET(p) { delete (p) ; (p) = NULL ;}
#define DELETE_RESET(p) { delete (p) ; (p) = nullptr ;}
DEFINE_GUID(CLSID_MozAudioSinkFilter, 0x1872d8c8, 0xea8d, 0x4c34, 0xae, 0x96, 0x69, 0xde,
0xf1, 0x33, 0x7b, 0x33);

View File

@ -83,7 +83,7 @@ DirectShowReader::ReadMetadata(MediaInfo* aInfo,
// Create the filter graph, reference it by the GraphBuilder interface,
// to make graph building more convenient.
hr = CoCreateInstance(CLSID_FilterGraph,
NULL,
nullptr,
CLSCTX_INPROC_SERVER,
IID_IGraphBuilder,
reinterpret_cast<void**>(static_cast<IGraphBuilder**>(byRef(mGraph))));
@ -217,7 +217,7 @@ DirectShowReader::Finish(HRESULT aStatus)
RefPtr<IMediaEventSink> eventSink;
HRESULT hr = mGraph->QueryInterface(static_cast<IMediaEventSink**>(byRef(eventSink)));
if (SUCCEEDED(hr) && eventSink) {
eventSink->Notify(EC_COMPLETE, aStatus, NULL);
eventSink->Notify(EC_COMPLETE, aStatus, 0);
}
return false;
}

View File

@ -163,7 +163,7 @@ GetGraphNotifyString(long evCode)
CASE(VFW_S_VIDEO_NOT_RENDERED); // Partial success; the video was not rendered.
CASE(E_ABORT); // Operation aborted.
CASE(E_OUTOFMEMORY); // Insufficient memory.
CASE(E_POINTER); // NULL pointer argument.
CASE(E_POINTER); // Null pointer argument.
CASE(VFW_E_CANNOT_CONNECT); // No combination of intermediate filters could be found to make the connection.
CASE(VFW_E_CANNOT_RENDER); // No combination of filters could be found to render the stream.
CASE(VFW_E_NO_ACCEPTABLE_TYPES); // There is no common media type between these pins.
@ -187,7 +187,7 @@ CreateAndAddFilter(IGraphBuilder* aGraph,
nsRefPtr<IBaseFilter> filter;
hr = CoCreateInstance(aFilterClsId,
NULL,
nullptr,
CLSCTX_INPROC_SERVER,
IID_IBaseFilter,
getter_AddRefs(filter));
@ -215,7 +215,7 @@ AddMP3DMOWrapperFilter(IGraphBuilder* aGraph,
// Create the wrapper filter.
nsRefPtr<IBaseFilter> filter;
hr = CoCreateInstance(CLSID_DMOWrapperFilter,
NULL,
nullptr,
CLSCTX_INPROC_SERVER,
IID_IBaseFilter,
getter_AddRefs(filter));
@ -281,7 +281,7 @@ GetUnconnectedPin(IBaseFilter* aFilter, PIN_DIRECTION aPinDir)
// Test each pin to see if it matches the direction we're looking for.
RefPtr<IPin> pin;
while (S_OK == enumPins->Next(1, byRef(pin), NULL)) {
while (S_OK == enumPins->Next(1, byRef(pin), nullptr)) {
bool matches = FALSE;
if (SUCCEEDED(MatchUnconnectedPin(pin, aPinDir, &matches)) &&
matches) {

View File

@ -22,7 +22,7 @@ public:
: mLock(aLock)
{
CriticalSectionAutoEnter lock(*mLock);
mEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
mEvent = CreateEvent(nullptr, FALSE, FALSE, nullptr);
}
~Signal() {

View File

@ -385,7 +385,7 @@ OutputPin::RequestAllocator(IMemAllocator* aPreferred,
CLSCTX_INPROC_SERVER,
IID_IMemAllocator,
getter_AddRefs(allocator));
if(FAILED(hr) || (allocator == NULL)) {
if(FAILED(hr) || (allocator == nullptr)) {
NS_WARNING("Can't create our own DirectShow allocator.");
return hr;
}
@ -451,7 +451,7 @@ OutputPin::WaitForNext(DWORD aTimeout,
NS_ASSERTION(aTimeout == 0 || aTimeout == INFINITE,
"Oops, we don't handle this!");
*aOutSample = NULL;
*aOutSample = nullptr;
*aOutDwUser = 0;
LONGLONG offset = 0;
@ -657,7 +657,7 @@ SourceFilter::GetPin(int n)
NS_ASSERTION(mOutputPin != 0, "GetPin with no pin!");
return static_cast<BasePin*>(mOutputPin);
} else {
return NULL;
return nullptr;
}
}

View File

@ -143,7 +143,7 @@ bool GStreamerFormatHelper::CanHandleMediaType(const nsACString& aMIMEType,
}
const char *type;
NS_CStringGetData(aMIMEType, &type, NULL);
NS_CStringGetData(aMIMEType, &type, nullptr);
GstCaps *caps;
if (aCodecs && !aCodecs->IsEmpty()) {

View File

@ -44,7 +44,7 @@ load_gstreamer()
return true;
}
void *gstreamerLib = NULL;
void *gstreamerLib = nullptr;
guint major = 0;
guint minor = 0;
guint micro, nano;

View File

@ -49,7 +49,7 @@ gst_moz_video_buffer_copy(GstMozVideoBuffer* self)
{
GstMozVideoBuffer* copy;
g_return_val_if_fail(GST_IS_MOZ_VIDEO_BUFFER(self), NULL);
g_return_val_if_fail(GST_IS_MOZ_VIDEO_BUFFER(self), nullptr);
copy = gst_moz_video_buffer_new();
@ -95,7 +95,7 @@ gst_moz_video_buffer_set_data(GstMozVideoBuffer* self, GstMozVideoBufferData* da
GstMozVideoBufferData*
gst_moz_video_buffer_get_data(const GstMozVideoBuffer* self)
{
g_return_val_if_fail(GST_IS_MOZ_VIDEO_BUFFER(self), NULL);
g_return_val_if_fail(GST_IS_MOZ_VIDEO_BUFFER(self), nullptr);
return self->data;
}

View File

@ -199,7 +199,7 @@ void GStreamerReader::PlayBinSourceSetupCb(GstElement* aPlayBin,
GstElement *source;
GStreamerReader* reader = reinterpret_cast<GStreamerReader*>(aUserData);
g_object_get(aPlayBin, "source", &source, NULL);
g_object_get(aPlayBin, "source", &source, nullptr);
reader->PlayBinSourceSetup(GST_APP_SRC(source));
}

View File

@ -309,7 +309,7 @@ bool TheoraState::Init() {
}
mCtx = th_decode_alloc(&mInfo, mSetup);
if (mCtx == NULL) {
if (mCtx == nullptr) {
return mActive = false;
}
@ -838,7 +838,7 @@ OpusState::OpusState(ogg_page* aBosPage) :
mChannelMapping(0),
mStreams(0),
mCoupledStreams(0),
mDecoder(NULL),
mDecoder(nullptr),
mSkip(0),
mPrevPacketGranulepos(0),
mPrevPageGranulepos(0)
@ -852,7 +852,7 @@ OpusState::~OpusState() {
if (mDecoder) {
opus_multistream_decoder_destroy(mDecoder);
mDecoder = NULL;
mDecoder = nullptr;
}
}
@ -893,7 +893,7 @@ bool OpusState::Init(void)
int error;
NS_ASSERTION(mDecoder == NULL, "leaking OpusDecoder");
NS_ASSERTION(mDecoder == nullptr, "leaking OpusDecoder");
mDecoder = opus_multistream_decoder_create(mRate,
mChannels,

View File

@ -313,7 +313,7 @@ nsresult OggReader::ReadMetadata(MediaInfo* aInfo,
mInfo.mAudio.mChannels = mVorbisState->mInfo.channels > 2 ? 2 : mVorbisState->mInfo.channels;
// Copy Vorbis info data for time computations on other threads.
memcpy(&mVorbisInfo, &mVorbisState->mInfo, sizeof(mVorbisInfo));
mVorbisInfo.codec_setup = NULL;
mVorbisInfo.codec_setup = nullptr;
mVorbisSerial = mVorbisState->mSerial;
*aTags = mVorbisState->GetTags();
} else {

View File

@ -35,7 +35,7 @@ sp<OMXCodecProxy> OMXCodecProxy::Create(
const char *mime;
if (!meta->findCString(kKeyMIMEType, &mime)) {
return NULL;
return nullptr;
}
if (!strncasecmp(mime, "video/", 6)) {
@ -57,7 +57,7 @@ OMXCodecProxy::OMXCodecProxy(
mSrcMeta(meta),
mIsEncoder(createEncoder),
mSource(source),
mComponentName(NULL),
mComponentName(nullptr),
mFlags(flags),
mNativeWindow(nativeWindow),
mState(MediaResourceManagerClient::CLIENT_STATE_WAIT_FOR_RESOURCE)
@ -71,7 +71,7 @@ OMXCodecProxy::~OMXCodecProxy()
if (mOMXCodec.get()) {
wp<MediaSource> tmp = mOMXCodec;
mOMXCodec.clear();
while (tmp.promote() != NULL) {
while (tmp.promote() != nullptr) {
// this value come from stagefrigh's AwesomePlayer.
usleep(1000);
}
@ -85,7 +85,7 @@ OMXCodecProxy::~OMXCodecProxy()
mSource.clear();
free(mComponentName);
mComponentName = NULL;
mComponentName = nullptr;
}
MediaResourceManagerClient::State OMXCodecProxy::getState()
@ -102,9 +102,9 @@ void OMXCodecProxy::setEventListener(const wp<OMXCodecProxy::EventListener>& lis
void OMXCodecProxy::notifyStatusChangedLocked()
{
if (mEventListener != NULL) {
if (mEventListener != nullptr) {
sp<EventListener> listener = mEventListener.promote();
if (listener != NULL) {
if (listener != nullptr) {
listener->statusChanged();
}
}
@ -122,7 +122,7 @@ void OMXCodecProxy::requestResource()
mManagerService = mClient->getMediaResourceManagerService();
if (!mManagerService.get()) {
mClient = NULL;
mClient = nullptr;
return;
}
@ -159,7 +159,7 @@ void OMXCodecProxy::statusChanged(int event)
if (!strncasecmp(mime, "video/", 6)) {
sp<MediaSource> codec;
mOMXCodec = OMXCodec::Create(mOMX, mSrcMeta, mIsEncoder, mSource, mComponentName, mFlags, mNativeWindow);
if (mOMXCodec == NULL) {
if (mOMXCodec == nullptr) {
mState = MediaResourceManagerClient::CLIENT_STATE_SHUTDOWN;
notifyStatusChangedLocked();
return;
@ -204,7 +204,7 @@ status_t OMXCodecProxy::start(MetaData *params)
if (mState != MediaResourceManagerClient::CLIENT_STATE_RESOURCE_ASSIGNED) {
return NO_INIT;
}
CHECK(mOMXCodec.get() != NULL);
CHECK(mOMXCodec.get() != nullptr);
return mOMXCodec->start();
}
@ -215,7 +215,7 @@ status_t OMXCodecProxy::stop()
if (mState != MediaResourceManagerClient::CLIENT_STATE_RESOURCE_ASSIGNED) {
return NO_INIT;
}
CHECK(mOMXCodec.get() != NULL);
CHECK(mOMXCodec.get() != nullptr);
return mOMXCodec->stop();
}
@ -227,7 +227,7 @@ sp<MetaData> OMXCodecProxy::getFormat()
sp<MetaData> meta = new MetaData;
return meta;
}
CHECK(mOMXCodec.get() != NULL);
CHECK(mOMXCodec.get() != nullptr);
return mOMXCodec->getFormat();
}
@ -238,7 +238,7 @@ status_t OMXCodecProxy::read(MediaBuffer **buffer, const ReadOptions *options)
if (mState != MediaResourceManagerClient::CLIENT_STATE_RESOURCE_ASSIGNED) {
return NO_INIT;
}
CHECK(mOMXCodec.get() != NULL);
CHECK(mOMXCodec.get() != nullptr);
return mOMXCodec->read(buffer, options);
}
@ -249,7 +249,7 @@ status_t OMXCodecProxy::pause()
if (mState != MediaResourceManagerClient::CLIENT_STATE_RESOURCE_ASSIGNED) {
return NO_INIT;
}
CHECK(mOMXCodec.get() != NULL);
CHECK(mOMXCodec.get() != nullptr);
return mOMXCodec->pause();
}

View File

@ -32,9 +32,9 @@ public:
const sp<IOMX> &omx,
const sp<MetaData> &meta, bool createEncoder,
const sp<MediaSource> &source,
const char *matchComponentName = NULL,
const char *matchComponentName = nullptr,
uint32_t flags = 0,
const sp<ANativeWindow> &nativeWindow = NULL);
const sp<ANativeWindow> &nativeWindow = nullptr);
MediaResourceManagerClient::State getState();
@ -47,13 +47,13 @@ public:
virtual void statusChanged(int event);
// MediaSource
virtual status_t start(MetaData *params = NULL);
virtual status_t start(MetaData *params = nullptr);
virtual status_t stop();
virtual sp<MetaData> getFormat();
virtual status_t read(
MediaBuffer **buffer, const ReadOptions *options = NULL);
MediaBuffer **buffer, const ReadOptions *options = nullptr);
virtual status_t pause();

View File

@ -34,7 +34,7 @@
using namespace MPAPI;
Decoder::Decoder() :
mResource(NULL), mPrivate(NULL)
mResource(nullptr), mPrivate(nullptr)
{
}
@ -199,7 +199,7 @@ MediaPluginHost::MediaPluginHost() {
ALOG("Loading OMX Plugin: %s", name ? name : "nullptr");
if (name) {
char *path = PR_GetLibraryFilePathname("libxul.so", (PRFuncPtr) GetOmxLibraryName);
PRLibrary *lib = NULL;
PRLibrary *lib = nullptr;
if (path) {
nsAutoCString libpath(path);
PR_Free(path);

View File

@ -22,12 +22,12 @@ MediaPluginReader::MediaPluginReader(AbstractMediaDecoder *aDecoder,
const nsACString& aContentType) :
MediaDecoderReader(aDecoder),
mType(aContentType),
mPlugin(NULL),
mPlugin(nullptr),
mHasAudio(false),
mHasVideo(false),
mVideoSeekTimeUs(-1),
mAudioSeekTimeUs(-1),
mLastVideoFrame(NULL)
mLastVideoFrame(nullptr)
{
}
@ -105,11 +105,11 @@ nsresult MediaPluginReader::ResetDecode()
{
if (mLastVideoFrame) {
delete mLastVideoFrame;
mLastVideoFrame = NULL;
mLastVideoFrame = nullptr;
}
if (mPlugin) {
GetMediaPluginHost()->DestroyDecoder(mPlugin);
mPlugin = NULL;
mPlugin = nullptr;
}
return NS_OK;
@ -126,7 +126,7 @@ bool MediaPluginReader::DecodeVideoFrame(bool &aKeyframeSkip,
// Throw away the currently buffered frame if we are seeking.
if (mLastVideoFrame && mVideoSeekTimeUs != -1) {
delete mLastVideoFrame;
mLastVideoFrame = NULL;
mLastVideoFrame = nullptr;
}
ImageBufferCallback bufferCallback(mDecoder->GetImageContainer());
@ -146,7 +146,7 @@ bool MediaPluginReader::DecodeVideoFrame(bool &aKeyframeSkip,
? durationUs
: mLastVideoFrame->mTime;
mVideoQueue.Push(mLastVideoFrame);
mLastVideoFrame = NULL;
mLastVideoFrame = nullptr;
}
return false;
}
@ -266,7 +266,7 @@ bool MediaPluginReader::DecodeVideoFrame(bool &aKeyframeSkip,
// in which case it wouldn't be displayed anyway.
if (mLastVideoFrame->mEndTime < aTimeThreshold) {
delete mLastVideoFrame;
mLastVideoFrame = NULL;
mLastVideoFrame = nullptr;
continue;
}

View File

@ -225,7 +225,7 @@ ServeResourceEvent::Run() {
NS_NAMED_LITERAL_CSTRING(byteRange, "Range: bytes=");
const char* s = strstr(line.get(), byteRange.get());
if (s) {
start = strtoll(s+byteRange.Length(), NULL, 10);
start = strtoll(s+byteRange.Length(), nullptr, 10);
// Clamp 'start' to be between 0 and the resource length.
start = std::max(0ll, std::min(resource->GetLength(), start));

View File

@ -198,7 +198,7 @@ void HRTFDatabaseLoader::shutdown()
{
MOZ_ASSERT(NS_IsMainThread());
if (s_loaderMap) {
// Set s_loaderMap to NULL so that the hashtable is not modified on
// Set s_loaderMap to nullptr so that the hashtable is not modified on
// reference release during enumeration.
nsTHashtable<LoaderByRateEntry>* loaderMap = s_loaderMap;
s_loaderMap = nullptr;

View File

@ -199,7 +199,7 @@ WebMReader::~WebMReader()
nsresult WebMReader::Init(MediaDecoderReader* aCloneDonor)
{
if (vpx_codec_dec_init(&mVP8, vpx_codec_vp8_dx(), NULL, 0)) {
if (vpx_codec_dec_init(&mVP8, vpx_codec_vp8_dx(), nullptr, 0)) {
return NS_ERROR_FAILURE;
}
@ -852,7 +852,7 @@ bool WebMReader::DecodeVideoFrame(bool &aKeyframeSkip,
aKeyframeSkip = false;
}
if (vpx_codec_decode(&mVP8, data, length, NULL, 0)) {
if (vpx_codec_decode(&mVP8, data, length, nullptr, 0)) {
return false;
}
@ -864,7 +864,7 @@ bool WebMReader::DecodeVideoFrame(bool &aKeyframeSkip,
continue;
}
vpx_codec_iter_t iter = NULL;
vpx_codec_iter_t iter = nullptr;
vpx_image_t *img;
while ((img = vpx_codec_get_frame(&mVP8, &iter))) {

View File

@ -221,13 +221,13 @@ protected:
AUDIO = 1
};
// Read a packet from the nestegg file. Returns NULL if all packets for
// Read a packet from the nestegg file. Returns nullptr if all packets for
// the particular track have been read. Pass VIDEO or AUDIO to indicate the
// type of the packet we want to read.
#ifdef MOZ_DASH
nsReturnRef<NesteggPacketHolder> NextPacketInternal(TrackType aTrackType);
// Read a packet from the nestegg file. Returns NULL if all packets for
// Read a packet from the nestegg file. Returns nullptr if all packets for
// the particular track have been read. Pass VIDEO or AUDIO to indicate the
// type of the packet we want to read. If the reader reaches a switch access
// point, this function will get a packet from |mNextReader|.

View File

@ -145,7 +145,7 @@ MediaEngineDefaultVideoSource::Stop(SourceMediaStream *aSource, TrackID aID)
}
mTimer->Cancel();
mTimer = NULL;
mTimer = nullptr;
aSource->EndTrack(aID);
aSource->Finish();
@ -243,7 +243,7 @@ MediaEngineDefaultVideoSource::NotifyPull(MediaStreamGraph* aGraph,
TrackTicks delta = target - aLastEndTime;
if (delta > 0) {
// NULL images are allowed
// nullptr images are allowed
if (image) {
segment.AppendFrame(image.forget(), delta,
gfxIntSize(mOpts.mWidth, mOpts.mHeight));
@ -398,7 +398,7 @@ MediaEngineDefaultAudioSource::Stop(SourceMediaStream *aSource, TrackID aID)
}
mTimer->Cancel();
mTimer = NULL;
mTimer = nullptr;
aSource->EndTrack(aID);
aSource->Finish();

View File

@ -49,7 +49,7 @@ MediaEngineTabVideoSource::StopRunnable::Run()
if (mVideoSource->mTimer) {
mVideoSource->mTimer->Cancel();
mVideoSource->mTimer = NULL;
mVideoSource->mTimer = nullptr;
}
return NS_OK;
}
@ -152,7 +152,7 @@ NotifyPull(MediaStreamGraph*, SourceMediaStream* aSource, mozilla::TrackID aID,
TrackTicks target = TimeToTicksRoundUp(USECS_PER_S, aDesiredTime);
TrackTicks delta = target - aLastEndTime;
if (delta > 0) {
// NULL images are allowed
// nullptr images are allowed
if (image) {
gfxIntSize size = image->GetSize();
segment.AppendFrame(image.forget(), delta, size);

View File

@ -238,8 +238,8 @@ MediaEngineWebRTC::EnumerateVideoDevices(nsTArray<nsRefPtr<MediaEngineVideoSourc
void
MediaEngineWebRTC::EnumerateAudioDevices(nsTArray<nsRefPtr<MediaEngineAudioSource> >* aASources)
{
webrtc::VoEBase* ptrVoEBase = NULL;
webrtc::VoEHardware* ptrVoEHw = NULL;
webrtc::VoEBase* ptrVoEBase = nullptr;
webrtc::VoEHardware* ptrVoEHw = nullptr;
// We spawn threads to handle gUM runnables, so we must protect the member vars
MutexAutoLock lock(mMutex);
@ -250,7 +250,7 @@ MediaEngineWebRTC::EnumerateAudioDevices(nsTArray<nsRefPtr<MediaEngineAudioSourc
JavaVM *jvm = mozilla::AndroidBridge::Bridge()->GetVM();
JNIEnv *env;
jvm->AttachCurrentThread(&env, NULL);
jvm->AttachCurrentThread(&env, nullptr);
if (webrtc::VoiceEngine::SetAndroidObjects(jvm, (void*)context) != 0) {
LOG(("VoiceEngine:SetAndroidObjects Failed"));
@ -357,8 +357,8 @@ MediaEngineWebRTC::Shutdown()
webrtc::VoiceEngine::Delete(mVoiceEngine);
}
mVideoEngine = NULL;
mVoiceEngine = NULL;
mVideoEngine = nullptr;
mVoiceEngine = nullptr;
}
}

View File

@ -129,7 +129,7 @@ public:
, mHeight(0)
, mInitDone(false)
, mInSnapshotMode(false)
, mSnapshotPath(NULL) {
, mSnapshotPath(nullptr) {
MOZ_ASSERT(aVideoEnginePtr);
mState = kReleased;
Init();

View File

@ -140,7 +140,7 @@ MediaEngineWebRTCVideoSource::NotifyPull(MediaStreamGraph* aGraph,
// Don't append if we've already provided a frame that supposedly goes past the current aDesiredTime
// Doing so means a negative delta and thus messes up handling of the graph
if (delta > 0) {
// NULL images are allowed
// nullptr images are allowed
if (image) {
segment.AppendFrame(image.forget(), delta, gfxIntSize(mWidth, mHeight));
} else {
@ -500,7 +500,7 @@ MediaEngineWebRTCVideoSource::Snapshot(uint32_t aDuration, nsIDOMFile** aFile)
NS_ConvertUTF16toUTF8 path(*mSnapshotPath);
if (vieFile->GetCaptureDeviceSnapshot(mCaptureIndex, path.get()) < 0) {
delete mSnapshotPath;
mSnapshotPath = NULL;
mSnapshotPath = nullptr;
return NS_ERROR_FAILURE;
}
@ -512,7 +512,7 @@ MediaEngineWebRTCVideoSource::Snapshot(uint32_t aDuration, nsIDOMFile** aFile)
nsresult rv = NS_NewLocalFile(*mSnapshotPath, false, getter_AddRefs(file));
delete mSnapshotPath;
mSnapshotPath = NULL;
mSnapshotPath = nullptr;
NS_ENSURE_SUCCESS(rv, rv);
@ -540,12 +540,12 @@ MediaEngineWebRTCVideoSource::Init()
(void) mMinFps;
LOG((__FUNCTION__));
if (mVideoEngine == NULL) {
if (mVideoEngine == nullptr) {
return;
}
mViEBase = webrtc::ViEBase::GetInterface(mVideoEngine);
if (mViEBase == NULL) {
if (mViEBase == nullptr) {
return;
}
@ -553,7 +553,7 @@ MediaEngineWebRTCVideoSource::Init()
mViECapture = webrtc::ViECapture::GetInterface(mVideoEngine);
mViERender = webrtc::ViERender::GetInterface(mVideoEngine);
if (mViECapture == NULL || mViERender == NULL) {
if (mViECapture == nullptr || mViERender == nullptr) {
return;
}

View File

@ -477,7 +477,7 @@ SpeechRecognition::NotifyFinalResult(SpeechEvent* aEvent)
srEvent->InitSpeechRecognitionEvent(NS_LITERAL_STRING("result"),
true, false, 0, ilist,
NS_LITERAL_STRING("NOT_IMPLEMENTED"),
NULL);
nullptr);
domEvent->SetTrusted(true);
bool defaultActionEnabled;

View File

@ -167,7 +167,7 @@ nsSpeechTask::SendAudio(const JS::Value& aData, const JS::Value& aLandmarks,
JS::Rooted<JSObject*> darray(aCx, &aData.toObject());
JSAutoCompartment ac(aCx, darray);
JS::Rooted<JSObject*> tsrc(aCx, NULL);
JS::Rooted<JSObject*> tsrc(aCx, nullptr);
// Allow either Int16Array or plain JS Array
if (JS_IsInt16Array(darray)) {

View File

@ -24,18 +24,18 @@ NS_GENERIC_FACTORY_SINGLETON_CONSTRUCTOR(nsPicoService,
NS_DEFINE_NAMED_CID(PICOSERVICE_CID);
static const mozilla::Module::CIDEntry kCIDs[] = {
{ &kPICOSERVICE_CID, true, NULL, nsPicoServiceConstructor },
{ NULL }
{ &kPICOSERVICE_CID, true, nullptr, nsPicoServiceConstructor },
{ nullptr }
};
static const mozilla::Module::ContractIDEntry kContracts[] = {
{ PICOSERVICE_CONTRACTID, &kPICOSERVICE_CID },
{ NULL }
{ nullptr }
};
static const mozilla::Module::CategoryEntry kCategories[] = {
{ "profile-after-change", "Pico Speech Synth", PICOSERVICE_CONTRACTID },
{ NULL }
{ nullptr }
};
static void
@ -49,8 +49,8 @@ static const mozilla::Module kModule = {
kCIDs,
kContracts,
kCategories,
NULL,
NULL,
nullptr,
nullptr,
UnloadPicoModule
};

View File

@ -106,7 +106,7 @@ D3D9DXVA2Manager::Init()
D3DCREATE_MULTITHREADED |
D3DCREATE_MIXED_VERTEXPROCESSING,
&params,
NULL,
nullptr,
getter_AddRefs(device));
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);

View File

@ -212,7 +212,7 @@ WMFByteStream::QueryInterface(REFIID aIId, void **aInterface)
return DoGetInterface(static_cast<IMFAttributes*>(this), aInterface);
}
*aInterface = NULL;
*aInterface = nullptr;
return E_NOINTERFACE;
}
@ -258,7 +258,7 @@ ReadRequest::QueryInterface(REFIID aIId, void **aInterface)
return DoGetInterface(static_cast<IUnknown*>(this), aInterface);
}
*aInterface = NULL;
*aInterface = nullptr;
return E_NOINTERFACE;
}

View File

@ -239,7 +239,7 @@ ConfigureSourceReaderStream(IMFSourceReader *aReader,
// Set the uncompressed format. This can fail if the decoder can't produce
// that type.
return aReader->SetCurrentMediaType(aStreamIndex, NULL, type);
return aReader->SetCurrentMediaType(aStreamIndex, nullptr, type);
}
// Returns the duration of the resource, in microseconds.
@ -333,7 +333,7 @@ GetPictureRegion(IMFMediaType* aMediaType, nsIntRect& aOutPictureRegion)
hr = aMediaType->GetBlob(MF_MT_PAN_SCAN_APERTURE,
(UINT8*)&videoArea,
sizeof(MFVideoArea),
NULL);
nullptr);
}
// If we're not in pan-and-scan mode, or the pan-and-scan region is not set,
@ -342,7 +342,7 @@ GetPictureRegion(IMFMediaType* aMediaType, nsIntRect& aOutPictureRegion)
hr = aMediaType->GetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE,
(UINT8*)&videoArea,
sizeof(MFVideoArea),
NULL);
nullptr);
}
if (hr == MF_E_ATTRIBUTENOTFOUND) {
@ -351,7 +351,7 @@ GetPictureRegion(IMFMediaType* aMediaType, nsIntRect& aOutPictureRegion)
hr = aMediaType->GetBlob(MF_MT_GEOMETRIC_APERTURE,
(UINT8*)&videoArea,
sizeof(MFVideoArea),
NULL);
nullptr);
}
if (SUCCEEDED(hr)) {
@ -796,7 +796,7 @@ WMFReader::CreateBasicVideoFrame(IMFSample* aSample,
hr = twoDBuffer->Lock2D(&data, &stride);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
} else {
hr = buffer->Lock(&data, NULL, NULL);
hr = buffer->Lock(&data, nullptr, nullptr);
NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
stride = mVideoStride;
}

View File

@ -29,7 +29,7 @@ WMFSourceReaderCallback::QueryInterface(REFIID aIId, void **aInterface)
return DoGetInterface(static_cast<WMFSourceReaderCallback*>(this), aInterface);
}
*aInterface = NULL;
*aInterface = nullptr;
return E_NOINTERFACE;
}
@ -58,7 +58,7 @@ WMFSourceReaderCallback::NotifyReadComplete(HRESULT aReadStatus,
LONGLONG aTimestamp,
IMFSample *aSample)
{
// Note: aSample can be NULL on success if more data is required!
// Note: aSample can be nullptr on success if more data is required!
ReentrantMonitorAutoEnter mon(mMonitor);
if (mSample) {

View File

@ -236,7 +236,7 @@ DisableBlockedDecoders(IMFPluginControl* aPluginControl,
HRESULT hr = S_OK;
UINT32 numMFTs = 0;
IMFActivate **ppActivate = NULL;
IMFActivate **ppActivate = nullptr;
hr = wmf::MFTEnumEx(aCategory,
MFT_ENUM_FLAG_ALL,
nullptr, // Input type, nullptr -> match all.
@ -295,11 +295,11 @@ struct WMFModule {
};
static WMFModule sDLLs[] = {
{ L"mfplat.dll", NULL },
{ L"mfreadwrite.dll", NULL },
{ L"propsys.dll", NULL },
{ L"mf.dll", NULL },
{ L"dxva2.dll", NULL }
{ L"mfplat.dll", nullptr },
{ L"mfreadwrite.dll", nullptr },
{ L"propsys.dll", nullptr },
{ L"mf.dll", nullptr },
{ L"dxva2.dll", nullptr }
};
HRESULT
@ -349,7 +349,7 @@ UnloadDLLs()
for (uint32_t i = 0; i < length; i++) {
if (sDLLs[i].handle) {
FreeLibrary(sDLLs[i].handle);
sDLLs[i].handle = NULL;
sDLLs[i].handle = nullptr;
}
sDLLsLoaded = false;
}

View File

@ -276,7 +276,7 @@ SVGFETurbulenceElement::Noise2(int aColorChannel, double aVec[2],
ry0 = t - (int) t;
ry1 = ry0 - 1.0f;
// If stitching, adjust lattice points accordingly.
if (aStitchInfo != NULL) {
if (aStitchInfo != nullptr) {
if (bx0 >= aStitchInfo->mWrapX)
bx0 -= aStitchInfo->mWidth;
if (bx1 >= aStitchInfo->mWrapX)
@ -322,7 +322,7 @@ SVGFETurbulenceElement::Turbulence(int aColorChannel, double* aPoint,
double aTileWidth, double aTileHeight)
{
StitchInfo stitch;
StitchInfo *stitchInfo = NULL; // Not stitching when NULL.
StitchInfo *stitchInfo = nullptr; // Not stitching when nullptr.
// Adjust the base frequencies if necessary for stitching.
if (aDoStitching) {
// When stitching tiled turbulence, the frequencies must be adjusted
@ -363,7 +363,7 @@ SVGFETurbulenceElement::Turbulence(int aColorChannel, double* aPoint,
vec[0] *= 2;
vec[1] *= 2;
ratio *= 2;
if (stitchInfo != NULL) {
if (stitchInfo != nullptr) {
// Update stitch values. Subtracting sPerlinN before the multiplication
// and adding it afterward simplifies to subtracting it once.
stitch.mWidth *= 2;