Bug 784739 - Switch from NULL to nullptr in js/src/jit/ (1/7); r=ehsan

--HG--
extra : rebase_source : bb4a1bebb093c07714aa0384f730177f44465907
This commit is contained in:
Birunthan Mohanathas 2013-09-27 16:28:11 -04:00
parent b6fd8179c4
commit 617a00226d
13 changed files with 189 additions and 186 deletions

View File

@ -58,7 +58,7 @@ class AliasSetIterator
AliasAnalysis::AliasAnalysis(MIRGenerator *mir, MIRGraph &graph)
: mir(mir),
graph_(graph),
loop_(NULL)
loop_(nullptr)
{
}
@ -246,6 +246,6 @@ AliasAnalysis::analyze()
}
}
JS_ASSERT(loop_ == NULL);
JS_ASSERT(loop_ == nullptr);
return true;
}

View File

@ -256,7 +256,7 @@ FunctionName(ParseNode *fn)
{
if (JSAtom *atom = FunctionObject(fn)->atom())
return atom->asPropertyName();
return NULL;
return nullptr;
}
static inline ParseNode *
@ -356,7 +356,7 @@ ParseVarOrConstStatement(AsmJSParser &parser, ParseNode **var)
{
TokenKind tk = PeekToken(parser);
if (tk != TOK_VAR && tk != TOK_CONST) {
*var = NULL;
*var = nullptr;
return true;
}
@ -1263,14 +1263,14 @@ class MOZ_STACK_CLASS ModuleCompiler
masm_(MacroAssembler::AsmJSToken()),
moduleLifo_(LIFO_ALLOC_PRIMARY_CHUNK_SIZE),
moduleFunctionNode_(parser.pc->maybeFunction),
moduleFunctionName_(NULL),
moduleFunctionName_(nullptr),
globals_(cx),
functions_(cx),
funcPtrTables_(cx),
exits_(cx),
standardLibraryMathNames_(cx),
globalAccesses_(cx),
errorString_(NULL),
errorString_(nullptr),
errorOffset_(UINT32_MAX),
usecBefore_(PRMJ_Now()),
slowFunctions_(cx),
@ -1389,7 +1389,7 @@ class MOZ_STACK_CLASS ModuleCompiler
MacroAssembler &masm() { return masm_; }
Label &stackOverflowLabel() { return stackOverflowLabel_; }
Label &operationCallbackLabel() { return operationCallbackLabel_; }
bool hasError() const { return errorString_ != NULL; }
bool hasError() const { return errorString_ != nullptr; }
const AsmJSModule &module() const { return *module_.get(); }
ParseNode *moduleFunctionNode() const { return moduleFunctionNode_; }
@ -1398,14 +1398,14 @@ class MOZ_STACK_CLASS ModuleCompiler
const Global *lookupGlobal(PropertyName *name) const {
if (GlobalMap::Ptr p = globals_.lookup(name))
return p->value;
return NULL;
return nullptr;
}
Func *lookupFunction(PropertyName *name) {
if (GlobalMap::Ptr p = globals_.lookup(name)) {
if (p->value->which() == Global::Function)
return functions_[p->value->funcIndex()];
}
return NULL;
return nullptr;
}
unsigned numFunctions() const {
return functions_.length();
@ -1843,11 +1843,11 @@ class FunctionCompiler
fn_(fn),
locals_(m.cx()),
varInitializers_(m.cx()),
alloc_(NULL),
graph_(NULL),
info_(NULL),
mirGen_(NULL),
curBlock_(NULL),
alloc_(nullptr),
graph_(nullptr),
info_(nullptr),
mirGen_(nullptr),
curBlock_(nullptr),
loopStack_(m.cx()),
breakableStack_(m.cx()),
unlabeledBreaks_(m.cx()),
@ -1899,7 +1899,7 @@ class FunctionCompiler
JS_ASSERT(unlabeledContinues_.empty());
JS_ASSERT(labeledBreaks_.empty());
JS_ASSERT(labeledContinues_.empty());
JS_ASSERT(curBlock_ == NULL);
JS_ASSERT(curBlock_ == nullptr);
}
#endif
}
@ -1935,7 +1935,7 @@ class FunctionCompiler
info_ = lifo_.new_<CompileInfo>(locals_.count(), SequentialExecution);
mirGen_ = lifo_.new_<MIRGenerator>(cx()->compartment(), alloc_, graph_, info_);
if (!newBlock(/* pred = */ NULL, &curBlock_, fn_))
if (!newBlock(/* pred = */ nullptr, &curBlock_, fn_))
return false;
curBlock_->add(MAsmJSCheckOverRecursed::New(&m_.stackOverflowLabel()));
@ -1978,20 +1978,20 @@ class FunctionCompiler
{
if (LocalMap::Ptr p = locals_.lookup(name))
return &p->value;
return NULL;
return nullptr;
}
MDefinition *getLocalDef(const Local &local)
{
if (!curBlock_)
return NULL;
return nullptr;
return curBlock_->getSlot(info().localSlot(local.slot));
}
const ModuleCompiler::Global *lookupGlobal(PropertyName *name) const
{
if (locals_.has(name))
return NULL;
return nullptr;
return m_.lookupGlobal(name);
}
@ -2000,7 +2000,7 @@ class FunctionCompiler
MDefinition *constant(const Value &v)
{
if (!curBlock_)
return NULL;
return nullptr;
JS_ASSERT(v.isNumber());
MConstant *constant = MConstant::New(v);
curBlock_->add(constant);
@ -2011,7 +2011,7 @@ class FunctionCompiler
MDefinition *unary(MDefinition *op)
{
if (!curBlock_)
return NULL;
return nullptr;
T *ins = T::NewAsmJS(op);
curBlock_->add(ins);
return ins;
@ -2021,7 +2021,7 @@ class FunctionCompiler
MDefinition *unary(MDefinition *op, MIRType type)
{
if (!curBlock_)
return NULL;
return nullptr;
T *ins = T::NewAsmJS(op, type);
curBlock_->add(ins);
return ins;
@ -2031,7 +2031,7 @@ class FunctionCompiler
MDefinition *binary(MDefinition *lhs, MDefinition *rhs)
{
if (!curBlock_)
return NULL;
return nullptr;
T *ins = T::New(lhs, rhs);
curBlock_->add(ins);
return ins;
@ -2041,7 +2041,7 @@ class FunctionCompiler
MDefinition *binary(MDefinition *lhs, MDefinition *rhs, MIRType type)
{
if (!curBlock_)
return NULL;
return nullptr;
T *ins = T::NewAsmJS(lhs, rhs, type);
curBlock_->add(ins);
return ins;
@ -2050,7 +2050,7 @@ class FunctionCompiler
MDefinition *mul(MDefinition *lhs, MDefinition *rhs, MIRType type, MMul::Mode mode)
{
if (!curBlock_)
return NULL;
return nullptr;
MMul *ins = MMul::New(lhs, rhs, type, mode);
curBlock_->add(ins);
return ins;
@ -2060,7 +2060,7 @@ class FunctionCompiler
MDefinition *bitwise(MDefinition *lhs, MDefinition *rhs)
{
if (!curBlock_)
return NULL;
return nullptr;
T *ins = T::NewAsmJS(lhs, rhs);
curBlock_->add(ins);
return ins;
@ -2070,7 +2070,7 @@ class FunctionCompiler
MDefinition *bitwise(MDefinition *op)
{
if (!curBlock_)
return NULL;
return nullptr;
T *ins = T::NewAsmJS(op);
curBlock_->add(ins);
return ins;
@ -2079,7 +2079,7 @@ class FunctionCompiler
MDefinition *compare(MDefinition *lhs, MDefinition *rhs, JSOp op, MCompare::CompareType type)
{
if (!curBlock_)
return NULL;
return nullptr;
MCompare *ins = MCompare::NewAsmJS(lhs, rhs, op, type);
curBlock_->add(ins);
return ins;
@ -2095,7 +2095,7 @@ class FunctionCompiler
MDefinition *loadHeap(ArrayBufferView::ViewType vt, MDefinition *ptr, NeedsBoundsCheck chk)
{
if (!curBlock_)
return NULL;
return nullptr;
MAsmJSLoadHeap *load = MAsmJSLoadHeap::New(vt, ptr);
curBlock_->add(load);
if (chk == NO_BOUNDS_CHECK)
@ -2116,7 +2116,7 @@ class FunctionCompiler
MDefinition *loadGlobalVar(const ModuleCompiler::Global &global)
{
if (!curBlock_)
return NULL;
return nullptr;
if (global.varIsLitConstant()) {
JS_ASSERT(global.litConstValue().isNumber());
MConstant *constant = MConstant::New(global.litConstValue());
@ -2242,7 +2242,7 @@ class FunctionCompiler
bool callPrivate(MAsmJSCall::Callee callee, const Call &call, MIRType returnType, MDefinition **def)
{
if (!curBlock_) {
*def = NULL;
*def = nullptr;
return true;
}
MAsmJSCall *ins = MAsmJSCall::New(callee, call.regArgs_, returnType, call.spIncrement_);
@ -2264,7 +2264,7 @@ class FunctionCompiler
const Call &call, MDefinition **def)
{
if (!curBlock_) {
*def = NULL;
*def = nullptr;
return true;
}
@ -2282,7 +2282,7 @@ class FunctionCompiler
bool ffiCall(unsigned exitIndex, const Call &call, MIRType returnType, MDefinition **def)
{
if (!curBlock_) {
*def = NULL;
*def = nullptr;
return true;
}
@ -2308,7 +2308,7 @@ class FunctionCompiler
return;
MAsmJSReturn *ins = MAsmJSReturn::New(expr);
curBlock_->end(ins);
curBlock_ = NULL;
curBlock_ = nullptr;
}
void returnVoid()
@ -2317,14 +2317,14 @@ class FunctionCompiler
return;
MAsmJSVoidReturn *ins = MAsmJSVoidReturn::New();
curBlock_->end(ins);
curBlock_ = NULL;
curBlock_ = nullptr;
}
bool branchAndStartThen(MDefinition *cond, MBasicBlock **thenBlock, MBasicBlock **elseBlock, ParseNode *thenPn, ParseNode* elsePn)
{
if (!curBlock_) {
*thenBlock = NULL;
*elseBlock = NULL;
*thenBlock = nullptr;
*elseBlock = nullptr;
return true;
}
if (!newBlock(curBlock_, thenBlock, thenPn) || !newBlock(curBlock_, elseBlock, elsePn))
@ -2391,7 +2391,7 @@ class FunctionCompiler
MDefinition *popPhiOutput()
{
if (!curBlock_)
return NULL;
return nullptr;
JS_ASSERT(curBlock_->stackDepth() == info().firstStackSlot() + 1);
return curBlock_->pop();
}
@ -2402,7 +2402,7 @@ class FunctionCompiler
return false;
JS_ASSERT_IF(curBlock_, curBlock_->loopDepth() == loopStack_.length() - 1);
if (!curBlock_) {
*loopEntry = NULL;
*loopEntry = nullptr;
return true;
}
*loopEntry = MBasicBlock::NewAsmJS(mirGraph(), info(), curBlock_,
@ -2420,7 +2420,7 @@ class FunctionCompiler
bool branchAndStartLoopBody(MDefinition *cond, MBasicBlock **afterLoop, ParseNode *bodyPn, ParseNode *afterPn)
{
if (!curBlock_) {
*afterLoop = NULL;
*afterLoop = nullptr;
return true;
}
JS_ASSERT(curBlock_->loopDepth() > 0);
@ -2428,7 +2428,7 @@ class FunctionCompiler
if (!newBlock(curBlock_, &body, bodyPn))
return false;
if (cond->isConstant() && ToBoolean(cond->toConstant()->value())) {
*afterLoop = NULL;
*afterLoop = nullptr;
curBlock_->end(MGoto::New(body));
} else {
if (!newBlockWithDepth(curBlock_, curBlock_->loopDepth() - 1, afterLoop, afterPn))
@ -2489,7 +2489,7 @@ class FunctionCompiler
curBlock_->end(MGoto::New(loopEntry));
if (!loopEntry->setBackedgeAsmJS(curBlock_))
return false;
curBlock_ = NULL;
curBlock_ = nullptr;
} else {
MBasicBlock *afterLoop;
if (!newBlock(curBlock_, &afterLoop, afterLoopStmt))
@ -2545,19 +2545,19 @@ class FunctionCompiler
if (!breakableStack_.append(pn))
return false;
if (!curBlock_) {
*switchBlock = NULL;
*switchBlock = nullptr;
return true;
}
curBlock_->end(MTableSwitch::New(expr, low, high));
*switchBlock = curBlock_;
curBlock_ = NULL;
curBlock_ = nullptr;
return true;
}
bool startSwitchCase(MBasicBlock *switchBlock, MBasicBlock **next, ParseNode *pn)
{
if (!switchBlock) {
*next = NULL;
*next = nullptr;
return true;
}
if (!newBlock(switchBlock, next, pn))
@ -2607,9 +2607,9 @@ class FunctionCompiler
MIRGenerator *extractMIR()
{
JS_ASSERT(mirGen_ != NULL);
JS_ASSERT(mirGen_ != nullptr);
MIRGenerator *mirGen = mirGen_;
mirGen_ = NULL;
mirGen_ = nullptr;
return mirGen;
}
@ -2697,7 +2697,7 @@ class FunctionCompiler
}
if (!p->value.append(curBlock_))
return false;
curBlock_ = NULL;
curBlock_ = nullptr;
return true;
}
@ -2790,23 +2790,23 @@ CheckModuleArguments(ModuleCompiler &m, ParseNode *fn)
{
unsigned numFormals;
ParseNode *arg1 = FunctionArgsList(fn, &numFormals);
ParseNode *arg2 = arg1 ? NextNode(arg1) : NULL;
ParseNode *arg3 = arg2 ? NextNode(arg2) : NULL;
ParseNode *arg2 = arg1 ? NextNode(arg1) : nullptr;
ParseNode *arg3 = arg2 ? NextNode(arg2) : nullptr;
if (numFormals > 3)
return m.fail(fn, "asm.js modules takes at most 3 argument");
PropertyName *arg1Name = NULL;
PropertyName *arg1Name = nullptr;
if (numFormals >= 1 && !CheckModuleArgument(m, arg1, &arg1Name))
return false;
m.initGlobalArgumentName(arg1Name);
PropertyName *arg2Name = NULL;
PropertyName *arg2Name = nullptr;
if (numFormals >= 2 && !CheckModuleArgument(m, arg2, &arg2Name))
return false;
m.initImportArgumentName(arg2Name);
PropertyName *arg3Name = NULL;
PropertyName *arg3Name = nullptr;
if (numFormals >= 3 && !CheckModuleArgument(m, arg3, &arg3Name))
return false;
m.initBufferArgumentName(arg3Name);
@ -2848,7 +2848,7 @@ CheckGlobalVariableInitConstant(ModuleCompiler &m, PropertyName *varName, ParseN
static bool
CheckTypeAnnotation(ModuleCompiler &m, ParseNode *coercionNode, AsmJSCoercion *coercion,
ParseNode **coercedExpr = NULL)
ParseNode **coercedExpr = nullptr)
{
switch (coercionNode->getKind()) {
case PNK_BITOR: {
@ -2919,7 +2919,7 @@ CheckNewArrayView(ModuleCompiler &m, PropertyName *varName, ParseNode *newExpr)
return m.failName(base, "expecting '%s.*Array", globalName);
ParseNode *bufArg = NextNode(ctorExpr);
if (!bufArg || NextNode(bufArg) != NULL)
if (!bufArg || NextNode(bufArg) != nullptr)
return m.fail(ctorExpr, "array view constructor takes exactly one argument");
PropertyName *bufferName = m.module().bufferArgumentName();
@ -4119,7 +4119,7 @@ CheckMultiply(FunctionCompiler &f, ParseNode *star, MDefinition **def, Type *typ
static bool
CheckAddOrSub(FunctionCompiler &f, ParseNode *expr, MDefinition **def, Type *type,
unsigned *numAddOrSubOut = NULL)
unsigned *numAddOrSubOut = nullptr)
{
JS_CHECK_RECURSION(f.cx(), return false);
@ -4383,7 +4383,7 @@ CheckExpr(FunctionCompiler &f, ParseNode *expr, MDefinition **def, Type *type)
}
static bool
CheckStatement(FunctionCompiler &f, ParseNode *stmt, LabelVector *maybeLabels = NULL);
CheckStatement(FunctionCompiler &f, ParseNode *stmt, LabelVector *maybeLabels = nullptr);
static bool
CheckExprStatement(FunctionCompiler &f, ParseNode *exprStmt)
@ -4631,7 +4631,7 @@ CheckDefaultAtEnd(FunctionCompiler &f, ParseNode *stmt)
{
for (; stmt; stmt = NextNode(stmt)) {
JS_ASSERT(stmt->isKind(PNK_CASE) || stmt->isKind(PNK_DEFAULT));
if (stmt->isKind(PNK_DEFAULT) && NextNode(stmt) != NULL)
if (stmt->isKind(PNK_DEFAULT) && NextNode(stmt) != nullptr)
return f.fail(stmt, "default label must be at the end");
}
@ -4856,7 +4856,7 @@ ParseFunction(ModuleCompiler &m, ParseNode **fnOut)
return false;
// This flows into FunctionBox, so must be tenured.
RootedFunction fun(m.cx(), NewFunction(m.cx(), NullPtr(), NULL, 0, JSFunction::INTERPRETED,
RootedFunction fun(m.cx(), NewFunction(m.cx(), NullPtr(), nullptr, 0, JSFunction::INTERPRETED,
m.cx()->global(), name, JSFunction::FinalizeKind,
TenuredObject));
if (!fun)
@ -4920,7 +4920,7 @@ CheckFunction(ModuleCompiler &m, LifoAlloc &lifo, MIRGenerator **mir, ModuleComp
if (!f.prepareToEmitMIR(argTypes))
return false;
ParseNode *lastNonEmptyStmt = NULL;
ParseNode *lastNonEmptyStmt = nullptr;
for (; stmtIter; stmtIter = NextNode(stmtIter)) {
if (!CheckStatement(f, stmtIter))
return false;
@ -4969,7 +4969,7 @@ GenerateCode(ModuleCompiler &m, ModuleCompiler::Func &func, MIRGenerator &mir, L
ScopedJSDeletePtr<CodeGenerator> codegen(jit::GenerateCode(&mir, &lir, &m.masm()));
if (!codegen)
return m.fail(NULL, "internal codegen failure (probably out of memory)");
return m.fail(nullptr, "internal codegen failure (probably out of memory)");
if (!m.collectAccesses(mir))
return false;
@ -5021,7 +5021,7 @@ CheckAllFunctionsDefined(ModuleCompiler &m)
{
for (unsigned i = 0; i < m.numFunctions(); i++) {
if (!m.function(i).code()->bound())
return m.failName(NULL, "missing definition of function %s", m.function(i).name());
return m.failName(nullptr, "missing definition of function %s", m.function(i).name());
}
return true;
@ -5115,7 +5115,7 @@ GetFinishedCompilation(ModuleCompiler &m, ParallelGroupState &group)
group.state.wait(WorkerThreadState::CONSUMER);
}
return NULL;
return nullptr;
}
static bool
@ -5168,7 +5168,7 @@ CheckFunctionsParallelImpl(ModuleCompiler &m, ParallelGroupState &group)
for (unsigned i = 0; PeekToken(m.parser()) == TOK_FUNCTION; i++) {
// Get exclusive access to an empty LifoAlloc from the thread group's pool.
AsmJSParallelTask *task = NULL;
AsmJSParallelTask *task = nullptr;
if (!GetUnusedTask(group, i, &task) && !GenerateCodeForFinishedJob(m, group, &task))
return false;
@ -5188,7 +5188,7 @@ CheckFunctionsParallelImpl(ModuleCompiler &m, ParallelGroupState &group)
// Block for all outstanding workers to complete.
while (group.outstandingJobs > 0) {
AsmJSParallelTask *ignored = NULL;
AsmJSParallelTask *ignored = nullptr;
if (!GenerateCodeForFinishedJob(m, group, &ignored))
return false;
}
@ -5301,7 +5301,7 @@ CheckFuncPtrTable(ModuleCompiler &m, ParseNode *var)
unsigned mask = length - 1;
ModuleCompiler::FuncPtrVector elems(m.cx());
const Signature *firstSig = NULL;
const Signature *firstSig = nullptr;
for (ParseNode *elem = ListHead(arrayLiteral); elem; elem = NextNode(elem)) {
if (!elem->isKind(PNK_NAME))
@ -5365,7 +5365,7 @@ CheckModuleExportFunction(ModuleCompiler &m, ParseNode *returnExpr)
if (!func)
return m.failName(returnExpr, "exported function name '%s' not found", funcName);
return m.addExportedFunction(func, /* maybeFieldName = */ NULL);
return m.addExportedFunction(func, /* maybeFieldName = */ nullptr);
}
static bool
@ -5402,8 +5402,8 @@ CheckModuleReturn(ModuleCompiler &m)
if (PeekToken(m.parser()) != TOK_RETURN) {
TokenKind tk = PeekToken(m.parser());
if (tk == TOK_RC || tk == TOK_EOF)
return m.fail(NULL, "expecting return statement");
return m.fail(NULL, "invalid asm.js statement");
return m.fail(nullptr, "expecting return statement");
return m.fail(nullptr, "invalid asm.js statement");
}
ParseNode *returnStmt = m.parser().statement();
@ -6047,7 +6047,7 @@ GenerateFFIIonExit(ModuleCompiler &m, const ModuleCompiler::ExitDescriptor &exit
// Get the pointer to the ion code
Label done, oolConvert;
Label *maybeDebugBreakpoint = NULL;
Label *maybeDebugBreakpoint = nullptr;
#ifdef DEBUG
Label ionFailed;
@ -6413,7 +6413,7 @@ CheckModule(ExclusiveContext *cx, AsmJSParser &parser, ParseNode *stmtList,
TokenKind tk = PeekToken(m.parser());
if (tk != TOK_EOF && tk != TOK_RC)
return m.fail(NULL, "top-level export (return) must be the last statement");
return m.fail(nullptr, "top-level export (return) must be the last statement");
AsmJSStaticLinkData linkData(cx);
if (!FinishModule(m, module, &linkData))

View File

@ -33,7 +33,7 @@ static bool
LinkFail(JSContext *cx, const char *str)
{
JS_ReportErrorFlagsAndNumber(cx, JSREPORT_WARNING, js_GetErrorMessage,
NULL, JSMSG_USE_ASM_LINK_FAIL, str);
nullptr, JSMSG_USE_ASM_LINK_FAIL, str);
return false;
}
@ -138,7 +138,7 @@ ValidateMathBuiltin(JSContext *cx, AsmJSModule::Global &global, HandleValue glob
if (!GetDataProperty(cx, v, field, &v))
return false;
Native native = NULL;
Native native = nullptr;
switch (global.mathBuiltin()) {
case AsmJSMathBuiltin_sin: native = math_sin; break;
case AsmJSMathBuiltin_cos: native = math_cos; break;
@ -274,9 +274,9 @@ DynamicallyLinkModule(JSContext *cx, CallArgs args, AsmJSModule &module)
AsmJSActivation::AsmJSActivation(JSContext *cx, AsmJSModule &module)
: cx_(cx),
module_(module),
errorRejoinSP_(NULL),
profiler_(NULL),
resumePC_(NULL)
errorRejoinSP_(nullptr),
profiler_(nullptr),
resumePC_(nullptr)
{
if (cx->runtime()->spsProfiler.enabled()) {
// Use a profiler string that matches jsMatch regex in
@ -366,7 +366,7 @@ CallAsmJS(JSContext *cx, unsigned argc, Value *vp)
// Eagerly push an IonContext+JitActivation so that the optimized
// asm.js-to-Ion FFI call path (which we want to be very fast) can
// avoid doing so.
jit::IonContext ictx(cx, NULL);
jit::IonContext ictx(cx, nullptr);
JitActivation jitActivation(cx, /* firstFrameIsConstructing = */ false, /* active */ false);
// Call the per-exported-function trampoline created by GenerateEntry.
@ -398,7 +398,7 @@ NewExportedFunction(JSContext *cx, const AsmJSModule::ExportedFunction &func,
JSFunction::NATIVE_FUN, cx->global(), name,
JSFunction::ExtendedFinalizeKind);
if (!fun)
return NULL;
return nullptr;
fun->setExtendedSlot(ASM_MODULE_SLOT, ObjectValue(*moduleObj));
fun->setExtendedSlot(ASM_EXPORT_INDEX_SLOT, Int32Value(exportIndex));
@ -417,7 +417,7 @@ HandleDynamicLinkFailure(JSContext *cx, CallArgs args, AsmJSModule &module, Hand
if (!src)
return false;
RootedFunction fun(cx, NewFunction(cx, NullPtr(), NULL, 0, JSFunction::INTERPRETED,
RootedFunction fun(cx, NewFunction(cx, NullPtr(), nullptr, 0, JSFunction::INTERPRETED,
cx->global(), name, JSFunction::FinalizeKind,
TenuredObject));
if (!fun)
@ -490,10 +490,10 @@ SendFunctionsToVTune(JSContext *cx, AsmJSModule &module)
method.method_load_address = (void *)start;
method.method_size = unsigned(end - start);
method.line_number_size = 0;
method.line_number_table = NULL;
method.line_number_table = nullptr;
method.class_id = 0;
method.class_file_name = NULL;
method.source_file_name = NULL;
method.class_file_name = nullptr;
method.source_file_name = nullptr;
iJIT_NotifyEvent(iJVM_EVENT_TYPE_METHOD_LOAD_FINISHED, (void *)&method);
}
@ -594,20 +594,20 @@ CreateExportObject(JSContext *cx, HandleObject moduleObj)
gc::AllocKind allocKind = gc::GetGCObjectKind(module.numExportedFunctions());
RootedObject obj(cx, NewBuiltinClassInstance(cx, &JSObject::class_, allocKind));
if (!obj)
return NULL;
return nullptr;
for (unsigned i = 0; i < module.numExportedFunctions(); i++) {
const AsmJSModule::ExportedFunction &func = module.exportedFunction(i);
RootedFunction fun(cx, NewExportedFunction(cx, func, moduleObj, i));
if (!fun)
return NULL;
return nullptr;
JS_ASSERT(func.maybeFieldName() != NULL);
JS_ASSERT(func.maybeFieldName() != nullptr);
RootedId id(cx, NameToId(func.maybeFieldName()));
RootedValue val(cx, ObjectValue(*fun));
if (!DefineNativeProperty(cx, obj, id, val, NULL, NULL, JSPROP_ENUMERATE, 0, 0))
return NULL;
if (!DefineNativeProperty(cx, obj, id, val, nullptr, nullptr, JSPROP_ENUMERATE, 0, 0))
return nullptr;
}
return obj;
@ -659,7 +659,7 @@ js::NewAsmJSModuleFunction(ExclusiveContext *cx, JSFunction *origFun, HandleObje
JSFunction::NATIVE_FUN, NullPtr(), name,
JSFunction::ExtendedFinalizeKind, TenuredObject);
if (!moduleFun)
return NULL;
return nullptr;
moduleFun->setExtendedSlot(MODULE_FUN_SLOT, ObjectValue(*moduleObj));
return moduleFun;

View File

@ -63,16 +63,18 @@ AllocateExecutableMemory(ExclusiveContext *cx, size_t totalBytes)
JS_ASSERT(totalBytes % AsmJSPageSize == 0);
#ifdef XP_WIN
void *p = VirtualAlloc(NULL, totalBytes, MEM_COMMIT, PAGE_EXECUTE_READWRITE);
void *p = VirtualAlloc(nullptr, totalBytes, MEM_COMMIT, PAGE_EXECUTE_READWRITE);
if (!p) {
js_ReportOutOfMemory(cx);
return NULL;
return nullptr;
}
#else // assume Unix
void *p = mmap(NULL, totalBytes, PROT_READ | PROT_WRITE | PROT_EXEC, MAP_PRIVATE | MAP_ANON, -1, 0);
void *p = mmap(nullptr, totalBytes,
PROT_READ | PROT_WRITE | PROT_EXEC, MAP_PRIVATE | MAP_ANON,
-1, 0);
if (p == MAP_FAILED) {
js_ReportOutOfMemory(cx);
return NULL;
return nullptr;
}
#endif
@ -249,7 +251,7 @@ AddressOf(AsmJSImmKind kind, ExclusiveContext *cx)
}
MOZ_ASSUME_UNREACHABLE("Bad AsmJSImmKind");
return NULL;
return nullptr;
}
void
@ -275,17 +277,17 @@ AsmJSModule::staticallyLink(const AsmJSStaticLinkData &linkData, ExclusiveContex
for (size_t i = 0; i < exits_.length(); i++) {
exitIndexToGlobalDatum(i).exit = interpExitTrampoline(exits_[i]);
exitIndexToGlobalDatum(i).fun = NULL;
exitIndexToGlobalDatum(i).fun = nullptr;
}
}
AsmJSModule::AsmJSModule(ScriptSource *scriptSource, uint32_t charsBegin)
: globalArgumentName_(NULL),
importArgumentName_(NULL),
bufferArgumentName_(NULL),
: globalArgumentName_(nullptr),
importArgumentName_(nullptr),
bufferArgumentName_(nullptr),
minHeapLength_(AsmJSAllocationGranularity),
code_(NULL),
operationCallbackExit_(NULL),
code_(nullptr),
operationCallbackExit_(nullptr),
linked_(false),
charsBegin_(charsBegin),
scriptSource_(scriptSource)
@ -364,21 +366,21 @@ const Class AsmJSModuleObject::class_ = {
JS_StrictPropertyStub, /* setProperty */
JS_EnumerateStub,
JS_ResolveStub,
NULL, /* convert */
nullptr, /* convert */
AsmJSModuleObject_finalize,
NULL, /* checkAccess */
NULL, /* call */
NULL, /* hasInstance */
NULL, /* construct */
nullptr, /* checkAccess */
nullptr, /* call */
nullptr, /* hasInstance */
nullptr, /* construct */
AsmJSModuleObject_trace
};
AsmJSModuleObject *
AsmJSModuleObject::create(ExclusiveContext *cx, ScopedJSDeletePtr<AsmJSModule> *module)
{
JSObject *obj = NewObjectWithGivenProto(cx, &AsmJSModuleObject::class_, NULL, NULL);
JSObject *obj = NewObjectWithGivenProto(cx, &AsmJSModuleObject::class_, nullptr, nullptr);
if (!obj)
return NULL;
return nullptr;
obj->setReservedSlot(MODULE_SLOT, PrivateValue(module->forget()));
return &obj->as<AsmJSModuleObject>();

View File

@ -417,7 +417,7 @@ class AsmJSModule
}
ScriptSource *scriptSource() const {
JS_ASSERT(scriptSource_ != NULL);
JS_ASSERT(scriptSource_ != nullptr);
return scriptSource_;
}
uint32_t charsBegin() const {
@ -435,7 +435,7 @@ class AsmJSModule
JS_ASSERT(pod.funcPtrTableAndExitBytes_ == 0);
if (pod.numGlobalVars_ == UINT32_MAX)
return false;
Global g(Global::Variable, NULL);
Global g(Global::Variable, nullptr);
g.pod.u.var.initKind_ = Global::InitConstant;
g.pod.u.var.init.constant_ = v;
g.pod.u.var.index_ = *globalIndex = pod.numGlobalVars_++;

View File

@ -147,7 +147,7 @@ InnermostAsmJSActivation()
{
PerThreadData *threadData = TlsPerThreadData.get();
if (!threadData)
return NULL;
return nullptr;
return threadData->asmJSActivationStackFromOwnerThread();
}
@ -157,7 +157,7 @@ RuntimeForCurrentThread()
{
PerThreadData *threadData = TlsPerThreadData.get();
if (!threadData)
return NULL;
return nullptr;
return threadData->runtimeFromMainThread();
}
@ -281,7 +281,7 @@ LookupHeapAccess(const AsmJSModule &module, uint8_t *pc)
size_t targetOffset = pc - module.codeBase();
if (module.numHeapAccesses() == 0)
return NULL;
return nullptr;
size_t low = 0;
size_t high = module.numHeapAccesses() - 1;
@ -300,7 +300,7 @@ LookupHeapAccess(const AsmJSModule &module, uint8_t *pc)
if (targetOffset == module.heapAccess(high).offset())
return &module.heapAccess(high);
return NULL;
return nullptr;
}
#endif
@ -375,10 +375,11 @@ enum { REG_EIP = 14 };
#if defined(ANDROID) && defined(MOZ_LINKER)
// Apparently, on some Android systems, the signal handler is always passed
// NULL as the faulting address. This would cause the asm.js signal handler to
// think that a safe out-of-bounds access was a NULL-deref. This brokenness is
// already detected by ElfLoader (enabled by MOZ_LINKER), so reuse that check
// to disable asm.js compilation on systems where the signal handler is broken.
// nullptr as the faulting address. This would cause the asm.js signal handler
// to think that a safe out-of-bounds access was a nullptr-deref. This
// brokenness is already detected by ElfLoader (enabled by MOZ_LINKER), so
// reuse that check to disable asm.js compilation on systems where the signal
// handler is broken.
extern "C" MFBT_API bool IsSignalHandlingBroken();
#else
static bool IsSignalHandlingBroken() { return false; }
@ -798,12 +799,12 @@ AsmJSMachExceptionHandlerThread(void *threadArg)
MACH_MSG_TIMEOUT_NONE, MACH_PORT_NULL);
}
return NULL;
return nullptr;
}
AsmJSMachExceptionHandler::AsmJSMachExceptionHandler()
: installed_(false),
thread_(NULL),
thread_(nullptr),
port_(MACH_PORT_NULL)
{}
@ -822,7 +823,7 @@ AsmJSMachExceptionHandler::uninstall()
MOZ_CRASH();
installed_ = false;
}
if (thread_ != NULL) {
if (thread_ != nullptr) {
// Break the handler thread out of the mach_msg loop.
mach_msg_header_t msg;
msg.msgh_bits = MACH_MSGH_BITS(MACH_MSG_TYPE_COPY_SEND, 0);
@ -839,8 +840,8 @@ AsmJSMachExceptionHandler::uninstall()
}
// Wait for the handler thread to complete before deallocating the port.
pthread_join(thread_, NULL);
thread_ = NULL;
pthread_join(thread_, nullptr);
thread_ = nullptr;
}
if (port_ != MACH_PORT_NULL) {
DebugOnly<kern_return_t> kret = mach_port_destroy(mach_task_self(), port_);
@ -865,7 +866,7 @@ AsmJSMachExceptionHandler::install(JSRuntime *rt)
goto error;
// Create a thread to block on reading port_.
if (pthread_create(&thread_, NULL, AsmJSMachExceptionHandlerThread, rt))
if (pthread_create(&thread_, nullptr, AsmJSMachExceptionHandlerThread, rt))
goto error;
// Direct exceptions on this thread to port_ (and thus our handler thread).
@ -986,7 +987,7 @@ AsmJSFaultHandler(int signum, siginfo_t *info, void *context)
if (sPrevHandler.sa_flags & SA_SIGINFO)
sPrevHandler.sa_sigaction(signum, info, context);
else if (sPrevHandler.sa_handler == SIG_DFL || sPrevHandler.sa_handler == SIG_IGN)
sigaction(signum, &sPrevHandler, NULL);
sigaction(signum, &sPrevHandler, nullptr);
else
sPrevHandler.sa_handler(signum);
}

View File

@ -48,7 +48,7 @@ BacktrackingAllocator::init()
LiveInterval *hotcodeInterval = new LiveInterval(0);
LBlock *backedge = NULL;
LBlock *backedge = nullptr;
for (size_t i = 0; i < graph.numBlocks(); i++) {
LBlock *block = graph.getBlock(i);
@ -455,7 +455,7 @@ BacktrackingAllocator::processInterval(LiveInterval *interval)
}
fixed = false;
conflict = NULL;
conflict = nullptr;
// If we want, but do not require an interval to be in a specific
// register, only look at that register for allocating and evict
@ -528,13 +528,13 @@ BacktrackingAllocator::processGroup(VirtualRegisterGroup *group)
for (size_t attempt = 0;; attempt++) {
// Search for any available register which the group can be allocated to.
fixed = false;
conflict = NULL;
conflict = nullptr;
for (size_t i = 0; i < AnyRegister::Total; i++) {
bool success;
if (!tryAllocateGroupRegister(registers[i], group, &success, &fixed, &conflict))
return false;
if (success) {
conflict = NULL;
conflict = nullptr;
break;
}
}
@ -631,7 +631,7 @@ BacktrackingAllocator::tryAllocateGroupRegister(PhysicalRegister &r, VirtualRegi
return true;
bool allocatable = true;
LiveInterval *conflicting = NULL;
LiveInterval *conflicting = nullptr;
for (size_t i = 0; i < group->registers.length(); i++) {
VirtualRegister &reg = vregs[group->registers[i]];
@ -769,7 +769,7 @@ BacktrackingAllocator::distributeUses(LiveInterval *interval,
iter++)
{
CodePosition pos = iter->pos;
LiveInterval *addInterval = NULL;
LiveInterval *addInterval = nullptr;
for (size_t i = 0; i < newIntervals.length(); i++) {
LiveInterval *newInterval = newIntervals[i];
if (newInterval->covers(pos)) {
@ -1462,7 +1462,7 @@ BacktrackingAllocator::trySplitAcrossHotcode(LiveInterval *interval, bool *succe
// If this interval has portions that are hot and portions that are cold,
// split it at the boundaries between hot and cold code.
const LiveInterval::Range *hotRange = NULL;
const LiveInterval::Range *hotRange = nullptr;
for (size_t i = 0; i < interval->numRanges(); i++) {
AllocatedRange range(interval, interval->getRange(i)), existing;
@ -1488,7 +1488,7 @@ BacktrackingAllocator::trySplitAcrossHotcode(LiveInterval *interval, bool *succe
return true;
LiveInterval *hotInterval = new LiveInterval(interval->vreg(), 0);
LiveInterval *preInterval = NULL, *postInterval = NULL;
LiveInterval *preInterval = nullptr, *postInterval = nullptr;
// Accumulate the ranges of hot and cold code in the interval. Note that
// we are only comparing with the single hot range found, so the cold code

View File

@ -79,7 +79,7 @@ class BacktrackingVirtualRegister : public VirtualRegister
canonicalSpill_ = alloc;
}
const LAllocation *canonicalSpill() const {
return canonicalSpill_.isUse() ? NULL : &canonicalSpill_;
return canonicalSpill_.isUse() ? nullptr : &canonicalSpill_;
}
void setCanonicalSpillExclude(CodePosition pos) {
@ -111,11 +111,11 @@ class BacktrackingAllocator : public LiveRangeAllocator<BacktrackingVirtualRegis
VirtualRegisterGroup *group;
QueueItem(LiveInterval *interval, size_t priority)
: interval(interval), group(NULL), priority_(priority)
: interval(interval), group(nullptr), priority_(priority)
{}
QueueItem(VirtualRegisterGroup *group, size_t priority)
: interval(NULL), group(group), priority_(priority)
: interval(nullptr), group(group), priority_(priority)
{}
static size_t priority(const QueueItem &v) {
@ -134,7 +134,7 @@ class BacktrackingAllocator : public LiveRangeAllocator<BacktrackingVirtualRegis
const LiveInterval::Range *range;
AllocatedRange()
: interval(NULL), range(NULL)
: interval(nullptr), range(nullptr)
{}
AllocatedRange(LiveInterval *interval, const LiveInterval::Range *range)
@ -216,7 +216,7 @@ class BacktrackingAllocator : public LiveRangeAllocator<BacktrackingVirtualRegis
bool minimalDef(const LiveInterval *interval, LInstruction *ins);
bool minimalUse(const LiveInterval *interval, LInstruction *ins);
bool minimalInterval(const LiveInterval *interval, bool *pfixed = NULL);
bool minimalInterval(const LiveInterval *interval, bool *pfixed = nullptr);
// Heuristic methods.

View File

@ -67,7 +67,7 @@ jit::Bailout(BailoutStack *sp, BaselineBailoutInfo **bailoutInfo)
JS_ASSERT(bailoutInfo);
JSContext *cx = GetIonContext()->cx;
// We don't have an exit frame.
cx->mainThread().ionTop = NULL;
cx->mainThread().ionTop = nullptr;
JitActivationIterator jitActivations(cx->runtime());
IonBailoutIterator iter(jitActivations, sp);
JitActivation *activation = jitActivations.activation()->asJit();
@ -76,12 +76,12 @@ jit::Bailout(BailoutStack *sp, BaselineBailoutInfo **bailoutInfo)
JS_ASSERT(IsBaselineEnabled(cx));
*bailoutInfo = NULL;
*bailoutInfo = nullptr;
uint32_t retval = BailoutIonToBaseline(cx, activation, iter, false, bailoutInfo);
JS_ASSERT(retval == BAILOUT_RETURN_OK ||
retval == BAILOUT_RETURN_FATAL_ERROR ||
retval == BAILOUT_RETURN_OVERRECURSED);
JS_ASSERT_IF(retval == BAILOUT_RETURN_OK, *bailoutInfo != NULL);
JS_ASSERT_IF(retval == BAILOUT_RETURN_OK, *bailoutInfo != nullptr);
if (retval != BAILOUT_RETURN_OK)
EnsureExitFrame(iter.jsFrame());
@ -98,7 +98,7 @@ jit::InvalidationBailout(InvalidationBailoutStack *sp, size_t *frameSizeOut,
JSContext *cx = GetIonContext()->cx;
// We don't have an exit frame.
cx->mainThread().ionTop = NULL;
cx->mainThread().ionTop = nullptr;
JitActivationIterator jitActivations(cx->runtime());
IonBailoutIterator iter(jitActivations, sp);
JitActivation *activation = jitActivations.activation()->asJit();
@ -110,12 +110,12 @@ jit::InvalidationBailout(InvalidationBailoutStack *sp, size_t *frameSizeOut,
JS_ASSERT(IsBaselineEnabled(cx));
*bailoutInfo = NULL;
*bailoutInfo = nullptr;
uint32_t retval = BailoutIonToBaseline(cx, activation, iter, true, bailoutInfo);
JS_ASSERT(retval == BAILOUT_RETURN_OK ||
retval == BAILOUT_RETURN_FATAL_ERROR ||
retval == BAILOUT_RETURN_OVERRECURSED);
JS_ASSERT_IF(retval == BAILOUT_RETURN_OK, *bailoutInfo != NULL);
JS_ASSERT_IF(retval == BAILOUT_RETURN_OK, *bailoutInfo != nullptr);
if (retval != BAILOUT_RETURN_OK) {
IonJSFrameLayout *frame = iter.jsFrame();
@ -124,7 +124,7 @@ jit::InvalidationBailout(InvalidationBailoutStack *sp, size_t *frameSizeOut,
IonSpew(IonSpew_Invalidate, " orig frameSize %u", unsigned(frame->prevFrameLocalSize()));
IonSpew(IonSpew_Invalidate, " orig ra %p", (void *) frame->returnAddress());
frame->replaceCalleeToken(NULL);
frame->replaceCalleeToken(nullptr);
EnsureExitFrame(frame);
IonSpew(IonSpew_Invalidate, " new calleeToken %p", (void *) frame->calleeToken());
@ -159,18 +159,18 @@ jit::ExceptionHandlerBailout(JSContext *cx, const InlineFrameIterator &frame,
{
JS_ASSERT(cx->isExceptionPending());
cx->mainThread().ionTop = NULL;
cx->mainThread().ionTop = nullptr;
JitActivationIterator jitActivations(cx->runtime());
IonBailoutIterator iter(jitActivations, frame.frame());
JitActivation *activation = jitActivations.activation()->asJit();
*bailoutInfo = NULL;
*bailoutInfo = nullptr;
uint32_t retval = BailoutIonToBaseline(cx, activation, iter, true, bailoutInfo, &excInfo);
JS_ASSERT(retval == BAILOUT_RETURN_OK ||
retval == BAILOUT_RETURN_FATAL_ERROR ||
retval == BAILOUT_RETURN_OVERRECURSED);
JS_ASSERT((retval == BAILOUT_RETURN_OK) == (*bailoutInfo != NULL));
JS_ASSERT((retval == BAILOUT_RETURN_OK) == (*bailoutInfo != nullptr));
return retval;
}

View File

@ -86,8 +86,8 @@ struct BaselineStackBuilder
bufferTotal_(initialSize),
bufferAvail_(0),
bufferUsed_(0),
buffer_(NULL),
header_(NULL),
buffer_(nullptr),
header_(nullptr),
framePushed_(0)
{
JS_ASSERT(bufferTotal_ >= HeaderSize());
@ -114,15 +114,15 @@ struct BaselineStackBuilder
header_->valueR0 = UndefinedValue();
header_->setR1 = 0;
header_->valueR1 = UndefinedValue();
header_->resumeFramePtr = NULL;
header_->resumeAddr = NULL;
header_->monitorStub = NULL;
header_->resumeFramePtr = nullptr;
header_->resumeAddr = nullptr;
header_->monitorStub = nullptr;
header_->numFrames = 0;
return true;
}
bool enlarge() {
JS_ASSERT(buffer_ != NULL);
JS_ASSERT(buffer_ != nullptr);
if (bufferTotal_ & mozilla::tl::MulOverflowMask<2>::value)
return false;
size_t newSize = bufferTotal_ * 2;
@ -149,7 +149,7 @@ struct BaselineStackBuilder
BaselineBailoutInfo *takeBuffer() {
JS_ASSERT(header_ == reinterpret_cast<BaselineBailoutInfo *>(buffer_));
buffer_ = NULL;
buffer_ = nullptr;
return header_;
}
@ -161,7 +161,7 @@ struct BaselineStackBuilder
return framePushed_;
}
bool subtract(size_t size, const char *info=NULL) {
bool subtract(size_t size, const char *info = nullptr) {
// enlarge the buffer if need be.
while (size > bufferAvail_) {
if (!enlarge())
@ -319,7 +319,7 @@ struct BaselineStackBuilder
// frame is meaningless, since Ion saves all registers before calling other ion
// frames, and the entry frame saves all registers too.
if (type == IonFrame_OptimizedJS || type == IonFrame_Entry)
return NULL;
return nullptr;
// BaselineStub - Baseline calling into Ion.
// PrevFramePtr needs to point to the BaselineStubFrame's saved frame pointer.
@ -353,7 +353,7 @@ struct BaselineStackBuilder
// If the frame preceding the rectifier is an OptimizedJS frame, then once again
// the frame pointer does not matter.
if (priorType == IonFrame_OptimizedJS)
return NULL;
return nullptr;
// Otherwise, the frame preceding the rectifier is a BaselineStub frame.
// let X = STACK_START_ADDR + IonJSFrameLayout::Size() + PREV_FRAME_SIZE
@ -467,7 +467,7 @@ InitFromBailout(JSContext *cx, HandleScript caller, jsbytecode *callerPC,
AutoValueVector &startFrameFormals, MutableHandleFunction nextCallee,
jsbytecode **callPC, const ExceptionBailoutInfo *excInfo)
{
// If excInfo is non-NULL, we are bailing out to a catch or finally block
// If excInfo is non-nullptr, we are bailing out to a catch or finally block
// and this is the frame where we will resume. Usually the expression stack
// should be empty in this case but there can be iterators on the stack.
uint32_t exprStackSlots;
@ -536,8 +536,8 @@ InitFromBailout(JSContext *cx, HandleScript caller, jsbytecode *callerPC,
}
// Initialize BaselineFrame's scopeChain and argsObj
JSObject *scopeChain = NULL;
ArgumentsObject *argsObj = NULL;
JSObject *scopeChain = nullptr;
ArgumentsObject *argsObj = nullptr;
BailoutKind bailoutKind = iter.bailoutKind();
if (bailoutKind == Bailout_ArgumentCheck) {
// Temporary hack -- skip the (unused) scopeChain, because it could be
@ -566,8 +566,8 @@ InitFromBailout(JSContext *cx, HandleScript caller, jsbytecode *callerPC,
// Get scope chain from function or script.
if (fun) {
// If pcOffset == 0, we may have to push a new call object, so
// we leave scopeChain NULL and enter baseline code before the
// prologue.
// we leave scopeChain nullptr and enter baseline code before
// the prologue.
if (iter.pcOffset() != 0 || iter.resumeAfter())
scopeChain = fun->environment();
} else {
@ -822,8 +822,8 @@ InitFromBailout(JSContext *cx, HandleScript caller, jsbytecode *callerPC,
// If this was the last inline frame, or we are bailing out to a catch or
// finally block in this frame, then unpacking is almost done.
if (!iter.moreFrames() || excInfo) {
// Last frame, so PC for call to next frame is set to NULL.
*callPC = NULL;
// Last frame, so PC for call to next frame is set to nullptr.
*callPC = nullptr;
// If the bailout was a resumeAfter, and the opcode is monitored,
// then the bailed out state should be in a position to enter
@ -917,10 +917,10 @@ InitFromBailout(JSContext *cx, HandleScript caller, jsbytecode *callerPC,
IonSpew(IonSpew_BaselineBailouts, " Adjusted framesize -= %d: %d",
int(sizeof(Value) * numUnsynced), int(frameSize));
// If scopeChain is NULL, then bailout is occurring during argument check.
// If scopeChain is nullptr, then bailout is occurring during argument check.
// In this case, resume into the prologue.
uint8_t *opReturnAddr;
if (scopeChain == NULL) {
if (scopeChain == nullptr) {
// Global and eval scripts expect the scope chain in R1, so only
// resume into the prologue for function scripts.
JS_ASSERT(fun);
@ -942,7 +942,7 @@ InitFromBailout(JSContext *cx, HandleScript caller, jsbytecode *callerPC,
// argument check, then:
// Top SPS profiler entry would be for callee frame.
// Ion would already have pushed an SPS entry for this frame.
// The pc for this entry would be set to NULL.
// The pc for this entry would be set to nullptr.
// Make sure it's set to script->pc.
if (cx->runtime()->spsProfiler.enabled()) {
if (caller && bailoutKind == Bailout_ArgumentCheck) {
@ -1182,8 +1182,8 @@ jit::BailoutIonToBaseline(JSContext *cx, JitActivation *activation, IonBailoutIt
bool invalidate, BaselineBailoutInfo **bailoutInfo,
const ExceptionBailoutInfo *excInfo)
{
JS_ASSERT(bailoutInfo != NULL);
JS_ASSERT(*bailoutInfo == NULL);
JS_ASSERT(bailoutInfo != nullptr);
JS_ASSERT(*bailoutInfo == nullptr);
#if JS_TRACE_LOGGING
TraceLogging::defaultLogger()->log(TraceLogging::INFO_ENGINE_BASELINE);
@ -1266,7 +1266,7 @@ jit::BailoutIonToBaseline(JSContext *cx, JitActivation *activation, IonBailoutIt
// Reconstruct baseline frames using the builder.
RootedScript caller(cx);
jsbytecode *callerPC = NULL;
jsbytecode *callerPC = nullptr;
RootedFunction fun(cx, callee);
RootedScript scr(cx, iter.script());
AutoValueVector startFrameFormals(cx);
@ -1284,11 +1284,11 @@ jit::BailoutIonToBaseline(JSContext *cx, JitActivation *activation, IonBailoutIt
// pass excInfo to InitFromBailout and don't unpack any other frames.
bool handleException = (excInfo && excInfo->frameNo == frameNo);
jsbytecode *callPC = NULL;
RootedFunction nextCallee(cx, NULL);
jsbytecode *callPC = nullptr;
RootedFunction nextCallee(cx, nullptr);
if (!InitFromBailout(cx, caller, callerPC, fun, scr, iter.ionScript(),
snapIter, invalidate, builder, startFrameFormals,
&nextCallee, &callPC, handleException ? excInfo : NULL))
&nextCallee, &callPC, handleException ? excInfo : nullptr))
{
return BAILOUT_RETURN_FATAL_ERROR;
}
@ -1417,10 +1417,10 @@ jit::FinishBailoutToBaseline(BaselineBailoutInfo *bailoutInfo)
// Free the bailout buffer.
js_free(bailoutInfo);
bailoutInfo = NULL;
bailoutInfo = nullptr;
// Ensure the frame has a call object if it needs one. If the scope chain
// is NULL, we will enter baseline code at the prologue so no need to do
// is nullptr, we will enter baseline code at the prologue so no need to do
// anything in that case.
BaselineFrame *topFrame = GetTopBaselineFrame(cx);
if (topFrame->scopeChain() && !EnsureHasScopeObjects(cx, topFrame))
@ -1428,8 +1428,8 @@ jit::FinishBailoutToBaseline(BaselineBailoutInfo *bailoutInfo)
// Create arguments objects for bailed out frames, to maintain the invariant
// that script->needsArgsObj() implies frame->hasArgsObj().
RootedScript innerScript(cx, NULL);
RootedScript outerScript(cx, NULL);
RootedScript innerScript(cx, nullptr);
RootedScript outerScript(cx, nullptr);
JS_ASSERT(cx->currentlyRunningInJit());
IonFrameIterator iter(cx->mainThread().ionTop);

View File

@ -235,10 +235,10 @@ BaselineCompiler::emitPrologue()
// Handle scope chain pre-initialization (in case GC gets run
// during stack check). For global and eval scripts, the scope
// chain is in R1. For function scripts, the scope chain is in
// the callee, NULL is stored for now so that GC doesn't choke on
// a bogus ScopeChain value in the frame.
// the callee, nullptr is stored for now so that GC doesn't choke
// on a bogus ScopeChain value in the frame.
if (function())
masm.storePtr(ImmPtr(NULL), frame.addressOfScopeChain());
masm.storePtr(ImmPtr(nullptr), frame.addressOfScopeChain());
else
masm.storePtr(R1.scratchReg(), frame.addressOfScopeChain());
@ -1822,7 +1822,7 @@ BaselineCompiler::emit_JSOP_GETALIASEDVAR()
Address address = getScopeCoordinateAddress(R0.scratchReg());
masm.loadValue(address, R0);
ICTypeMonitor_Fallback::Compiler compiler(cx, (ICMonitoredFallbackStub *) NULL);
ICTypeMonitor_Fallback::Compiler compiler(cx, (ICMonitoredFallbackStub *) nullptr);
if (!emitOpIC(compiler.getStub(&stubSpace_)))
return false;
@ -2811,7 +2811,7 @@ DoCreateRestParameter(JSContext *cx, BaselineFrame *frame, MutableHandleValue re
unsigned numRest = numActuals > numFormals ? numActuals - numFormals : 0;
Value *rest = frame->argv() + numFormals;
JSObject *obj = NewDenseCopiedArray(cx, numRest, rest, NULL);
JSObject *obj = NewDenseCopiedArray(cx, numRest, rest, nullptr);
if (!obj)
return false;
types::FixRestArgumentsType(cx, obj);

View File

@ -158,7 +158,7 @@ BaselineFrame::initForOsr(StackFrame *fp, uint32_t numStackValues)
// In debug mode there's always at least 1 ICEntry (since there are always
// debug prologue/epilogue calls).
IonFrameIterator iter(cx->mainThread().ionTop);
JS_ASSERT(iter.returnAddress() == NULL);
JS_ASSERT(iter.returnAddress() == nullptr);
BaselineScript *baseline = fp->script()->baselineScript();
iter.current()->setReturnAddress(baseline->returnAddressForIC(baseline->icEntry(0)));

View File

@ -125,7 +125,7 @@ class BaselineFrame
return CalleeTokenToFunction(calleeToken());
}
JSFunction *maybeFun() const {
return isFunctionFrame() ? fun() : NULL;
return isFunctionFrame() ? fun() : nullptr;
}
JSFunction *callee() const {
return CalleeTokenToFunction(calleeToken());
@ -217,7 +217,7 @@ class BaselineFrame
return *blockChain_;
}
StaticBlockObject *maybeBlockChain() const {
return hasBlockChain() ? blockChain_ : NULL;
return hasBlockChain() ? blockChain_ : nullptr;
}
void setBlockChain(StaticBlockObject &block) {
flags_ |= HAS_BLOCKCHAIN;
@ -225,7 +225,7 @@ class BaselineFrame
}
void setBlockChainNull() {
JS_ASSERT(!hasBlockChain());
blockChain_ = NULL;
blockChain_ = nullptr;
}
StaticBlockObject **addressOfBlockChain() {
return &blockChain_;
@ -285,7 +285,7 @@ class BaselineFrame
}
void *maybeHookData() const {
return hasHookData() ? hookData_ : NULL;
return hasHookData() ? hookData_ : nullptr;
}
void setHookData(void *v) {