Bug 1033126 - Convert xpcshell tests to use mozlog.structured format for logging.;r=ted

This commit is contained in:
Chris Manchester 2014-10-22 15:53:42 -04:00
parent 5a2067fe47
commit fa3071b32b
13 changed files with 495 additions and 772 deletions

View File

@ -88,8 +88,8 @@ function test(func, msg) {
function run_test() {
tests.forEach(function(t) {
_log("test_info", {source_file: t.filename,
diagnostic: "test group: " + t.msg});
do_print("test group: " + t.msg,
{source_file: t.filename});
t.func();
});
};

View File

@ -29,7 +29,7 @@ var server = null, sock = null;
*/
function TestServer() {
this.listener = ServerSocket(-1, true, -1);
do_print('server: listening on', this.listener.port);
do_print('server: listening on ' + this.listener.port);
this.listener.asyncListen(this);
this.binaryInput = null;
@ -149,4 +149,4 @@ function run_test() {
sock.send(ok.buffer, 0, 2);
sock.send(ok.buffer, 2, 3);
};
}
}

View File

@ -91,7 +91,7 @@ function is_content() {
*/
function TestServer() {
this.listener = ServerSocket(-1, true, -1);
do_print('server: listening on', this.listener.port);
do_print('server: listening on ' + this.listener.port);
this.listener.asyncListen(this);
this.binaryInput = null;

View File

@ -25,7 +25,7 @@ function TestServer() {
// any port (-1), loopback only (true), default backlog (-1)
this.listener = ServerSocket(-1, true, -1);
this.port = this.listener.port;
do_print('server: listening on', this.port);
do_print('server: listening on ' + this.port);
this.listener.asyncListen(this);
}

View File

@ -236,8 +236,7 @@ this.encryptPayload = function encryptPayload(cleartext) {
this.add_identity_test = function(test, testFunction) {
function note(what) {
let msg = "running test " + testFunction.name + " with " + what + " identity manager";
test._log("test_info",
{_message: "TEST-INFO | | " + msg + "\n"});
test.do_print(msg);
}
let ns = {};
Cu.import("resource://services-sync/service.js", ns);

View File

@ -105,7 +105,9 @@ StructuredLogger.prototype.suiteEnd = function () {
};
/**
* Unstructured logging functions
* Unstructured logging functions. The "extra" parameter can always by used to
* log suite specific data. If a "stack" field is provided it is logged at the
* top level of the data object for the benefit of mozlog's formatters.
*/
StructuredLogger.prototype.log = function (level, message, extra=null) {
let data = {
@ -115,6 +117,9 @@ StructuredLogger.prototype.log = function (level, message, extra=null) {
if (extra !== null) {
data.extra = extra;
if ("stack" in extra) {
data.stack = extra.stack;
}
}
this._logData("log", data);

View File

@ -13,8 +13,6 @@
var _quit = false;
var _passed = true;
var _tests_pending = 0;
var _passedChecks = 0, _falsePassedChecks = 0;
var _todoChecks = 0;
var _cleanupFunctions = [];
var _pendingTimers = [];
var _profileInitialized = false;
@ -36,23 +34,19 @@ let Assert = new AssertCls(function(err, message, stack) {
}
});
let _log = function (action, params) {
let _add_params = function (params) {
if (typeof _XPCSHELL_PROCESS != "undefined") {
params.process = _XPCSHELL_PROCESS;
params.xpcshell_process = _XPCSHELL_PROCESS;
}
params.action = action;
params._time = Date.now();
dump("\n" + JSON.stringify(params) + "\n");
};
let _dumpLog = function (raw_msg) {
dump("\n" + raw_msg + "\n");
}
function _dump(str) {
let start = /^TEST-/.test(str) ? "\n" : "";
if (typeof _XPCSHELL_PROCESS == "undefined") {
dump(start + str);
} else {
dump(start + _XPCSHELL_PROCESS + ": " + str);
}
}
let _LoggerClass = Components.utils.import("resource://testing-common/StructuredLog.jsm", null).StructuredLogger;
let _testLogger = new _LoggerClass("xpcshell/head.js", _dumpLog, [_add_params]);
// Disable automatic network detection, so tests work correctly when
// not connected to a network.
@ -181,8 +175,7 @@ function _do_main() {
if (_quit)
return;
_log("test_info",
{_message: "TEST-INFO | (xpcshell/head.js) | running event loop\n"});
_testLogger.info("running event loop");
var thr = Components.classes["@mozilla.org/thread-manager;1"]
.getService().currentThread;
@ -195,40 +188,11 @@ function _do_main() {
}
function _do_quit() {
_log("test_info",
{_message: "TEST-INFO | (xpcshell/head.js) | exiting test\n"});
_testLogger.info("exiting test");
_Promise.Debugging.flushUncaughtErrors();
_quit = true;
}
function _format_exception_stack(stack) {
if (typeof stack == "object" && stack.caller) {
let frame = stack;
let strStack = "";
while (frame != null) {
strStack += frame + "\n";
frame = frame.caller;
}
stack = strStack;
}
// frame is of the form "fname@file:line"
let frame_regexp = new RegExp("(.*)@(.*):(\\d*)", "g");
return stack.split("\n").reduce(function(stack_msg, frame) {
if (frame) {
let parts = frame_regexp.exec(frame);
if (parts) {
let [ _, func, file, line ] = parts;
return stack_msg + "JS frame :: " + file + " :: " +
(func || "anonymous") + " :: line " + line + "\n";
}
else { /* Could be a -e (command line string) style location. */
return stack_msg + "JS frame :: " + frame + "\n";
}
}
return stack_msg;
}, "");
}
/**
* Overrides idleService with a mock. Idle is commonly used for maintenance
* tasks, thus if a test uses a service that requires the idle service, it will
@ -383,8 +347,11 @@ function _execute_test() {
_Promise.Debugging.addUncaughtErrorObserver(function observer({message, date, fileName, stack, lineNumber}) {
let text = " A promise chain failed to handle a rejection: " +
message + " - rejection date: " + date;
_log_message_with_stack("test_unexpected_fail",
text, stack, fileName);
_testLogger.error(text,
{
stack: _format_stack(stack),
source_file: fileName
});
});
// _HEAD_FILES is dynamically defined by <runxpcshelltests.py>.
@ -411,21 +378,20 @@ function _execute_test() {
// possible that this will mask an NS_ERROR_ABORT that happens after a
// do_check failure though.
if (!_quit || e != Components.results.NS_ERROR_ABORT) {
let msgObject = {};
let extra = {};
if (e.fileName) {
msgObject.source_file = e.fileName;
extra.source_file = e.fileName;
if (e.lineNumber) {
msgObject.line_number = e.lineNumber;
extra.line_number = e.lineNumber;
}
} else {
msgObject.source_file = "xpcshell/head.js";
extra.source_file = "xpcshell/head.js";
}
msgObject.diagnostic = _exception_message(e);
let message = _exception_message(e);
if (e.stack) {
msgObject.diagnostic += " - See following stack:\n";
msgObject.stack = _format_exception_stack(e.stack);
extra.stack = _format_stack(e.stack);
}
_log("test_unexpected_fail", msgObject);
_testLogger.error(message, extra);
}
}
@ -445,8 +411,11 @@ function _execute_test() {
} else if (ex.fileName) {
filename = ex.fileName;
}
_log_message_with_stack("test_unexpected_fail",
ex, stack, filename);
_testLogger.error(_exception_message(ex),
{
stack: _format_stack(stack),
source_file: filename
});
};
let func;
@ -477,26 +446,6 @@ function _execute_test() {
if (!_passed)
return;
var truePassedChecks = _passedChecks - _falsePassedChecks;
if (truePassedChecks > 0) {
_log("test_pass",
{_message: "TEST-PASS | (xpcshell/head.js) | " + truePassedChecks + " (+ " +
_falsePassedChecks + ") check(s) passed\n",
source_file: _TEST_FILE,
passed_checks: truePassedChecks});
_log("test_info",
{_message: "TEST-INFO | (xpcshell/head.js) | " + _todoChecks +
" check(s) todo\n",
source_file: _TEST_FILE,
todo_checks: _todoChecks});
} else {
// ToDo: switch to TEST-UNEXPECTED-FAIL when all tests have been updated. (Bug 496443)
_log("test_info",
{_message: "TEST-INFO | (xpcshell/head.js) | No (+ " + _falsePassedChecks +
") checks actually run\n",
source_file: _TEST_FILE});
}
}
/**
@ -505,25 +454,21 @@ function _execute_test() {
* @param aFiles Array of files to load.
*/
function _load_files(aFiles) {
function loadTailFile(element, index, array) {
function load_file(element, index, array) {
try {
load(element);
} catch (e if e instanceof SyntaxError) {
_log("javascript_error",
{_message: "TEST-UNEXPECTED-FAIL | (xpcshell/head.js) | Source file " + element + " contains SyntaxError",
diagnostic: _exception_message(e),
source_file: element,
stack: _format_exception_stack(e.stack)});
} catch (e) {
_log("javascript_error",
{_message: "TEST-UNEXPECTED-FAIL | (xpcshell/head.js) | Source file " + element + " contains an error",
diagnostic: _exception_message(e),
source_file: element,
stack: e.stack ? _format_exception_stack(e.stack) : null});
let extra = {
source_file: element
}
if (e.stack) {
extra.stack = _format_stack(e.stack);
}
_testLogger.error(_exception_message(e), extra);
}
}
aFiles.forEach(loadTailFile);
aFiles.forEach(load_file);
}
function _wrap_with_quotes_if_necessary(val) {
@ -535,13 +480,10 @@ function _wrap_with_quotes_if_necessary(val) {
/**
* Prints a message to the output log.
*/
function do_print(msg) {
var caller_stack = Components.stack.caller;
function do_print(msg, data) {
msg = _wrap_with_quotes_if_necessary(msg);
_log("test_info",
{source_file: caller_stack.filename,
diagnostic: msg});
data = data ? data : null;
_testLogger.info(msg, data);
}
/**
@ -575,16 +517,13 @@ function do_execute_soon(callback, aName) {
// possible that this will mask an NS_ERROR_ABORT that happens after a
// do_check failure though.
if (!_quit || e != Components.results.NS_ERROR_ABORT) {
if (e.stack) {
_log("javascript_error",
{source_file: "xpcshell/head.js",
diagnostic: _exception_message(e) + " - See following stack:",
stack: _format_exception_stack(e.stack)});
} else {
_log("javascript_error",
{source_file: "xpcshell/head.js",
diagnostic: _exception_message(e)});
}
let stack = e.stack ? _format_stack(e.stack) : null;
_testLogger.testStatus(_TEST_NAME,
funcName,
'ERROR',
'OK',
_exception_message(e),
stack);
_do_quit();
}
}
@ -613,9 +552,16 @@ function do_throw(error, stack) {
else if (error.fileName)
filename = error.fileName;
_log_message_with_stack("test_unexpected_fail",
error, stack, filename);
_testLogger.error(_exception_message(error),
{
source_file: filename,
stack: _format_stack(stack)
});
_abort_failed_test();
}
function _abort_failed_test() {
// Called to abort the test run after all failures are logged.
_passed = false;
_do_quit();
throw Components.results.NS_ERROR_ABORT;
@ -635,17 +581,6 @@ function _format_stack(stack) {
return _Task.Debugging.generateReadableStack(normalized, " ");
}
function do_throw_todo(text, stack) {
if (!stack)
stack = Components.stack.caller;
_passed = false;
_log_message_with_stack("test_unexpected_pass",
text, stack, stack.filename);
_do_quit();
throw Components.results.NS_ERROR_ABORT;
}
// Make a nice display string from an object that behaves
// like Error
function _exception_message(ex) {
@ -669,39 +604,27 @@ function _exception_message(ex) {
return "" + ex;
}
function _log_message_with_stack(action, ex, stack, filename, text) {
if (stack) {
_log(action,
{diagnostic: (text ? text : "") +
_exception_message(ex) +
" - See following stack:",
source_file: filename,
stack: _format_stack(stack)});
} else {
_log(action,
{diagnostic: (text ? text : "") +
_exception_message(ex),
source_file: filename});
}
}
function do_report_unexpected_exception(ex, text) {
var caller_stack = Components.stack.caller;
let filename = Components.stack.caller.filename;
text = text ? text + " - " : "";
_passed = false;
_log_message_with_stack("test_unexpected_fail", ex, ex.stack || "",
caller_stack.filename, text + "Unexpected exception ");
_testLogger.error(text + "Unexpected exception " + _exception_message(ex),
{
source_file: filename,
stack: _format_stack(ex.stack)
});
_do_quit();
throw Components.results.NS_ERROR_ABORT;
}
function do_note_exception(ex, text) {
var caller_stack = Components.stack.caller;
text = text ? text + " - " : "";
_log_message_with_stack("test_info", ex, ex.stack,
caller_stack.filename, text + "Swallowed exception ");
let filename = Components.stack.caller.filename;
_testLogger.info(text + "Swallowed exception " + _exception_message(ex),
{
source_file: filename,
stack: _format_stack(ex.stack)
});
}
function _do_check_neq(left, right, stack, todo) {
@ -726,29 +649,46 @@ function do_report_result(passed, text, stack, todo) {
while (stack.filename.contains("head.js") && stack.caller) {
stack = stack.caller;
}
let name = _gRunningTest ? _gRunningTest.name : stack.name;
let message;
if (name) {
message = "[" + name + " : " + stack.lineNumber + "] " + text;
} else {
message = text;
}
if (passed) {
if (todo) {
do_throw_todo(text, stack);
_testLogger.testStatus(_TEST_NAME,
name,
"PASS",
"FAIL",
message,
_format_stack(stack));
_abort_failed_test();
} else {
++_passedChecks;
_log("test_pass",
{source_file: stack.filename,
test_name: stack.name,
line_number: stack.lineNumber,
diagnostic: "[" + stack.name + " : " + stack.lineNumber + "] " +
text + "\n"});
_testLogger.testStatus(_TEST_NAME,
name,
"PASS",
"PASS",
message);
}
} else {
if (todo) {
++_todoChecks;
_log("test_known_fail",
{source_file: stack.filename,
test_name: stack.name,
line_number: stack.lineNumber,
diagnostic: "[" + stack.name + " : " + stack.lineNumber + "] " +
text + "\n"});
_testLogger.testStatus(_TEST_NAME,
name,
"FAIL",
"FAIL",
message);
} else {
do_throw(text, stack);
_testLogger.testStatus(_TEST_NAME,
name,
"FAIL",
"PASS",
message,
_format_stack(stack));
_abort_failed_test();
}
}
}
@ -878,17 +818,15 @@ function todo_check_instanceof(value, constructor,
function do_test_pending(aName) {
++_tests_pending;
_log("test_pending",
{_message: "TEST-INFO | (xpcshell/head.js) | test" +
(aName ? " " + aName : "") +
" pending (" + _tests_pending + ")\n"});
_testLogger.info("(xpcshell/head.js) | test" +
(aName ? " " + aName : "") +
" pending (" + _tests_pending + ")");
}
function do_test_finished(aName) {
_log("test_finish",
{_message: "TEST-INFO | (xpcshell/head.js) | test" +
(aName ? " " + aName : "") +
" finished (" + _tests_pending + ")\n"});
_testLogger.info("(xpcshell/head.js) | test" +
(aName ? " " + aName : "") +
" finished (" + _tests_pending + ")");
if (--_tests_pending == 0)
_do_quit();
}
@ -913,12 +851,8 @@ function do_get_file(path, allowNonexistent) {
// Not using do_throw(): caller will continue.
_passed = false;
var stack = Components.stack.caller;
_log("test_unexpected_fail",
{source_file: stack.filename,
test_name: stack.name,
line_number: stack.lineNumber,
diagnostic: "[" + stack.name + " : " + stack.lineNumber + "] " +
lf.path + " does not exist\n"});
_testLogger.error("[" + stack.name + " : " + stack.lineNumber + "] " +
lf.path + " does not exist");
}
return lf;
@ -940,7 +874,6 @@ function do_load_manifest(path) {
const nsIComponentRegistrar = Components.interfaces.nsIComponentRegistrar;
do_check_true(Components.manager instanceof nsIComponentRegistrar);
// Previous do_check_true() is not a test check.
++_falsePassedChecks;
Components.manager.autoRegister(lf);
}
@ -1038,9 +971,7 @@ function do_get_minidumpdir() {
*/
function do_get_profile() {
if (!runningInParent) {
_log("test_info",
{_message: "TEST-INFO | (xpcshell/head.js) | Ignoring profile creation from child process.\n"});
_testLogger.info("Ignoring profile creation from child process.");
return null;
}
@ -1171,9 +1102,11 @@ function run_test_in_child(testFile, optionalCallback)
var testPath = do_get_file(testFile).path.replace(/\\/g, "/");
do_test_pending("run in child");
sendCommand("_log('child_test_start', {_message: 'CHILD-TEST-STARTED'}); "
+ "const _TEST_FILE=['" + testPath + "']; _execute_test(); "
+ "_log('child_test_end', {_message: 'CHILD-TEST-COMPLETED'});",
sendCommand("_testLogger.info('CHILD-TEST-STARTED'); "
+ "const _TEST_FILE=['" + testPath + "']; "
+ "const _TEST_NAME=" + uneval(_TEST_NAME) + "; "
+ "_execute_test(); "
+ "_testLogger.info('CHILD-TEST-COMPLETED');",
callback);
}
@ -1300,7 +1233,7 @@ function run_next_test()
_Promise.Debugging.flushUncaughtErrors();
let _isTask;
[_isTask, _gRunningTest] = _gTests[_gTestIndex++];
print("TEST-INFO | " + _TEST_FILE + " | Starting " + _gRunningTest.name);
_testLogger.info(_TEST_NAME + " | Starting " + _gRunningTest.name);
do_test_pending(_gRunningTest.name);
if (_isTask) {

View File

@ -6,14 +6,13 @@
from __future__ import unicode_literals, print_function
import mozpack.path
import logging
import argparse
import os
import shutil
import sys
import urllib2
from StringIO import StringIO
from mozlog import structured
from mozbuild.base import (
MachCommandBase,
@ -27,6 +26,9 @@ from mach.decorators import (
Command,
)
_parser = argparse.ArgumentParser()
structured.commandline.add_logging_group(_parser)
ADB_NOT_FOUND = '''
The %s command requires the adb binary to be on your path.
@ -45,11 +47,6 @@ if sys.version_info[0] < 3:
else:
unicode_type = str
# Simple filter to omit the message emitted as a test file begins.
class TestStartFilter(logging.Filter):
def filter(self, record):
return not record.params['msg'].endswith("running test ...")
# This should probably be consolidated with similar classes in other test
# runners.
class InvalidTestPathError(Exception):
@ -69,6 +66,7 @@ class XPCShellRunner(MozbuildObject):
keep_going=False, sequential=False, shuffle=False,
debugger=None, debuggerArgs=None, debuggerInteractive=None,
rerun_failures=False, test_objects=None, verbose=False,
log=None,
# ignore parameters from other platforms' options
**kwargs):
"""Runs an individual xpcshell test."""
@ -86,7 +84,7 @@ class XPCShellRunner(MozbuildObject):
debugger=debugger, debuggerArgs=debuggerArgs,
debuggerInteractive=debuggerInteractive,
rerun_failures=rerun_failures,
verbose=verbose)
verbose=verbose, log=log)
return
elif test_paths:
test_paths = [self._wrap_path_argument(p).relpath() for p in test_paths]
@ -118,6 +116,7 @@ class XPCShellRunner(MozbuildObject):
'rerun_failures': rerun_failures,
'manifest': manifest,
'verbose': verbose,
'log': log,
}
return self._run_xpcshell_harness(**args)
@ -126,25 +125,20 @@ class XPCShellRunner(MozbuildObject):
test_path=None, shuffle=False, interactive=False,
keep_going=False, sequential=False,
debugger=None, debuggerArgs=None, debuggerInteractive=None,
rerun_failures=False, verbose=False):
rerun_failures=False, verbose=False, log=None):
# Obtain a reference to the xpcshell test runner.
import runxpcshelltests
dummy_log = StringIO()
xpcshell = runxpcshelltests.XPCShellTests(log=dummy_log)
xpcshell = runxpcshelltests.XPCShellTests(log=log)
self.log_manager.enable_unstructured()
xpcshell_filter = TestStartFilter()
self.log_manager.terminal_handler.addFilter(xpcshell_filter)
tests_dir = os.path.join(self.topobjdir, '_tests', 'xpcshell')
modules_dir = os.path.join(self.topobjdir, '_tests', 'modules')
# We want output from the test to be written immediately if we are only
# running a single test.
single_test = (test_path is not None or
(manifest and len(manifest.test_paths())==1))
verbose_output = verbose or single_test
sequential = sequential or single_test
args = {
@ -160,15 +154,13 @@ class XPCShellRunner(MozbuildObject):
'testsRootDir': tests_dir,
'testingModulesDir': modules_dir,
'profileName': 'firefox',
'verbose': test_path is not None,
'verbose': verbose or single_test,
'xunitFilename': os.path.join(self.statedir, 'xpchsell.xunit.xml'),
'xunitName': 'xpcshell',
'pluginsPath': os.path.join(self.distdir, 'plugins'),
'debugger': debugger,
'debuggerArgs': debuggerArgs,
'debuggerInteractive': debuggerInteractive,
'on_message': (lambda obj, msg: xpcshell.log.info(msg.decode('utf-8', 'replace'))) \
if verbose_output else None,
}
if test_path is not None:
@ -204,7 +196,6 @@ class XPCShellRunner(MozbuildObject):
result = xpcshell.runTests(**filtered_args)
self.log_manager.terminal_handler.removeFilter(xpcshell_filter)
self.log_manager.disable_unstructured()
if not result and not xpcshell.sequential:
@ -233,7 +224,7 @@ class AndroidXPCShellRunner(MozbuildObject):
def run_test(self,
test_paths, keep_going,
devicemanager, ip, port, remote_test_root, no_setup, local_apk,
test_objects=None,
test_objects=None, log=None,
# ignore parameters from other platforms' options
**kwargs):
# TODO Bug 794506 remove once mach integrates with virtualenv.
@ -288,12 +279,8 @@ class AndroidXPCShellRunner(MozbuildObject):
testdirs = test_paths[0]
options.testPath = test_paths[0]
options.verbose = True
dummy_log = StringIO()
xpcshell = remotexpcshelltests.XPCShellRemote(dm, options, args=testdirs, log=dummy_log)
self.log_manager.enable_unstructured()
xpcshell_filter = TestStartFilter()
self.log_manager.terminal_handler.addFilter(xpcshell_filter)
xpcshell = remotexpcshelltests.XPCShellRemote(dm, options, testdirs, log)
result = xpcshell.runTests(xpcshell='xpcshell',
testClass=remotexpcshelltests.RemoteXPCShellTestThread,
@ -301,8 +288,6 @@ class AndroidXPCShellRunner(MozbuildObject):
mobileArgs=xpcshell.mobileArgs,
**options.__dict__)
self.log_manager.terminal_handler.removeFilter(xpcshell_filter)
self.log_manager.disable_unstructured()
return int(not result)
@ -348,7 +333,7 @@ class B2GXPCShellRunner(MozbuildObject):
return busybox_path
def run_test(self, test_paths, b2g_home=None, busybox=None, device_name=None,
test_objects=None,
test_objects=None, log=None,
# ignore parameters from other platforms' options
**kwargs):
try:
@ -397,7 +382,7 @@ class B2GXPCShellRunner(MozbuildObject):
if not options.busybox:
options.busybox = self._download_busybox(b2g_home, options.emulator)
return runtestsb2g.run_remote_xpcshell(parser, options, args)
return runtestsb2g.run_remote_xpcshell(parser, options, args, log)
def is_platform_supported(cls):
"""Must have a Firefox, Android or B2G build."""
@ -415,7 +400,8 @@ class MachCommands(MachCommandBase):
@Command('xpcshell-test', category='testing',
conditions=[is_platform_supported],
description='Run XPCOM Shell tests (API direct unit testing)')
description='Run XPCOM Shell tests (API direct unit testing)',
parser=_parser)
@CommandArgument('test_paths', default='all', nargs='*', metavar='TEST',
help='Test to run. Can be specified as a single JS file, a directory, '
'or omitted. If omitted, the entire test suite is executed.')
@ -466,6 +452,11 @@ class MachCommands(MachCommandBase):
driver = self._spawn(BuildDriver)
driver.install_tests(remove=False)
structured.commandline.formatter_option_defaults['verbose'] = True
params['log'] = structured.commandline.setup_logging("XPCShellTests",
params,
{"mach": sys.stdout})
if conditions.is_android(self):
xpcshell = self._spawn(AndroidXPCShellRunner)
elif conditions.is_b2g(self):

View File

@ -11,6 +11,8 @@ import runxpcshelltests as xpcshell
import tempfile
from automationutils import replaceBackSlashes
from zipfile import ZipFile
from mozlog import structured
from mozlog.structured import commandline
import shutil
import mozdevice
import mozfile
@ -63,20 +65,20 @@ class RemoteXPCShellTestThread(xpcshell.XPCShellTestThread):
pluginsDir = remoteJoin(self.remoteTmpDir, "plugins")
self.device.pushDir(self.pluginsPath, pluginsDir)
if self.interactive:
self.log.info("TEST-INFO | plugins dir is %s" % pluginsDir)
self.log.info("plugins dir is %s" % pluginsDir)
return pluginsDir
def setupProfileDir(self):
self.device.removeDir(self.profileDir)
self.device.mkDir(self.profileDir)
if self.interactive or self.singleFile:
self.log.info("TEST-INFO | profile dir is %s" % self.profileDir)
self.log.info("profile dir is %s" % self.profileDir)
return self.profileDir
def logCommand(self, name, completeCmd, testdir):
self.log.info("TEST-INFO | %s | full command: %r" % (name, completeCmd))
self.log.info("TEST-INFO | %s | current directory: %r" % (name, self.remoteHere))
self.log.info("TEST-INFO | %s | environment: %s" % (name, self.env))
self.log.info("%s | full command: %r" % (name, completeCmd))
self.log.info("%s | current directory: %r" % (name, self.remoteHere))
self.log.info("%s | environment: %s" % (name, self.env))
def getHeadAndTailFiles(self, test):
"""Override parent method to find files on remote device."""
@ -120,10 +122,7 @@ class RemoteXPCShellTestThread(xpcshell.XPCShellTestThread):
self.remoteDebuggerArgs,
self.xpcsCmd]
def testTimeout(self, test_file, proc):
self.timedout = True
if not self.retry:
self.log.error("TEST-UNEXPECTED-FAIL | %s | Test timed out" % test_file)
def killTimeout(self, proc):
self.kill(proc)
def launchProcess(self, cmd, stdout, stderr, env, cwd, timeout=None):
@ -211,7 +210,7 @@ class RemoteXPCShellTestThread(xpcshell.XPCShellTestThread):
# via devicemanager.
class XPCShellRemote(xpcshell.XPCShellTests, object):
def __init__(self, devmgr, options, args, log=None):
def __init__(self, devmgr, options, args, log):
xpcshell.XPCShellTests.__init__(self, log)
# Add Android version (SDK level) to mozinfo so that manifest entries
@ -575,6 +574,7 @@ def main():
sys.exit(1)
parser = RemoteXPCShellOptions()
structured.commandline.add_logging_group(parser)
options, args = parser.parse_args()
if not options.localAPK:
for file in os.listdir(os.path.join(options.objdir, "dist")):
@ -588,6 +588,9 @@ def main():
sys.exit(1)
options = parser.verifyRemoteOptions(options)
log = commandline.setup_logging("Remote XPCShell",
options,
{"tbpl": sys.stdout})
if len(args) < 1 and options.manifest is None:
print >>sys.stderr, """Usage: %s <test dirs>
@ -609,7 +612,7 @@ def main():
print >>sys.stderr, "Error: You must specify a test filename in interactive mode!"
sys.exit(1)
xpcsh = XPCShellRemote(dm, options, args)
xpcsh = XPCShellRemote(dm, options, args, log)
# we don't run concurrent tests on mobile
options.sequential = True

View File

@ -11,6 +11,8 @@ sys.path.insert(0, os.path.abspath(os.path.realpath(os.path.dirname(sys.argv[0])
import traceback
from remotexpcshelltests import RemoteXPCShellTestThread, XPCShellRemote, RemoteXPCShellOptions
from mozdevice import devicemanagerADB, DMError
from mozlog import structured
from mozlog.structured import commandline
DEVICE_TEST_ROOT = '/data/local/tests'
@ -153,7 +155,7 @@ class B2GOptions(RemoteXPCShellOptions):
self.error("You must specify --emulator if you specify --logdir")
return RemoteXPCShellOptions.verifyRemoteOptions(self, options)
def run_remote_xpcshell(parser, options, args):
def run_remote_xpcshell(parser, options, args, log):
options = parser.verifyRemoteOptions(options)
# Create the Marionette instance
@ -194,7 +196,7 @@ def run_remote_xpcshell(parser, options, args):
if not options.remoteTestRoot:
options.remoteTestRoot = dm.deviceRoot
xpcsh = B2GXPCShellRemote(dm, options, args)
xpcsh = B2GXPCShellRemote(dm, options, args, log)
# we don't run concurrent tests on mobile
options.sequential = True
@ -212,17 +214,16 @@ def run_remote_xpcshell(parser, options, args):
def main():
parser = B2GOptions()
structured.commandline.add_logging_group(parser)
options, args = parser.parse_args()
run_remote_xpcshell(parser, options, args)
log = commandline.setup_logging("Remote XPCShell",
options,
{"tbpl": sys.stdout})
run_remote_xpcshell(parser, options, args, log)
# You usually run this like :
# python runtestsb2g.py --emulator arm --b2gpath $B2GPATH --manifest $MANIFEST [--xre-path $MOZ_HOST_BIN
# --adbpath $ADB_PATH
# ...]
#
# For xUnit output you should also pass in --tests-root-dir ..objdir-gecko/_tests
# --xunit-file ..objdir_gecko/_tests/results.xml
# --xunit-suite-name xpcshell-tests
if __name__ == '__main__':
main()

File diff suppressed because it is too large Load Diff

View File

@ -9,8 +9,8 @@ import sys, os, unittest, tempfile, shutil
import mozinfo
from StringIO import StringIO
from xml.etree.ElementTree import ElementTree
from mozlog import structured
from mozbuild.base import MozbuildObject
os.environ.pop('MOZ_OBJDIR', None)
build_obj = MozbuildObject.from_environment()
@ -29,6 +29,9 @@ else:
if sys.platform == "win32":
xpcshellBin += ".exe"
TEST_PASS_STRING = "TEST-PASS"
TEST_FAIL_STRING = "TEST-UNEXPECTED-FAIL"
SIMPLE_PASSING_TEST = "function run_test() { do_check_true(true); }"
SIMPLE_FAILING_TEST = "function run_test() { do_check_true(false); }"
@ -74,13 +77,30 @@ function run_test () { run_next_test(); }
add_test(function test_child_simple () {
do_test_pending("hang test");
do_load_child_test_harness();
sendCommand("_log('child_test_start', {_message: 'CHILD-TEST-STARTED'}); " +
sendCommand("_testLogger.info('CHILD-TEST-STARTED'); " +
+ "const _TEST_FILE=['test_pass.js']; _execute_test(); ",
do_test_finished);
run_next_test();
});
'''
SIMPLE_LOOPING_TEST = '''
function run_test () { run_next_test(); }
add_test(function test_loop () {
do_test_pending()
});
'''
PASSING_TEST_UNICODE = '''
function run_test () { run_next_test(); }
add_test(function test_unicode_print () {
do_check_eq("\u201c\u201d", "\u201c\u201d");
run_next_test();
});
'''
ADD_TASK_SINGLE = '''
Components.utils.import("resource://gre/modules/Promise.jsm");
@ -264,7 +284,11 @@ class XPCShellTestsTests(unittest.TestCase):
def setUp(self):
self.log = StringIO()
self.tempdir = tempfile.mkdtemp()
self.x = XPCShellTests(log=self.log)
logger = structured.commandline.setup_logging("selftest%s" % id(self),
{},
{"tbpl": self.log})
self.x = XPCShellTests(logger)
self.x.harness_timeout = 15
def tearDown(self):
shutil.rmtree(self.tempdir)
@ -300,7 +324,7 @@ tail =
""" + "\n".join(testlines))
def assertTestResult(self, expected, shuffle=False, xunitFilename=None, verbose=False):
def assertTestResult(self, expected, shuffle=False, verbose=False):
"""
Assert that self.x.runTests with manifest=self.manifest
returns |expected|.
@ -312,7 +336,6 @@ tail =
shuffle=shuffle,
testsRootDir=self.tempdir,
verbose=verbose,
xunitFilename=xunitFilename,
sequential=True),
msg="""Tests should have %s, log:
========
@ -352,8 +375,8 @@ tail =
self.assertEquals(1, self.x.passCount)
self.assertEquals(0, self.x.failCount)
self.assertEquals(0, self.x.todoCount)
self.assertInLog("TEST-PASS")
self.assertNotInLog("TEST-UNEXPECTED-FAIL")
self.assertInLog(TEST_PASS_STRING)
self.assertNotInLog(TEST_FAIL_STRING)
def testFail(self):
"""
@ -367,8 +390,8 @@ tail =
self.assertEquals(0, self.x.passCount)
self.assertEquals(1, self.x.failCount)
self.assertEquals(0, self.x.todoCount)
self.assertInLog("TEST-UNEXPECTED-FAIL")
self.assertNotInLog("TEST-PASS")
self.assertInLog(TEST_FAIL_STRING)
self.assertNotInLog(TEST_PASS_STRING)
@unittest.skipIf(build_obj.defines.get('MOZ_B2G'),
'selftests with child processes fail on b2g desktop builds')
@ -385,10 +408,10 @@ tail =
self.assertEquals(1, self.x.passCount)
self.assertEquals(0, self.x.failCount)
self.assertEquals(0, self.x.todoCount)
self.assertInLog("TEST-PASS")
self.assertInLog(TEST_PASS_STRING)
self.assertInLog("CHILD-TEST-STARTED")
self.assertInLog("CHILD-TEST-COMPLETED")
self.assertNotInLog("TEST-UNEXPECTED-FAIL")
self.assertNotInLog(TEST_FAIL_STRING)
@unittest.skipIf(build_obj.defines.get('MOZ_B2G'),
@ -406,10 +429,10 @@ tail =
self.assertEquals(0, self.x.passCount)
self.assertEquals(1, self.x.failCount)
self.assertEquals(0, self.x.todoCount)
self.assertInLog("TEST-UNEXPECTED-FAIL")
self.assertInLog(TEST_FAIL_STRING)
self.assertInLog("CHILD-TEST-STARTED")
self.assertInLog("CHILD-TEST-COMPLETED")
self.assertNotInLog("TEST-PASS")
self.assertNotInLog(TEST_PASS_STRING)
@unittest.skipIf(build_obj.defines.get('MOZ_B2G'),
'selftests with child processes fail on b2g desktop builds')
@ -427,10 +450,10 @@ tail =
self.assertEquals(0, self.x.passCount)
self.assertEquals(1, self.x.failCount)
self.assertEquals(0, self.x.todoCount)
self.assertInLog("TEST-UNEXPECTED-FAIL")
self.assertInLog(TEST_FAIL_STRING)
self.assertInLog("CHILD-TEST-STARTED")
self.assertNotInLog("CHILD-TEST-COMPLETED")
self.assertNotInLog("TEST-PASS")
self.assertNotInLog(TEST_PASS_STRING)
def testSyntaxError(self):
"""
@ -445,8 +468,36 @@ tail =
self.assertEquals(0, self.x.passCount)
self.assertEquals(1, self.x.failCount)
self.assertEquals(0, self.x.todoCount)
self.assertInLog("TEST-UNEXPECTED-FAIL")
self.assertNotInLog("TEST-PASS")
self.assertInLog(TEST_FAIL_STRING)
self.assertNotInLog(TEST_PASS_STRING)
def testUnicodeInAssertMethods(self):
"""
Check that passing unicode characters through an assertion method works.
"""
self.writeFile("test_unicode_assert.js", PASSING_TEST_UNICODE)
self.writeManifest(["test_unicode_assert.js"])
self.assertTestResult(True, verbose=True)
def testHangingTimeout(self):
"""
Check that a test that never finishes results in the correct error log.
"""
self.writeFile("test_loop.js", SIMPLE_LOOPING_TEST)
self.writeManifest(["test_loop.js"])
old_timeout = self.x.harness_timeout
self.x.harness_timeout = 1
self.assertTestResult(False)
self.assertEquals(1, self.x.testCount)
self.assertEquals(1, self.x.failCount)
self.assertEquals(0, self.x.passCount)
self.assertEquals(0, self.x.todoCount)
self.assertInLog("TEST-UNEXPECTED-TIMEOUT")
self.x.harness_timeout = old_timeout
def testPassFail(self):
"""
@ -461,8 +512,8 @@ tail =
self.assertEquals(1, self.x.passCount)
self.assertEquals(1, self.x.failCount)
self.assertEquals(0, self.x.todoCount)
self.assertInLog("TEST-PASS")
self.assertInLog("TEST-UNEXPECTED-FAIL")
self.assertInLog(TEST_PASS_STRING)
self.assertInLog(TEST_FAIL_STRING)
def testSkip(self):
"""
@ -476,8 +527,8 @@ tail =
self.assertEquals(0, self.x.passCount)
self.assertEquals(0, self.x.failCount)
self.assertEquals(0, self.x.todoCount)
self.assertNotInLog("TEST-UNEXPECTED-FAIL")
self.assertNotInLog("TEST-PASS")
self.assertNotInLog(TEST_FAIL_STRING)
self.assertNotInLog(TEST_PASS_STRING)
def testKnownFail(self):
"""
@ -491,11 +542,11 @@ tail =
self.assertEquals(0, self.x.passCount)
self.assertEquals(0, self.x.failCount)
self.assertEquals(1, self.x.todoCount)
self.assertInLog("TEST-KNOWN-FAIL")
self.assertInLog("TEST-FAIL")
# This should be suppressed because the harness doesn't include
# the full log from the xpcshell run when things pass.
self.assertNotInLog("TEST-UNEXPECTED-FAIL")
self.assertNotInLog("TEST-PASS")
self.assertNotInLog(TEST_FAIL_STRING)
self.assertNotInLog(TEST_PASS_STRING)
def testUnexpectedPass(self):
"""
@ -512,8 +563,6 @@ tail =
# From the outer (Python) harness
self.assertInLog("TEST-UNEXPECTED-PASS")
self.assertNotInLog("TEST-KNOWN-FAIL")
# From the inner (JS) harness
self.assertInLog("TEST-PASS")
def testReturnNonzero(self):
"""
@ -527,8 +576,8 @@ tail =
self.assertEquals(0, self.x.passCount)
self.assertEquals(1, self.x.failCount)
self.assertEquals(0, self.x.todoCount)
self.assertInLog("TEST-UNEXPECTED-FAIL")
self.assertNotInLog("TEST-PASS")
self.assertInLog(TEST_FAIL_STRING)
self.assertNotInLog(TEST_PASS_STRING)
def testAddTestSimple(self):
"""
@ -712,50 +761,6 @@ tail =
self.assertEquals(10, self.x.testCount)
self.assertEquals(10, self.x.passCount)
def testXunitOutput(self):
"""
Check that Xunit XML files are written.
"""
self.writeFile("test_00.js", SIMPLE_PASSING_TEST)
self.writeFile("test_01.js", SIMPLE_FAILING_TEST)
self.writeFile("test_02.js", SIMPLE_PASSING_TEST)
manifest = [
"test_00.js",
"test_01.js",
("test_02.js", "skip-if = true")
]
self.writeManifest(manifest)
filename = os.path.join(self.tempdir, "xunit.xml")
self.assertTestResult(False, xunitFilename=filename)
self.assertTrue(os.path.exists(filename))
self.assertTrue(os.path.getsize(filename) > 0)
tree = ElementTree()
tree.parse(filename)
suite = tree.getroot()
self.assertTrue(suite is not None)
self.assertEqual(suite.get("tests"), "3")
self.assertEqual(suite.get("failures"), "1")
self.assertEqual(suite.get("skip"), "1")
testcases = suite.findall("testcase")
self.assertEqual(len(testcases), 3)
for testcase in testcases:
attributes = testcase.keys()
self.assertTrue("classname" in attributes)
self.assertTrue("name" in attributes)
self.assertTrue("time" in attributes)
self.assertTrue(testcases[1].find("failure") is not None)
self.assertTrue(testcases[2].find("skipped") is not None)
def testDoThrowString(self):
"""
Check that do_throw produces reasonable messages when the
@ -765,9 +770,9 @@ tail =
self.writeManifest(["test_error.js"])
self.assertTestResult(False)
self.assertInLog("TEST-UNEXPECTED-FAIL")
self.assertInLog(TEST_FAIL_STRING)
self.assertInLog("Passing a string to do_throw")
self.assertNotInLog("TEST-PASS")
self.assertNotInLog(TEST_PASS_STRING)
def testDoThrowForeignObject(self):
"""
@ -779,11 +784,11 @@ tail =
self.writeManifest(["test_error.js"])
self.assertTestResult(False)
self.assertInLog("TEST-UNEXPECTED-FAIL")
self.assertInLog(TEST_FAIL_STRING)
self.assertInLog("failure.js")
self.assertInLog("Error object")
self.assertInLog("ERROR STACK")
self.assertNotInLog("TEST-PASS")
self.assertNotInLog(TEST_PASS_STRING)
def testDoReportForeignObject(self):
"""
@ -795,11 +800,11 @@ tail =
self.writeManifest(["test_error.js"])
self.assertTestResult(False)
self.assertInLog("TEST-UNEXPECTED-FAIL")
self.assertInLog(TEST_FAIL_STRING)
self.assertInLog("failure.js")
self.assertInLog("Error object")
self.assertInLog("ERROR STACK")
self.assertNotInLog("TEST-PASS")
self.assertNotInLog(TEST_PASS_STRING)
def testDoReportRefError(self):
"""
@ -810,11 +815,11 @@ tail =
self.writeManifest(["test_error.js"])
self.assertTestResult(False)
self.assertInLog("TEST-UNEXPECTED-FAIL")
self.assertInLog(TEST_FAIL_STRING)
self.assertInLog("test_error.js")
self.assertInLog("obj.noSuchFunction is not a function")
self.assertInLog("run_test@")
self.assertNotInLog("TEST-PASS")
self.assertNotInLog(TEST_PASS_STRING)
def testDoReportSyntaxError(self):
"""
@ -825,12 +830,9 @@ tail =
self.writeManifest(["test_error.js"])
self.assertTestResult(False)
self.assertInLog("TEST-UNEXPECTED-FAIL")
self.assertInLog("test_error.js")
self.assertInLog("test_error.js contains SyntaxError")
self.assertInLog("Diagnostic: SyntaxError: missing formal parameter at")
self.assertInLog(TEST_FAIL_STRING)
self.assertInLog("test_error.js:3")
self.assertNotInLog("TEST-PASS")
self.assertNotInLog(TEST_PASS_STRING)
def testDoReportNonSyntaxError(self):
"""
@ -841,10 +843,10 @@ tail =
self.writeManifest(["test_error.js"])
self.assertTestResult(False)
self.assertInLog("TEST-UNEXPECTED-FAIL")
self.assertInLog("Diagnostic: TypeError: generator function run_test returns a value at")
self.assertInLog(TEST_FAIL_STRING)
self.assertInLog("TypeError: generator function run_test returns a value at")
self.assertInLog("test_error.js:4")
self.assertNotInLog("TEST-PASS")
self.assertNotInLog(TEST_PASS_STRING)
def testAsyncCleanup(self):
"""

View File

@ -35,7 +35,7 @@ FeedListener.prototype = {
handleResult: function(result) {
var feed = result.doc;
try {
do_print(true, "Testing feed " + this.testcase.file.path);
do_print("Testing feed " + this.testcase.file.path);
Assert.ok(isIID(feed, Ci.nsIFeed), "Has feed interface");
if (!eval(this.testcase.expect)) {