Bug 744902 - Add perf testing to Marionette, r=jgriffin, DONTBUILD because NPOTB

This commit is contained in:
Malini Das 2012-06-08 18:33:54 -04:00
parent 98d7066537
commit b92a2b574b
11 changed files with 264 additions and 22 deletions

View File

@ -381,6 +381,12 @@ class Marionette(object):
def get_logs(self):
return self._send_message('getLogs', 'value')
def add_perf_data(self, suite, name, value):
return self._send_message('addPerfData', 'ok', suite=suite, name=name, value=value)
def get_perf_data(self):
return self._send_message('getPerfData', 'value')
def import_script(self, file):
f = open(file, "r")
js = f.read()

View File

@ -90,6 +90,7 @@ whitelist_prefs.forEach(function (pref) {
if self.marionette.session is None:
self.marionette.start_session()
self.loglines = None
self.perfdata = None
def tearDown(self):
if self.marionette.session is not None:
@ -174,6 +175,9 @@ class MarionetteJSTestCase(CommonTestCase):
results = self.marionette.execute_js_script(js, args)
self.loglines = self.marionette.get_logs()
self.perfdata = self.marionette.get_perf_data()
print "in marionette_test"
print self.perfdata
if launch_app:
self.kill_gaia_app(launch_app)
@ -193,8 +197,9 @@ class MarionetteJSTestCase(CommonTestCase):
self.assertEqual(0, results['failed'],
'%d tests failed:\n%s' % (results['failed'], '\n'.join(fails)))
self.assertTrue(results['passed'] + results['failed'] > 0,
'no tests run')
if not self.perfdata:
self.assertTrue(results['passed'] + results['failed'] > 0,
'no tests run')
if self.marionette.session is not None:
self.marionette.delete_session()

View File

@ -13,6 +13,8 @@ import unittest
import socket
import sys
import time
import platform
import datazilla
try:
from manifestparser import TestManifest
@ -28,12 +30,12 @@ except ImportError:
from marionette import Marionette
from marionette_test import MarionetteJSTestCase
class MarionetteTestResult(unittest._TextTestResult):
def __init__(self, *args):
super(MarionetteTestResult, self).__init__(*args)
self.passed = 0
self.perfdata = None
def addSuccess(self, test):
super(MarionetteTestResult, self).addSuccess(test)
@ -65,6 +67,13 @@ class MarionetteTestResult(unittest._TextTestResult):
print ' '.join(line)
print 'END LOG:'
def getPerfData(self, test):
for testcase in test._tests:
if testcase.perfdata:
if not self.perfdata:
self.perfdata = datazilla.dzResult(testcase.perfdata)
else:
self.perfdata.join_results(testcase.perfdata)
class MarionetteTextTestRunner(unittest.TextTestRunner):
@ -92,6 +101,7 @@ class MarionetteTextTestRunner(unittest.TextTestRunner):
timeTaken = stopTime - startTime
result.printErrors()
result.printLogs(test)
result.getPerfData(test)
if hasattr(result, 'separator2'):
self.stream.writeln(result.separator2)
run = result.testsRun
@ -155,6 +165,7 @@ class MarionetteTestRunner(object):
self.baseurl = None
self.marionette = None
self.logcat_dir = logcat_dir
self.perfrequest = None
self.reset_test_stats()
@ -172,6 +183,7 @@ class MarionetteTestRunner(object):
self.failed = 0
self.todo = 0
self.failures = []
self.perfrequest = None
def start_httpd(self):
host = iface.get_lan_ip()
@ -256,8 +268,10 @@ class MarionetteTestRunner(object):
def run_tests(self, tests, testtype=None):
self.reset_test_stats()
starttime = datetime.utcnow()
for test in tests:
self.run_test(test, testtype)
while options.repeat >=0 :
for test in tests:
self.run_test(test, testtype)
options.repeat -= 1
self.logger.info('\nSUMMARY\n-------')
self.logger.info('passed: %d' % self.passed)
self.logger.info('failed: %d' % self.failed)
@ -265,6 +279,12 @@ class MarionetteTestRunner(object):
elapsedtime = datetime.utcnow() - starttime
if self.autolog:
self.post_to_autolog(elapsedtime)
if self.perfrequest and options.perf:
try:
self.perfrequest.submit()
except Exception, e:
print "Could not submit to datazilla"
print e
if self.marionette.emulator:
self.marionette.emulator.close()
self.marionette.emulator = None
@ -308,6 +328,22 @@ class MarionetteTestRunner(object):
manifest = TestManifest()
manifest.read(filepath)
if options.perf:
if options.perfserv is None:
options.perfserv = manifest.get("perfserv")[0]
machine_name = socket.gethostname()
try:
manifest.has_key("machine_name")
machine_name = manifest.get("machine_name")[0]
except:
self.logger.info("Using machine_name: %s" % machine_name)
os_name = platform.system()
os_version = platform.release()
self.perfrequest = datazilla.dzRequest(server=options.perfserv, machine_name=machine_name, os=os_name, os_version=os_version,
platform=manifest.get("platform")[0], build_name=manifest.get("build_name")[0],
version=manifest.get("version")[0], revision=self.revision,
branch=manifest.get("branch")[0], id=os.getenv('BUILD_ID'), test_date=int(time.time()))
manifest_tests = manifest.get(**testargs)
for i in manifest_tests:
@ -333,6 +369,8 @@ class MarionetteTestRunner(object):
if suite.countTestCases():
results = MarionetteTextTestRunner(verbosity=3).run(suite)
self.failed += len(results.failures) + len(results.errors)
if results.perfdata:
self.perfrequest.add_dzresult(results.perfdata)
if hasattr(results, 'skipped'):
self.todo += len(results.skipped) + len(results.expectedFailures)
self.passed += results.passed
@ -358,7 +396,7 @@ if __name__ == "__main__":
help = "send test results to autolog")
parser.add_option("--revision",
action = "store", dest = "revision",
help = "git revision for autolog submissions")
help = "git revision for autolog/perfdata submissions")
parser.add_option("--testgroup",
action = "store", dest = "testgroup",
help = "testgroup names for autolog submissions")
@ -398,7 +436,16 @@ if __name__ == "__main__":
parser.add_option('--profile', dest='profile', action='store',
help='profile to use when launching the gecko process. If not '
'passed, then a profile will be constructed and used.')
parser.add_option('--perf', dest='perf', action='store_true',
default = False,
help='send performance data to perf data server')
parser.add_option('--perf-server', dest='perfserv', action='store',
default=None,
help='dataserver for perf data submission. Entering this value '
'will overwrite the perfserv value in any passed .ini files.')
parser.add_option('--repeat', dest='repeat', action='store', type=int,
default=0, help='number of times to repeat the test(s).')
options, tests = parser.parse_args()
if not tests:
@ -429,5 +476,3 @@ if __name__ == "__main__":
if runner.failed > 0:
sys.exit(10)

View File

@ -0,0 +1,57 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
from marionette_test import MarionetteTestCase
class TestPerf(MarionetteTestCase):
def test_perf_basic(self):
self.marionette.add_perf_data("testgroup", "testperf", 10)
data = self.marionette.get_perf_data()
self.assertTrue(data.has_key("testgroup"))
self.assertTrue(data["testgroup"].has_key("testperf"))
self.assertEqual(10, data["testgroup"]["testperf"][0])
self.marionette.add_perf_data("testgroup", "testperf", 20)
data = self.marionette.get_perf_data()
self.assertTrue(data.has_key("testgroup"))
self.assertTrue(data["testgroup"].has_key("testperf"))
self.assertEqual(20, data["testgroup"]["testperf"][1])
self.marionette.add_perf_data("testgroup", "testperf2", 20)
data = self.marionette.get_perf_data()
self.assertTrue(data.has_key("testgroup"))
self.assertTrue(data["testgroup"].has_key("testperf2"))
self.assertEqual(20, data["testgroup"]["testperf2"][0])
self.marionette.add_perf_data("testgroup2", "testperf3", 30)
data = self.marionette.get_perf_data()
self.assertTrue(data.has_key("testgroup2"))
self.assertTrue(data["testgroup2"].has_key("testperf3"))
self.assertEqual(30, data["testgroup2"]["testperf3"][0])
def test_perf_script(self):
self.marionette.execute_script("addPerfData('testgroup', 'testperf', 10);")
data = self.marionette.get_perf_data()
self.assertTrue(data.has_key("testgroup"))
self.assertTrue(data["testgroup"].has_key("testperf"))
self.assertEqual(10, data["testgroup"]["testperf"][0])
self.marionette.execute_script("addPerfData('testgroup', 'testperf', 20);")
data = self.marionette.get_perf_data()
self.assertTrue(data.has_key("testgroup"))
self.assertTrue(data["testgroup"].has_key("testperf"))
self.assertEqual(20, data["testgroup"]["testperf"][1])
self.marionette.execute_script("addPerfData('testgroup', 'testperf2', 20);")
data = self.marionette.get_perf_data()
self.assertTrue(data.has_key("testgroup"))
self.assertTrue(data["testgroup"].has_key("testperf2"))
self.assertEqual(20, data["testgroup"]["testperf2"][0])
self.marionette.execute_script("addPerfData('testgroup2', 'testperf3', 30);")
data = self.marionette.get_perf_data()
self.assertTrue(data.has_key("testgroup2"))
self.assertTrue(data["testgroup2"].has_key("testperf3"))
self.assertEqual(30, data["testgroup2"]["testperf3"][0])
class TestPerfChrome(TestPerf):
def setUp(self):
MarionetteTestCase.setUp(self)
self.marionette.set_context("chrome")

View File

@ -10,6 +10,7 @@ b2g = false
[test_text.py]
b2g = false
[test_perf.py]
[test_log.py]
[test_emulator.py]
[test_execute_async_script.py]

View File

@ -42,6 +42,10 @@ else
git clone git://github.com/mozilla/mozbase.git
cd mozbase
python setup_development.py
cd ..
git clone git://github.com/mozilla/datazilla_client.git
cd datazilla_client
python setup.py develop
fi
# update the marionette_client

View File

@ -9,6 +9,7 @@ marionette.jar:
content/marionette-elements.js (marionette-elements.js)
content/marionette-log-obj.js (marionette-log-obj.js)
content/marionette-simpletest.js (marionette-simpletest.js)
content/marionette-perf.js (marionette-perf.js)
content/EventUtils.js (EventUtils.js)
content/ChromeUtils.js (ChromeUtils.js)

View File

@ -14,6 +14,7 @@ let loader = Cc["@mozilla.org/moz/jssubscript-loader;1"]
.getService(Ci.mozIJSSubScriptLoader);
loader.loadSubScript("chrome://marionette/content/marionette-simpletest.js");
loader.loadSubScript("chrome://marionette/content/marionette-log-obj.js");
loader.loadSubScript("chrome://marionette/content/marionette-perf.js");
Cu.import("chrome://marionette/content/marionette-elements.js");
let utils = {};
loader.loadSubScript("chrome://marionette/content/EventUtils.js", utils);
@ -121,6 +122,7 @@ function MarionetteDriverActor(aConnection)
this.scriptTimeout = null;
this.timer = null;
this.marionetteLog = new MarionetteLogObj();
this.marionettePerf = new MarionettePerfData();
this.command_id = null;
this.mainFrame = null; //topmost chrome frame
this.curFrame = null; //subframe that currently has focus
@ -131,7 +133,7 @@ function MarionetteDriverActor(aConnection)
this.messageManager.addMessageListener("Marionette:done", this);
this.messageManager.addMessageListener("Marionette:error", this);
this.messageManager.addMessageListener("Marionette:log", this);
this.messageManager.addMessageListener("Marionette:testLog", this);
this.messageManager.addMessageListener("Marionette:shareData", this);
this.messageManager.addMessageListener("Marionette:register", this);
this.messageManager.addMessageListener("Marionette:goUrl", this);
this.messageManager.addMessageListener("Marionette:runEmulatorCmd", this);
@ -392,6 +394,21 @@ MarionetteDriverActor.prototype = {
this.sendResponse(this.marionetteLog.getLogs());
},
/**
* Log some performance data
*/
addPerfData: function MDA_addPerfData(aRequest) {
this.marionettePerf.addPerfData(aRequest.suite, aRequest.name, aRequest.value);
this.sendOk();
},
/**
* Retrieve the performance data
*/
getPerfData: function MDA_getPerfData() {
this.sendResponse(this.marionettePerf.getPerfData());
},
/**
* Sets the context of the subsequent commands to be either 'chrome' or 'content'
*
@ -523,7 +540,7 @@ MarionetteDriverActor.prototype = {
}
let curWindow = this.getCurrentWindow();
let marionette = new Marionette(this, curWindow, "chrome", this.marionetteLog);
let marionette = new Marionette(this, curWindow, "chrome", this.marionetteLog, this.marionettePerf);
let _chromeSandbox = this.createExecuteSandbox(curWindow, marionette, aRequest.args);
if (!_chromeSandbox)
return;
@ -630,7 +647,7 @@ MarionetteDriverActor.prototype = {
let curWindow = this.getCurrentWindow();
let original_onerror = curWindow.onerror;
let that = this;
let marionette = new Marionette(this, curWindow, "chrome", this.marionetteLog);
let marionette = new Marionette(this, curWindow, "chrome", this.marionetteLog, this.marionettePerf);
marionette.command_id = this.command_id;
function chromeAsyncReturnFunc(value, status) {
@ -1201,7 +1218,7 @@ MarionetteDriverActor.prototype = {
this.messageManager.removeMessageListener("Marionette:done", this);
this.messageManager.removeMessageListener("Marionette:error", this);
this.messageManager.removeMessageListener("Marionette:log", this);
this.messageManager.removeMessageListener("Marionette:testLog", this);
this.messageManager.removeMessageListener("Marionette:shareData", this);
this.messageManager.removeMessageListener("Marionette:register", this);
this.messageManager.removeMessageListener("Marionette:goUrl", this);
this.messageManager.removeMessageListener("Marionette:runEmulatorCmd", this);
@ -1290,9 +1307,14 @@ MarionetteDriverActor.prototype = {
//log server-side messages
logger.info(message.json.message);
break;
case "Marionette:testLog":
case "Marionette:shareData":
//log messages from tests
this.marionetteLog.addLogs(message.json.value);
if (message.json.log) {
this.marionetteLog.addLogs(message.json.log);
}
if (message.json.perf) {
this.marionettePerf.appendPerfData(message.json.perf);
}
break;
case "Marionette:runEmulatorCmd":
this.sendToClient(message.json);
@ -1331,6 +1353,8 @@ MarionetteDriverActor.prototype.requestTypes = {
"newSession": MarionetteDriverActor.prototype.newSession,
"log": MarionetteDriverActor.prototype.log,
"getLogs": MarionetteDriverActor.prototype.getLogs,
"addPerfData": MarionetteDriverActor.prototype.addPerfData,
"getPerfData": MarionetteDriverActor.prototype.getPerfData,
"setContext": MarionetteDriverActor.prototype.setContext,
"executeScript": MarionetteDriverActor.prototype.execute,
"setScriptTimeout": MarionetteDriverActor.prototype.setScriptTimeout,

View File

@ -13,6 +13,7 @@ let loader = Cc["@mozilla.org/moz/jssubscript-loader;1"]
loader.loadSubScript("chrome://marionette/content/marionette-simpletest.js");
loader.loadSubScript("chrome://marionette/content/marionette-log-obj.js");
loader.loadSubScript("chrome://marionette/content/marionette-perf.js");
Cu.import("chrome://marionette/content/marionette-elements.js");
Cu.import("resource://gre/modules/FileUtils.jsm");
Cu.import("resource://gre/modules/NetUtil.jsm");
@ -27,6 +28,7 @@ loader.loadSubScript("chrome://specialpowers/content/specialpowersAPI.js");
loader.loadSubScript("chrome://specialpowers/content/specialpowers.js");
let marionetteLogObj = new MarionetteLogObj();
let marionettePerf = new MarionettePerfData();
let isB2G = false;
@ -248,7 +250,7 @@ function createExecuteContentSandbox(aWindow) {
sandbox.__proto__ = sandbox.window;
sandbox.testUtils = utils;
let marionette = new Marionette(this, aWindow, "content", marionetteLogObj);
let marionette = new Marionette(this, aWindow, "content", marionetteLogObj, marionettePerf);
sandbox.marionette = marionette;
marionette.exports.forEach(function(fn) {
sandbox[fn] = marionette[fn].bind(marionette);
@ -270,9 +272,10 @@ function createExecuteContentSandbox(aWindow) {
curWindow.clearTimeout(i);
}
sendSyncMessage("Marionette:testLog",
{value: elementManager.wrapValue(marionetteLogObj.getLogs())});
sendSyncMessage("Marionette:shareData", {log: elementManager.wrapValue(marionetteLogObj.getLogs()),
perf: elementManager.wrapValue(marionettePerf.getPerfData())});
marionetteLogObj.clearLogs();
marionettePerf.clearPerfData();
if (status == 0){
sendResponse({value: elementManager.wrapValue(value), status: status}, asyncTestCommandId);
}
@ -323,8 +326,10 @@ function executeScript(msg, directInject) {
script = data + script;
}
let res = Cu.evalInSandbox(script, sandbox, "1.8");
sendSyncMessage("Marionette:testLog", {value: elementManager.wrapValue(marionetteLogObj.getLogs())});
sendSyncMessage("Marionette:shareData", {log: elementManager.wrapValue(marionetteLogObj.getLogs()),
perf: elementManager.wrapValue(marionettePerf.getPerfData())});
marionetteLogObj.clearLogs();
marionettePerf.clearPerfData();
if (res == undefined || res.passed == undefined) {
sendError("Marionette.finish() not called", 17, null);
}
@ -352,8 +357,10 @@ function executeScript(msg, directInject) {
scriptSrc = data + scriptSrc;
}
let res = Cu.evalInSandbox(scriptSrc, sandbox, "1.8");
sendSyncMessage("Marionette:testLog", {value: elementManager.wrapValue(marionetteLogObj.getLogs())});
sendSyncMessage("Marionette:shareData", {log: elementManager.wrapValue(marionetteLogObj.getLogs()),
perf: elementManager.wrapValue(marionettePerf.getPerfData())});
marionetteLogObj.clearLogs();
marionettePerf.clearPerfData();
sendResponse({value: elementManager.wrapValue(res)});
}
}

View File

@ -0,0 +1,83 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
function MarionettePerfData() {
this.perfData = {};
}
MarionettePerfData.prototype = {
/**
* Add performance data.
*
* Datapoints within a testSuite get rolled up into
* one value in Datazilla. You can then drill down to
* individual (testName,data) pairs
*
* If the testSuite and testName exist, the data will
* be added to this dataset.
*
* @param testSuite String
* name of the test suite
* @param testName String
* name of the test
* @param object data
* data value to store
*/
addPerfData: function Marionette__addPerfData(testSuite, testName, data) {
if (this.perfData[testSuite]) {
if (this.perfData[testSuite][testName]) {
this.perfData[testSuite][testName].push(data);
}
else {
this.perfData[testSuite][testName.toString()] = [data];
}
}
else {
this.perfData[testSuite] = {}
this.perfData[testSuite][testName.toString()] = [data];
}
},
/**
* Join another set of performance data this this set.
* Used by the actor to join data gathered from the listener
* @param object data
* The performance data to join
*/
appendPerfData: function Marionette__appendPerfData(data) {
for (var suite in data) {
if (data.hasOwnProperty(suite)) {
if (this.perfData[suite]) {
for (var test in data[suite]) {
if (this.perfData[suite][test]) {
this.perfData[suite][test] = this.perfData[suite][test].concat(data[suite][test]);
}
else {
this.perfData[suite][test] = data[suite][test];
}
}
}
else {
this.perfData[suite] = data[suite];
}
}
}
},
/**
* Retrieve the performance data
*
* @return object
* Returns a list of test names to metric value
*/
getPerfData: function Marionette__getPerfData() {
return this.perfData;
},
/**
* Clears the current performance data
*/
clearPerfData: function Marionette_clearPerfData() {
this.perfData = {};
},
}

View File

@ -5,18 +5,19 @@
* The Marionette object, passed to the script context.
*/
function Marionette(scope, window, context, logObj) {
function Marionette(scope, window, context, logObj, perfData) {
this.scope = scope;
this.window = window;
this.tests = [];
this.logObj = logObj;
this.perfData = perfData;
this.context = context;
this.timeout = 0;
}
Marionette.prototype = {
exports: ['ok', 'is', 'isnot', 'log', 'getLogs', 'generate_results', 'waitFor',
'runEmulatorCmd'],
'runEmulatorCmd', 'addPerfData', 'getPerfData'],
ok: function Marionette__ok(condition, name, diag) {
let test = {'result': !!condition, 'name': name, 'diag': diag};
@ -38,6 +39,14 @@ Marionette.prototype = {
this.ok(pass, name, diag);
},
addPerfData: function Marionette__addPerfData(testSuite, testName, data) {
this.perfData.addPerfData(testSuite, testName, data);
},
getPerfData: function Marionette__getPerfData() {
return this.perfData.perfData;
},
log: function Marionette__log(msg, level) {
dump("MARIONETTE LOG: " + (level ? level : "INFO") + ": " + msg + "\n");
if (this.logObj != null) {