2008-02-21 13:08:39 -08:00
|
|
|
#
|
|
|
|
# ***** BEGIN LICENSE BLOCK *****
|
|
|
|
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
|
|
|
#
|
|
|
|
# The contents of this file are subject to the Mozilla Public License Version
|
|
|
|
# 1.1 (the "License"); you may not use this file except in compliance with
|
|
|
|
# the License. You may obtain a copy of the License at
|
|
|
|
# http://www.mozilla.org/MPL/
|
|
|
|
#
|
|
|
|
# Software distributed under the License is distributed on an "AS IS" basis,
|
|
|
|
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
|
|
|
# for the specific language governing rights and limitations under the
|
|
|
|
# License.
|
|
|
|
#
|
|
|
|
# The Original Code is mozilla.org code.
|
|
|
|
#
|
|
|
|
# The Initial Developer of the Original Code is
|
|
|
|
# Mozilla Foundation.
|
|
|
|
# Portions created by the Initial Developer are Copyright (C) 2008
|
|
|
|
# the Initial Developer. All Rights Reserved.
|
|
|
|
#
|
|
|
|
# Contributor(s):
|
|
|
|
# Robert Sayre <sayrer@gmail.com>
|
|
|
|
# Jeff Walden <jwalden+bmo@mit.edu>
|
|
|
|
#
|
|
|
|
# Alternatively, the contents of this file may be used under the terms of
|
|
|
|
# either the GNU General Public License Version 2 or later (the "GPL"), or
|
|
|
|
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
|
|
|
# in which case the provisions of the GPL or the LGPL are applicable instead
|
|
|
|
# of those above. If you wish to allow use of your version of this file only
|
|
|
|
# under the terms of either the GPL or the LGPL, and not to allow others to
|
|
|
|
# use your version of this file under the terms of the MPL, indicate your
|
|
|
|
# decision by deleting the provisions above and replace them with the notice
|
|
|
|
# and other provisions required by the GPL or the LGPL. If you do not delete
|
|
|
|
# the provisions above, a recipient may use your version of this file under
|
|
|
|
# the terms of any one of the MPL, the GPL or the LGPL.
|
|
|
|
#
|
|
|
|
# ***** END LICENSE BLOCK *****
|
|
|
|
|
2008-06-06 23:43:15 -07:00
|
|
|
import codecs
|
2008-02-21 13:08:39 -08:00
|
|
|
from datetime import datetime
|
|
|
|
import itertools
|
2008-04-07 22:18:45 -07:00
|
|
|
import logging
|
2008-02-21 13:08:39 -08:00
|
|
|
import os
|
2008-06-06 23:43:15 -07:00
|
|
|
import re
|
2009-01-12 11:23:28 -08:00
|
|
|
import shutil
|
2008-04-07 22:18:45 -07:00
|
|
|
import signal
|
2009-01-12 11:23:28 -08:00
|
|
|
import subprocess
|
2008-02-21 13:08:39 -08:00
|
|
|
import sys
|
2008-04-07 22:18:45 -07:00
|
|
|
import threading
|
2009-05-11 12:54:39 -07:00
|
|
|
|
|
|
|
from automationutils import checkForCrashes
|
2008-02-21 13:08:39 -08:00
|
|
|
|
|
|
|
"""
|
|
|
|
Runs the browser from a script, and provides useful utilities
|
|
|
|
for setting up the browser environment.
|
|
|
|
"""
|
|
|
|
|
2008-09-05 06:35:58 -07:00
|
|
|
SCRIPT_DIR = os.path.abspath(os.path.realpath(os.path.dirname(sys.argv[0])))
|
|
|
|
|
2008-04-07 22:18:45 -07:00
|
|
|
__all__ = [
|
|
|
|
"UNIXISH",
|
|
|
|
"IS_WIN32",
|
2008-05-22 12:08:41 -07:00
|
|
|
"IS_MAC",
|
2009-04-20 15:26:03 -07:00
|
|
|
"log",
|
2008-04-07 22:18:45 -07:00
|
|
|
"runApp",
|
|
|
|
"Process",
|
|
|
|
"initializeProfile",
|
|
|
|
"DIST_BIN",
|
|
|
|
"DEFAULT_APP",
|
2009-02-09 10:57:27 -08:00
|
|
|
"CERTS_SRC_DIR",
|
2008-09-05 06:35:58 -07:00
|
|
|
"environment",
|
2009-04-05 06:03:46 -07:00
|
|
|
"dumpLeakLog",
|
2009-04-10 03:30:32 -07:00
|
|
|
"processLeakLog",
|
2009-05-11 12:54:39 -07:00
|
|
|
"IS_TEST_BUILD",
|
|
|
|
"IS_DEBUG_BUILD",
|
|
|
|
"SYMBOLS_PATH",
|
2008-04-07 22:18:45 -07:00
|
|
|
]
|
|
|
|
|
2008-02-21 13:08:39 -08:00
|
|
|
# These are generated in mozilla/build/Makefile.in
|
2008-12-05 08:52:25 -08:00
|
|
|
#expand DIST_BIN = __XPC_BIN_PATH__
|
2008-02-21 13:08:39 -08:00
|
|
|
#expand IS_WIN32 = len("__WIN32__") != 0
|
|
|
|
#expand IS_MAC = __IS_MAC__ != 0
|
|
|
|
#ifdef IS_CYGWIN
|
|
|
|
#expand IS_CYGWIN = __IS_CYGWIN__ == 1
|
|
|
|
#else
|
|
|
|
IS_CYGWIN = False
|
|
|
|
#endif
|
2008-05-06 10:52:26 -07:00
|
|
|
#expand IS_CAMINO = __IS_CAMINO__ != 0
|
2008-09-05 06:35:58 -07:00
|
|
|
#expand BIN_SUFFIX = __BIN_SUFFIX__
|
2008-02-21 13:08:39 -08:00
|
|
|
|
|
|
|
UNIXISH = not IS_WIN32 and not IS_MAC
|
|
|
|
|
|
|
|
#expand DEFAULT_APP = "./" + __BROWSER_PATH__
|
2009-01-18 13:44:43 -08:00
|
|
|
#expand CERTS_SRC_DIR = __CERTS_SRC_DIR__
|
2008-09-28 13:27:51 -07:00
|
|
|
#expand IS_TEST_BUILD = __IS_TEST_BUILD__
|
2009-01-12 09:15:12 -08:00
|
|
|
#expand IS_DEBUG_BUILD = __IS_DEBUG_BUILD__
|
2009-03-10 10:36:14 -07:00
|
|
|
#expand SYMBOLS_PATH = __SYMBOLS_PATH__
|
2008-02-21 13:08:39 -08:00
|
|
|
|
2008-04-07 22:18:45 -07:00
|
|
|
###########
|
|
|
|
# LOGGING #
|
|
|
|
###########
|
|
|
|
|
|
|
|
# We use the logging system here primarily because it'll handle multiple
|
|
|
|
# threads, which is needed to process the output of the server and application
|
|
|
|
# processes simultaneously.
|
|
|
|
log = logging.getLogger()
|
|
|
|
handler = logging.StreamHandler(sys.stdout)
|
|
|
|
log.setLevel(logging.INFO)
|
|
|
|
log.addHandler(handler)
|
|
|
|
|
|
|
|
|
2008-02-21 13:08:39 -08:00
|
|
|
#################
|
|
|
|
# SUBPROCESSING #
|
|
|
|
#################
|
|
|
|
|
2009-01-12 11:23:28 -08:00
|
|
|
class Process(subprocess.Popen):
|
2008-02-21 13:08:39 -08:00
|
|
|
"""
|
2009-01-12 11:23:28 -08:00
|
|
|
Represents our view of a subprocess.
|
|
|
|
It adds a kill() method which allows it to be stopped explicitly.
|
2008-02-21 13:08:39 -08:00
|
|
|
"""
|
|
|
|
|
2009-01-12 11:23:28 -08:00
|
|
|
def kill(self):
|
2009-01-18 20:12:32 -08:00
|
|
|
if IS_WIN32:
|
2009-01-29 05:37:37 -08:00
|
|
|
import platform
|
2009-01-12 11:23:28 -08:00
|
|
|
pid = "%i" % self.pid
|
2009-01-29 05:37:37 -08:00
|
|
|
if platform.release() == "2000":
|
|
|
|
# Windows 2000 needs 'kill.exe' from the 'Windows 2000 Resource Kit tools'. (See bug 475455.)
|
|
|
|
try:
|
|
|
|
subprocess.Popen(["kill", "-f", pid]).wait()
|
|
|
|
except:
|
2009-05-11 12:54:39 -07:00
|
|
|
log.info("TEST-UNEXPECTED-FAIL | automation.py | Missing 'kill' utility to kill process with pid=%s. Kill it manually!", pid)
|
2009-01-29 05:37:37 -08:00
|
|
|
else:
|
|
|
|
# Windows XP and later.
|
|
|
|
subprocess.Popen(["taskkill", "/F", "/PID", pid]).wait()
|
2009-01-18 20:12:32 -08:00
|
|
|
else:
|
2009-01-12 11:23:28 -08:00
|
|
|
os.kill(self.pid, signal.SIGKILL)
|
2008-02-21 13:08:39 -08:00
|
|
|
|
2008-04-07 22:18:45 -07:00
|
|
|
|
|
|
|
#################
|
|
|
|
# PROFILE SETUP #
|
|
|
|
#################
|
2008-02-21 13:08:39 -08:00
|
|
|
|
2008-06-06 23:43:15 -07:00
|
|
|
class SyntaxError(Exception):
|
|
|
|
"Signifies a syntax error on a particular line in server-locations.txt."
|
|
|
|
|
|
|
|
def __init__(self, lineno, msg = None):
|
|
|
|
self.lineno = lineno
|
|
|
|
self.msg = msg
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
s = "Syntax error on line " + str(self.lineno)
|
|
|
|
if self.msg:
|
|
|
|
s += ": %s." % self.msg
|
|
|
|
else:
|
|
|
|
s += "."
|
|
|
|
return s
|
|
|
|
|
|
|
|
|
|
|
|
class Location:
|
|
|
|
"Represents a location line in server-locations.txt."
|
|
|
|
|
|
|
|
def __init__(self, scheme, host, port, options):
|
|
|
|
self.scheme = scheme
|
|
|
|
self.host = host
|
|
|
|
self.port = port
|
|
|
|
self.options = options
|
|
|
|
|
|
|
|
|
2008-09-05 06:35:58 -07:00
|
|
|
def readLocations(locationsPath = "server-locations.txt"):
|
2008-06-06 23:43:15 -07:00
|
|
|
"""
|
|
|
|
Reads the locations at which the Mochitest HTTP server is available from
|
|
|
|
server-locations.txt.
|
|
|
|
"""
|
|
|
|
|
2008-09-05 06:35:58 -07:00
|
|
|
locationFile = codecs.open(locationsPath, "r", "UTF-8")
|
2008-06-06 23:43:15 -07:00
|
|
|
|
|
|
|
# Perhaps more detail than necessary, but it's the easiest way to make sure
|
|
|
|
# we get exactly the format we want. See server-locations.txt for the exact
|
|
|
|
# format guaranteed here.
|
|
|
|
lineRe = re.compile(r"^(?P<scheme>[a-z][-a-z0-9+.]*)"
|
|
|
|
r"://"
|
|
|
|
r"(?P<host>"
|
|
|
|
r"\d+\.\d+\.\d+\.\d+"
|
|
|
|
r"|"
|
|
|
|
r"(?:[a-z0-9](?:[-a-z0-9]*[a-z0-9])?\.)*"
|
|
|
|
r"[a-z](?:[-a-z0-9]*[a-z0-9])?"
|
|
|
|
r")"
|
|
|
|
r":"
|
|
|
|
r"(?P<port>\d+)"
|
|
|
|
r"(?:"
|
|
|
|
r"\s+"
|
2008-09-05 06:35:58 -07:00
|
|
|
r"(?P<options>\S+(?:,\S+)*)"
|
2008-06-06 23:43:15 -07:00
|
|
|
r")?$")
|
|
|
|
locations = []
|
|
|
|
lineno = 0
|
|
|
|
seenPrimary = False
|
|
|
|
for line in locationFile:
|
|
|
|
lineno += 1
|
|
|
|
if line.startswith("#") or line == "\n":
|
|
|
|
continue
|
|
|
|
|
|
|
|
match = lineRe.match(line)
|
|
|
|
if not match:
|
|
|
|
raise SyntaxError(lineno)
|
|
|
|
|
|
|
|
options = match.group("options")
|
|
|
|
if options:
|
|
|
|
options = options.split(",")
|
|
|
|
if "primary" in options:
|
|
|
|
if seenPrimary:
|
|
|
|
raise SyntaxError(lineno, "multiple primary locations")
|
|
|
|
seenPrimary = True
|
|
|
|
else:
|
|
|
|
options = []
|
|
|
|
|
|
|
|
locations.append(Location(match.group("scheme"), match.group("host"),
|
|
|
|
match.group("port"), options))
|
|
|
|
|
|
|
|
if not seenPrimary:
|
|
|
|
raise SyntaxError(lineno + 1, "missing primary location")
|
|
|
|
|
|
|
|
return locations
|
|
|
|
|
|
|
|
|
2008-02-21 13:08:39 -08:00
|
|
|
def initializeProfile(profileDir):
|
|
|
|
"Sets up the standard testing profile."
|
|
|
|
|
|
|
|
# Start with a clean slate.
|
|
|
|
shutil.rmtree(profileDir, True)
|
|
|
|
os.mkdir(profileDir)
|
|
|
|
|
|
|
|
prefs = []
|
|
|
|
|
|
|
|
part = """\
|
|
|
|
user_pref("browser.dom.window.dump.enabled", true);
|
2008-05-14 06:27:47 -07:00
|
|
|
user_pref("dom.allow_scripts_to_close_windows", true);
|
2008-02-21 13:08:39 -08:00
|
|
|
user_pref("dom.disable_open_during_load", false);
|
|
|
|
user_pref("dom.max_script_run_time", 0); // no slow script dialogs
|
2009-05-05 16:11:19 -07:00
|
|
|
user_pref("dom.max_chrome_script_run_time", 0);
|
2008-02-21 13:08:39 -08:00
|
|
|
user_pref("signed.applets.codebase_principal_support", true);
|
|
|
|
user_pref("security.warn_submit_insecure", false);
|
|
|
|
user_pref("browser.shell.checkDefaultBrowser", false);
|
2008-09-05 05:03:30 -07:00
|
|
|
user_pref("shell.checkDefaultClient", false);
|
2008-02-21 13:08:39 -08:00
|
|
|
user_pref("browser.warnOnQuit", false);
|
2008-03-03 23:24:26 -08:00
|
|
|
user_pref("accessibility.typeaheadfind.autostart", false);
|
2008-03-04 14:12:06 -08:00
|
|
|
user_pref("javascript.options.showInConsole", true);
|
2008-04-28 16:56:07 -07:00
|
|
|
user_pref("layout.debug.enable_data_xbl", true);
|
2008-05-05 13:43:44 -07:00
|
|
|
user_pref("browser.EULA.override", true);
|
2008-09-30 21:36:01 -07:00
|
|
|
user_pref("javascript.options.jit.content", true);
|
2008-09-09 21:13:23 -07:00
|
|
|
user_pref("gfx.color_management.force_srgb", true);
|
2009-01-12 21:52:00 -08:00
|
|
|
user_pref("network.manage-offline-status", false);
|
2009-05-05 21:30:39 -07:00
|
|
|
user_pref("test.mousescroll", true);
|
2009-01-15 11:19:15 -08:00
|
|
|
user_pref("security.default_personal_cert", "Select Automatically"); // Need to client auth test be w/o any dialogs
|
2009-02-24 11:46:51 -08:00
|
|
|
user_pref("network.http.prompt-temp-redirect", false);
|
2009-03-10 13:35:25 -07:00
|
|
|
user_pref("svg.smil.enabled", true); // Needed for SMIL mochitests until bug 482402 lands
|
2009-03-31 17:52:56 -07:00
|
|
|
user_pref("media.cache_size", 100);
|
2009-04-08 01:45:32 -07:00
|
|
|
user_pref("security.warn_viewing_mixed", false);
|
2008-05-06 10:52:26 -07:00
|
|
|
|
2009-06-08 12:50:04 -07:00
|
|
|
user_pref("geo.wifi.uri", "http://localhost:8888/tests/dom/tests/mochitest/geolocation/network_geolocation.sjs");
|
2008-05-06 10:52:26 -07:00
|
|
|
user_pref("camino.warn_when_closing", false); // Camino-only, harmless to others
|
2009-06-17 03:47:08 -07:00
|
|
|
|
|
|
|
// Make url-classifier updates so rare that they won't affect tests
|
|
|
|
user_pref("urlclassifier.updateinterval", 172800);
|
|
|
|
// Point the url-classifier to the local testing server for fast failures
|
|
|
|
user_pref("browser.safebrowsing.provider.0.gethashURL", "http://localhost:8888/safebrowsing-dummy/gethash");
|
|
|
|
user_pref("browser.safebrowsing.provider.0.keyURL", "http://localhost:8888/safebrowsing-dummy/newkey");
|
|
|
|
user_pref("browser.safebrowsing.provider.0.lookupURL", "http://localhost:8888/safebrowsing-dummy/lookup");
|
|
|
|
user_pref("browser.safebrowsing.provider.0.updateURL", "http://localhost:8888/safebrowsing-dummy/update");
|
2009-02-18 10:44:07 -08:00
|
|
|
"""
|
2009-02-18 05:31:31 -08:00
|
|
|
|
2008-02-21 13:08:39 -08:00
|
|
|
prefs.append(part)
|
|
|
|
|
2008-06-06 23:43:15 -07:00
|
|
|
locations = readLocations()
|
|
|
|
|
|
|
|
# Grant God-power to all the privileged servers on which tests run.
|
|
|
|
privileged = filter(lambda loc: "privileged" in loc.options, locations)
|
|
|
|
for (i, l) in itertools.izip(itertools.count(1), privileged):
|
2008-02-21 13:08:39 -08:00
|
|
|
part = """
|
|
|
|
user_pref("capability.principal.codebase.p%(i)d.granted",
|
|
|
|
"UniversalXPConnect UniversalBrowserRead UniversalBrowserWrite \
|
|
|
|
UniversalPreferencesRead UniversalPreferencesWrite \
|
|
|
|
UniversalFileRead");
|
2008-06-06 23:43:15 -07:00
|
|
|
user_pref("capability.principal.codebase.p%(i)d.id", "%(origin)s");
|
2008-02-21 13:08:39 -08:00
|
|
|
user_pref("capability.principal.codebase.p%(i)d.subjectName", "");
|
2008-06-06 23:43:15 -07:00
|
|
|
""" % { "i": i,
|
|
|
|
"origin": (l.scheme + "://" + l.host + ":" + l.port) }
|
2008-02-21 13:08:39 -08:00
|
|
|
prefs.append(part)
|
|
|
|
|
2008-06-06 23:43:15 -07:00
|
|
|
# We need to proxy every server but the primary one.
|
|
|
|
origins = ["'%s://%s:%s'" % (l.scheme, l.host, l.port)
|
|
|
|
for l in filter(lambda l: "primary" not in l.options, locations)]
|
|
|
|
origins = ", ".join(origins)
|
2008-02-21 13:08:39 -08:00
|
|
|
|
|
|
|
pacURL = """data:text/plain,
|
|
|
|
function FindProxyForURL(url, host)
|
|
|
|
{
|
2008-06-06 23:43:15 -07:00
|
|
|
var origins = [%(origins)s];
|
|
|
|
var regex = new RegExp('^([a-z][-a-z0-9+.]*)' +
|
|
|
|
'://' +
|
|
|
|
'(?:[^/@]*@)?' +
|
|
|
|
'(.*?)' +
|
|
|
|
'(?::(\\\\\\\\d+))?/');
|
2008-02-21 13:08:39 -08:00
|
|
|
var matches = regex.exec(url);
|
|
|
|
if (!matches)
|
|
|
|
return 'DIRECT';
|
2008-06-06 23:43:15 -07:00
|
|
|
var isHttp = matches[1] == 'http';
|
2008-09-05 06:35:58 -07:00
|
|
|
var isHttps = matches[1] == 'https';
|
2008-06-06 23:43:15 -07:00
|
|
|
if (!matches[3])
|
2008-09-05 06:35:58 -07:00
|
|
|
{
|
|
|
|
if (isHttp) matches[3] = '80';
|
|
|
|
if (isHttps) matches[3] = '443';
|
|
|
|
}
|
|
|
|
|
2008-06-06 23:43:15 -07:00
|
|
|
var origin = matches[1] + '://' + matches[2] + ':' + matches[3];
|
|
|
|
if (origins.indexOf(origin) < 0)
|
|
|
|
return 'DIRECT';
|
|
|
|
if (isHttp)
|
2008-09-05 06:35:58 -07:00
|
|
|
return 'PROXY 127.0.0.1:8888';
|
|
|
|
if (isHttps)
|
|
|
|
return 'PROXY 127.0.0.1:4443';
|
2008-02-21 13:08:39 -08:00
|
|
|
return 'DIRECT';
|
2008-06-06 23:43:15 -07:00
|
|
|
}""" % { "origins": origins }
|
2008-02-21 13:08:39 -08:00
|
|
|
pacURL = "".join(pacURL.splitlines())
|
|
|
|
|
|
|
|
part = """
|
|
|
|
user_pref("network.proxy.type", 2);
|
|
|
|
user_pref("network.proxy.autoconfig_url", "%(pacURL)s");
|
2008-05-06 10:52:26 -07:00
|
|
|
|
|
|
|
user_pref("camino.use_system_proxy_settings", false); // Camino-only, harmless to others
|
2008-02-21 13:08:39 -08:00
|
|
|
""" % {"pacURL": pacURL}
|
|
|
|
prefs.append(part)
|
|
|
|
|
|
|
|
# write the preferences
|
|
|
|
prefsFile = open(profileDir + "/" + "user.js", "a")
|
|
|
|
prefsFile.write("".join(prefs))
|
|
|
|
prefsFile.close()
|
|
|
|
|
2009-02-09 10:57:27 -08:00
|
|
|
def fillCertificateDB(profileDir, certPath, utilityPath, xrePath):
|
2008-09-05 06:35:58 -07:00
|
|
|
pwfilePath = os.path.join(profileDir, ".crtdbpw")
|
|
|
|
|
|
|
|
pwfile = open(pwfilePath, "w")
|
|
|
|
pwfile.write("\n")
|
|
|
|
pwfile.close()
|
|
|
|
|
|
|
|
# Create head of the ssltunnel configuration file
|
2009-02-09 10:57:27 -08:00
|
|
|
sslTunnelConfigPath = os.path.join(profileDir, "ssltunnel.cfg")
|
2008-09-05 06:35:58 -07:00
|
|
|
sslTunnelConfig = open(sslTunnelConfigPath, "w")
|
|
|
|
|
|
|
|
sslTunnelConfig.write("httpproxy:1\n")
|
2009-02-09 10:57:27 -08:00
|
|
|
sslTunnelConfig.write("certdbdir:%s\n" % certPath)
|
2008-09-05 06:35:58 -07:00
|
|
|
sslTunnelConfig.write("forward:127.0.0.1:8888\n")
|
|
|
|
sslTunnelConfig.write("listen:*:4443:pgo server certificate\n")
|
|
|
|
|
2009-01-15 11:19:15 -08:00
|
|
|
# Configure automatic certificate and bind custom certificates, client authentication
|
2008-09-05 06:35:58 -07:00
|
|
|
locations = readLocations()
|
|
|
|
locations.pop(0)
|
|
|
|
for loc in locations:
|
|
|
|
if loc.scheme == "https" and "nocert" not in loc.options:
|
|
|
|
customCertRE = re.compile("^cert=(?P<nickname>[0-9a-zA-Z_ ]+)")
|
2009-01-15 11:19:15 -08:00
|
|
|
clientAuthRE = re.compile("^clientauth=(?P<clientauth>[a-z]+)")
|
2008-09-05 06:35:58 -07:00
|
|
|
for option in loc.options:
|
|
|
|
match = customCertRE.match(option)
|
|
|
|
if match:
|
|
|
|
customcert = match.group("nickname");
|
2009-01-15 11:19:15 -08:00
|
|
|
sslTunnelConfig.write("listen:%s:%s:4443:%s\n" %
|
|
|
|
(loc.host, loc.port, customcert))
|
|
|
|
|
|
|
|
match = clientAuthRE.match(option)
|
|
|
|
if match:
|
|
|
|
clientauth = match.group("clientauth");
|
|
|
|
sslTunnelConfig.write("clientauth:%s:%s:4443:%s\n" %
|
|
|
|
(loc.host, loc.port, clientauth))
|
2008-09-05 06:35:58 -07:00
|
|
|
|
|
|
|
sslTunnelConfig.close()
|
|
|
|
|
|
|
|
# Pre-create the certification database for the profile
|
2009-02-09 10:57:27 -08:00
|
|
|
env = environment(xrePath = xrePath)
|
|
|
|
certutil = os.path.join(utilityPath, "certutil" + BIN_SUFFIX)
|
|
|
|
pk12util = os.path.join(utilityPath, "pk12util" + BIN_SUFFIX)
|
2009-01-15 11:19:15 -08:00
|
|
|
|
2009-02-09 10:57:27 -08:00
|
|
|
status = Process([certutil, "-N", "-d", profileDir, "-f", pwfilePath], env = env).wait()
|
2008-09-05 06:35:58 -07:00
|
|
|
if status != 0:
|
|
|
|
return status
|
|
|
|
|
2009-01-15 11:19:15 -08:00
|
|
|
# Walk the cert directory and add custom CAs and client certs
|
2009-02-09 10:57:27 -08:00
|
|
|
files = os.listdir(certPath)
|
2008-09-05 06:35:58 -07:00
|
|
|
for item in files:
|
|
|
|
root, ext = os.path.splitext(item)
|
|
|
|
if ext == ".ca":
|
2009-01-26 01:39:08 -08:00
|
|
|
trustBits = "CT,,"
|
|
|
|
if root.endswith("-object"):
|
|
|
|
trustBits = "CT,,CT"
|
2009-02-09 10:57:27 -08:00
|
|
|
Process([certutil, "-A", "-i", os.path.join(certPath, item),
|
2009-01-26 01:39:08 -08:00
|
|
|
"-d", profileDir, "-f", pwfilePath, "-n", root, "-t", trustBits],
|
2009-02-09 10:57:27 -08:00
|
|
|
env = env).wait()
|
2009-01-15 11:19:15 -08:00
|
|
|
if ext == ".client":
|
2009-02-09 10:57:27 -08:00
|
|
|
Process([pk12util, "-i", os.path.join(certPath, item), "-w",
|
2009-01-18 13:44:43 -08:00
|
|
|
pwfilePath, "-d", profileDir],
|
2009-02-09 10:57:27 -08:00
|
|
|
env = env).wait()
|
2008-09-05 06:35:58 -07:00
|
|
|
|
|
|
|
os.unlink(pwfilePath)
|
|
|
|
return 0
|
|
|
|
|
2009-08-06 10:34:49 -07:00
|
|
|
def environment(env = None, xrePath = DIST_BIN, crashreporter = True):
|
2008-09-05 06:35:58 -07:00
|
|
|
if env == None:
|
|
|
|
env = dict(os.environ)
|
|
|
|
|
2009-02-09 10:57:27 -08:00
|
|
|
ldLibraryPath = os.path.abspath(os.path.join(SCRIPT_DIR, xrePath))
|
|
|
|
if UNIXISH or IS_MAC:
|
|
|
|
envVar = "LD_LIBRARY_PATH"
|
|
|
|
if IS_MAC:
|
|
|
|
envVar = "DYLD_LIBRARY_PATH"
|
|
|
|
if envVar in env:
|
|
|
|
ldLibraryPath = ldLibraryPath + ":" + env[envVar]
|
|
|
|
env[envVar] = ldLibraryPath
|
|
|
|
elif IS_WIN32:
|
|
|
|
env["PATH"] = env["PATH"] + ";" + ldLibraryPath
|
2008-09-05 06:35:58 -07:00
|
|
|
|
2009-08-06 10:34:49 -07:00
|
|
|
if crashreporter:
|
|
|
|
env['MOZ_CRASHREPORTER_NO_REPORT'] = '1'
|
|
|
|
env['MOZ_CRASHREPORTER'] = '1'
|
2009-03-10 10:36:14 -07:00
|
|
|
|
2008-09-05 06:35:58 -07:00
|
|
|
return env
|
2008-02-21 13:08:39 -08:00
|
|
|
|
2009-03-10 10:36:14 -07:00
|
|
|
|
2008-04-07 22:18:45 -07:00
|
|
|
###############
|
|
|
|
# RUN THE APP #
|
|
|
|
###############
|
2008-02-21 13:08:39 -08:00
|
|
|
|
2009-04-05 06:03:46 -07:00
|
|
|
def dumpLeakLog(leakLogFile, filter = False):
|
|
|
|
"""Process the leak log, without parsing it.
|
|
|
|
|
|
|
|
Use this function if you want the raw log only.
|
|
|
|
Use it preferably with the |XPCOM_MEM_LEAK_LOG| environment variable.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Don't warn (nor "info") if the log file is not there.
|
|
|
|
if not os.path.exists(leakLogFile):
|
|
|
|
return
|
|
|
|
|
|
|
|
leaks = open(leakLogFile, "r")
|
|
|
|
leakReport = leaks.read()
|
|
|
|
leaks.close()
|
|
|
|
|
|
|
|
# Only |XPCOM_MEM_LEAK_LOG| reports can be actually filtered out.
|
|
|
|
# Only check whether an actual leak was reported.
|
|
|
|
if filter and not "0 TOTAL " in leakReport:
|
|
|
|
return
|
|
|
|
|
|
|
|
# Simply copy the log.
|
|
|
|
log.info(leakReport.rstrip("\n"))
|
|
|
|
|
|
|
|
def processLeakLog(leakLogFile, leakThreshold = 0):
|
|
|
|
"""Process the leak log, parsing it.
|
|
|
|
|
|
|
|
Use this function if you want an additional PASS/FAIL summary.
|
|
|
|
It must be used with the |XPCOM_MEM_BLOAT_LOG| environment variable.
|
|
|
|
"""
|
2009-03-09 13:00:55 -07:00
|
|
|
|
2009-03-10 07:10:25 -07:00
|
|
|
if not os.path.exists(leakLogFile):
|
2009-03-09 13:00:55 -07:00
|
|
|
log.info("WARNING refcount logging is off, so leaks can't be detected!")
|
2009-03-17 07:22:57 -07:00
|
|
|
return
|
|
|
|
|
|
|
|
# Per-Inst Leaked Total Rem ...
|
|
|
|
# 0 TOTAL 17 192 419115886 2 ...
|
|
|
|
# 833 nsTimerImpl 60 120 24726 2 ...
|
|
|
|
lineRe = re.compile(r"^\s*\d+\s+(?P<name>\S+)\s+"
|
|
|
|
r"(?P<size>-?\d+)\s+(?P<bytesLeaked>-?\d+)\s+"
|
2009-03-17 07:22:59 -07:00
|
|
|
r"-?\d+\s+(?P<numLeaked>-?\d+)")
|
2009-03-17 07:22:57 -07:00
|
|
|
|
|
|
|
leaks = open(leakLogFile, "r")
|
|
|
|
for line in leaks:
|
|
|
|
matches = lineRe.match(line)
|
|
|
|
if (matches and
|
|
|
|
int(matches.group("numLeaked")) == 0 and
|
|
|
|
matches.group("name") != "TOTAL"):
|
|
|
|
continue
|
|
|
|
log.info(line.rstrip())
|
|
|
|
leaks.close()
|
|
|
|
|
|
|
|
leaks = open(leakLogFile, "r")
|
|
|
|
seenTotal = False
|
|
|
|
prefix = "TEST-PASS"
|
|
|
|
for line in leaks:
|
|
|
|
matches = lineRe.match(line)
|
|
|
|
if not matches:
|
|
|
|
continue
|
|
|
|
name = matches.group("name")
|
|
|
|
size = int(matches.group("size"))
|
|
|
|
bytesLeaked = int(matches.group("bytesLeaked"))
|
|
|
|
numLeaked = int(matches.group("numLeaked"))
|
|
|
|
if size < 0 or bytesLeaked < 0 or numLeaked < 0:
|
|
|
|
log.info("TEST-UNEXPECTED-FAIL | runtests-leaks | negative leaks caught!")
|
2009-03-17 07:22:59 -07:00
|
|
|
if name == "TOTAL":
|
|
|
|
seenTotal = True
|
|
|
|
elif name == "TOTAL":
|
2009-03-17 07:22:57 -07:00
|
|
|
seenTotal = True
|
|
|
|
# Check for leaks.
|
|
|
|
if bytesLeaked < 0 or bytesLeaked > leakThreshold:
|
|
|
|
prefix = "TEST-UNEXPECTED-FAIL"
|
|
|
|
leakLog = "TEST-UNEXPECTED-FAIL | runtests-leaks | leaked" \
|
|
|
|
" %d bytes during test execution" % bytesLeaked
|
|
|
|
elif bytesLeaked > 0:
|
|
|
|
leakLog = "TEST-PASS | runtests-leaks | WARNING leaked" \
|
|
|
|
" %d bytes during test execution" % bytesLeaked
|
2009-03-09 13:00:55 -07:00
|
|
|
else:
|
2009-03-17 07:22:57 -07:00
|
|
|
leakLog = "TEST-PASS | runtests-leaks | no leaks detected!"
|
|
|
|
# Remind the threshold if it is not 0, which is the default/goal.
|
|
|
|
if leakThreshold != 0:
|
|
|
|
leakLog += " (threshold set at %d bytes)" % leakThreshold
|
|
|
|
# Log the information.
|
|
|
|
log.info(leakLog)
|
|
|
|
else:
|
|
|
|
if numLeaked != 0:
|
2009-03-17 07:22:59 -07:00
|
|
|
if numLeaked > 1:
|
2009-03-17 07:22:57 -07:00
|
|
|
instance = "instances"
|
|
|
|
rest = " each (%s bytes total)" % matches.group("bytesLeaked")
|
|
|
|
else:
|
|
|
|
instance = "instance"
|
|
|
|
rest = ""
|
|
|
|
log.info("%(prefix)s | runtests-leaks | leaked %(numLeaked)d %(instance)s of %(name)s "
|
|
|
|
"with size %(size)s bytes%(rest)s" %
|
|
|
|
{ "prefix": prefix,
|
|
|
|
"numLeaked": numLeaked,
|
|
|
|
"instance": instance,
|
|
|
|
"name": name,
|
|
|
|
"size": matches.group("size"),
|
|
|
|
"rest": rest })
|
|
|
|
if not seenTotal:
|
|
|
|
log.info("TEST-UNEXPECTED-FAIL | runtests-leaks | missing output line for total leaks!")
|
|
|
|
leaks.close()
|
2009-03-09 13:00:55 -07:00
|
|
|
|
2009-03-10 10:36:14 -07:00
|
|
|
def runApp(testURL, env, app, profileDir, extraArgs,
|
|
|
|
runSSLTunnel = False, utilityPath = DIST_BIN,
|
|
|
|
xrePath = DIST_BIN, certPath = CERTS_SRC_DIR,
|
|
|
|
debuggerInfo = None, symbolsPath = SYMBOLS_PATH):
|
2009-03-05 10:01:39 -08:00
|
|
|
"Run the app, log the duration it took to execute, return the status code."
|
|
|
|
|
2009-02-12 05:49:53 -08:00
|
|
|
if IS_TEST_BUILD and runSSLTunnel:
|
2008-09-28 13:27:51 -07:00
|
|
|
# create certificate database for the profile
|
2009-02-09 10:57:27 -08:00
|
|
|
certificateStatus = fillCertificateDB(profileDir, certPath, utilityPath, xrePath)
|
2008-09-28 13:27:51 -07:00
|
|
|
if certificateStatus != 0:
|
2009-05-11 12:54:39 -07:00
|
|
|
log.info("TEST-UNEXPECTED FAIL | automation.py | Certificate integration failed")
|
2008-09-28 13:27:51 -07:00
|
|
|
return certificateStatus
|
2009-03-05 10:01:39 -08:00
|
|
|
|
2008-09-28 13:27:51 -07:00
|
|
|
# start ssltunnel to provide https:// URLs capability
|
2009-02-09 10:57:27 -08:00
|
|
|
ssltunnel = os.path.join(utilityPath, "ssltunnel" + BIN_SUFFIX)
|
|
|
|
ssltunnelProcess = Process([ssltunnel, os.path.join(profileDir, "ssltunnel.cfg")], env = environment(xrePath = xrePath))
|
2009-05-11 12:54:39 -07:00
|
|
|
log.info("INFO | automation.py | SSL tunnel pid: %d", ssltunnelProcess.pid)
|
2008-02-21 13:08:39 -08:00
|
|
|
|
|
|
|
# now run with the profile we created
|
|
|
|
cmd = app
|
2008-05-06 10:52:26 -07:00
|
|
|
if IS_MAC and not IS_CAMINO and not cmd.endswith("-bin"):
|
2008-02-21 13:08:39 -08:00
|
|
|
cmd += "-bin"
|
|
|
|
cmd = os.path.abspath(cmd)
|
|
|
|
|
|
|
|
args = []
|
2009-03-06 15:28:20 -08:00
|
|
|
|
|
|
|
if debuggerInfo:
|
|
|
|
args.extend(debuggerInfo["args"])
|
|
|
|
args.append(cmd)
|
|
|
|
cmd = os.path.abspath(debuggerInfo["path"])
|
|
|
|
|
2008-02-21 13:08:39 -08:00
|
|
|
if IS_MAC:
|
|
|
|
args.append("-foreground")
|
|
|
|
|
|
|
|
if IS_CYGWIN:
|
|
|
|
profileDirectory = commands.getoutput("cygpath -w \"" + profileDir + "/\"")
|
|
|
|
else:
|
|
|
|
profileDirectory = profileDir + "/"
|
|
|
|
|
2008-05-06 10:52:26 -07:00
|
|
|
args.extend(("-no-remote", "-profile", profileDirectory))
|
2009-02-12 05:49:53 -08:00
|
|
|
if testURL is not None:
|
|
|
|
if IS_CAMINO:
|
|
|
|
args.extend(("-url", testURL))
|
|
|
|
else:
|
|
|
|
args.append((testURL))
|
2008-03-15 15:48:08 -07:00
|
|
|
args.extend(extraArgs)
|
2009-03-05 10:01:39 -08:00
|
|
|
|
|
|
|
startTime = datetime.now()
|
2009-03-06 15:28:20 -08:00
|
|
|
|
|
|
|
# Don't redirect stdout and stderr if an interactive debugger is attached
|
|
|
|
if debuggerInfo and debuggerInfo["interactive"]:
|
|
|
|
outputPipe = None
|
|
|
|
else:
|
|
|
|
outputPipe = subprocess.PIPE
|
|
|
|
|
2009-08-06 10:34:49 -07:00
|
|
|
proc = Process([cmd] + args,
|
|
|
|
env = environment(env, xrePath = xrePath,
|
|
|
|
crashreporter = not debuggerInfo),
|
|
|
|
stdout = outputPipe,
|
|
|
|
stderr = subprocess.STDOUT)
|
2009-05-11 12:54:39 -07:00
|
|
|
log.info("INFO | automation.py | Application pid: %d", proc.pid)
|
2009-03-06 15:28:20 -08:00
|
|
|
|
|
|
|
if outputPipe is None:
|
|
|
|
log.info("TEST-INFO: Not logging stdout or stderr due to debugger connection")
|
|
|
|
else:
|
2009-02-13 14:33:14 -08:00
|
|
|
line = proc.stdout.readline()
|
2009-03-06 15:28:20 -08:00
|
|
|
while line != "":
|
|
|
|
log.info(line.rstrip())
|
|
|
|
line = proc.stdout.readline()
|
|
|
|
|
2008-02-21 13:08:39 -08:00
|
|
|
status = proc.wait()
|
|
|
|
if status != 0:
|
2009-05-11 12:54:39 -07:00
|
|
|
log.info("TEST-UNEXPECTED-FAIL | automation.py | Exited with code %d during test run", status)
|
|
|
|
log.info("INFO | automation.py | Application ran for: %s", str(datetime.now() - startTime))
|
2008-02-21 13:08:39 -08:00
|
|
|
|
2009-05-11 12:54:39 -07:00
|
|
|
if checkForCrashes(os.path.join(profileDir, "minidumps"), symbolsPath):
|
2009-03-10 10:36:14 -07:00
|
|
|
status = -1
|
|
|
|
|
2009-02-12 05:49:53 -08:00
|
|
|
if IS_TEST_BUILD and runSSLTunnel:
|
2008-09-28 13:27:51 -07:00
|
|
|
ssltunnelProcess.kill()
|
2009-03-05 10:01:39 -08:00
|
|
|
|
|
|
|
return status
|