2008-02-21 13:08:39 -08:00
|
|
|
#
|
2012-05-21 04:12:37 -07:00
|
|
|
# This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
|
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
2008-02-21 13:08:39 -08:00
|
|
|
|
2011-10-14 08:45:58 -07:00
|
|
|
from __future__ import with_statement
|
2008-06-06 23:43:15 -07:00
|
|
|
import codecs
|
2010-01-06 16:45:46 -08:00
|
|
|
from datetime import datetime, timedelta
|
2008-02-21 13:08:39 -08:00
|
|
|
import itertools
|
2008-04-07 22:18:45 -07:00
|
|
|
import logging
|
2008-02-21 13:08:39 -08:00
|
|
|
import os
|
2008-06-06 23:43:15 -07:00
|
|
|
import re
|
2009-10-13 13:56:24 -07:00
|
|
|
import select
|
2009-01-12 11:23:28 -08:00
|
|
|
import shutil
|
2008-04-07 22:18:45 -07:00
|
|
|
import signal
|
2009-01-12 11:23:28 -08:00
|
|
|
import subprocess
|
2008-02-21 13:08:39 -08:00
|
|
|
import sys
|
2008-04-07 22:18:45 -07:00
|
|
|
import threading
|
2009-11-20 11:48:56 -08:00
|
|
|
import tempfile
|
2010-08-19 16:12:46 -07:00
|
|
|
import sqlite3
|
2012-07-19 23:40:15 -07:00
|
|
|
from string import Template
|
2009-05-11 12:54:39 -07:00
|
|
|
|
2010-03-18 11:14:14 -07:00
|
|
|
SCRIPT_DIR = os.path.abspath(os.path.realpath(os.path.dirname(sys.argv[0])))
|
|
|
|
sys.path.insert(0, SCRIPT_DIR)
|
|
|
|
import automationutils
|
|
|
|
|
2010-03-13 09:56:24 -08:00
|
|
|
_DEFAULT_WEB_SERVER = "127.0.0.1"
|
|
|
|
_DEFAULT_HTTP_PORT = 8888
|
|
|
|
_DEFAULT_SSL_PORT = 4443
|
2010-06-17 12:00:58 -07:00
|
|
|
_DEFAULT_WEBSOCKET_PORT = 9988
|
2010-03-09 19:33:11 -08:00
|
|
|
|
2012-09-25 16:28:17 -07:00
|
|
|
# from nsIPrincipal.idl
|
|
|
|
_APP_STATUS_NOT_INSTALLED = 0
|
|
|
|
_APP_STATUS_INSTALLED = 1
|
|
|
|
_APP_STATUS_PRIVILEGED = 2
|
|
|
|
_APP_STATUS_CERTIFIED = 3
|
|
|
|
|
2010-01-15 09:22:54 -08:00
|
|
|
#expand _DIST_BIN = __XPC_BIN_PATH__
|
|
|
|
#expand _IS_WIN32 = len("__WIN32__") != 0
|
|
|
|
#expand _IS_MAC = __IS_MAC__ != 0
|
|
|
|
#expand _IS_LINUX = __IS_LINUX__ != 0
|
2008-02-21 13:08:39 -08:00
|
|
|
#ifdef IS_CYGWIN
|
2010-01-15 09:22:54 -08:00
|
|
|
#expand _IS_CYGWIN = __IS_CYGWIN__ == 1
|
2008-02-21 13:08:39 -08:00
|
|
|
#else
|
2010-01-15 09:22:54 -08:00
|
|
|
_IS_CYGWIN = False
|
2008-02-21 13:08:39 -08:00
|
|
|
#endif
|
2010-01-15 09:22:54 -08:00
|
|
|
#expand _IS_CAMINO = __IS_CAMINO__ != 0
|
|
|
|
#expand _BIN_SUFFIX = __BIN_SUFFIX__
|
|
|
|
#expand _PERL = __PERL__
|
2008-02-21 13:08:39 -08:00
|
|
|
|
2010-01-15 09:22:54 -08:00
|
|
|
#expand _DEFAULT_APP = "./" + __BROWSER_PATH__
|
|
|
|
#expand _CERTS_SRC_DIR = __CERTS_SRC_DIR__
|
|
|
|
#expand _IS_TEST_BUILD = __IS_TEST_BUILD__
|
|
|
|
#expand _IS_DEBUG_BUILD = __IS_DEBUG_BUILD__
|
|
|
|
#expand _CRASHREPORTER = __CRASHREPORTER__ == 1
|
2013-01-22 07:48:02 -08:00
|
|
|
#expand _IS_ASAN = __IS_ASAN__ == 1
|
2008-04-07 22:18:45 -07:00
|
|
|
|
2010-03-18 11:14:14 -07:00
|
|
|
|
|
|
|
if _IS_WIN32:
|
|
|
|
import ctypes, ctypes.wintypes, time, msvcrt
|
|
|
|
else:
|
|
|
|
import errno
|
|
|
|
|
|
|
|
|
2010-03-18 13:13:33 -07:00
|
|
|
# We use the logging system here primarily because it'll handle multiple
|
|
|
|
# threads, which is needed to process the output of the server and application
|
|
|
|
# processes simultaneously.
|
|
|
|
_log = logging.getLogger()
|
|
|
|
handler = logging.StreamHandler(sys.stdout)
|
|
|
|
_log.setLevel(logging.INFO)
|
|
|
|
_log.addHandler(handler)
|
|
|
|
|
|
|
|
|
2008-04-07 22:18:45 -07:00
|
|
|
#################
|
|
|
|
# PROFILE SETUP #
|
|
|
|
#################
|
2008-02-21 13:08:39 -08:00
|
|
|
|
2008-06-06 23:43:15 -07:00
|
|
|
class SyntaxError(Exception):
|
|
|
|
"Signifies a syntax error on a particular line in server-locations.txt."
|
|
|
|
|
|
|
|
def __init__(self, lineno, msg = None):
|
|
|
|
self.lineno = lineno
|
|
|
|
self.msg = msg
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
s = "Syntax error on line " + str(self.lineno)
|
|
|
|
if self.msg:
|
|
|
|
s += ": %s." % self.msg
|
|
|
|
else:
|
|
|
|
s += "."
|
|
|
|
return s
|
|
|
|
|
|
|
|
|
|
|
|
class Location:
|
|
|
|
"Represents a location line in server-locations.txt."
|
|
|
|
|
|
|
|
def __init__(self, scheme, host, port, options):
|
|
|
|
self.scheme = scheme
|
|
|
|
self.host = host
|
|
|
|
self.port = port
|
|
|
|
self.options = options
|
|
|
|
|
2010-01-15 09:22:54 -08:00
|
|
|
class Automation(object):
|
2008-06-06 23:43:15 -07:00
|
|
|
"""
|
2010-01-15 09:22:54 -08:00
|
|
|
Runs the browser from a script, and provides useful utilities
|
|
|
|
for setting up the browser environment.
|
2008-06-06 23:43:15 -07:00
|
|
|
"""
|
|
|
|
|
2010-01-15 09:22:54 -08:00
|
|
|
DIST_BIN = _DIST_BIN
|
|
|
|
IS_WIN32 = _IS_WIN32
|
|
|
|
IS_MAC = _IS_MAC
|
|
|
|
IS_LINUX = _IS_LINUX
|
|
|
|
IS_CYGWIN = _IS_CYGWIN
|
|
|
|
IS_CAMINO = _IS_CAMINO
|
|
|
|
BIN_SUFFIX = _BIN_SUFFIX
|
|
|
|
PERL = _PERL
|
|
|
|
|
|
|
|
UNIXISH = not IS_WIN32 and not IS_MAC
|
|
|
|
|
|
|
|
DEFAULT_APP = _DEFAULT_APP
|
|
|
|
CERTS_SRC_DIR = _CERTS_SRC_DIR
|
|
|
|
IS_TEST_BUILD = _IS_TEST_BUILD
|
|
|
|
IS_DEBUG_BUILD = _IS_DEBUG_BUILD
|
|
|
|
CRASHREPORTER = _CRASHREPORTER
|
2013-01-22 07:48:02 -08:00
|
|
|
IS_ASAN = _IS_ASAN
|
2010-01-15 09:22:54 -08:00
|
|
|
|
|
|
|
# timeout, in seconds
|
|
|
|
DEFAULT_TIMEOUT = 60.0
|
2010-03-13 09:56:24 -08:00
|
|
|
DEFAULT_WEB_SERVER = _DEFAULT_WEB_SERVER
|
|
|
|
DEFAULT_HTTP_PORT = _DEFAULT_HTTP_PORT
|
|
|
|
DEFAULT_SSL_PORT = _DEFAULT_SSL_PORT
|
2010-06-16 22:38:55 -07:00
|
|
|
DEFAULT_WEBSOCKET_PORT = _DEFAULT_WEBSOCKET_PORT
|
2008-06-06 23:43:15 -07:00
|
|
|
|
2010-01-15 09:22:54 -08:00
|
|
|
def __init__(self):
|
2010-03-18 13:13:33 -07:00
|
|
|
self.log = _log
|
2010-06-10 18:46:18 -07:00
|
|
|
self.lastTestSeen = "automation.py"
|
2010-08-22 18:27:26 -07:00
|
|
|
self.haveDumpedScreen = False
|
2010-01-15 09:22:54 -08:00
|
|
|
|
2010-06-16 22:38:55 -07:00
|
|
|
def setServerInfo(self,
|
|
|
|
webServer = _DEFAULT_WEB_SERVER,
|
|
|
|
httpPort = _DEFAULT_HTTP_PORT,
|
|
|
|
sslPort = _DEFAULT_SSL_PORT,
|
2010-07-28 10:55:36 -07:00
|
|
|
webSocketPort = _DEFAULT_WEBSOCKET_PORT):
|
2010-03-13 09:56:24 -08:00
|
|
|
self.webServer = webServer
|
2010-03-12 13:53:37 -08:00
|
|
|
self.httpPort = httpPort
|
|
|
|
self.sslPort = sslPort
|
2010-06-16 22:38:55 -07:00
|
|
|
self.webSocketPort = webSocketPort
|
2010-03-12 13:53:37 -08:00
|
|
|
|
2010-01-15 09:22:54 -08:00
|
|
|
@property
|
|
|
|
def __all__(self):
|
|
|
|
return [
|
|
|
|
"UNIXISH",
|
|
|
|
"IS_WIN32",
|
|
|
|
"IS_MAC",
|
|
|
|
"log",
|
|
|
|
"runApp",
|
|
|
|
"Process",
|
|
|
|
"addCommonOptions",
|
|
|
|
"initializeProfile",
|
|
|
|
"DIST_BIN",
|
|
|
|
"DEFAULT_APP",
|
|
|
|
"CERTS_SRC_DIR",
|
|
|
|
"environment",
|
|
|
|
"IS_TEST_BUILD",
|
|
|
|
"IS_DEBUG_BUILD",
|
|
|
|
"DEFAULT_TIMEOUT",
|
|
|
|
]
|
|
|
|
|
|
|
|
class Process(subprocess.Popen):
|
|
|
|
"""
|
|
|
|
Represents our view of a subprocess.
|
|
|
|
It adds a kill() method which allows it to be stopped explicitly.
|
|
|
|
"""
|
|
|
|
|
2010-03-18 13:13:33 -07:00
|
|
|
def __init__(self,
|
|
|
|
args,
|
|
|
|
bufsize=0,
|
|
|
|
executable=None,
|
|
|
|
stdin=None,
|
|
|
|
stdout=None,
|
|
|
|
stderr=None,
|
|
|
|
preexec_fn=None,
|
|
|
|
close_fds=False,
|
|
|
|
shell=False,
|
|
|
|
cwd=None,
|
|
|
|
env=None,
|
|
|
|
universal_newlines=False,
|
|
|
|
startupinfo=None,
|
|
|
|
creationflags=0):
|
2010-09-23 09:19:31 -07:00
|
|
|
args = automationutils.wrapCommand(args)
|
|
|
|
print "args: %s" % args
|
2010-03-18 13:13:33 -07:00
|
|
|
subprocess.Popen.__init__(self, args, bufsize, executable,
|
|
|
|
stdin, stdout, stderr,
|
|
|
|
preexec_fn, close_fds,
|
|
|
|
shell, cwd, env,
|
|
|
|
universal_newlines, startupinfo, creationflags)
|
|
|
|
self.log = _log
|
|
|
|
|
2010-01-15 09:22:54 -08:00
|
|
|
def kill(self):
|
|
|
|
if Automation().IS_WIN32:
|
|
|
|
import platform
|
|
|
|
pid = "%i" % self.pid
|
|
|
|
if platform.release() == "2000":
|
|
|
|
# Windows 2000 needs 'kill.exe' from the
|
|
|
|
#'Windows 2000 Resource Kit tools'. (See bug 475455.)
|
|
|
|
try:
|
|
|
|
subprocess.Popen(["kill", "-f", pid]).wait()
|
|
|
|
except:
|
|
|
|
self.log.info("TEST-UNEXPECTED-FAIL | automation.py | Missing 'kill' utility to kill process with pid=%s. Kill it manually!", pid)
|
|
|
|
else:
|
|
|
|
# Windows XP and later.
|
|
|
|
subprocess.Popen(["taskkill", "/F", "/PID", pid]).wait()
|
|
|
|
else:
|
|
|
|
os.kill(self.pid, signal.SIGKILL)
|
|
|
|
|
|
|
|
def readLocations(self, locationsPath = "server-locations.txt"):
|
|
|
|
"""
|
|
|
|
Reads the locations at which the Mochitest HTTP server is available from
|
|
|
|
server-locations.txt.
|
|
|
|
"""
|
|
|
|
|
|
|
|
locationFile = codecs.open(locationsPath, "r", "UTF-8")
|
|
|
|
|
|
|
|
# Perhaps more detail than necessary, but it's the easiest way to make sure
|
|
|
|
# we get exactly the format we want. See server-locations.txt for the exact
|
|
|
|
# format guaranteed here.
|
|
|
|
lineRe = re.compile(r"^(?P<scheme>[a-z][-a-z0-9+.]*)"
|
2008-06-06 23:43:15 -07:00
|
|
|
r"://"
|
|
|
|
r"(?P<host>"
|
|
|
|
r"\d+\.\d+\.\d+\.\d+"
|
|
|
|
r"|"
|
|
|
|
r"(?:[a-z0-9](?:[-a-z0-9]*[a-z0-9])?\.)*"
|
|
|
|
r"[a-z](?:[-a-z0-9]*[a-z0-9])?"
|
|
|
|
r")"
|
|
|
|
r":"
|
|
|
|
r"(?P<port>\d+)"
|
|
|
|
r"(?:"
|
|
|
|
r"\s+"
|
2008-09-05 06:35:58 -07:00
|
|
|
r"(?P<options>\S+(?:,\S+)*)"
|
2008-06-06 23:43:15 -07:00
|
|
|
r")?$")
|
2010-01-15 09:22:54 -08:00
|
|
|
locations = []
|
|
|
|
lineno = 0
|
|
|
|
seenPrimary = False
|
|
|
|
for line in locationFile:
|
|
|
|
lineno += 1
|
|
|
|
if line.startswith("#") or line == "\n":
|
|
|
|
continue
|
2008-06-06 23:43:15 -07:00
|
|
|
|
2010-01-15 09:22:54 -08:00
|
|
|
match = lineRe.match(line)
|
|
|
|
if not match:
|
|
|
|
raise SyntaxError(lineno)
|
|
|
|
|
|
|
|
options = match.group("options")
|
|
|
|
if options:
|
|
|
|
options = options.split(",")
|
|
|
|
if "primary" in options:
|
|
|
|
if seenPrimary:
|
|
|
|
raise SyntaxError(lineno, "multiple primary locations")
|
|
|
|
seenPrimary = True
|
|
|
|
else:
|
|
|
|
options = []
|
2008-06-06 23:43:15 -07:00
|
|
|
|
2010-01-15 09:22:54 -08:00
|
|
|
locations.append(Location(match.group("scheme"), match.group("host"),
|
|
|
|
match.group("port"), options))
|
2008-06-06 23:43:15 -07:00
|
|
|
|
2010-01-15 09:22:54 -08:00
|
|
|
if not seenPrimary:
|
|
|
|
raise SyntaxError(lineno + 1, "missing primary location")
|
2008-06-06 23:43:15 -07:00
|
|
|
|
2010-01-15 09:22:54 -08:00
|
|
|
return locations
|
2008-06-06 23:43:15 -07:00
|
|
|
|
2010-08-19 16:12:46 -07:00
|
|
|
def setupPermissionsDatabase(self, profileDir, permissions):
|
|
|
|
# Open database and create table
|
|
|
|
permDB = sqlite3.connect(os.path.join(profileDir, "permissions.sqlite"))
|
|
|
|
cursor = permDB.cursor();
|
2012-08-23 11:39:41 -07:00
|
|
|
|
|
|
|
cursor.execute("PRAGMA user_version=3");
|
|
|
|
|
2010-08-21 09:48:01 -07:00
|
|
|
# SQL copied from nsPermissionManager.cpp
|
2013-01-04 10:41:34 -08:00
|
|
|
cursor.execute("""CREATE TABLE IF NOT EXISTS moz_hosts (
|
|
|
|
id INTEGER PRIMARY KEY,
|
|
|
|
host TEXT,
|
|
|
|
type TEXT,
|
|
|
|
permission INTEGER,
|
|
|
|
expireType INTEGER,
|
|
|
|
expireTime INTEGER,
|
|
|
|
appId INTEGER,
|
|
|
|
isInBrowserElement INTEGER)""")
|
2010-08-19 16:12:46 -07:00
|
|
|
|
|
|
|
# Insert desired permissions
|
|
|
|
for perm in permissions.keys():
|
2010-09-03 15:53:28 -07:00
|
|
|
for host,allow in permissions[perm]:
|
2013-01-04 10:41:34 -08:00
|
|
|
cursor.execute("INSERT INTO moz_hosts values(NULL, ?, ?, ?, 0, 0, 0, 0)",
|
|
|
|
(host, perm, 1 if allow else 2))
|
2010-08-19 16:12:46 -07:00
|
|
|
|
|
|
|
# Commit and close
|
|
|
|
permDB.commit()
|
|
|
|
cursor.close()
|
|
|
|
|
2012-07-19 23:40:15 -07:00
|
|
|
def setupTestApps(self, profileDir, apps):
|
2013-01-04 10:41:34 -08:00
|
|
|
webappJSONTemplate = Template(""""$id": {
|
|
|
|
"origin": "$origin",
|
|
|
|
"installOrigin": "$origin",
|
|
|
|
"receipt": null,
|
|
|
|
"installTime": 132333986000,
|
|
|
|
"manifestURL": "$manifestURL",
|
|
|
|
"localId": $localId,
|
|
|
|
"id": "$id",
|
|
|
|
"appStatus": $appStatus,
|
|
|
|
"csp": "$csp"
|
2012-07-19 23:40:15 -07:00
|
|
|
}""")
|
|
|
|
|
|
|
|
manifestTemplate = Template("""{
|
|
|
|
"name": "$name",
|
2012-10-19 03:43:17 -07:00
|
|
|
"csp": "$csp",
|
2012-07-19 23:40:15 -07:00
|
|
|
"description": "$description",
|
|
|
|
"launch_path": "/",
|
|
|
|
"developer": {
|
|
|
|
"name": "Mozilla",
|
|
|
|
"url": "https://mozilla.org/"
|
|
|
|
},
|
|
|
|
"permissions": [
|
|
|
|
],
|
|
|
|
"locales": {
|
|
|
|
"en-US": {
|
|
|
|
"name": "$name",
|
|
|
|
"description": "$description"
|
|
|
|
}
|
|
|
|
},
|
|
|
|
"default_locale": "en-US",
|
|
|
|
"icons": {
|
|
|
|
}
|
|
|
|
}
|
|
|
|
""")
|
|
|
|
|
|
|
|
# Create webapps/webapps.json
|
|
|
|
webappsDir = os.path.join(profileDir, "webapps")
|
2013-01-04 10:41:34 -08:00
|
|
|
if not os.access(webappsDir, os.F_OK):
|
|
|
|
os.mkdir(webappsDir)
|
2012-07-19 23:40:15 -07:00
|
|
|
|
2013-01-04 10:41:34 -08:00
|
|
|
lineRe = re.compile(r'(.*?)"(.*?)": (.*)')
|
|
|
|
|
|
|
|
webappsJSONFilename = os.path.join(webappsDir, "webapps.json")
|
2012-07-19 23:40:15 -07:00
|
|
|
webappsJSON = []
|
2013-01-04 10:41:34 -08:00
|
|
|
if os.access(webappsJSONFilename, os.F_OK):
|
|
|
|
# If there is an existing webapps.json file (which will be the case for
|
|
|
|
# b2g), we parse the data in the existing file before appending test
|
|
|
|
# test apps to it.
|
|
|
|
startId = 1
|
|
|
|
webappsJSONFile = open(webappsJSONFilename, "r")
|
|
|
|
contents = webappsJSONFile.read()
|
|
|
|
|
|
|
|
for app_content in contents.split('},'):
|
|
|
|
app = {}
|
|
|
|
# ghetto json parser needed due to lack of json/simplejson on test slaves
|
|
|
|
for line in app_content.split('\n'):
|
|
|
|
m = lineRe.match(line)
|
|
|
|
if m:
|
|
|
|
value = m.groups()[2]
|
|
|
|
# remove any trailing commas
|
|
|
|
if value[-1:] == ',':
|
|
|
|
value = value[:-1]
|
|
|
|
# set the app name from a line that looks like this:
|
|
|
|
# "name.gaiamobile.org": {
|
|
|
|
if value == '{':
|
|
|
|
app['id'] = m.groups()[1]
|
|
|
|
# parse string, None, bool and int types
|
|
|
|
elif value[0:1] == '"':
|
|
|
|
app[m.groups()[1]] = value[1:-1]
|
|
|
|
elif value == "null":
|
|
|
|
app[m.groups()[1]] = None
|
|
|
|
elif value == "true":
|
|
|
|
app[m.groups()[1]] = True
|
|
|
|
elif value == "false":
|
|
|
|
app[m.groups()[1]] = False
|
|
|
|
else:
|
|
|
|
app[m.groups()[1]] = int(value)
|
|
|
|
if app:
|
|
|
|
webappsJSON.append(app)
|
|
|
|
|
|
|
|
webappsJSONFile.close()
|
|
|
|
|
|
|
|
startId = 1
|
|
|
|
for app in webappsJSON:
|
|
|
|
if app['localId'] >= startId:
|
|
|
|
startId = app['localId'] + 1
|
|
|
|
if not app.get('csp'):
|
|
|
|
app['csp'] = ''
|
|
|
|
if not app.get('appStatus'):
|
|
|
|
app['appStatus'] = 3
|
2013-01-15 03:04:25 -08:00
|
|
|
|
2013-01-04 10:41:34 -08:00
|
|
|
for localId, app in enumerate(apps):
|
|
|
|
app['localId'] = localId + startId # localId must be from 1..N
|
|
|
|
if not app.get('id'):
|
|
|
|
app['id'] = app['name']
|
|
|
|
webappsJSON.append(app)
|
|
|
|
|
|
|
|
contents = []
|
|
|
|
for app in webappsJSON:
|
|
|
|
contents.append(webappJSONTemplate.substitute(app))
|
|
|
|
contents = '{\n' + ',\n'.join(contents) + '\n}\n'
|
|
|
|
|
|
|
|
webappsJSONFile = open(webappsJSONFilename, "w")
|
|
|
|
webappsJSONFile.write(contents)
|
2012-07-19 23:40:15 -07:00
|
|
|
webappsJSONFile.close()
|
|
|
|
|
|
|
|
# Create manifest file for each app.
|
|
|
|
for app in apps:
|
|
|
|
manifest = manifestTemplate.substitute(app)
|
|
|
|
|
|
|
|
manifestDir = os.path.join(webappsDir, app['name'])
|
|
|
|
os.mkdir(manifestDir)
|
|
|
|
|
|
|
|
manifestFile = open(os.path.join(manifestDir, "manifest.webapp"), "a")
|
|
|
|
manifestFile.write(manifest)
|
|
|
|
manifestFile.close()
|
|
|
|
|
2013-01-04 10:41:34 -08:00
|
|
|
def initializeProfile(self, profileDir, extraPrefs=[],
|
|
|
|
useServerLocations=False,
|
|
|
|
initialProfile=None):
|
2010-03-12 13:53:37 -08:00
|
|
|
" Sets up the standard testing profile."
|
2008-06-06 23:43:15 -07:00
|
|
|
|
2010-03-12 13:53:37 -08:00
|
|
|
prefs = []
|
2010-01-15 09:22:54 -08:00
|
|
|
# Start with a clean slate.
|
|
|
|
shutil.rmtree(profileDir, True)
|
2013-01-04 10:41:34 -08:00
|
|
|
|
|
|
|
if initialProfile:
|
|
|
|
shutil.copytree(initialProfile, profileDir)
|
|
|
|
else:
|
|
|
|
os.mkdir(profileDir)
|
2008-02-21 13:08:39 -08:00
|
|
|
|
2010-08-19 16:12:46 -07:00
|
|
|
# Set up permissions database
|
|
|
|
locations = self.readLocations()
|
|
|
|
self.setupPermissionsDatabase(profileDir,
|
2010-09-03 15:53:28 -07:00
|
|
|
{'allowXULXBL':[(l.host, 'noxul' not in l.options) for l in locations]});
|
2010-08-19 16:12:46 -07:00
|
|
|
|
2010-01-15 09:22:54 -08:00
|
|
|
part = """\
|
2010-12-09 14:47:21 -08:00
|
|
|
user_pref("browser.console.showInPanel", true);
|
2008-02-21 13:08:39 -08:00
|
|
|
user_pref("browser.dom.window.dump.enabled", true);
|
2011-06-07 12:11:37 -07:00
|
|
|
user_pref("browser.firstrun.show.localepicker", false);
|
2011-06-10 15:14:02 -07:00
|
|
|
user_pref("browser.firstrun.show.uidiscovery", false);
|
2012-05-16 03:07:50 -07:00
|
|
|
user_pref("browser.startup.page", 0); // use about:blank, not browser.startup.homepage
|
2011-08-30 20:06:58 -07:00
|
|
|
user_pref("browser.ui.layout.tablet", 0); // force tablet UI off
|
2008-05-14 06:27:47 -07:00
|
|
|
user_pref("dom.allow_scripts_to_close_windows", true);
|
2008-02-21 13:08:39 -08:00
|
|
|
user_pref("dom.disable_open_during_load", false);
|
2013-02-11 15:14:35 -08:00
|
|
|
user_pref("dom.experimental_forms", true); // on for testing
|
2008-02-21 13:08:39 -08:00
|
|
|
user_pref("dom.max_script_run_time", 0); // no slow script dialogs
|
2011-11-11 07:37:24 -08:00
|
|
|
user_pref("hangmonitor.timeout", 0); // no hang monitor
|
2009-05-05 16:11:19 -07:00
|
|
|
user_pref("dom.max_chrome_script_run_time", 0);
|
2009-10-22 10:49:21 -07:00
|
|
|
user_pref("dom.popup_maximum", -1);
|
2011-01-24 16:23:08 -08:00
|
|
|
user_pref("dom.send_after_paint_to_content", true);
|
2010-09-16 18:24:14 -07:00
|
|
|
user_pref("dom.successive_dialog_time_limit", 0);
|
2008-02-21 13:08:39 -08:00
|
|
|
user_pref("signed.applets.codebase_principal_support", true);
|
|
|
|
user_pref("browser.shell.checkDefaultBrowser", false);
|
2008-09-05 05:03:30 -07:00
|
|
|
user_pref("shell.checkDefaultClient", false);
|
2008-02-21 13:08:39 -08:00
|
|
|
user_pref("browser.warnOnQuit", false);
|
2008-03-03 23:24:26 -08:00
|
|
|
user_pref("accessibility.typeaheadfind.autostart", false);
|
2008-03-04 14:12:06 -08:00
|
|
|
user_pref("javascript.options.showInConsole", true);
|
2010-09-11 11:41:39 -07:00
|
|
|
user_pref("devtools.errorconsole.enabled", true);
|
2013-02-08 12:51:09 -08:00
|
|
|
user_pref("devtools.debugger.remote-port", 6023);
|
2008-04-28 16:56:07 -07:00
|
|
|
user_pref("layout.debug.enable_data_xbl", true);
|
2008-05-05 13:43:44 -07:00
|
|
|
user_pref("browser.EULA.override", true);
|
2011-11-08 13:28:49 -08:00
|
|
|
user_pref("javascript.options.jit_hardening", true);
|
2008-09-09 21:13:23 -07:00
|
|
|
user_pref("gfx.color_management.force_srgb", true);
|
2009-01-12 21:52:00 -08:00
|
|
|
user_pref("network.manage-offline-status", false);
|
2012-06-11 22:15:55 -07:00
|
|
|
user_pref("dom.min_background_timeout_value", 1000);
|
2009-05-05 21:30:39 -07:00
|
|
|
user_pref("test.mousescroll", true);
|
2009-01-15 11:19:15 -08:00
|
|
|
user_pref("security.default_personal_cert", "Select Automatically"); // Need to client auth test be w/o any dialogs
|
2009-02-24 11:46:51 -08:00
|
|
|
user_pref("network.http.prompt-temp-redirect", false);
|
2009-03-31 17:52:56 -07:00
|
|
|
user_pref("media.cache_size", 100);
|
2009-04-08 01:45:32 -07:00
|
|
|
user_pref("security.warn_viewing_mixed", false);
|
2010-08-12 19:07:18 -07:00
|
|
|
user_pref("app.update.enabled", false);
|
2012-09-11 19:13:14 -07:00
|
|
|
user_pref("app.update.staging.enabled", false);
|
2010-10-26 14:06:20 -07:00
|
|
|
user_pref("browser.panorama.experienced_first_run", true); // Assume experienced
|
2012-10-25 07:57:51 -07:00
|
|
|
user_pref("dom.w3c_touch_events.enabled", 1);
|
2013-01-03 22:54:26 -08:00
|
|
|
user_pref("dom.undo_manager.enabled", true);
|
2013-01-02 07:31:10 -08:00
|
|
|
// Set a future policy version to avoid the telemetry prompt.
|
|
|
|
user_pref("toolkit.telemetry.prompted", 999);
|
|
|
|
user_pref("toolkit.telemetry.notifiedOptOut", 999);
|
2011-11-23 18:48:24 -08:00
|
|
|
// Existing tests assume there is no font size inflation.
|
|
|
|
user_pref("font.size.inflation.emPerLine", 0);
|
|
|
|
user_pref("font.size.inflation.minTwips", 0);
|
2010-06-18 08:54:22 -07:00
|
|
|
|
|
|
|
// Only load extensions from the application and user profile
|
|
|
|
// AddonManager.SCOPE_PROFILE + AddonManager.SCOPE_APPLICATION
|
|
|
|
user_pref("extensions.enabledScopes", 5);
|
2010-10-12 12:15:04 -07:00
|
|
|
// Disable metadata caching for installed add-ons by default
|
|
|
|
user_pref("extensions.getAddons.cache.enabled", false);
|
2011-01-19 14:56:01 -08:00
|
|
|
// Disable intalling any distribution add-ons
|
|
|
|
user_pref("extensions.installDistroAddons", false);
|
2008-05-06 10:52:26 -07:00
|
|
|
|
2010-06-24 16:36:31 -07:00
|
|
|
user_pref("extensions.testpilot.runStudies", false);
|
2013-01-18 07:27:44 -08:00
|
|
|
user_pref("extensions.testpilot.alreadyCustomizedToolbar", true);
|
2008-05-06 10:52:26 -07:00
|
|
|
|
2010-03-12 13:53:37 -08:00
|
|
|
user_pref("geo.wifi.uri", "http://%(server)s/tests/dom/tests/mochitest/geolocation/network_geolocation.sjs");
|
2009-08-14 16:12:09 -07:00
|
|
|
user_pref("geo.wifi.testing", true);
|
2010-08-31 09:15:52 -07:00
|
|
|
user_pref("geo.ignore.location_filter", true);
|
2009-08-14 16:12:09 -07:00
|
|
|
|
2008-05-06 10:52:26 -07:00
|
|
|
user_pref("camino.warn_when_closing", false); // Camino-only, harmless to others
|
2009-06-17 03:47:08 -07:00
|
|
|
|
|
|
|
// Make url-classifier updates so rare that they won't affect tests
|
|
|
|
user_pref("urlclassifier.updateinterval", 172800);
|
|
|
|
// Point the url-classifier to the local testing server for fast failures
|
2012-08-01 15:52:47 -07:00
|
|
|
user_pref("browser.safebrowsing.gethashURL", "http://%(server)s/safebrowsing-dummy/gethash");
|
|
|
|
user_pref("browser.safebrowsing.keyURL", "http://%(server)s/safebrowsing-dummy/newkey");
|
|
|
|
user_pref("browser.safebrowsing.updateURL", "http://%(server)s/safebrowsing-dummy/update");
|
2010-09-14 16:51:12 -07:00
|
|
|
// Point update checks to the local testing server for fast failures
|
|
|
|
user_pref("extensions.update.url", "http://%(server)s/extensions-dummy/updateURL");
|
2012-03-07 00:18:28 -08:00
|
|
|
user_pref("extensions.update.background.url", "http://%(server)s/extensions-dummy/updateBackgroundURL");
|
2010-09-14 16:51:12 -07:00
|
|
|
user_pref("extensions.blocklist.url", "http://%(server)s/extensions-dummy/blocklistURL");
|
2011-12-16 12:04:28 -08:00
|
|
|
user_pref("extensions.hotfix.url", "http://%(server)s/extensions-dummy/hotfixURL");
|
2012-10-19 07:27:19 -07:00
|
|
|
// Turn off extension updates so they don't bother tests
|
|
|
|
user_pref("extensions.update.enabled", false);
|
2010-12-25 08:34:28 -08:00
|
|
|
// Make sure opening about:addons won't hit the network
|
|
|
|
user_pref("extensions.webservice.discoverURL", "http://%(server)s/extensions-dummy/discoveryURL");
|
2011-12-14 17:00:19 -08:00
|
|
|
// Make sure AddonRepository won't hit the network
|
|
|
|
user_pref("extensions.getAddons.maxResults", 0);
|
|
|
|
user_pref("extensions.getAddons.get.url", "http://%(server)s/extensions-dummy/repositoryGetURL");
|
2012-01-31 17:22:42 -08:00
|
|
|
user_pref("extensions.getAddons.getWithPerformance.url", "http://%(server)s/extensions-dummy/repositoryGetWithPerformanceURL");
|
2011-12-14 17:00:19 -08:00
|
|
|
user_pref("extensions.getAddons.search.browseURL", "http://%(server)s/extensions-dummy/repositoryBrowseURL");
|
|
|
|
user_pref("extensions.getAddons.search.url", "http://%(server)s/extensions-dummy/repositorySearchURL");
|
2012-12-20 17:25:26 -08:00
|
|
|
// Make sure that opening the plugins check page won't hit the network
|
|
|
|
user_pref("plugins.update.url", "http://%(server)s/plugins-dummy/updateCheckURL");
|
2012-08-23 11:45:28 -07:00
|
|
|
|
2012-10-26 15:15:31 -07:00
|
|
|
// Existing tests don't wait for the notification button security delay
|
|
|
|
user_pref("security.notification_enable_delay", 0);
|
|
|
|
|
2012-08-23 11:45:28 -07:00
|
|
|
// Make enablePrivilege continue to work for test code. :-(
|
2012-10-18 05:03:37 -07:00
|
|
|
user_pref("security.turn_off_all_security_so_that_viruses_can_take_over_this_computer", true);
|
2012-09-30 08:07:51 -07:00
|
|
|
|
|
|
|
// Get network events.
|
|
|
|
user_pref("network.activity.blipIntervalMilliseconds", 250);
|
2013-01-11 19:05:35 -08:00
|
|
|
|
|
|
|
// Don't allow the Data Reporting service to prompt for policy acceptance.
|
|
|
|
user_pref("datareporting.policy.dataSubmissionPolicyBypassAcceptance", true);
|
2013-01-21 11:43:06 -08:00
|
|
|
|
2013-02-14 12:24:21 -08:00
|
|
|
// Point Firefox Health Report at a local server. We don't care if it actually
|
|
|
|
// works. It just can't hit the default production endpoint.
|
|
|
|
user_pref("datareporting.healthreport.documentServerURI", "http://%(server)s/healthreport/");
|
|
|
|
|
2013-01-21 11:43:06 -08:00
|
|
|
// Make sure CSS error reporting is enabled for tests
|
|
|
|
user_pref("layout.css.report_errors", true);
|
2010-03-12 13:53:37 -08:00
|
|
|
""" % { "server" : self.webServer + ":" + str(self.httpPort) }
|
2010-01-15 09:22:54 -08:00
|
|
|
prefs.append(part)
|
2008-02-21 13:08:39 -08:00
|
|
|
|
2012-08-23 11:45:28 -07:00
|
|
|
if useServerLocations:
|
2010-03-12 13:53:37 -08:00
|
|
|
# We need to proxy every server but the primary one.
|
|
|
|
origins = ["'%s://%s:%s'" % (l.scheme, l.host, l.port)
|
|
|
|
for l in filter(lambda l: "primary" not in l.options, locations)]
|
|
|
|
origins = ", ".join(origins)
|
2008-02-21 13:08:39 -08:00
|
|
|
|
2010-03-12 13:53:37 -08:00
|
|
|
pacURL = """data:text/plain,
|
2008-02-21 13:08:39 -08:00
|
|
|
function FindProxyForURL(url, host)
|
|
|
|
{
|
2008-06-06 23:43:15 -07:00
|
|
|
var origins = [%(origins)s];
|
|
|
|
var regex = new RegExp('^([a-z][-a-z0-9+.]*)' +
|
|
|
|
'://' +
|
|
|
|
'(?:[^/@]*@)?' +
|
|
|
|
'(.*?)' +
|
|
|
|
'(?::(\\\\\\\\d+))?/');
|
2008-02-21 13:08:39 -08:00
|
|
|
var matches = regex.exec(url);
|
|
|
|
if (!matches)
|
|
|
|
return 'DIRECT';
|
2008-06-06 23:43:15 -07:00
|
|
|
var isHttp = matches[1] == 'http';
|
2008-09-05 06:35:58 -07:00
|
|
|
var isHttps = matches[1] == 'https';
|
2010-06-16 22:38:55 -07:00
|
|
|
var isWebSocket = matches[1] == 'ws';
|
2011-05-21 18:27:52 -07:00
|
|
|
var isWebSocketSSL = matches[1] == 'wss';
|
2008-06-06 23:43:15 -07:00
|
|
|
if (!matches[3])
|
2008-09-05 06:35:58 -07:00
|
|
|
{
|
2010-06-16 22:38:55 -07:00
|
|
|
if (isHttp | isWebSocket) matches[3] = '80';
|
2011-05-21 18:27:52 -07:00
|
|
|
if (isHttps | isWebSocketSSL) matches[3] = '443';
|
2008-09-05 06:35:58 -07:00
|
|
|
}
|
2010-06-16 22:38:55 -07:00
|
|
|
if (isWebSocket)
|
|
|
|
matches[1] = 'http';
|
2011-05-21 18:27:52 -07:00
|
|
|
if (isWebSocketSSL)
|
|
|
|
matches[1] = 'https';
|
2010-06-16 22:38:55 -07:00
|
|
|
|
2008-06-06 23:43:15 -07:00
|
|
|
var origin = matches[1] + '://' + matches[2] + ':' + matches[3];
|
|
|
|
if (origins.indexOf(origin) < 0)
|
|
|
|
return 'DIRECT';
|
|
|
|
if (isHttp)
|
2010-03-13 09:56:24 -08:00
|
|
|
return 'PROXY %(remote)s:%(httpport)s';
|
2011-05-21 18:27:52 -07:00
|
|
|
if (isHttps || isWebSocket || isWebSocketSSL)
|
2010-03-13 09:56:24 -08:00
|
|
|
return 'PROXY %(remote)s:%(sslport)s';
|
2008-02-21 13:08:39 -08:00
|
|
|
return 'DIRECT';
|
2010-03-13 09:56:24 -08:00
|
|
|
}""" % { "origins": origins,
|
|
|
|
"remote": self.webServer,
|
|
|
|
"httpport":self.httpPort,
|
2010-07-28 10:55:36 -07:00
|
|
|
"sslport": self.sslPort }
|
2010-03-12 13:53:37 -08:00
|
|
|
pacURL = "".join(pacURL.splitlines())
|
2008-02-21 13:08:39 -08:00
|
|
|
|
2010-03-12 13:53:37 -08:00
|
|
|
part += """
|
2008-02-21 13:08:39 -08:00
|
|
|
user_pref("network.proxy.type", 2);
|
|
|
|
user_pref("network.proxy.autoconfig_url", "%(pacURL)s");
|
2008-05-06 10:52:26 -07:00
|
|
|
|
|
|
|
user_pref("camino.use_system_proxy_settings", false); // Camino-only, harmless to others
|
2008-02-21 13:08:39 -08:00
|
|
|
""" % {"pacURL": pacURL}
|
2010-03-12 13:53:37 -08:00
|
|
|
prefs.append(part)
|
2012-09-25 12:51:59 -07:00
|
|
|
else:
|
|
|
|
part = 'user_pref("network.proxy.type", 0);\n'
|
|
|
|
prefs.append(part)
|
2009-10-14 10:55:25 -07:00
|
|
|
|
2010-01-15 09:22:54 -08:00
|
|
|
for v in extraPrefs:
|
2012-04-25 10:34:53 -07:00
|
|
|
thispref = v.split("=", 1)
|
2010-01-15 09:22:54 -08:00
|
|
|
if len(thispref) < 2:
|
|
|
|
print "Error: syntax error in --setpref=" + v
|
|
|
|
sys.exit(1)
|
|
|
|
part = 'user_pref("%s", %s);\n' % (thispref[0], thispref[1])
|
|
|
|
prefs.append(part)
|
|
|
|
|
|
|
|
# write the preferences
|
|
|
|
prefsFile = open(profileDir + "/" + "user.js", "a")
|
|
|
|
prefsFile.write("".join(prefs))
|
|
|
|
prefsFile.close()
|
|
|
|
|
2012-07-19 23:40:15 -07:00
|
|
|
apps = [
|
|
|
|
{
|
2012-07-20 02:14:25 -07:00
|
|
|
'name': 'http_example_org',
|
2012-10-19 03:43:17 -07:00
|
|
|
'csp': '',
|
2012-07-19 23:40:15 -07:00
|
|
|
'origin': 'http://example.org',
|
|
|
|
'manifestURL': 'http://example.org/manifest.webapp',
|
2012-09-25 16:28:17 -07:00
|
|
|
'description': 'http://example.org App',
|
|
|
|
'appStatus': _APP_STATUS_INSTALLED
|
2012-07-19 23:40:15 -07:00
|
|
|
},
|
|
|
|
{
|
2012-07-20 02:14:25 -07:00
|
|
|
'name': 'https_example_com',
|
2012-10-19 03:43:17 -07:00
|
|
|
'csp': '',
|
2012-07-19 23:40:15 -07:00
|
|
|
'origin': 'https://example.com',
|
|
|
|
'manifestURL': 'https://example.com/manifest.webapp',
|
2012-09-25 16:28:17 -07:00
|
|
|
'description': 'https://example.com App',
|
|
|
|
'appStatus': _APP_STATUS_INSTALLED
|
2012-07-19 23:40:15 -07:00
|
|
|
},
|
|
|
|
{
|
2012-07-20 02:14:25 -07:00
|
|
|
'name': 'http_test1_example_org',
|
2012-10-19 03:43:17 -07:00
|
|
|
'csp': '',
|
2012-07-19 23:40:15 -07:00
|
|
|
'origin': 'http://test1.example.org',
|
|
|
|
'manifestURL': 'http://test1.example.org/manifest.webapp',
|
2012-09-25 16:28:17 -07:00
|
|
|
'description': 'http://test1.example.org App',
|
|
|
|
'appStatus': _APP_STATUS_INSTALLED
|
2012-07-19 23:40:15 -07:00
|
|
|
},
|
|
|
|
{
|
2012-07-20 02:14:25 -07:00
|
|
|
'name': 'http_test1_example_org_8000',
|
2012-10-19 03:43:17 -07:00
|
|
|
'csp': '',
|
2012-07-19 23:40:15 -07:00
|
|
|
'origin': 'http://test1.example.org:8000',
|
|
|
|
'manifestURL': 'http://test1.example.org:8000/manifest.webapp',
|
2012-09-25 16:28:17 -07:00
|
|
|
'description': 'http://test1.example.org:8000 App',
|
|
|
|
'appStatus': _APP_STATUS_INSTALLED
|
2012-07-19 23:40:15 -07:00
|
|
|
},
|
|
|
|
{
|
2012-07-20 02:14:25 -07:00
|
|
|
'name': 'http_sub1_test1_example_org',
|
2012-10-19 03:43:17 -07:00
|
|
|
'csp': '',
|
2012-07-19 23:40:15 -07:00
|
|
|
'origin': 'http://sub1.test1.example.org',
|
|
|
|
'manifestURL': 'http://sub1.test1.example.org/manifest.webapp',
|
2012-09-25 16:28:17 -07:00
|
|
|
'description': 'http://sub1.test1.example.org App',
|
|
|
|
'appStatus': _APP_STATUS_INSTALLED
|
|
|
|
},
|
|
|
|
{
|
|
|
|
'name': 'https_example_com_privileged',
|
2012-10-19 03:43:17 -07:00
|
|
|
'csp': '',
|
2012-09-25 16:28:17 -07:00
|
|
|
'origin': 'https://example.com',
|
|
|
|
'manifestURL': 'https://example.com/manifest_priv.webapp',
|
|
|
|
'description': 'https://example.com Privileged App',
|
|
|
|
'appStatus': _APP_STATUS_PRIVILEGED
|
|
|
|
},
|
|
|
|
{
|
|
|
|
'name': 'https_example_com_certified',
|
2012-10-19 03:43:17 -07:00
|
|
|
'csp': '',
|
2012-09-25 16:28:17 -07:00
|
|
|
'origin': 'https://example.com',
|
|
|
|
'manifestURL': 'https://example.com/manifest_cert.webapp',
|
|
|
|
'description': 'https://example.com Certified App',
|
|
|
|
'appStatus': _APP_STATUS_CERTIFIED
|
2012-07-19 23:40:15 -07:00
|
|
|
},
|
2012-10-19 03:43:17 -07:00
|
|
|
{
|
|
|
|
'name': 'https_example_csp_certified',
|
|
|
|
'csp': "default-src *; script-src 'self'; object-src 'none'; style-src 'self' 'unsafe-inline'",
|
|
|
|
'origin': 'https://example.com',
|
|
|
|
'manifestURL': 'https://example.com/manifest_csp_cert.webapp',
|
|
|
|
'description': 'https://example.com Certified App with manifest policy',
|
|
|
|
'appStatus': _APP_STATUS_CERTIFIED
|
2013-01-04 10:41:34 -08:00
|
|
|
},
|
2012-10-19 03:43:17 -07:00
|
|
|
{
|
|
|
|
'name': 'https_example_csp_installed',
|
|
|
|
'csp': "default-src *; script-src 'self'; object-src 'none'; style-src 'self' 'unsafe-inline'",
|
|
|
|
'origin': 'https://example.com',
|
|
|
|
'manifestURL': 'https://example.com/manifest_csp_inst.webapp',
|
|
|
|
'description': 'https://example.com Installed App with manifest policy',
|
|
|
|
'appStatus': _APP_STATUS_INSTALLED
|
2013-01-04 10:41:34 -08:00
|
|
|
},
|
2012-10-19 03:43:17 -07:00
|
|
|
{
|
|
|
|
'name': 'https_example_csp_privileged',
|
|
|
|
'csp': "default-src *; script-src 'self'; object-src 'none'; style-src 'self' 'unsafe-inline'",
|
|
|
|
'origin': 'https://example.com',
|
|
|
|
'manifestURL': 'https://example.com/manifest_csp_priv.webapp',
|
|
|
|
'description': 'https://example.com Privileged App with manifest policy',
|
|
|
|
'appStatus': _APP_STATUS_PRIVILEGED
|
2013-01-04 10:41:34 -08:00
|
|
|
},
|
2012-12-17 08:38:40 -08:00
|
|
|
{
|
|
|
|
'name': 'https_a_domain_certified',
|
2013-01-04 10:41:34 -08:00
|
|
|
'csp': "",
|
2012-12-17 08:38:40 -08:00
|
|
|
'origin': 'https://acertified.com',
|
|
|
|
'manifestURL': 'https://acertified.com/manifest.webapp',
|
|
|
|
'description': 'https://acertified.com Certified App',
|
|
|
|
'appStatus': _APP_STATUS_CERTIFIED
|
2013-01-04 10:41:34 -08:00
|
|
|
},
|
2012-12-17 08:38:40 -08:00
|
|
|
{
|
|
|
|
'name': 'https_a_domain_privileged',
|
|
|
|
'csp': "",
|
|
|
|
'origin': 'https://aprivileged.com',
|
|
|
|
'manifestURL': 'https://aprivileged.com/manifest.webapp',
|
|
|
|
'description': 'https://aprivileged.com Privileged App ',
|
|
|
|
'appStatus': _APP_STATUS_PRIVILEGED
|
2013-01-04 10:41:34 -08:00
|
|
|
},
|
2012-07-19 23:40:15 -07:00
|
|
|
];
|
|
|
|
self.setupTestApps(profileDir, apps)
|
|
|
|
|
2010-01-15 09:22:54 -08:00
|
|
|
def addCommonOptions(self, parser):
|
|
|
|
"Adds command-line options which are common to mochitest and reftest."
|
|
|
|
|
|
|
|
parser.add_option("--setpref",
|
|
|
|
action = "append", type = "string",
|
|
|
|
default = [],
|
|
|
|
dest = "extraPrefs", metavar = "PREF=VALUE",
|
|
|
|
help = "defines an extra user preference")
|
|
|
|
|
|
|
|
def fillCertificateDB(self, profileDir, certPath, utilityPath, xrePath):
|
|
|
|
pwfilePath = os.path.join(profileDir, ".crtdbpw")
|
2008-09-05 06:35:58 -07:00
|
|
|
|
2010-01-15 09:22:54 -08:00
|
|
|
pwfile = open(pwfilePath, "w")
|
|
|
|
pwfile.write("\n")
|
|
|
|
pwfile.close()
|
2008-09-05 06:35:58 -07:00
|
|
|
|
2010-01-15 09:22:54 -08:00
|
|
|
# Create head of the ssltunnel configuration file
|
|
|
|
sslTunnelConfigPath = os.path.join(profileDir, "ssltunnel.cfg")
|
|
|
|
sslTunnelConfig = open(sslTunnelConfigPath, "w")
|
2008-09-05 06:35:58 -07:00
|
|
|
|
2010-01-15 09:22:54 -08:00
|
|
|
sslTunnelConfig.write("httpproxy:1\n")
|
|
|
|
sslTunnelConfig.write("certdbdir:%s\n" % certPath)
|
2010-03-13 09:56:24 -08:00
|
|
|
sslTunnelConfig.write("forward:127.0.0.1:%s\n" % self.httpPort)
|
2010-07-28 10:55:36 -07:00
|
|
|
sslTunnelConfig.write("websocketserver:%s:%s\n" % (self.webServer, self.webSocketPort))
|
2010-03-13 09:56:24 -08:00
|
|
|
sslTunnelConfig.write("listen:*:%s:pgo server certificate\n" % self.sslPort)
|
2010-01-15 09:22:54 -08:00
|
|
|
|
|
|
|
# Configure automatic certificate and bind custom certificates, client authentication
|
|
|
|
locations = self.readLocations()
|
|
|
|
locations.pop(0)
|
|
|
|
for loc in locations:
|
|
|
|
if loc.scheme == "https" and "nocert" not in loc.options:
|
|
|
|
customCertRE = re.compile("^cert=(?P<nickname>[0-9a-zA-Z_ ]+)")
|
|
|
|
clientAuthRE = re.compile("^clientauth=(?P<clientauth>[a-z]+)")
|
2011-11-02 14:43:27 -07:00
|
|
|
redirRE = re.compile("^redir=(?P<redirhost>[0-9a-zA-Z_ .]+)")
|
2010-01-15 09:22:54 -08:00
|
|
|
for option in loc.options:
|
|
|
|
match = customCertRE.match(option)
|
|
|
|
if match:
|
|
|
|
customcert = match.group("nickname");
|
2010-03-13 09:56:24 -08:00
|
|
|
sslTunnelConfig.write("listen:%s:%s:%s:%s\n" %
|
|
|
|
(loc.host, loc.port, self.sslPort, customcert))
|
2010-01-15 09:22:54 -08:00
|
|
|
|
|
|
|
match = clientAuthRE.match(option)
|
|
|
|
if match:
|
|
|
|
clientauth = match.group("clientauth");
|
2010-03-13 09:56:24 -08:00
|
|
|
sslTunnelConfig.write("clientauth:%s:%s:%s:%s\n" %
|
|
|
|
(loc.host, loc.port, self.sslPort, clientauth))
|
2010-01-15 09:22:54 -08:00
|
|
|
|
2011-11-02 14:43:27 -07:00
|
|
|
match = redirRE.match(option)
|
|
|
|
if match:
|
|
|
|
redirhost = match.group("redirhost")
|
|
|
|
sslTunnelConfig.write("redirhost:%s:%s:%s:%s\n" %
|
|
|
|
(loc.host, loc.port, self.sslPort, redirhost))
|
|
|
|
|
2010-01-15 09:22:54 -08:00
|
|
|
sslTunnelConfig.close()
|
|
|
|
|
|
|
|
# Pre-create the certification database for the profile
|
|
|
|
env = self.environment(xrePath = xrePath)
|
|
|
|
certutil = os.path.join(utilityPath, "certutil" + self.BIN_SUFFIX)
|
|
|
|
pk12util = os.path.join(utilityPath, "pk12util" + self.BIN_SUFFIX)
|
|
|
|
|
|
|
|
status = self.Process([certutil, "-N", "-d", profileDir, "-f", pwfilePath], env = env).wait()
|
|
|
|
if status != 0:
|
|
|
|
return status
|
|
|
|
|
|
|
|
# Walk the cert directory and add custom CAs and client certs
|
|
|
|
files = os.listdir(certPath)
|
|
|
|
for item in files:
|
|
|
|
root, ext = os.path.splitext(item)
|
|
|
|
if ext == ".ca":
|
|
|
|
trustBits = "CT,,"
|
|
|
|
if root.endswith("-object"):
|
|
|
|
trustBits = "CT,,CT"
|
|
|
|
self.Process([certutil, "-A", "-i", os.path.join(certPath, item),
|
|
|
|
"-d", profileDir, "-f", pwfilePath, "-n", root, "-t", trustBits],
|
|
|
|
env = env).wait()
|
|
|
|
if ext == ".client":
|
|
|
|
self.Process([pk12util, "-i", os.path.join(certPath, item), "-w",
|
|
|
|
pwfilePath, "-d", profileDir],
|
|
|
|
env = env).wait()
|
|
|
|
|
|
|
|
os.unlink(pwfilePath)
|
|
|
|
return 0
|
|
|
|
|
|
|
|
def environment(self, env = None, xrePath = None, crashreporter = True):
|
|
|
|
if xrePath == None:
|
|
|
|
xrePath = self.DIST_BIN
|
|
|
|
if env == None:
|
|
|
|
env = dict(os.environ)
|
|
|
|
|
2010-03-18 11:14:14 -07:00
|
|
|
ldLibraryPath = os.path.abspath(os.path.join(SCRIPT_DIR, xrePath))
|
2010-01-15 09:22:54 -08:00
|
|
|
if self.UNIXISH or self.IS_MAC:
|
|
|
|
envVar = "LD_LIBRARY_PATH"
|
|
|
|
if self.IS_MAC:
|
|
|
|
envVar = "DYLD_LIBRARY_PATH"
|
|
|
|
else: # unixish
|
|
|
|
env['MOZILLA_FIVE_HOME'] = xrePath
|
|
|
|
if envVar in env:
|
|
|
|
ldLibraryPath = ldLibraryPath + ":" + env[envVar]
|
|
|
|
env[envVar] = ldLibraryPath
|
|
|
|
elif self.IS_WIN32:
|
|
|
|
env["PATH"] = env["PATH"] + ";" + ldLibraryPath
|
|
|
|
|
|
|
|
if crashreporter:
|
|
|
|
env['MOZ_CRASHREPORTER_NO_REPORT'] = '1'
|
|
|
|
env['MOZ_CRASHREPORTER'] = '1'
|
2010-01-13 14:44:49 -08:00
|
|
|
else:
|
2010-01-15 09:22:54 -08:00
|
|
|
env['MOZ_CRASHREPORTER_DISABLE'] = '1'
|
|
|
|
|
|
|
|
env['GNOME_DISABLE_CRASH_DIALOG'] = '1'
|
|
|
|
env['XRE_NO_WINDOWS_CRASH_DIALOG'] = '1'
|
2011-10-28 15:43:49 -07:00
|
|
|
env['NS_TRACE_MALLOC_DISABLE_STACKS'] = '1'
|
2013-01-22 07:48:02 -08:00
|
|
|
|
|
|
|
# ASan specific environment stuff
|
|
|
|
if self.IS_ASAN and (self.IS_LINUX or self.IS_MAC):
|
|
|
|
try:
|
|
|
|
totalMemory = int(os.popen("free").readlines()[1].split()[1])
|
|
|
|
|
|
|
|
# Only 2 GB RAM or less available? Use custom ASan options to reduce
|
|
|
|
# the amount of resources required to do the tests. Standard options
|
|
|
|
# will otherwise lead to OOM conditions on the current test slaves.
|
|
|
|
if totalMemory <= 1024 * 1024 * 2:
|
|
|
|
self.log.info("INFO | automation.py | ASan running in low-memory configuration")
|
|
|
|
env["ASAN_OPTIONS"] = "quarantine_size=50331648:redzone=64"
|
|
|
|
except OSError,err:
|
|
|
|
self.log.info("Failed determine available memory, disabling ASan low-memory configuration: %s", err.strerror)
|
|
|
|
except:
|
|
|
|
self.log.info("Failed determine available memory, disabling ASan low-memory configuration")
|
|
|
|
|
2010-01-15 09:22:54 -08:00
|
|
|
return env
|
|
|
|
|
|
|
|
if IS_WIN32:
|
|
|
|
PeekNamedPipe = ctypes.windll.kernel32.PeekNamedPipe
|
|
|
|
GetLastError = ctypes.windll.kernel32.GetLastError
|
|
|
|
|
|
|
|
def readWithTimeout(self, f, timeout):
|
|
|
|
"""Try to read a line of output from the file object |f|.
|
|
|
|
|f| must be a pipe, like the |stdout| member of a subprocess.Popen
|
|
|
|
object created with stdout=PIPE. If no output
|
|
|
|
is received within |timeout| seconds, return a blank line.
|
|
|
|
Returns a tuple (line, did_timeout), where |did_timeout| is True
|
|
|
|
if the read timed out, and False otherwise."""
|
|
|
|
if timeout is None:
|
|
|
|
# shortcut to allow callers to pass in "None" for no timeout.
|
|
|
|
return (f.readline(), False)
|
2010-03-18 11:14:14 -07:00
|
|
|
x = msvcrt.get_osfhandle(f.fileno())
|
|
|
|
l = ctypes.c_long()
|
|
|
|
done = time.time() + timeout
|
|
|
|
while time.time() < done:
|
|
|
|
if self.PeekNamedPipe(x, None, 0, None, ctypes.byref(l), None) == 0:
|
2010-01-15 09:22:54 -08:00
|
|
|
err = self.GetLastError()
|
|
|
|
if err == 38 or err == 109: # ERROR_HANDLE_EOF || ERROR_BROKEN_PIPE
|
|
|
|
return ('', False)
|
|
|
|
else:
|
|
|
|
log.error("readWithTimeout got error: %d", err)
|
2010-01-25 12:37:58 -08:00
|
|
|
if l.value > 0:
|
2010-01-15 09:22:54 -08:00
|
|
|
# we're assuming that the output is line-buffered,
|
|
|
|
# which is not unreasonable
|
|
|
|
return (f.readline(), False)
|
2010-03-18 11:14:14 -07:00
|
|
|
time.sleep(0.01)
|
2009-10-13 13:56:24 -07:00
|
|
|
return ('', True)
|
2010-01-13 13:53:26 -08:00
|
|
|
|
2010-01-15 09:22:54 -08:00
|
|
|
def isPidAlive(self, pid):
|
|
|
|
STILL_ACTIVE = 259
|
|
|
|
PROCESS_QUERY_LIMITED_INFORMATION = 0x1000
|
2010-03-18 11:14:14 -07:00
|
|
|
pHandle = ctypes.windll.kernel32.OpenProcess(PROCESS_QUERY_LIMITED_INFORMATION, 0, pid)
|
2010-01-15 09:22:54 -08:00
|
|
|
if not pHandle:
|
|
|
|
return False
|
2010-01-26 14:46:47 -08:00
|
|
|
pExitCode = ctypes.wintypes.DWORD()
|
2010-06-25 22:40:13 -07:00
|
|
|
ctypes.windll.kernel32.GetExitCodeProcess(pHandle, ctypes.byref(pExitCode))
|
2010-03-18 11:14:14 -07:00
|
|
|
ctypes.windll.kernel32.CloseHandle(pHandle)
|
2011-04-14 04:03:31 -07:00
|
|
|
return pExitCode.value == STILL_ACTIVE
|
2010-01-15 09:22:54 -08:00
|
|
|
|
|
|
|
def killPid(self, pid):
|
|
|
|
PROCESS_TERMINATE = 0x0001
|
2010-03-18 11:14:14 -07:00
|
|
|
pHandle = ctypes.windll.kernel32.OpenProcess(PROCESS_TERMINATE, 0, pid)
|
2010-01-15 09:22:54 -08:00
|
|
|
if not pHandle:
|
2009-10-16 10:34:27 -07:00
|
|
|
return
|
2010-03-18 11:14:14 -07:00
|
|
|
success = ctypes.windll.kernel32.TerminateProcess(pHandle, 1)
|
|
|
|
ctypes.windll.kernel32.CloseHandle(pHandle)
|
2010-01-06 14:03:29 -08:00
|
|
|
|
2008-02-21 13:08:39 -08:00
|
|
|
else:
|
2010-01-15 09:22:54 -08:00
|
|
|
|
|
|
|
def readWithTimeout(self, f, timeout):
|
|
|
|
"""Try to read a line of output from the file object |f|. If no output
|
|
|
|
is received within |timeout| seconds, return a blank line.
|
|
|
|
Returns a tuple (line, did_timeout), where |did_timeout| is True
|
|
|
|
if the read timed out, and False otherwise."""
|
|
|
|
(r, w, e) = select.select([f], [], [], timeout)
|
|
|
|
if len(r) == 0:
|
|
|
|
return ('', True)
|
|
|
|
return (f.readline(), False)
|
|
|
|
|
|
|
|
def isPidAlive(self, pid):
|
|
|
|
try:
|
|
|
|
# kill(pid, 0) checks for a valid PID without actually sending a signal
|
|
|
|
# The method throws OSError if the PID is invalid, which we catch below.
|
|
|
|
os.kill(pid, 0)
|
|
|
|
|
|
|
|
# Wait on it to see if it's a zombie. This can throw OSError.ECHILD if
|
|
|
|
# the process terminates before we get to this point.
|
|
|
|
wpid, wstatus = os.waitpid(pid, os.WNOHANG)
|
2011-04-14 04:03:31 -07:00
|
|
|
return wpid == 0
|
2010-01-15 09:22:54 -08:00
|
|
|
except OSError, err:
|
|
|
|
# Catch the errors we might expect from os.kill/os.waitpid,
|
|
|
|
# and re-raise any others
|
2010-03-18 11:14:14 -07:00
|
|
|
if err.errno == errno.ESRCH or err.errno == errno.ECHILD:
|
2010-01-15 09:22:54 -08:00
|
|
|
return False
|
|
|
|
raise
|
|
|
|
|
|
|
|
def killPid(self, pid):
|
|
|
|
os.kill(pid, signal.SIGKILL)
|
|
|
|
|
2011-11-02 07:56:35 -07:00
|
|
|
def dumpScreen(self, utilityPath):
|
|
|
|
self.haveDumpedScreen = True;
|
|
|
|
|
|
|
|
# Need to figure out what tool and whether it write to a file or stdout
|
|
|
|
if self.UNIXISH:
|
|
|
|
utility = [os.path.join(utilityPath, "screentopng")]
|
|
|
|
imgoutput = 'stdout'
|
|
|
|
elif self.IS_MAC:
|
|
|
|
utility = ['/usr/sbin/screencapture', '-C', '-x', '-t', 'png']
|
|
|
|
imgoutput = 'file'
|
|
|
|
elif self.IS_WIN32:
|
2012-02-08 12:30:07 -08:00
|
|
|
utility = [os.path.join(utilityPath, "screenshot.exe")]
|
|
|
|
imgoutput = 'file'
|
2011-11-02 07:56:35 -07:00
|
|
|
|
|
|
|
# Run the capture correctly for the type of capture
|
|
|
|
try:
|
|
|
|
if imgoutput == 'file':
|
|
|
|
tmpfd, imgfilename = tempfile.mkstemp(prefix='mozilla-test-fail_')
|
|
|
|
os.close(tmpfd)
|
|
|
|
dumper = self.Process(utility + [imgfilename])
|
|
|
|
elif imgoutput == 'stdout':
|
|
|
|
dumper = self.Process(utility, bufsize=-1,
|
|
|
|
stdout=subprocess.PIPE, close_fds=True)
|
|
|
|
except OSError, err:
|
|
|
|
self.log.info("Failed to start %s for screenshot: %s",
|
|
|
|
utility[0], err.strerror)
|
|
|
|
return
|
|
|
|
|
|
|
|
# Check whether the capture utility ran successfully
|
|
|
|
dumper_out, dumper_err = dumper.communicate()
|
|
|
|
if dumper.returncode != 0:
|
|
|
|
self.log.info("%s exited with code %d", utility, dumper.returncode)
|
|
|
|
return
|
|
|
|
|
|
|
|
try:
|
|
|
|
if imgoutput == 'stdout':
|
|
|
|
image = dumper_out
|
|
|
|
elif imgoutput == 'file':
|
2012-02-08 12:30:07 -08:00
|
|
|
with open(imgfilename, 'rb') as imgfile:
|
2011-11-02 07:56:35 -07:00
|
|
|
image = imgfile.read()
|
|
|
|
except IOError, err:
|
|
|
|
self.log.info("Failed to read image from %s", imgoutput)
|
|
|
|
|
|
|
|
import base64
|
|
|
|
encoded = base64.b64encode(image)
|
|
|
|
self.log.info("SCREENSHOT: data:image/png;base64,%s", encoded)
|
2010-08-22 18:27:26 -07:00
|
|
|
|
2010-03-20 23:08:49 -07:00
|
|
|
def killAndGetStack(self, proc, utilityPath, debuggerInfo):
|
|
|
|
"""Kill the process, preferrably in a way that gets us a stack trace."""
|
2012-03-07 09:27:45 -08:00
|
|
|
if not debuggerInfo:
|
|
|
|
if self.haveDumpedScreen:
|
|
|
|
self.log.info("Not taking screenshot here: see the one that was previously logged")
|
|
|
|
else:
|
|
|
|
self.dumpScreen(utilityPath)
|
2010-08-22 18:27:26 -07:00
|
|
|
|
2010-03-20 23:08:49 -07:00
|
|
|
if self.CRASHREPORTER and not debuggerInfo:
|
2010-01-15 09:22:54 -08:00
|
|
|
if self.UNIXISH:
|
2010-02-25 15:05:48 -08:00
|
|
|
# ABRT will get picked up by Breakpad's signal handler
|
|
|
|
os.kill(proc.pid, signal.SIGABRT)
|
2010-01-15 09:22:54 -08:00
|
|
|
return
|
|
|
|
elif self.IS_WIN32:
|
|
|
|
# We should have a "crashinject" program in our utility path
|
|
|
|
crashinject = os.path.normpath(os.path.join(utilityPath, "crashinject.exe"))
|
|
|
|
if os.path.exists(crashinject) and subprocess.Popen([crashinject, str(proc.pid)]).wait() == 0:
|
|
|
|
return
|
2010-03-20 23:08:49 -07:00
|
|
|
#TODO: kill the process such that it triggers Breakpad on OS X (bug 525296)
|
2010-01-15 09:22:54 -08:00
|
|
|
self.log.info("Can't trigger Breakpad, just killing process")
|
|
|
|
proc.kill()
|
|
|
|
|
2012-08-03 03:36:59 -07:00
|
|
|
def waitForFinish(self, proc, utilityPath, timeout, maxTime, startTime, debuggerInfo, symbolsPath):
|
2010-01-19 11:45:04 -08:00
|
|
|
""" Look for timeout or crashes and return the status after the process terminates """
|
2010-01-15 09:22:54 -08:00
|
|
|
stackFixerProcess = None
|
2010-06-25 14:47:19 -07:00
|
|
|
stackFixerFunction = None
|
2010-01-15 09:22:54 -08:00
|
|
|
didTimeout = False
|
2011-06-18 13:29:57 -07:00
|
|
|
hitMaxTime = False
|
2010-01-19 11:45:04 -08:00
|
|
|
if proc.stdout is None:
|
2010-01-15 09:22:54 -08:00
|
|
|
self.log.info("TEST-INFO: Not logging stdout or stderr due to debugger connection")
|
|
|
|
else:
|
|
|
|
logsource = proc.stdout
|
2010-02-21 13:03:20 -08:00
|
|
|
|
2010-06-25 14:47:19 -07:00
|
|
|
if self.IS_DEBUG_BUILD and (self.IS_MAC or self.IS_LINUX) and symbolsPath and os.path.exists(symbolsPath):
|
|
|
|
# Run each line through a function in fix_stack_using_bpsyms.py (uses breakpad symbol files)
|
|
|
|
# This method is preferred for Tinderbox builds, since native symbols may have been stripped.
|
|
|
|
sys.path.insert(0, utilityPath)
|
|
|
|
import fix_stack_using_bpsyms as stackFixerModule
|
|
|
|
stackFixerFunction = lambda line: stackFixerModule.fixSymbols(line, symbolsPath)
|
|
|
|
del sys.path[0]
|
|
|
|
elif self.IS_DEBUG_BUILD and self.IS_MAC and False:
|
|
|
|
# Run each line through a function in fix_macosx_stack.py (uses atos)
|
2010-02-21 13:03:20 -08:00
|
|
|
sys.path.insert(0, utilityPath)
|
|
|
|
import fix_macosx_stack as stackFixerModule
|
2010-06-25 14:47:19 -07:00
|
|
|
stackFixerFunction = lambda line: stackFixerModule.fixSymbols(line)
|
2010-02-21 13:03:20 -08:00
|
|
|
del sys.path[0]
|
2010-06-25 14:47:19 -07:00
|
|
|
elif self.IS_DEBUG_BUILD and self.IS_LINUX:
|
|
|
|
# Run logsource through fix-linux-stack.pl (uses addr2line)
|
|
|
|
# This method is preferred for developer machines, so we don't have to run "make buildsymbols".
|
|
|
|
stackFixerProcess = self.Process([self.PERL, os.path.join(utilityPath, "fix-linux-stack.pl")],
|
|
|
|
stdin=logsource,
|
|
|
|
stdout=subprocess.PIPE)
|
|
|
|
logsource = stackFixerProcess.stdout
|
2010-01-15 09:22:54 -08:00
|
|
|
|
|
|
|
(line, didTimeout) = self.readWithTimeout(logsource, timeout)
|
|
|
|
while line != "" and not didTimeout:
|
2012-05-29 16:10:58 -07:00
|
|
|
if stackFixerFunction:
|
|
|
|
line = stackFixerFunction(line)
|
2012-05-02 04:15:07 -07:00
|
|
|
self.log.info(line.rstrip().decode("UTF-8", "ignore"))
|
2010-06-10 18:46:18 -07:00
|
|
|
if "TEST-START" in line and "|" in line:
|
|
|
|
self.lastTestSeen = line.split("|")[1].strip()
|
2012-03-07 09:27:45 -08:00
|
|
|
if not debuggerInfo and "TEST-UNEXPECTED-FAIL" in line and "Test timed out" in line:
|
|
|
|
if self.haveDumpedScreen:
|
|
|
|
self.log.info("Not taking screenshot here: see the one that was previously logged")
|
|
|
|
else:
|
|
|
|
self.dumpScreen(utilityPath)
|
2010-08-22 18:27:26 -07:00
|
|
|
|
2010-01-15 09:22:54 -08:00
|
|
|
(line, didTimeout) = self.readWithTimeout(logsource, timeout)
|
|
|
|
if not hitMaxTime and maxTime and datetime.now() - startTime > timedelta(seconds = maxTime):
|
|
|
|
# Kill the application, but continue reading from stack fixer so as not to deadlock on stackFixerProcess.wait().
|
|
|
|
hitMaxTime = True
|
2010-06-10 18:46:18 -07:00
|
|
|
self.log.info("TEST-UNEXPECTED-FAIL | %s | application ran for longer than allowed maximum time of %d seconds", self.lastTestSeen, int(maxTime))
|
2010-03-20 23:08:49 -07:00
|
|
|
self.killAndGetStack(proc, utilityPath, debuggerInfo)
|
2010-01-15 09:22:54 -08:00
|
|
|
if didTimeout:
|
2010-06-10 18:46:18 -07:00
|
|
|
self.log.info("TEST-UNEXPECTED-FAIL | %s | application timed out after %d seconds with no output", self.lastTestSeen, int(timeout))
|
2010-03-20 23:08:49 -07:00
|
|
|
self.killAndGetStack(proc, utilityPath, debuggerInfo)
|
2010-01-15 09:22:54 -08:00
|
|
|
|
|
|
|
status = proc.wait()
|
2010-06-10 18:46:18 -07:00
|
|
|
if status == 0:
|
|
|
|
self.lastTestSeen = "Main app process exited normally"
|
2010-01-15 09:22:54 -08:00
|
|
|
if status != 0 and not didTimeout and not hitMaxTime:
|
2010-06-10 18:46:18 -07:00
|
|
|
self.log.info("TEST-UNEXPECTED-FAIL | %s | Exited with code %d during test run", self.lastTestSeen, status)
|
2010-01-15 09:22:54 -08:00
|
|
|
if stackFixerProcess is not None:
|
|
|
|
fixerStatus = stackFixerProcess.wait()
|
|
|
|
if fixerStatus != 0 and not didTimeout and not hitMaxTime:
|
|
|
|
self.log.info("TEST-UNEXPECTED-FAIL | automation.py | Stack fixer process exited with code %d during test run", fixerStatus)
|
2010-01-19 11:45:04 -08:00
|
|
|
return status
|
2010-01-15 09:22:54 -08:00
|
|
|
|
2010-01-19 11:45:04 -08:00
|
|
|
def buildCommandLine(self, app, debuggerInfo, profileDir, testURL, extraArgs):
|
|
|
|
""" build the application command line """
|
|
|
|
|
2011-09-09 17:04:36 -07:00
|
|
|
cmd = os.path.abspath(app)
|
|
|
|
if self.IS_MAC and not self.IS_CAMINO and os.path.exists(cmd + "-bin"):
|
|
|
|
# Prefer 'app-bin' in case 'app' is a shell script.
|
|
|
|
# We can remove this hack once bug 673899 etc are fixed.
|
2010-01-19 11:45:04 -08:00
|
|
|
cmd += "-bin"
|
|
|
|
|
|
|
|
args = []
|
|
|
|
|
|
|
|
if debuggerInfo:
|
|
|
|
args.extend(debuggerInfo["args"])
|
|
|
|
args.append(cmd)
|
|
|
|
cmd = os.path.abspath(debuggerInfo["path"])
|
|
|
|
|
|
|
|
if self.IS_MAC:
|
|
|
|
args.append("-foreground")
|
|
|
|
|
|
|
|
if self.IS_CYGWIN:
|
|
|
|
profileDirectory = commands.getoutput("cygpath -w \"" + profileDir + "/\"")
|
|
|
|
else:
|
|
|
|
profileDirectory = profileDir + "/"
|
|
|
|
|
|
|
|
args.extend(("-no-remote", "-profile", profileDirectory))
|
|
|
|
if testURL is not None:
|
|
|
|
if self.IS_CAMINO:
|
|
|
|
args.extend(("-url", testURL))
|
|
|
|
else:
|
|
|
|
args.append((testURL))
|
|
|
|
args.extend(extraArgs)
|
|
|
|
return cmd, args
|
|
|
|
|
|
|
|
def checkForZombies(self, processLog):
|
|
|
|
""" Look for hung processes """
|
2010-01-15 09:22:54 -08:00
|
|
|
if not os.path.exists(processLog):
|
2012-11-20 07:24:28 -08:00
|
|
|
self.log.info('Automation Error: PID log not found: %s', processLog)
|
|
|
|
# Whilst no hung process was found, the run should still display as a failure
|
|
|
|
return True
|
|
|
|
|
|
|
|
foundZombie = False
|
|
|
|
self.log.info('INFO | automation.py | Reading PID log: %s', processLog)
|
|
|
|
processList = []
|
|
|
|
pidRE = re.compile(r'launched child process (\d+)$')
|
|
|
|
processLogFD = open(processLog)
|
|
|
|
for line in processLogFD:
|
|
|
|
self.log.info(line.rstrip())
|
|
|
|
m = pidRE.search(line)
|
|
|
|
if m:
|
|
|
|
processList.append(int(m.group(1)))
|
|
|
|
processLogFD.close()
|
|
|
|
|
|
|
|
for processPID in processList:
|
|
|
|
self.log.info("INFO | automation.py | Checking for orphan process with PID: %d", processPID)
|
|
|
|
if self.isPidAlive(processPID):
|
|
|
|
foundZombie = True
|
|
|
|
self.log.info("TEST-UNEXPECTED-FAIL | automation.py | child process %d still alive after shutdown", processPID)
|
|
|
|
self.killPid(processPID)
|
|
|
|
return foundZombie
|
2010-01-15 09:22:54 -08:00
|
|
|
|
2011-09-21 07:27:16 -07:00
|
|
|
def checkForCrashes(self, profileDir, symbolsPath):
|
2012-11-20 07:24:28 -08:00
|
|
|
return automationutils.checkForCrashes(os.path.join(profileDir, "minidumps"), symbolsPath, self.lastTestSeen)
|
2011-09-21 07:27:16 -07:00
|
|
|
|
2010-01-19 11:45:04 -08:00
|
|
|
def runApp(self, testURL, env, app, profileDir, extraArgs,
|
|
|
|
runSSLTunnel = False, utilityPath = None,
|
2012-08-03 03:36:59 -07:00
|
|
|
xrePath = None, certPath = None,
|
2010-01-19 11:45:04 -08:00
|
|
|
debuggerInfo = None, symbolsPath = None,
|
2013-01-04 10:41:34 -08:00
|
|
|
timeout = -1, maxTime = None, onLaunch = None):
|
2010-01-19 11:45:04 -08:00
|
|
|
"""
|
|
|
|
Run the app, log the duration it took to execute, return the status code.
|
|
|
|
Kills the app if it runs for longer than |maxTime| seconds, or outputs nothing for |timeout| seconds.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if utilityPath == None:
|
|
|
|
utilityPath = self.DIST_BIN
|
|
|
|
if xrePath == None:
|
|
|
|
xrePath = self.DIST_BIN
|
|
|
|
if certPath == None:
|
|
|
|
certPath = self.CERTS_SRC_DIR
|
|
|
|
if timeout == -1:
|
|
|
|
timeout = self.DEFAULT_TIMEOUT
|
|
|
|
|
|
|
|
# copy env so we don't munge the caller's environment
|
|
|
|
env = dict(env);
|
|
|
|
env["NO_EM_RESTART"] = "1"
|
|
|
|
tmpfd, processLog = tempfile.mkstemp(suffix='pidlog')
|
|
|
|
os.close(tmpfd)
|
|
|
|
env["MOZ_PROCESS_LOG"] = processLog
|
|
|
|
|
|
|
|
if self.IS_TEST_BUILD and runSSLTunnel:
|
|
|
|
# create certificate database for the profile
|
|
|
|
certificateStatus = self.fillCertificateDB(profileDir, certPath, utilityPath, xrePath)
|
|
|
|
if certificateStatus != 0:
|
2011-01-03 06:48:38 -08:00
|
|
|
self.log.info("TEST-UNEXPECTED-FAIL | automation.py | Certificate integration failed")
|
2010-01-19 11:45:04 -08:00
|
|
|
return certificateStatus
|
|
|
|
|
|
|
|
# start ssltunnel to provide https:// URLs capability
|
|
|
|
ssltunnel = os.path.join(utilityPath, "ssltunnel" + self.BIN_SUFFIX)
|
|
|
|
ssltunnelProcess = self.Process([ssltunnel,
|
|
|
|
os.path.join(profileDir, "ssltunnel.cfg")],
|
|
|
|
env = self.environment(xrePath = xrePath))
|
|
|
|
self.log.info("INFO | automation.py | SSL tunnel pid: %d", ssltunnelProcess.pid)
|
|
|
|
|
|
|
|
cmd, args = self.buildCommandLine(app, debuggerInfo, profileDir, testURL, extraArgs)
|
|
|
|
startTime = datetime.now()
|
|
|
|
|
|
|
|
if debuggerInfo and debuggerInfo["interactive"]:
|
2010-08-31 18:03:38 -07:00
|
|
|
# If an interactive debugger is attached, don't redirect output,
|
|
|
|
# don't use timeouts, and don't capture ctrl-c.
|
2010-03-20 23:08:49 -07:00
|
|
|
timeout = None
|
|
|
|
maxTime = None
|
2010-01-19 11:45:04 -08:00
|
|
|
outputPipe = None
|
2010-08-31 18:03:38 -07:00
|
|
|
signal.signal(signal.SIGINT, lambda sigid, frame: None)
|
2010-01-19 11:45:04 -08:00
|
|
|
else:
|
|
|
|
outputPipe = subprocess.PIPE
|
|
|
|
|
2010-06-10 21:28:36 -07:00
|
|
|
self.lastTestSeen = "automation.py"
|
2010-01-19 11:45:04 -08:00
|
|
|
proc = self.Process([cmd] + args,
|
|
|
|
env = self.environment(env, xrePath = xrePath,
|
|
|
|
crashreporter = not debuggerInfo),
|
|
|
|
stdout = outputPipe,
|
|
|
|
stderr = subprocess.STDOUT)
|
|
|
|
self.log.info("INFO | automation.py | Application pid: %d", proc.pid)
|
|
|
|
|
2013-01-04 10:41:34 -08:00
|
|
|
if onLaunch is not None:
|
|
|
|
# Allow callers to specify an onLaunch callback to be fired after the
|
|
|
|
# app is launched.
|
|
|
|
onLaunch()
|
|
|
|
|
2012-08-03 03:36:59 -07:00
|
|
|
status = self.waitForFinish(proc, utilityPath, timeout, maxTime, startTime, debuggerInfo, symbolsPath)
|
2010-01-19 11:45:04 -08:00
|
|
|
self.log.info("INFO | automation.py | Application ran for: %s", str(datetime.now() - startTime))
|
|
|
|
|
|
|
|
# Do a final check for zombie child processes.
|
2012-11-20 07:24:28 -08:00
|
|
|
zombieProcesses = self.checkForZombies(processLog)
|
2012-11-20 07:24:28 -08:00
|
|
|
|
|
|
|
crashed = self.checkForCrashes(profileDir, symbolsPath)
|
|
|
|
|
2012-11-20 07:24:28 -08:00
|
|
|
if crashed or zombieProcesses:
|
2012-11-20 07:24:28 -08:00
|
|
|
status = 1
|
2010-01-15 09:22:54 -08:00
|
|
|
|
|
|
|
if os.path.exists(processLog):
|
|
|
|
os.unlink(processLog)
|
|
|
|
|
|
|
|
if self.IS_TEST_BUILD and runSSLTunnel:
|
|
|
|
ssltunnelProcess.kill()
|
|
|
|
|
|
|
|
return status
|
2010-06-24 02:32:01 -07:00
|
|
|
|
2011-10-14 08:45:58 -07:00
|
|
|
def getExtensionIDFromRDF(self, rdfSource):
|
|
|
|
"""
|
|
|
|
Retrieves the extension id from an install.rdf file (or string).
|
|
|
|
"""
|
|
|
|
from xml.dom.minidom import parse, parseString, Node
|
|
|
|
|
|
|
|
if isinstance(rdfSource, file):
|
|
|
|
document = parse(rdfSource)
|
|
|
|
else:
|
|
|
|
document = parseString(rdfSource)
|
|
|
|
|
|
|
|
# Find the <em:id> element. There can be multiple <em:id> tags
|
|
|
|
# within <em:targetApplication> tags, so we have to check this way.
|
|
|
|
for rdfChild in document.documentElement.childNodes:
|
|
|
|
if rdfChild.nodeType == Node.ELEMENT_NODE and rdfChild.tagName == "Description":
|
|
|
|
for descChild in rdfChild.childNodes:
|
|
|
|
if descChild.nodeType == Node.ELEMENT_NODE and descChild.tagName == "em:id":
|
|
|
|
return descChild.childNodes[0].data
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
2011-04-14 04:03:47 -07:00
|
|
|
def installExtension(self, extensionSource, profileDir, extensionID = None):
|
2011-10-14 08:45:58 -07:00
|
|
|
"""
|
|
|
|
Copies an extension into the extensions directory of the given profile.
|
|
|
|
extensionSource - the source location of the extension files. This can be either
|
|
|
|
a directory or a path to an xpi file.
|
|
|
|
profileDir - the profile directory we are copying into. We will create the
|
|
|
|
"extensions" directory there if it doesn't exist.
|
|
|
|
extensionID - the id of the extension to be used as the containing directory for the
|
|
|
|
extension, if extensionSource is a directory, i.e.
|
|
|
|
this is the name of the folder in the <profileDir>/extensions/<extensionID>
|
|
|
|
"""
|
2011-04-14 04:03:47 -07:00
|
|
|
if not os.path.isdir(profileDir):
|
|
|
|
self.log.info("INFO | automation.py | Cannot install extension, invalid profileDir at: %s", profileDir)
|
2011-01-04 03:06:53 -08:00
|
|
|
return
|
2010-06-24 02:32:01 -07:00
|
|
|
|
2011-10-14 08:45:58 -07:00
|
|
|
installRDFFilename = "install.rdf"
|
|
|
|
|
2011-10-22 11:37:15 -07:00
|
|
|
extensionsRootDir = os.path.join(profileDir, "extensions", "staged")
|
2011-10-14 08:45:58 -07:00
|
|
|
if not os.path.isdir(extensionsRootDir):
|
2011-10-22 11:37:15 -07:00
|
|
|
os.makedirs(extensionsRootDir)
|
2010-06-24 02:32:01 -07:00
|
|
|
|
2011-04-14 04:03:47 -07:00
|
|
|
if os.path.isfile(extensionSource):
|
2011-11-14 20:33:21 -08:00
|
|
|
reader = automationutils.ZipFileReader(extensionSource)
|
2011-10-14 08:45:58 -07:00
|
|
|
|
|
|
|
for filename in reader.namelist():
|
|
|
|
# Sanity check the zip file.
|
|
|
|
if os.path.isabs(filename):
|
|
|
|
self.log.info("INFO | automation.py | Cannot install extension, bad files in xpi")
|
|
|
|
return
|
|
|
|
|
|
|
|
# We may need to dig the extensionID out of the zip file...
|
|
|
|
if extensionID is None and filename == installRDFFilename:
|
|
|
|
extensionID = self.getExtensionIDFromRDF(reader.read(filename))
|
|
|
|
|
|
|
|
# We must know the extensionID now.
|
|
|
|
if extensionID is None:
|
2011-04-14 04:03:47 -07:00
|
|
|
self.log.info("INFO | automation.py | Cannot install extension, missing extensionID")
|
|
|
|
return
|
|
|
|
|
2011-10-14 08:45:58 -07:00
|
|
|
# Make the extension directory.
|
|
|
|
extensionDir = os.path.join(extensionsRootDir, extensionID)
|
|
|
|
os.mkdir(extensionDir)
|
|
|
|
|
|
|
|
# Extract all files.
|
|
|
|
reader.extractall(extensionDir)
|
|
|
|
|
|
|
|
elif os.path.isdir(extensionSource):
|
|
|
|
if extensionID is None:
|
|
|
|
filename = os.path.join(extensionSource, installRDFFilename)
|
|
|
|
if os.path.isfile(filename):
|
|
|
|
with open(filename, "r") as installRDF:
|
|
|
|
extensionID = self.getExtensionIDFromRDF(installRDF)
|
|
|
|
|
|
|
|
if extensionID is None:
|
|
|
|
self.log.info("INFO | automation.py | Cannot install extension, missing extensionID")
|
|
|
|
return
|
|
|
|
|
2011-04-14 04:03:47 -07:00
|
|
|
# Copy extension tree into its own directory.
|
|
|
|
# "destination directory must not already exist".
|
2011-10-14 08:45:58 -07:00
|
|
|
shutil.copytree(extensionSource, os.path.join(extensionsRootDir, extensionID))
|
|
|
|
|
2011-04-14 04:03:47 -07:00
|
|
|
else:
|
|
|
|
self.log.info("INFO | automation.py | Cannot install extension, invalid extensionSource at: %s", extensionSource)
|
2013-01-03 17:37:26 -08:00
|
|
|
|
|
|
|
def elf_arm(self, filename):
|
|
|
|
data = open(filename, 'rb').read(20)
|
2013-02-14 12:24:21 -08:00
|
|
|
return data[:4] == "\x7fELF" and ord(data[18]) == 40 # EM_ARM
|
|
|
|
|