# # ***** BEGIN LICENSE BLOCK ***** # Version: MPL 1.1/GPL 2.0/LGPL 2.1 # # The contents of this file are subject to the Mozilla Public License Version # 1.1 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # http://www.mozilla.org/MPL/ # # Software distributed under the License is distributed on an "AS IS" basis, # WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License # for the specific language governing rights and limitations under the # License. # # The Original Code is mozilla.org code. # # The Initial Developer of the Original Code is # Mozilla Foundation. # Portions created by the Initial Developer are Copyright (C) 2008 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Robert Sayre # Jeff Walden # # Alternatively, the contents of this file may be used under the terms of # either the GNU General Public License Version 2 or later (the "GPL"), or # the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), # in which case the provisions of the GPL or the LGPL are applicable instead # of those above. If you wish to allow use of your version of this file only # under the terms of either the GPL or the LGPL, and not to allow others to # use your version of this file under the terms of the MPL, indicate your # decision by deleting the provisions above and replace them with the notice # and other provisions required by the GPL or the LGPL. If you do not delete # the provisions above, a recipient may use your version of this file under # the terms of any one of the MPL, the GPL or the LGPL. # # ***** END LICENSE BLOCK ***** from __future__ import with_statement import codecs from datetime import datetime, timedelta import itertools import logging import os import re import select import shutil import signal import subprocess import sys import threading import tempfile import sqlite3 SCRIPT_DIR = os.path.abspath(os.path.realpath(os.path.dirname(sys.argv[0]))) sys.path.insert(0, SCRIPT_DIR) import automationutils _DEFAULT_WEB_SERVER = "127.0.0.1" _DEFAULT_HTTP_PORT = 8888 _DEFAULT_SSL_PORT = 4443 _DEFAULT_WEBSOCKET_PORT = 9988 #expand _DIST_BIN = __XPC_BIN_PATH__ #expand _IS_WIN32 = len("__WIN32__") != 0 #expand _IS_MAC = __IS_MAC__ != 0 #expand _IS_LINUX = __IS_LINUX__ != 0 #ifdef IS_CYGWIN #expand _IS_CYGWIN = __IS_CYGWIN__ == 1 #else _IS_CYGWIN = False #endif #expand _IS_CAMINO = __IS_CAMINO__ != 0 #expand _BIN_SUFFIX = __BIN_SUFFIX__ #expand _PERL = __PERL__ #expand _DEFAULT_APP = "./" + __BROWSER_PATH__ #expand _CERTS_SRC_DIR = __CERTS_SRC_DIR__ #expand _IS_TEST_BUILD = __IS_TEST_BUILD__ #expand _IS_DEBUG_BUILD = __IS_DEBUG_BUILD__ #expand _CRASHREPORTER = __CRASHREPORTER__ == 1 if _IS_WIN32: import ctypes, ctypes.wintypes, time, msvcrt else: import errno # We use the logging system here primarily because it'll handle multiple # threads, which is needed to process the output of the server and application # processes simultaneously. _log = logging.getLogger() handler = logging.StreamHandler(sys.stdout) _log.setLevel(logging.INFO) _log.addHandler(handler) ################# # PROFILE SETUP # ################# class SyntaxError(Exception): "Signifies a syntax error on a particular line in server-locations.txt." def __init__(self, lineno, msg = None): self.lineno = lineno self.msg = msg def __str__(self): s = "Syntax error on line " + str(self.lineno) if self.msg: s += ": %s." % self.msg else: s += "." return s class Location: "Represents a location line in server-locations.txt." def __init__(self, scheme, host, port, options): self.scheme = scheme self.host = host self.port = port self.options = options class Automation(object): """ Runs the browser from a script, and provides useful utilities for setting up the browser environment. """ DIST_BIN = _DIST_BIN IS_WIN32 = _IS_WIN32 IS_MAC = _IS_MAC IS_LINUX = _IS_LINUX IS_CYGWIN = _IS_CYGWIN IS_CAMINO = _IS_CAMINO BIN_SUFFIX = _BIN_SUFFIX PERL = _PERL UNIXISH = not IS_WIN32 and not IS_MAC DEFAULT_APP = _DEFAULT_APP CERTS_SRC_DIR = _CERTS_SRC_DIR IS_TEST_BUILD = _IS_TEST_BUILD IS_DEBUG_BUILD = _IS_DEBUG_BUILD CRASHREPORTER = _CRASHREPORTER # timeout, in seconds DEFAULT_TIMEOUT = 60.0 DEFAULT_WEB_SERVER = _DEFAULT_WEB_SERVER DEFAULT_HTTP_PORT = _DEFAULT_HTTP_PORT DEFAULT_SSL_PORT = _DEFAULT_SSL_PORT DEFAULT_WEBSOCKET_PORT = _DEFAULT_WEBSOCKET_PORT def __init__(self): self.log = _log self.lastTestSeen = "automation.py" self.haveDumpedScreen = False def setServerInfo(self, webServer = _DEFAULT_WEB_SERVER, httpPort = _DEFAULT_HTTP_PORT, sslPort = _DEFAULT_SSL_PORT, webSocketPort = _DEFAULT_WEBSOCKET_PORT): self.webServer = webServer self.httpPort = httpPort self.sslPort = sslPort self.webSocketPort = webSocketPort @property def __all__(self): return [ "UNIXISH", "IS_WIN32", "IS_MAC", "log", "runApp", "Process", "addCommonOptions", "initializeProfile", "DIST_BIN", "DEFAULT_APP", "CERTS_SRC_DIR", "environment", "IS_TEST_BUILD", "IS_DEBUG_BUILD", "DEFAULT_TIMEOUT", ] class Process(subprocess.Popen): """ Represents our view of a subprocess. It adds a kill() method which allows it to be stopped explicitly. """ def __init__(self, args, bufsize=0, executable=None, stdin=None, stdout=None, stderr=None, preexec_fn=None, close_fds=False, shell=False, cwd=None, env=None, universal_newlines=False, startupinfo=None, creationflags=0): args = automationutils.wrapCommand(args) print "args: %s" % args subprocess.Popen.__init__(self, args, bufsize, executable, stdin, stdout, stderr, preexec_fn, close_fds, shell, cwd, env, universal_newlines, startupinfo, creationflags) self.log = _log def kill(self): if Automation().IS_WIN32: import platform pid = "%i" % self.pid if platform.release() == "2000": # Windows 2000 needs 'kill.exe' from the #'Windows 2000 Resource Kit tools'. (See bug 475455.) try: subprocess.Popen(["kill", "-f", pid]).wait() except: self.log.info("TEST-UNEXPECTED-FAIL | automation.py | Missing 'kill' utility to kill process with pid=%s. Kill it manually!", pid) else: # Windows XP and later. subprocess.Popen(["taskkill", "/F", "/PID", pid]).wait() else: os.kill(self.pid, signal.SIGKILL) def readLocations(self, locationsPath = "server-locations.txt"): """ Reads the locations at which the Mochitest HTTP server is available from server-locations.txt. """ locationFile = codecs.open(locationsPath, "r", "UTF-8") # Perhaps more detail than necessary, but it's the easiest way to make sure # we get exactly the format we want. See server-locations.txt for the exact # format guaranteed here. lineRe = re.compile(r"^(?P[a-z][-a-z0-9+.]*)" r"://" r"(?P" r"\d+\.\d+\.\d+\.\d+" r"|" r"(?:[a-z0-9](?:[-a-z0-9]*[a-z0-9])?\.)*" r"[a-z](?:[-a-z0-9]*[a-z0-9])?" r")" r":" r"(?P\d+)" r"(?:" r"\s+" r"(?P\S+(?:,\S+)*)" r")?$") locations = [] lineno = 0 seenPrimary = False for line in locationFile: lineno += 1 if line.startswith("#") or line == "\n": continue match = lineRe.match(line) if not match: raise SyntaxError(lineno) options = match.group("options") if options: options = options.split(",") if "primary" in options: if seenPrimary: raise SyntaxError(lineno, "multiple primary locations") seenPrimary = True else: options = [] locations.append(Location(match.group("scheme"), match.group("host"), match.group("port"), options)) if not seenPrimary: raise SyntaxError(lineno + 1, "missing primary location") return locations def setupPermissionsDatabase(self, profileDir, permissions): # Open database and create table permDB = sqlite3.connect(os.path.join(profileDir, "permissions.sqlite")) cursor = permDB.cursor(); # SQL copied from nsPermissionManager.cpp cursor.execute("""CREATE TABLE moz_hosts ( id INTEGER PRIMARY KEY, host TEXT, type TEXT, permission INTEGER, expireType INTEGER, expireTime INTEGER)""") # Insert desired permissions c = 0 for perm in permissions.keys(): for host,allow in permissions[perm]: c += 1 cursor.execute("INSERT INTO moz_hosts values(?, ?, ?, ?, 0, 0)", (c, host, perm, 1 if allow else 2)) # Commit and close permDB.commit() cursor.close() def initializeProfile(self, profileDir, extraPrefs = [], useServerLocations = False): " Sets up the standard testing profile." prefs = [] # Start with a clean slate. shutil.rmtree(profileDir, True) os.mkdir(profileDir) # Set up permissions database locations = self.readLocations() self.setupPermissionsDatabase(profileDir, {'allowXULXBL':[(l.host, 'noxul' not in l.options) for l in locations]}); part = """\ user_pref("browser.console.showInPanel", true); user_pref("browser.dom.window.dump.enabled", true); user_pref("browser.firstrun.show.localepicker", false); user_pref("browser.firstrun.show.uidiscovery", false); user_pref("browser.ui.layout.tablet", 0); // force tablet UI off user_pref("dom.allow_scripts_to_close_windows", true); user_pref("dom.disable_open_during_load", false); user_pref("dom.max_script_run_time", 0); // no slow script dialogs user_pref("hangmonitor.timeout", 0); // no hang monitor user_pref("dom.max_chrome_script_run_time", 0); user_pref("dom.popup_maximum", -1); user_pref("dom.send_after_paint_to_content", true); user_pref("dom.successive_dialog_time_limit", 0); user_pref("signed.applets.codebase_principal_support", true); user_pref("security.warn_submit_insecure", false); user_pref("browser.shell.checkDefaultBrowser", false); user_pref("shell.checkDefaultClient", false); user_pref("browser.warnOnQuit", false); user_pref("accessibility.typeaheadfind.autostart", false); user_pref("javascript.options.showInConsole", true); user_pref("devtools.errorconsole.enabled", true); user_pref("layout.debug.enable_data_xbl", true); user_pref("browser.EULA.override", true); user_pref("javascript.options.jit_hardening", true); user_pref("gfx.color_management.force_srgb", true); user_pref("network.manage-offline-status", false); user_pref("test.mousescroll", true); user_pref("security.default_personal_cert", "Select Automatically"); // Need to client auth test be w/o any dialogs user_pref("network.http.prompt-temp-redirect", false); user_pref("media.cache_size", 100); user_pref("security.warn_viewing_mixed", false); user_pref("app.update.enabled", false); user_pref("browser.panorama.experienced_first_run", true); // Assume experienced user_pref("dom.w3c_touch_events.enabled", true); user_pref("toolkit.telemetry.prompted", 2); // Existing tests assume there is no font size inflation. user_pref("font.size.inflation.emPerLine", 0); user_pref("font.size.inflation.minTwips", 0); // Only load extensions from the application and user profile // AddonManager.SCOPE_PROFILE + AddonManager.SCOPE_APPLICATION user_pref("extensions.enabledScopes", 5); // Disable metadata caching for installed add-ons by default user_pref("extensions.getAddons.cache.enabled", false); // Disable intalling any distribution add-ons user_pref("extensions.installDistroAddons", false); user_pref("extensions.testpilot.runStudies", false); user_pref("geo.wifi.uri", "http://%(server)s/tests/dom/tests/mochitest/geolocation/network_geolocation.sjs"); user_pref("geo.wifi.testing", true); user_pref("geo.ignore.location_filter", true); user_pref("camino.warn_when_closing", false); // Camino-only, harmless to others // Make url-classifier updates so rare that they won't affect tests user_pref("urlclassifier.updateinterval", 172800); // Point the url-classifier to the local testing server for fast failures user_pref("browser.safebrowsing.provider.0.gethashURL", "http://%(server)s/safebrowsing-dummy/gethash"); user_pref("browser.safebrowsing.provider.0.keyURL", "http://%(server)s/safebrowsing-dummy/newkey"); user_pref("browser.safebrowsing.provider.0.updateURL", "http://%(server)s/safebrowsing-dummy/update"); // Point update checks to the local testing server for fast failures user_pref("extensions.update.url", "http://%(server)s/extensions-dummy/updateURL"); user_pref("extensions.blocklist.url", "http://%(server)s/extensions-dummy/blocklistURL"); user_pref("extensions.hotfix.url", "http://%(server)s/extensions-dummy/hotfixURL"); // Make sure opening about:addons won't hit the network user_pref("extensions.webservice.discoverURL", "http://%(server)s/extensions-dummy/discoveryURL"); // Make sure AddonRepository won't hit the network user_pref("extensions.getAddons.maxResults", 0); user_pref("extensions.getAddons.get.url", "http://%(server)s/extensions-dummy/repositoryGetURL"); user_pref("extensions.getAddons.getWithPerformance.url", "http://%(server)s/extensions-dummy/repositoryGetWithPerformanceURL"); user_pref("extensions.getAddons.search.browseURL", "http://%(server)s/extensions-dummy/repositoryBrowseURL"); user_pref("extensions.getAddons.search.url", "http://%(server)s/extensions-dummy/repositorySearchURL"); """ % { "server" : self.webServer + ":" + str(self.httpPort) } prefs.append(part) if useServerLocations == False: part = """ user_pref("capability.principal.codebase.p1.granted", "UniversalXPConnect"); user_pref("capability.principal.codebase.p1.id", "%(origin)s"); user_pref("capability.principal.codebase.p1.subjectName", ""); """ % { "origin": "http://" + self.webServer + ":" + str(self.httpPort) } prefs.append(part) else: # Grant God-power to all the privileged servers on which tests run. privileged = filter(lambda loc: "privileged" in loc.options, locations) for (i, l) in itertools.izip(itertools.count(1), privileged): part = """ user_pref("capability.principal.codebase.p%(i)d.granted", "UniversalXPConnect"); user_pref("capability.principal.codebase.p%(i)d.id", "%(origin)s"); user_pref("capability.principal.codebase.p%(i)d.subjectName", ""); """ % { "i": i, "origin": (l.scheme + "://" + l.host + ":" + str(l.port)) } prefs.append(part) # We need to proxy every server but the primary one. origins = ["'%s://%s:%s'" % (l.scheme, l.host, l.port) for l in filter(lambda l: "primary" not in l.options, locations)] origins = ", ".join(origins) pacURL = """data:text/plain, function FindProxyForURL(url, host) { var origins = [%(origins)s]; var regex = new RegExp('^([a-z][-a-z0-9+.]*)' + '://' + '(?:[^/@]*@)?' + '(.*?)' + '(?::(\\\\\\\\d+))?/'); var matches = regex.exec(url); if (!matches) return 'DIRECT'; var isHttp = matches[1] == 'http'; var isHttps = matches[1] == 'https'; var isWebSocket = matches[1] == 'ws'; var isWebSocketSSL = matches[1] == 'wss'; if (!matches[3]) { if (isHttp | isWebSocket) matches[3] = '80'; if (isHttps | isWebSocketSSL) matches[3] = '443'; } if (isWebSocket) matches[1] = 'http'; if (isWebSocketSSL) matches[1] = 'https'; var origin = matches[1] + '://' + matches[2] + ':' + matches[3]; if (origins.indexOf(origin) < 0) return 'DIRECT'; if (isHttp) return 'PROXY %(remote)s:%(httpport)s'; if (isHttps || isWebSocket || isWebSocketSSL) return 'PROXY %(remote)s:%(sslport)s'; return 'DIRECT'; }""" % { "origins": origins, "remote": self.webServer, "httpport":self.httpPort, "sslport": self.sslPort } pacURL = "".join(pacURL.splitlines()) part += """ user_pref("network.proxy.type", 2); user_pref("network.proxy.autoconfig_url", "%(pacURL)s"); user_pref("camino.use_system_proxy_settings", false); // Camino-only, harmless to others """ % {"pacURL": pacURL} prefs.append(part) for v in extraPrefs: thispref = v.split("=") if len(thispref) < 2: print "Error: syntax error in --setpref=" + v sys.exit(1) part = 'user_pref("%s", %s);\n' % (thispref[0], thispref[1]) prefs.append(part) # write the preferences prefsFile = open(profileDir + "/" + "user.js", "a") prefsFile.write("".join(prefs)) prefsFile.close() def addCommonOptions(self, parser): "Adds command-line options which are common to mochitest and reftest." parser.add_option("--setpref", action = "append", type = "string", default = [], dest = "extraPrefs", metavar = "PREF=VALUE", help = "defines an extra user preference") def fillCertificateDB(self, profileDir, certPath, utilityPath, xrePath): pwfilePath = os.path.join(profileDir, ".crtdbpw") pwfile = open(pwfilePath, "w") pwfile.write("\n") pwfile.close() # Create head of the ssltunnel configuration file sslTunnelConfigPath = os.path.join(profileDir, "ssltunnel.cfg") sslTunnelConfig = open(sslTunnelConfigPath, "w") sslTunnelConfig.write("httpproxy:1\n") sslTunnelConfig.write("certdbdir:%s\n" % certPath) sslTunnelConfig.write("forward:127.0.0.1:%s\n" % self.httpPort) sslTunnelConfig.write("websocketserver:%s:%s\n" % (self.webServer, self.webSocketPort)) sslTunnelConfig.write("listen:*:%s:pgo server certificate\n" % self.sslPort) # Configure automatic certificate and bind custom certificates, client authentication locations = self.readLocations() locations.pop(0) for loc in locations: if loc.scheme == "https" and "nocert" not in loc.options: customCertRE = re.compile("^cert=(?P[0-9a-zA-Z_ ]+)") clientAuthRE = re.compile("^clientauth=(?P[a-z]+)") redirRE = re.compile("^redir=(?P[0-9a-zA-Z_ .]+)") for option in loc.options: match = customCertRE.match(option) if match: customcert = match.group("nickname"); sslTunnelConfig.write("listen:%s:%s:%s:%s\n" % (loc.host, loc.port, self.sslPort, customcert)) match = clientAuthRE.match(option) if match: clientauth = match.group("clientauth"); sslTunnelConfig.write("clientauth:%s:%s:%s:%s\n" % (loc.host, loc.port, self.sslPort, clientauth)) match = redirRE.match(option) if match: redirhost = match.group("redirhost") sslTunnelConfig.write("redirhost:%s:%s:%s:%s\n" % (loc.host, loc.port, self.sslPort, redirhost)) sslTunnelConfig.close() # Pre-create the certification database for the profile env = self.environment(xrePath = xrePath) certutil = os.path.join(utilityPath, "certutil" + self.BIN_SUFFIX) pk12util = os.path.join(utilityPath, "pk12util" + self.BIN_SUFFIX) status = self.Process([certutil, "-N", "-d", profileDir, "-f", pwfilePath], env = env).wait() if status != 0: return status # Walk the cert directory and add custom CAs and client certs files = os.listdir(certPath) for item in files: root, ext = os.path.splitext(item) if ext == ".ca": trustBits = "CT,," if root.endswith("-object"): trustBits = "CT,,CT" self.Process([certutil, "-A", "-i", os.path.join(certPath, item), "-d", profileDir, "-f", pwfilePath, "-n", root, "-t", trustBits], env = env).wait() if ext == ".client": self.Process([pk12util, "-i", os.path.join(certPath, item), "-w", pwfilePath, "-d", profileDir], env = env).wait() os.unlink(pwfilePath) return 0 def environment(self, env = None, xrePath = None, crashreporter = True): if xrePath == None: xrePath = self.DIST_BIN if env == None: env = dict(os.environ) ldLibraryPath = os.path.abspath(os.path.join(SCRIPT_DIR, xrePath)) if self.UNIXISH or self.IS_MAC: envVar = "LD_LIBRARY_PATH" if self.IS_MAC: envVar = "DYLD_LIBRARY_PATH" else: # unixish env['MOZILLA_FIVE_HOME'] = xrePath if envVar in env: ldLibraryPath = ldLibraryPath + ":" + env[envVar] env[envVar] = ldLibraryPath elif self.IS_WIN32: env["PATH"] = env["PATH"] + ";" + ldLibraryPath if crashreporter: env['MOZ_CRASHREPORTER_NO_REPORT'] = '1' env['MOZ_CRASHREPORTER'] = '1' else: env['MOZ_CRASHREPORTER_DISABLE'] = '1' env['GNOME_DISABLE_CRASH_DIALOG'] = '1' env['XRE_NO_WINDOWS_CRASH_DIALOG'] = '1' env['NS_TRACE_MALLOC_DISABLE_STACKS'] = '1' return env if IS_WIN32: PeekNamedPipe = ctypes.windll.kernel32.PeekNamedPipe GetLastError = ctypes.windll.kernel32.GetLastError def readWithTimeout(self, f, timeout): """Try to read a line of output from the file object |f|. |f| must be a pipe, like the |stdout| member of a subprocess.Popen object created with stdout=PIPE. If no output is received within |timeout| seconds, return a blank line. Returns a tuple (line, did_timeout), where |did_timeout| is True if the read timed out, and False otherwise.""" if timeout is None: # shortcut to allow callers to pass in "None" for no timeout. return (f.readline(), False) x = msvcrt.get_osfhandle(f.fileno()) l = ctypes.c_long() done = time.time() + timeout while time.time() < done: if self.PeekNamedPipe(x, None, 0, None, ctypes.byref(l), None) == 0: err = self.GetLastError() if err == 38 or err == 109: # ERROR_HANDLE_EOF || ERROR_BROKEN_PIPE return ('', False) else: log.error("readWithTimeout got error: %d", err) if l.value > 0: # we're assuming that the output is line-buffered, # which is not unreasonable return (f.readline(), False) time.sleep(0.01) return ('', True) def isPidAlive(self, pid): STILL_ACTIVE = 259 PROCESS_QUERY_LIMITED_INFORMATION = 0x1000 pHandle = ctypes.windll.kernel32.OpenProcess(PROCESS_QUERY_LIMITED_INFORMATION, 0, pid) if not pHandle: return False pExitCode = ctypes.wintypes.DWORD() ctypes.windll.kernel32.GetExitCodeProcess(pHandle, ctypes.byref(pExitCode)) ctypes.windll.kernel32.CloseHandle(pHandle) return pExitCode.value == STILL_ACTIVE def killPid(self, pid): PROCESS_TERMINATE = 0x0001 pHandle = ctypes.windll.kernel32.OpenProcess(PROCESS_TERMINATE, 0, pid) if not pHandle: return success = ctypes.windll.kernel32.TerminateProcess(pHandle, 1) ctypes.windll.kernel32.CloseHandle(pHandle) else: def readWithTimeout(self, f, timeout): """Try to read a line of output from the file object |f|. If no output is received within |timeout| seconds, return a blank line. Returns a tuple (line, did_timeout), where |did_timeout| is True if the read timed out, and False otherwise.""" (r, w, e) = select.select([f], [], [], timeout) if len(r) == 0: return ('', True) return (f.readline(), False) def isPidAlive(self, pid): try: # kill(pid, 0) checks for a valid PID without actually sending a signal # The method throws OSError if the PID is invalid, which we catch below. os.kill(pid, 0) # Wait on it to see if it's a zombie. This can throw OSError.ECHILD if # the process terminates before we get to this point. wpid, wstatus = os.waitpid(pid, os.WNOHANG) return wpid == 0 except OSError, err: # Catch the errors we might expect from os.kill/os.waitpid, # and re-raise any others if err.errno == errno.ESRCH or err.errno == errno.ECHILD: return False raise def killPid(self, pid): os.kill(pid, signal.SIGKILL) def dumpScreen(self, utilityPath): self.haveDumpedScreen = True; # Need to figure out what tool and whether it write to a file or stdout if self.UNIXISH: utility = [os.path.join(utilityPath, "screentopng")] imgoutput = 'stdout' elif self.IS_MAC: utility = ['/usr/sbin/screencapture', '-C', '-x', '-t', 'png'] imgoutput = 'file' elif self.IS_WIN32: self.log.info("If you fixed bug 589668, you'd get a screenshot here") return # Run the capture correctly for the type of capture try: if imgoutput == 'file': tmpfd, imgfilename = tempfile.mkstemp(prefix='mozilla-test-fail_') os.close(tmpfd) dumper = self.Process(utility + [imgfilename]) elif imgoutput == 'stdout': dumper = self.Process(utility, bufsize=-1, stdout=subprocess.PIPE, close_fds=True) except OSError, err: self.log.info("Failed to start %s for screenshot: %s", utility[0], err.strerror) return # Check whether the capture utility ran successfully dumper_out, dumper_err = dumper.communicate() if dumper.returncode != 0: self.log.info("%s exited with code %d", utility, dumper.returncode) return try: if imgoutput == 'stdout': image = dumper_out elif imgoutput == 'file': with open(imgfilename) as imgfile: image = imgfile.read() except IOError, err: self.log.info("Failed to read image from %s", imgoutput) import base64 encoded = base64.b64encode(image) self.log.info("SCREENSHOT: data:image/png;base64,%s", encoded) def killAndGetStack(self, proc, utilityPath, debuggerInfo): """Kill the process, preferrably in a way that gets us a stack trace.""" if not debuggerInfo and not self.haveDumpedScreen: self.dumpScreen(utilityPath) if self.CRASHREPORTER and not debuggerInfo: if self.UNIXISH: # ABRT will get picked up by Breakpad's signal handler os.kill(proc.pid, signal.SIGABRT) return elif self.IS_WIN32: # We should have a "crashinject" program in our utility path crashinject = os.path.normpath(os.path.join(utilityPath, "crashinject.exe")) if os.path.exists(crashinject) and subprocess.Popen([crashinject, str(proc.pid)]).wait() == 0: return #TODO: kill the process such that it triggers Breakpad on OS X (bug 525296) self.log.info("Can't trigger Breakpad, just killing process") proc.kill() def waitForFinish(self, proc, utilityPath, timeout, maxTime, startTime, debuggerInfo, symbolsPath): """ Look for timeout or crashes and return the status after the process terminates """ stackFixerProcess = None stackFixerFunction = None didTimeout = False hitMaxTime = False if proc.stdout is None: self.log.info("TEST-INFO: Not logging stdout or stderr due to debugger connection") else: logsource = proc.stdout if self.IS_DEBUG_BUILD and (self.IS_MAC or self.IS_LINUX) and symbolsPath and os.path.exists(symbolsPath): # Run each line through a function in fix_stack_using_bpsyms.py (uses breakpad symbol files) # This method is preferred for Tinderbox builds, since native symbols may have been stripped. sys.path.insert(0, utilityPath) import fix_stack_using_bpsyms as stackFixerModule stackFixerFunction = lambda line: stackFixerModule.fixSymbols(line, symbolsPath) del sys.path[0] elif self.IS_DEBUG_BUILD and self.IS_MAC and False: # Run each line through a function in fix_macosx_stack.py (uses atos) sys.path.insert(0, utilityPath) import fix_macosx_stack as stackFixerModule stackFixerFunction = lambda line: stackFixerModule.fixSymbols(line) del sys.path[0] elif self.IS_DEBUG_BUILD and self.IS_LINUX: # Run logsource through fix-linux-stack.pl (uses addr2line) # This method is preferred for developer machines, so we don't have to run "make buildsymbols". stackFixerProcess = self.Process([self.PERL, os.path.join(utilityPath, "fix-linux-stack.pl")], stdin=logsource, stdout=subprocess.PIPE) logsource = stackFixerProcess.stdout (line, didTimeout) = self.readWithTimeout(logsource, timeout) while line != "" and not didTimeout: if "TEST-START" in line and "|" in line: self.lastTestSeen = line.split("|")[1].strip() if stackFixerFunction: line = stackFixerFunction(line) self.log.info(line.rstrip().decode("UTF-8", "ignore")) if not debuggerInfo and not self.haveDumpedScreen and "TEST-UNEXPECTED-FAIL" in line and "Test timed out" in line: self.dumpScreen(utilityPath) (line, didTimeout) = self.readWithTimeout(logsource, timeout) if not hitMaxTime and maxTime and datetime.now() - startTime > timedelta(seconds = maxTime): # Kill the application, but continue reading from stack fixer so as not to deadlock on stackFixerProcess.wait(). hitMaxTime = True self.log.info("TEST-UNEXPECTED-FAIL | %s | application ran for longer than allowed maximum time of %d seconds", self.lastTestSeen, int(maxTime)) self.killAndGetStack(proc, utilityPath, debuggerInfo) if didTimeout: self.log.info("TEST-UNEXPECTED-FAIL | %s | application timed out after %d seconds with no output", self.lastTestSeen, int(timeout)) self.killAndGetStack(proc, utilityPath, debuggerInfo) status = proc.wait() if status == 0: self.lastTestSeen = "Main app process exited normally" if status != 0 and not didTimeout and not hitMaxTime: self.log.info("TEST-UNEXPECTED-FAIL | %s | Exited with code %d during test run", self.lastTestSeen, status) if stackFixerProcess is not None: fixerStatus = stackFixerProcess.wait() if fixerStatus != 0 and not didTimeout and not hitMaxTime: self.log.info("TEST-UNEXPECTED-FAIL | automation.py | Stack fixer process exited with code %d during test run", fixerStatus) return status def buildCommandLine(self, app, debuggerInfo, profileDir, testURL, extraArgs): """ build the application command line """ cmd = os.path.abspath(app) if self.IS_MAC and not self.IS_CAMINO and os.path.exists(cmd + "-bin"): # Prefer 'app-bin' in case 'app' is a shell script. # We can remove this hack once bug 673899 etc are fixed. cmd += "-bin" args = [] if debuggerInfo: args.extend(debuggerInfo["args"]) args.append(cmd) cmd = os.path.abspath(debuggerInfo["path"]) if self.IS_MAC: args.append("-foreground") if self.IS_CYGWIN: profileDirectory = commands.getoutput("cygpath -w \"" + profileDir + "/\"") else: profileDirectory = profileDir + "/" args.extend(("-no-remote", "-profile", profileDirectory)) if testURL is not None: if self.IS_CAMINO: args.extend(("-url", testURL)) else: args.append((testURL)) args.extend(extraArgs) return cmd, args def checkForZombies(self, processLog): """ Look for hung processes """ if not os.path.exists(processLog): self.log.info('INFO | automation.py | PID log not found: %s', processLog) else: self.log.info('INFO | automation.py | Reading PID log: %s', processLog) processList = [] pidRE = re.compile(r'launched child process (\d+)$') processLogFD = open(processLog) for line in processLogFD: self.log.info(line.rstrip()) m = pidRE.search(line) if m: processList.append(int(m.group(1))) processLogFD.close() for processPID in processList: self.log.info("INFO | automation.py | Checking for orphan process with PID: %d", processPID) if self.isPidAlive(processPID): self.log.info("TEST-UNEXPECTED-FAIL | automation.py | child process %d still alive after shutdown", processPID) self.killPid(processPID) def checkForCrashes(self, profileDir, symbolsPath): automationutils.checkForCrashes(os.path.join(profileDir, "minidumps"), symbolsPath, self.lastTestSeen) def runApp(self, testURL, env, app, profileDir, extraArgs, runSSLTunnel = False, utilityPath = None, xrePath = None, certPath = None, debuggerInfo = None, symbolsPath = None, timeout = -1, maxTime = None): """ Run the app, log the duration it took to execute, return the status code. Kills the app if it runs for longer than |maxTime| seconds, or outputs nothing for |timeout| seconds. """ if utilityPath == None: utilityPath = self.DIST_BIN if xrePath == None: xrePath = self.DIST_BIN if certPath == None: certPath = self.CERTS_SRC_DIR if timeout == -1: timeout = self.DEFAULT_TIMEOUT # copy env so we don't munge the caller's environment env = dict(env); env["NO_EM_RESTART"] = "1" tmpfd, processLog = tempfile.mkstemp(suffix='pidlog') os.close(tmpfd) env["MOZ_PROCESS_LOG"] = processLog if self.IS_TEST_BUILD and runSSLTunnel: # create certificate database for the profile certificateStatus = self.fillCertificateDB(profileDir, certPath, utilityPath, xrePath) if certificateStatus != 0: self.log.info("TEST-UNEXPECTED-FAIL | automation.py | Certificate integration failed") return certificateStatus # start ssltunnel to provide https:// URLs capability ssltunnel = os.path.join(utilityPath, "ssltunnel" + self.BIN_SUFFIX) ssltunnelProcess = self.Process([ssltunnel, os.path.join(profileDir, "ssltunnel.cfg")], env = self.environment(xrePath = xrePath)) self.log.info("INFO | automation.py | SSL tunnel pid: %d", ssltunnelProcess.pid) cmd, args = self.buildCommandLine(app, debuggerInfo, profileDir, testURL, extraArgs) startTime = datetime.now() if debuggerInfo and debuggerInfo["interactive"]: # If an interactive debugger is attached, don't redirect output, # don't use timeouts, and don't capture ctrl-c. timeout = None maxTime = None outputPipe = None signal.signal(signal.SIGINT, lambda sigid, frame: None) else: outputPipe = subprocess.PIPE self.lastTestSeen = "automation.py" proc = self.Process([cmd] + args, env = self.environment(env, xrePath = xrePath, crashreporter = not debuggerInfo), stdout = outputPipe, stderr = subprocess.STDOUT) self.log.info("INFO | automation.py | Application pid: %d", proc.pid) status = self.waitForFinish(proc, utilityPath, timeout, maxTime, startTime, debuggerInfo, symbolsPath) self.log.info("INFO | automation.py | Application ran for: %s", str(datetime.now() - startTime)) # Do a final check for zombie child processes. self.checkForZombies(processLog) self.checkForCrashes(profileDir, symbolsPath) if os.path.exists(processLog): os.unlink(processLog) if self.IS_TEST_BUILD and runSSLTunnel: ssltunnelProcess.kill() return status def getExtensionIDFromRDF(self, rdfSource): """ Retrieves the extension id from an install.rdf file (or string). """ from xml.dom.minidom import parse, parseString, Node if isinstance(rdfSource, file): document = parse(rdfSource) else: document = parseString(rdfSource) # Find the element. There can be multiple tags # within tags, so we have to check this way. for rdfChild in document.documentElement.childNodes: if rdfChild.nodeType == Node.ELEMENT_NODE and rdfChild.tagName == "Description": for descChild in rdfChild.childNodes: if descChild.nodeType == Node.ELEMENT_NODE and descChild.tagName == "em:id": return descChild.childNodes[0].data return None def installExtension(self, extensionSource, profileDir, extensionID = None): """ Copies an extension into the extensions directory of the given profile. extensionSource - the source location of the extension files. This can be either a directory or a path to an xpi file. profileDir - the profile directory we are copying into. We will create the "extensions" directory there if it doesn't exist. extensionID - the id of the extension to be used as the containing directory for the extension, if extensionSource is a directory, i.e. this is the name of the folder in the /extensions/ """ if not os.path.isdir(profileDir): self.log.info("INFO | automation.py | Cannot install extension, invalid profileDir at: %s", profileDir) return installRDFFilename = "install.rdf" extensionsRootDir = os.path.join(profileDir, "extensions", "staged") if not os.path.isdir(extensionsRootDir): os.makedirs(extensionsRootDir) if os.path.isfile(extensionSource): reader = automationutils.ZipFileReader(extensionSource) for filename in reader.namelist(): # Sanity check the zip file. if os.path.isabs(filename): self.log.info("INFO | automation.py | Cannot install extension, bad files in xpi") return # We may need to dig the extensionID out of the zip file... if extensionID is None and filename == installRDFFilename: extensionID = self.getExtensionIDFromRDF(reader.read(filename)) # We must know the extensionID now. if extensionID is None: self.log.info("INFO | automation.py | Cannot install extension, missing extensionID") return # Make the extension directory. extensionDir = os.path.join(extensionsRootDir, extensionID) os.mkdir(extensionDir) # Extract all files. reader.extractall(extensionDir) elif os.path.isdir(extensionSource): if extensionID is None: filename = os.path.join(extensionSource, installRDFFilename) if os.path.isfile(filename): with open(filename, "r") as installRDF: extensionID = self.getExtensionIDFromRDF(installRDF) if extensionID is None: self.log.info("INFO | automation.py | Cannot install extension, missing extensionID") return # Copy extension tree into its own directory. # "destination directory must not already exist". shutil.copytree(extensionSource, os.path.join(extensionsRootDir, extensionID)) else: self.log.info("INFO | automation.py | Cannot install extension, invalid extensionSource at: %s", extensionSource)