mirror of
https://gitlab.winehq.org/wine/wine-gecko.git
synced 2024-09-13 09:24:08 -07:00
Merge m-c to inbound on a CLOSED TREE
This commit is contained in:
commit
5689996e99
@ -55,7 +55,6 @@ SEARCH_PATHS = [
|
||||
'testing/mozbase/mozsystemmonitor',
|
||||
'testing/mozbase/mozinfo',
|
||||
'testing/mozbase/moztest',
|
||||
'testing/mozbase/mozversion',
|
||||
'testing/mozbase/manifestdestiny',
|
||||
'xpcom/idl-parser',
|
||||
]
|
||||
|
@ -253,14 +253,13 @@ class B2GRemoteReftest(RefTest):
|
||||
sys.exit(5)
|
||||
|
||||
# Delete any bundled extensions
|
||||
if profileDir:
|
||||
extensionDir = os.path.join(profileDir, 'extensions', 'staged')
|
||||
for filename in os.listdir(extensionDir):
|
||||
try:
|
||||
self._devicemanager._checkCmd(['shell', 'rm', '-rf',
|
||||
os.path.join(self.bundlesDir, filename)])
|
||||
except DMError:
|
||||
pass
|
||||
extensionDir = os.path.join(profileDir, 'extensions', 'staged')
|
||||
for filename in os.listdir(extensionDir):
|
||||
try:
|
||||
self._devicemanager._checkCmdAs(['shell', 'rm', '-rf',
|
||||
os.path.join(self.bundlesDir, filename)])
|
||||
except DMError:
|
||||
pass
|
||||
|
||||
# Restore the original profiles.ini.
|
||||
if self.originalProfilesIni:
|
||||
@ -277,8 +276,8 @@ class B2GRemoteReftest(RefTest):
|
||||
self._devicemanager.removeDir(self.remoteTestRoot)
|
||||
|
||||
# Restore the original user.js.
|
||||
self._devicemanager._checkCmd(['shell', 'rm', '-f', self.userJS])
|
||||
self._devicemanager._checkCmd(['shell', 'dd', 'if=%s.orig' % self.userJS, 'of=%s' % self.userJS])
|
||||
self._devicemanager._checkCmdAs(['shell', 'rm', '-f', self.userJS])
|
||||
self._devicemanager._checkCmdAs(['shell', 'dd', 'if=%s.orig' % self.userJS, 'of=%s' % self.userJS])
|
||||
|
||||
# We've restored the original profile, so reboot the device so that
|
||||
# it gets picked up.
|
||||
@ -442,9 +441,9 @@ class B2GRemoteReftest(RefTest):
|
||||
# Copy the extensions to the B2G bundles dir.
|
||||
extensionDir = os.path.join(profileDir, 'extensions', 'staged')
|
||||
# need to write to read-only dir
|
||||
self._devicemanager._checkCmd(['remount'])
|
||||
self._devicemanager._checkCmdAs(['remount'])
|
||||
for filename in os.listdir(extensionDir):
|
||||
self._devicemanager._checkCmd(['shell', 'rm', '-rf',
|
||||
self._devicemanager._checkCmdAs(['shell', 'rm', '-rf',
|
||||
os.path.join(self.bundlesDir, filename)])
|
||||
try:
|
||||
self._devicemanager.pushDir(extensionDir, self.bundlesDir)
|
||||
@ -454,8 +453,8 @@ class B2GRemoteReftest(RefTest):
|
||||
|
||||
# In B2G, user.js is always read from /data/local, not the profile
|
||||
# directory. Backup the original user.js first so we can restore it.
|
||||
self._devicemanager._checkCmd(['shell', 'rm', '-f', '%s.orig' % self.userJS])
|
||||
self._devicemanager._checkCmd(['shell', 'dd', 'if=%s' % self.userJS, 'of=%s.orig' % self.userJS])
|
||||
self._devicemanager._checkCmdAs(['shell', 'rm', '-f', '%s.orig' % self.userJS])
|
||||
self._devicemanager._checkCmdAs(['shell', 'dd', 'if=%s' % self.userJS, 'of=%s.orig' % self.userJS])
|
||||
self._devicemanager.pushFile(os.path.join(profileDir, "user.js"), self.userJS)
|
||||
|
||||
self.updateProfilesIni(self.remoteProfile)
|
||||
|
@ -24,7 +24,6 @@ MOZBASE_PACKAGES = \
|
||||
moznetwork \
|
||||
mozsystemmonitor \
|
||||
moztest \
|
||||
mozversion \
|
||||
$(NULL)
|
||||
|
||||
MOZBASE_EXTRAS = \
|
||||
|
@ -1,5 +1,3 @@
|
||||
# Mozbase
|
||||
|
||||
Mozbase is a set of easy-to-use Python packages forming a supplemental standard
|
||||
library for Mozilla. It provides consistency and reduces redundancy in
|
||||
automation and other system-level software. All of Mozilla's test harnesses use
|
||||
|
@ -1,153 +0,0 @@
|
||||
# Makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line.
|
||||
SPHINXOPTS =
|
||||
SPHINXBUILD = sphinx-build
|
||||
PAPER =
|
||||
BUILDDIR = _build
|
||||
|
||||
# Internal variables.
|
||||
PAPEROPT_a4 = -D latex_paper_size=a4
|
||||
PAPEROPT_letter = -D latex_paper_size=letter
|
||||
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
# the i18n builder cannot share the environment and doctrees with the others
|
||||
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
|
||||
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
|
||||
|
||||
help:
|
||||
@echo "Please use \`make <target>' where <target> is one of"
|
||||
@echo " html to make standalone HTML files"
|
||||
@echo " dirhtml to make HTML files named index.html in directories"
|
||||
@echo " singlehtml to make a single large HTML file"
|
||||
@echo " pickle to make pickle files"
|
||||
@echo " json to make JSON files"
|
||||
@echo " htmlhelp to make HTML files and a HTML help project"
|
||||
@echo " qthelp to make HTML files and a qthelp project"
|
||||
@echo " devhelp to make HTML files and a Devhelp project"
|
||||
@echo " epub to make an epub"
|
||||
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
||||
@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
||||
@echo " text to make text files"
|
||||
@echo " man to make manual pages"
|
||||
@echo " texinfo to make Texinfo files"
|
||||
@echo " info to make Texinfo files and run them through makeinfo"
|
||||
@echo " gettext to make PO message catalogs"
|
||||
@echo " changes to make an overview of all changed/added/deprecated items"
|
||||
@echo " linkcheck to check all external links for integrity"
|
||||
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
||||
|
||||
clean:
|
||||
-rm -rf $(BUILDDIR)/*
|
||||
|
||||
html:
|
||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||
|
||||
dirhtml:
|
||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
||||
|
||||
singlehtml:
|
||||
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
||||
|
||||
pickle:
|
||||
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
||||
@echo
|
||||
@echo "Build finished; now you can process the pickle files."
|
||||
|
||||
json:
|
||||
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
||||
@echo
|
||||
@echo "Build finished; now you can process the JSON files."
|
||||
|
||||
htmlhelp:
|
||||
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
||||
".hhp project file in $(BUILDDIR)/htmlhelp."
|
||||
|
||||
qthelp:
|
||||
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
||||
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
|
||||
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/MozBase.qhcp"
|
||||
@echo "To view the help file:"
|
||||
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/MozBase.qhc"
|
||||
|
||||
devhelp:
|
||||
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
||||
@echo
|
||||
@echo "Build finished."
|
||||
@echo "To view the help file:"
|
||||
@echo "# mkdir -p $$HOME/.local/share/devhelp/MozBase"
|
||||
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/MozBase"
|
||||
@echo "# devhelp"
|
||||
|
||||
epub:
|
||||
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
||||
@echo
|
||||
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
||||
|
||||
latex:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo
|
||||
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
|
||||
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
||||
"(use \`make latexpdf' here to do that automatically)."
|
||||
|
||||
latexpdf:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through pdflatex..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
text:
|
||||
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
||||
@echo
|
||||
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
||||
|
||||
man:
|
||||
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
||||
@echo
|
||||
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
||||
|
||||
texinfo:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo
|
||||
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
|
||||
@echo "Run \`make' in that directory to run these through makeinfo" \
|
||||
"(use \`make info' here to do that automatically)."
|
||||
|
||||
info:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo "Running Texinfo files through makeinfo..."
|
||||
make -C $(BUILDDIR)/texinfo info
|
||||
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
||||
|
||||
gettext:
|
||||
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
||||
@echo
|
||||
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
|
||||
|
||||
changes:
|
||||
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
||||
@echo
|
||||
@echo "The overview file is in $(BUILDDIR)/changes."
|
||||
|
||||
linkcheck:
|
||||
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
||||
@echo
|
||||
@echo "Link check complete; look for any errors in the above output " \
|
||||
"or in $(BUILDDIR)/linkcheck/output.txt."
|
||||
|
||||
doctest:
|
||||
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
||||
@echo "Testing of doctests in the sources finished, look at the " \
|
||||
"results in $(BUILDDIR)/doctest/output.txt."
|
@ -1,248 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# MozBase documentation build configuration file, created by
|
||||
# sphinx-quickstart on Mon Oct 22 14:02:17 2012.
|
||||
#
|
||||
# This file is execfile()d with the current directory set to its containing dir.
|
||||
#
|
||||
# Note that not all possible configuration values are present in this
|
||||
# autogenerated file.
|
||||
#
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
import sys, os
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
here = os.path.dirname(os.path.abspath(__file__))
|
||||
parent = os.path.dirname(here)
|
||||
for item in os.listdir(parent):
|
||||
path = os.path.join(parent, item)
|
||||
if (not os.path.isdir(path)) or (not os.path.exists(os.path.join(path, 'setup.py'))):
|
||||
continue
|
||||
sys.path.insert(0, path)
|
||||
|
||||
# -- General configuration -----------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
#needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be extensions
|
||||
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo', 'sphinx.ext.viewcode']
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = '.rst'
|
||||
|
||||
# The encoding of source files.
|
||||
#source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'MozBase'
|
||||
copyright = u'2012, Mozilla Automation and Tools team'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = '1'
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = '1'
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
#today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
#today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = ['_build']
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
#default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
#add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
#add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
#show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
#modindex_common_prefix = []
|
||||
|
||||
|
||||
# -- Options for HTML output ---------------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = 'default'
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
#html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
#html_theme_path = []
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
html_title = "mozbase documentation"
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
#html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
#html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
#html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
#html_last_updated_fmt = '%b %d, %Y'
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
#html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
#html_sidebars = {}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
#html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
#html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
#html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
#html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
#html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
#html_show_sphinx = True
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
#html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
#html_use_opensearch = ''
|
||||
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
#html_file_suffix = None
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'MozBasedoc'
|
||||
|
||||
|
||||
# -- Options for LaTeX output --------------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author, documentclass [howto/manual]).
|
||||
latex_documents = [
|
||||
('index', 'MozBase.tex', u'MozBase Documentation',
|
||||
u'Mozilla Automation and Tools team', 'manual'),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
#latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
#latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
#latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#latex_show_urls = False
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output --------------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
('index', 'mozbase', u'MozBase Documentation',
|
||||
[u'Mozilla Automation and Tools team'], 1)
|
||||
]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#man_show_urls = False
|
||||
|
||||
|
||||
# -- Options for Texinfo output ------------------------------------------------
|
||||
|
||||
# Grouping the document tree into Texinfo files. List of tuples
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
('index', 'MozBase', u'MozBase Documentation',
|
||||
u'Mozilla Automation and Tools team', 'MozBase', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#texinfo_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#texinfo_domain_indices = True
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
#texinfo_show_urls = 'footnote'
|
@ -1,11 +0,0 @@
|
||||
Device management
|
||||
-----------------
|
||||
|
||||
Mozbase provides a module called `mozdevice` for the purposes of
|
||||
running automated tests or scripts on a device (e.g. an Android- or
|
||||
FirefoxOS-based phone) connected to a workstation.
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
mozdevice
|
@ -1,13 +0,0 @@
|
||||
Getting information on the system under test
|
||||
============================================
|
||||
|
||||
It's often necessary to get some information about the system we're
|
||||
testing, for example to turn on or off some platform specific
|
||||
behaviour.
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
mozinfo
|
||||
moznetwork
|
||||
mozversion
|
@ -1,57 +0,0 @@
|
||||
.. MozBase documentation master file, created by
|
||||
sphinx-quickstart on Mon Oct 22 14:02:17 2012.
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive.
|
||||
|
||||
mozbase
|
||||
=======
|
||||
|
||||
Mozbase is a set of easy-to-use Python packages forming a supplemental standard
|
||||
library for Mozilla. It provides consistency and reduces redundancy in
|
||||
automation and other system-level software. All of Mozilla's test harnesses use
|
||||
mozbase to some degree, including Talos_, mochitest_, reftest_, Autophone_, and
|
||||
Eideticker_.
|
||||
|
||||
.. _Talos: https://wiki.mozilla.org/Talos
|
||||
|
||||
.. _mochitest: https://developer.mozilla.org/en-US/docs/Mochitest
|
||||
|
||||
.. _reftest: https://developer.mozilla.org/en-US/docs/Creating_reftest-based_unit_tests
|
||||
|
||||
.. _Autophone: https://wiki.mozilla.org/Auto-tools/Projects/AutoPhone
|
||||
|
||||
.. _Eideticker: https://wiki.mozilla.org/Project_Eideticker
|
||||
|
||||
In the course of writing automated tests at Mozilla, we found that
|
||||
the same tasks came up over and over, regardless of the specific nature of
|
||||
what we were testing. We figured that consolidating this code into a set of
|
||||
libraries would save us a good deal of time, and so we spent some effort
|
||||
factoring out the best-of-breed automation code into something we named
|
||||
"mozbase" (usually written all in lower case except at the beginning of a
|
||||
sentence).
|
||||
|
||||
This is the main documentation for users of mozbase. There is also a
|
||||
project_ wiki page with notes on development practices and administration.
|
||||
|
||||
.. _project: https://wiki.mozilla.org/Auto-tools/Projects/Mozbase
|
||||
|
||||
The documentation is organized by category, then by module. Figure out what you
|
||||
want to do then dive in!
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
manifestdestiny
|
||||
gettinginfo
|
||||
setuprunning
|
||||
mozhttpd
|
||||
loggingreporting
|
||||
devicemanagement
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
||||
|
@ -1,12 +0,0 @@
|
||||
Logging and reporting
|
||||
=====================
|
||||
|
||||
Ideally output between different types of testing system should be as
|
||||
uniform as possible, as well as making it easy to make things more or
|
||||
less verbose. We created some libraries to make doing this easy.
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
mozlog
|
||||
|
@ -1,190 +0,0 @@
|
||||
@ECHO OFF
|
||||
|
||||
REM Command file for Sphinx documentation
|
||||
|
||||
if "%SPHINXBUILD%" == "" (
|
||||
set SPHINXBUILD=sphinx-build
|
||||
)
|
||||
set BUILDDIR=_build
|
||||
set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
|
||||
set I18NSPHINXOPTS=%SPHINXOPTS% .
|
||||
if NOT "%PAPER%" == "" (
|
||||
set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
|
||||
set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
|
||||
)
|
||||
|
||||
if "%1" == "" goto help
|
||||
|
||||
if "%1" == "help" (
|
||||
:help
|
||||
echo.Please use `make ^<target^>` where ^<target^> is one of
|
||||
echo. html to make standalone HTML files
|
||||
echo. dirhtml to make HTML files named index.html in directories
|
||||
echo. singlehtml to make a single large HTML file
|
||||
echo. pickle to make pickle files
|
||||
echo. json to make JSON files
|
||||
echo. htmlhelp to make HTML files and a HTML help project
|
||||
echo. qthelp to make HTML files and a qthelp project
|
||||
echo. devhelp to make HTML files and a Devhelp project
|
||||
echo. epub to make an epub
|
||||
echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
|
||||
echo. text to make text files
|
||||
echo. man to make manual pages
|
||||
echo. texinfo to make Texinfo files
|
||||
echo. gettext to make PO message catalogs
|
||||
echo. changes to make an overview over all changed/added/deprecated items
|
||||
echo. linkcheck to check all external links for integrity
|
||||
echo. doctest to run all doctests embedded in the documentation if enabled
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "clean" (
|
||||
for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
|
||||
del /q /s %BUILDDIR%\*
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "html" (
|
||||
%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The HTML pages are in %BUILDDIR%/html.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "dirhtml" (
|
||||
%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "singlehtml" (
|
||||
%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "pickle" (
|
||||
%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can process the pickle files.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "json" (
|
||||
%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can process the JSON files.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "htmlhelp" (
|
||||
%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can run HTML Help Workshop with the ^
|
||||
.hhp project file in %BUILDDIR%/htmlhelp.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "qthelp" (
|
||||
%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can run "qcollectiongenerator" with the ^
|
||||
.qhcp project file in %BUILDDIR%/qthelp, like this:
|
||||
echo.^> qcollectiongenerator %BUILDDIR%\qthelp\MozBase.qhcp
|
||||
echo.To view the help file:
|
||||
echo.^> assistant -collectionFile %BUILDDIR%\qthelp\MozBase.ghc
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "devhelp" (
|
||||
%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "epub" (
|
||||
%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The epub file is in %BUILDDIR%/epub.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "latex" (
|
||||
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "text" (
|
||||
%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The text files are in %BUILDDIR%/text.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "man" (
|
||||
%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The manual pages are in %BUILDDIR%/man.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "texinfo" (
|
||||
%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "gettext" (
|
||||
%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "changes" (
|
||||
%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.The overview file is in %BUILDDIR%/changes.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "linkcheck" (
|
||||
%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Link check complete; look for any errors in the above output ^
|
||||
or in %BUILDDIR%/linkcheck/output.txt.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "doctest" (
|
||||
%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Testing of doctests in the sources finished, look at the ^
|
||||
results in %BUILDDIR%/doctest/output.txt.
|
||||
goto end
|
||||
)
|
||||
|
||||
:end
|
@ -1,489 +0,0 @@
|
||||
Managing lists of tests
|
||||
=======================
|
||||
|
||||
We don't always want to run all tests, all the time. Sometimes a test
|
||||
may be broken, in other cases we only want to run a test on a specific
|
||||
platform or build of Mozilla. To handle these cases (and more), we
|
||||
created a python library to create and use test "manifests", which
|
||||
codify this information.
|
||||
|
||||
:mod:`manifestdestiny` --- Create and manage test manifests
|
||||
-----------------------------------------------------------
|
||||
|
||||
manifestdestiny lets you easily create and use test manifests, to
|
||||
control which tests are run under what circumstances.
|
||||
|
||||
What ManifestDestiny gives you:
|
||||
|
||||
* manifests are ordered lists of tests
|
||||
* tests may have an arbitrary number of key, value pairs
|
||||
* the parser returns an ordered list of test data structures, which
|
||||
are just dicts with some keys. For example, a test with no
|
||||
user-specified metadata looks like this:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
[{'expected': 'pass',
|
||||
'path': '/home/mozilla/mozmill/src/ManifestDestiny/manifestdestiny/tests/testToolbar/testBackForwardButtons.js',
|
||||
'relpath': 'testToolbar/testBackForwardButtons.js',
|
||||
'name': 'testBackForwardButtons.js',
|
||||
'here': '/home/mozilla/mozmill/src/ManifestDestiny/manifestdestiny/tests',
|
||||
'manifest': '/home/mozilla/mozmill/src/ManifestDestiny/manifestdestiny/tests/manifest.ini',}]
|
||||
|
||||
The keys displayed here (path, relpath, name, here, and manifest) are
|
||||
reserved keys for ManifestDestiny and any consuming APIs. You can add
|
||||
additional key, value metadata to each test.
|
||||
|
||||
Why have test manifests?
|
||||
````````````````````````
|
||||
|
||||
It is desirable to have a unified format for test manifests for testing
|
||||
[mozilla-central](http://hg.mozilla.org/mozilla-central), etc.
|
||||
|
||||
* It is desirable to be able to selectively enable or disable tests based on platform or other conditions. This should be easy to do. Currently, since many of the harnesses just crawl directories, there is no effective way of disabling a test except for removal from mozilla-central
|
||||
* It is desriable to do this in a universal way so that enabling and disabling tests as well as other tasks are easily accessible to a wider audience than just those intimately familiar with the specific test framework.
|
||||
* It is desirable to have other metadata on top of the test. For instance, let's say a test is marked as skipped. It would be nice to give the reason why.
|
||||
|
||||
|
||||
Most Mozilla test harnesses work by crawling a directory structure.
|
||||
While this is straight-forward, manifests offer several practical
|
||||
advantages:
|
||||
|
||||
* ability to turn a test off easily: if a test is broken on m-c
|
||||
currently, the only way to turn it off, generally speaking, is just
|
||||
removing the test. Often this is undesirable, as if the test should
|
||||
be dismissed because other people want to land and it can't be
|
||||
investigated in real time (is it a failure? is the test bad? is no
|
||||
one around that knows the test?), then backing out a test is at best
|
||||
problematic. With a manifest, a test may be disabled without
|
||||
removing it from the tree and a bug filed with the appropriate
|
||||
reason:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
[test_broken.js]
|
||||
disabled = https://bugzilla.mozilla.org/show_bug.cgi?id=123456
|
||||
|
||||
* ability to run different (subsets of) tests on different
|
||||
platforms. Traditionally, we've done a bit of magic or had the test
|
||||
know what platform it would or would not run on. With manifests, you
|
||||
can mark what platforms a test will or will not run on and change
|
||||
these without changing the test.
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
[test_works_on_windows_only.js]
|
||||
run-if = os == 'win'
|
||||
|
||||
* ability to markup tests with metadata. We have a large, complicated,
|
||||
and always changing infrastructure. key, value metadata may be used
|
||||
as an annotation to a test and appropriately curated and mined. For
|
||||
instance, we could mark certain tests as randomorange with a bug
|
||||
number, if it were desirable.
|
||||
|
||||
* ability to have sane and well-defined test-runs. You can keep
|
||||
different manifests for different test runs and ``[include:]``
|
||||
(sub)manifests as appropriate to your needs.
|
||||
|
||||
Manifest Format
|
||||
````````
|
||||
|
||||
Manifests are .ini file with the section names denoting the path
|
||||
relative to the manifest:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
[foo.js]
|
||||
[bar.js]
|
||||
[fleem.js]
|
||||
|
||||
The sections are read in order. In addition, tests may include
|
||||
arbitrary key, value metadata to be used by the harness. You may also
|
||||
have a `[DEFAULT]` section that will give key, value pairs that will
|
||||
be inherited by each test unless overridden:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
[DEFAULT]
|
||||
type = restart
|
||||
|
||||
[lilies.js]
|
||||
color = white
|
||||
|
||||
[daffodils.js]
|
||||
color = yellow
|
||||
type = other
|
||||
# override type from DEFAULT
|
||||
|
||||
[roses.js]
|
||||
color = red
|
||||
|
||||
You can also include other manifests:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
[include:subdir/anothermanifest.ini]
|
||||
|
||||
Manifests are included relative to the directory of the manifest with
|
||||
the `[include:]` directive unless they are absolute paths.
|
||||
|
||||
By default you can use both '#' and ';' as comment characters. Comments
|
||||
must start on a new line, inline comments are not supported.
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
[roses.js]
|
||||
# a valid comment
|
||||
; another valid comment
|
||||
color = red # not a valid comment
|
||||
|
||||
In the example above, the 'color' property will have the value 'red #
|
||||
not a valid comment'.
|
||||
|
||||
Manifest Conditional Expressions
|
||||
````````````````````````````````
|
||||
The conditional expressions used in manifests are parsed using the *ExpressionParser* class.
|
||||
|
||||
.. autoclass:: manifestparser.ExpressionParser
|
||||
|
||||
Consumers of this module are expected to pass in a value dictionary
|
||||
for evaluating conditional expressions. A common pattern is to pass
|
||||
the dictionary from the :mod:`mozinfo` module.
|
||||
|
||||
Data
|
||||
````
|
||||
|
||||
Manifest Destiny gives tests as a list of dictionaries (in python
|
||||
terms).
|
||||
|
||||
* path: full path to the test
|
||||
* relpath: relative path starting from the root manifest location
|
||||
* name: file name of the test
|
||||
* here: the parent directory of the manifest
|
||||
* manifest: the path to the manifest containing the test
|
||||
|
||||
This data corresponds to a one-line manifest:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
[testToolbar/testBackForwardButtons.js]
|
||||
|
||||
If additional key, values were specified, they would be in this dict
|
||||
as well.
|
||||
|
||||
Outside of the reserved keys, the remaining key, values
|
||||
are up to convention to use. There is a (currently very minimal)
|
||||
generic integration layer in ManifestDestiny for use of all harnesses,
|
||||
`manifestparser.TestManifest`.
|
||||
For instance, if the 'disabled' key is present, you can get the set of
|
||||
tests without disabled (various other queries are doable as well).
|
||||
|
||||
Since the system is convention-based, the harnesses may do whatever
|
||||
they want with the data. They may ignore it completely, they may use
|
||||
the provided integration layer, or they may provide their own
|
||||
integration layer. This should allow whatever sort of logic is
|
||||
desired. For instance, if in yourtestharness you wanted to run only on
|
||||
mondays for a certain class of tests:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
tests = []
|
||||
for test in manifests.tests:
|
||||
if 'runOnDay' in test:
|
||||
if calendar.day_name[calendar.weekday(*datetime.datetime.now().timetuple()[:3])].lower() == test['runOnDay'].lower():
|
||||
tests.append(test)
|
||||
else:
|
||||
tests.append(test)
|
||||
|
||||
To recap:
|
||||
* the manifests allow you to specify test data
|
||||
* the parser gives you this data
|
||||
* you can use it however you want or process it further as you need
|
||||
|
||||
Tests are denoted by sections in an .ini file (see
|
||||
http://hg.mozilla.org/automation/ManifestDestiny/file/tip/manifestdestiny/tests/mozmill-example.ini).
|
||||
|
||||
Additional manifest files may be included with an `[include:]` directive:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
[include:path-to-additional-file.manifest]
|
||||
|
||||
The path to included files is relative to the current manifest.
|
||||
|
||||
The `[DEFAULT]` section contains variables that all tests inherit from.
|
||||
|
||||
Included files will inherit the top-level variables but may override
|
||||
in their own `[DEFAULT]` section.
|
||||
|
||||
ManifestDestiny Architecture
|
||||
````````````````````````````
|
||||
|
||||
There is a two- or three-layered approach to the ManifestDestiny
|
||||
architecture, depending on your needs:
|
||||
|
||||
1. ManifestParser: this is a generic parser for .ini manifests that
|
||||
facilitates the `[include:]` logic and the inheritence of
|
||||
metadata. Despite the internal variable being called `self.tests`
|
||||
(an oversight), this layer has nothing in particular to do with tests.
|
||||
|
||||
2. TestManifest: this is a harness-agnostic integration layer that is
|
||||
test-specific. TestManifest faciliates `skip-if` and `run-if` logic.
|
||||
|
||||
3. Optionally, a harness will have an integration layer than inherits
|
||||
from TestManifest if more harness-specific customization is desired at
|
||||
the manifest level.
|
||||
|
||||
See the source code at https://github.com/mozilla/mozbase/tree/master/manifestdestiny
|
||||
and
|
||||
https://github.com/mozilla/mozbase/blob/master/manifestdestiny/manifestparser.py
|
||||
in particular.
|
||||
|
||||
Using Manifests
|
||||
```````````````
|
||||
|
||||
A test harness will normally call `TestManifest.active_tests`:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
def active_tests(self, exists=True, disabled=True, **tags):
|
||||
|
||||
The manifests are passed to the `__init__` or `read` methods with
|
||||
appropriate arguments. `active_tests` then allows you to select the
|
||||
tests you want:
|
||||
|
||||
- exists : return only existing tests
|
||||
- disabled : whether to return disabled tests; if not these will be
|
||||
filtered out; if True (the default), the `disabled` key of a
|
||||
test's metadata will be present and will be set to the reason that a
|
||||
test is disabled
|
||||
- tags : keys and values to filter on (e.g. `os='linux'`)
|
||||
|
||||
`active_tests` looks for tests with `skip-if`
|
||||
`run-if`. If the condition is or is not fulfilled,
|
||||
respectively, the test is marked as disabled. For instance, if you
|
||||
pass `**dict(os='linux')` as `**tags`, if a test contains a line
|
||||
`skip-if = os == 'linux'` this test will be disabled, or
|
||||
`run-if = os = 'win'` in which case the test will also be disabled. It
|
||||
is up to the harness to pass in tags appropriate to its usage.
|
||||
|
||||
Creating Manifests
|
||||
``````````````````
|
||||
|
||||
ManifestDestiny comes with a console script, `manifestparser create`, that
|
||||
may be used to create a seed manifest structure from a directory of
|
||||
files. Run `manifestparser help create` for usage information.
|
||||
|
||||
Copying Manifests
|
||||
`````````````````
|
||||
|
||||
To copy tests and manifests from a source:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
manifestparser [options] copy from_manifest to_directory -tag1 -tag2 `key1=value1 key2=value2 ...
|
||||
|
||||
Updating Tests
|
||||
``````````````
|
||||
|
||||
To update the tests associated with with a manifest from a source
|
||||
directory:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
manifestparser [options] update manifest from_directory -tag1 -tag2 `key1=value1 `key2=value2 ...
|
||||
|
||||
Usage example
|
||||
`````````````
|
||||
|
||||
Here is an example of how to create manifests for a directory tree and
|
||||
update the tests listed in the manifests from an external source.
|
||||
|
||||
Creating Manifests
|
||||
``````````````````
|
||||
|
||||
Let's say you want to make a series of manifests for a given directory structure containing `.js` test files:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
testing/mozmill/tests/firefox/
|
||||
testing/mozmill/tests/firefox/testAwesomeBar/
|
||||
testing/mozmill/tests/firefox/testPreferences/
|
||||
testing/mozmill/tests/firefox/testPrivateBrowsing/
|
||||
testing/mozmill/tests/firefox/testSessionStore/
|
||||
testing/mozmill/tests/firefox/testTechnicalTools/
|
||||
testing/mozmill/tests/firefox/testToolbar/
|
||||
testing/mozmill/tests/firefox/restartTests
|
||||
|
||||
You can use `manifestparser create` to do this:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
$ manifestparser help create
|
||||
Usage: manifestparser.py [options] create directory <directory> <...>
|
||||
|
||||
create a manifest from a list of directories
|
||||
|
||||
Options:
|
||||
-p PATTERN, `pattern=PATTERN
|
||||
glob pattern for files
|
||||
-i IGNORE, `ignore=IGNORE
|
||||
directories to ignore
|
||||
-w IN_PLACE, --in-place=IN_PLACE
|
||||
Write .ini files in place; filename to write to
|
||||
|
||||
We only want `.js` files and we want to skip the `restartTests` directory.
|
||||
We also want to write a manifest per directory, so I use the `--in-place`
|
||||
option to write the manifests:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
manifestparser create . -i restartTests -p '*.js' -w manifest.ini
|
||||
|
||||
This creates a manifest.ini per directory that we care about with the JS test files:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
testing/mozmill/tests/firefox/manifest.ini
|
||||
testing/mozmill/tests/firefox/testAwesomeBar/manifest.ini
|
||||
testing/mozmill/tests/firefox/testPreferences/manifest.ini
|
||||
testing/mozmill/tests/firefox/testPrivateBrowsing/manifest.ini
|
||||
testing/mozmill/tests/firefox/testSessionStore/manifest.ini
|
||||
testing/mozmill/tests/firefox/testTechnicalTools/manifest.ini
|
||||
testing/mozmill/tests/firefox/testToolbar/manifest.ini
|
||||
|
||||
The top-level `manifest.ini` merely has `[include:]` references to the sub manifests:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
[include:testAwesomeBar/manifest.ini]
|
||||
[include:testPreferences/manifest.ini]
|
||||
[include:testPrivateBrowsing/manifest.ini]
|
||||
[include:testSessionStore/manifest.ini]
|
||||
[include:testTechnicalTools/manifest.ini]
|
||||
[include:testToolbar/manifest.ini]
|
||||
|
||||
Each sub-level manifest contains the (`.js`) test files relative to it.
|
||||
|
||||
Updating the tests from manifests
|
||||
`````````````````````````````````
|
||||
|
||||
You may need to update tests as given in manifests from a different source directory.
|
||||
`manifestparser update` was made for just this purpose:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
Usage: manifestparser [options] update manifest directory -tag1 -tag2 `key1=value1 --key2=value2 ...
|
||||
|
||||
update the tests as listed in a manifest from a directory
|
||||
|
||||
To update from a directory of tests in `~/mozmill/src/mozmill-tests/firefox/` run:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
manifestparser update manifest.ini ~/mozmill/src/mozmill-tests/firefox/
|
||||
|
||||
Tests
|
||||
`````
|
||||
|
||||
ManifestDestiny includes a suite of tests:
|
||||
|
||||
https://github.com/mozilla/mozbase/tree/master/manifestdestiny/tests
|
||||
|
||||
`test_manifest.txt` is a doctest that may be helpful in figuring out
|
||||
how to use the API. Tests are run via `python test.py`.
|
||||
|
||||
Bugs
|
||||
````
|
||||
|
||||
Please file any bugs or feature requests at
|
||||
|
||||
https://bugzilla.mozilla.org/enter_bug.cgi?product=Testing&component=ManifestParser
|
||||
|
||||
Or contact jhammel @mozilla.org or in #ateam on irc.mozilla.org
|
||||
|
||||
CLI
|
||||
```
|
||||
|
||||
Run `manifestparser help` for usage information.
|
||||
|
||||
To create a manifest from a set of directories:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
manifestparser [options] create directory <directory> <...> [create-options]
|
||||
|
||||
To output a manifest of tests:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
manifestparser [options] write manifest <manifest> <...> -tag1 -tag2 --key1=value1 --key2=value2 ...
|
||||
|
||||
To copy tests and manifests from a source:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
manifestparser [options] copy from_manifest to_manifest -tag1 -tag2 `key1=value1 key2=value2 ...
|
||||
|
||||
To update the tests associated with with a manifest from a source
|
||||
directory:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
manifestparser [options] update manifest from_directory -tag1 -tag2 --key1=value1 --key2=value2 ...
|
||||
|
||||
Design Considerations
|
||||
`````````````````````
|
||||
|
||||
Contrary to some opinion, manifestparser.py and the associated .ini
|
||||
format were not magically plucked from the sky but were descended upon
|
||||
through several design considerations.
|
||||
|
||||
* test manifests should be ordered. While python 2.6 and greater has
|
||||
a ConfigParser that can use an ordered dictionary, it is a
|
||||
requirement that we support python 2.4 for the build + testing
|
||||
environment. To that end, a `read_ini` function was implemented
|
||||
in manifestparser.py that should be the equivalent of the .ini
|
||||
dialect used by ConfigParser.
|
||||
|
||||
* the manifest format should be easily human readable/writable. While
|
||||
there was initially some thought of using JSON, there was pushback
|
||||
that JSON was not easily editable. An ideal manifest format would
|
||||
degenerate to a line-separated list of files. While .ini format
|
||||
requires an additional `[]` per line, and while there have been
|
||||
complaints about this, hopefully this is good enough.
|
||||
|
||||
* python does not have an in-built YAML parser. Since it was
|
||||
undesirable for manifestparser.py to have any dependencies, YAML was
|
||||
dismissed as a format.
|
||||
|
||||
* we could have used a proprietary format but decided against it.
|
||||
Everyone knows .ini and there are good tools to deal with it.
|
||||
However, since read_ini is the only function that transforms a
|
||||
manifest to a list of key, value pairs, while the implications for
|
||||
changing the format impacts downstream code, doing so should be
|
||||
programmatically simple.
|
||||
|
||||
* there should be a single file that may easily be
|
||||
transported. Traditionally, test harnesses have lived in
|
||||
mozilla-central. This is less true these days and it is increasingly
|
||||
likely that more tests will not live in mozilla-central going
|
||||
forward. So `manifestparser.py` should be highly consumable. To
|
||||
this end, it is a single file, as appropriate to mozilla-central,
|
||||
which is also a working python package deployed to PyPI for easy
|
||||
installation.
|
||||
|
||||
Historical Reference
|
||||
````````````````````
|
||||
|
||||
Date-ordered list of links about how manifests came to be where they are today::
|
||||
|
||||
* https://wiki.mozilla.org/Auto-tools/Projects/UniversalManifest
|
||||
* http://alice.nodelman.net/blog/post/2010/05/
|
||||
* http://alice.nodelman.net/blog/post/universal-manifest-for-unit-tests-a-proposal/
|
||||
* https://elvis314.wordpress.com/2010/07/05/improving-personal-hygiene-by-adjusting-mochitests/
|
||||
* https://elvis314.wordpress.com/2010/07/27/types-of-data-we-care-about-in-a-manifest/
|
||||
* https://bugzilla.mozilla.org/show_bug.cgi?id=585106
|
||||
* http://elvis314.wordpress.com/2011/05/20/converting-xpcshell-from-listing-directories-to-a-manifest/
|
||||
* https://bugzilla.mozilla.org/show_bug.cgi?id=616999
|
||||
* https://developer.mozilla.org/en/Writing_xpcshell-based_unit_tests#Adding_your_tests_to_the_xpcshell_manifest
|
@ -1,8 +0,0 @@
|
||||
:mod:`mozcrash` --- Print stack traces from minidumps left behind by crashed processes
|
||||
======================================================================================
|
||||
|
||||
Gets stack traces out of processes that have crashed and left behind
|
||||
a minidump file using the Google Breakpad library.
|
||||
|
||||
.. automodule:: mozcrash
|
||||
:members: check_for_crashes
|
@ -1,121 +0,0 @@
|
||||
:mod:`mozdevice` --- Interact with remote devices
|
||||
=================================================
|
||||
|
||||
Mozdevice provides an interface to interact with a remote device such
|
||||
as an Android- or FirefoxOS-based phone connected to a
|
||||
host machine. Currently there are two implementations of the interface: one
|
||||
uses a custom TCP-based protocol to communicate with a server running
|
||||
on the device, another uses Android's adb utility.
|
||||
|
||||
.. automodule:: mozdevice
|
||||
|
||||
DeviceManager interface
|
||||
-----------------------
|
||||
.. autoclass:: DeviceManager
|
||||
|
||||
Here's an example script which lists the files in '/mnt/sdcard' and sees if a
|
||||
process called 'org.mozilla.fennec' is running. In this example, we're
|
||||
instantiating the DeviceManagerADB implementation, but we could just
|
||||
as easily have used DeviceManagerSUT (assuming the device had an agent
|
||||
running speaking the SUT protocol).
|
||||
|
||||
::
|
||||
|
||||
import mozdevice
|
||||
|
||||
dm = mozdevice.DeviceManagerADB()
|
||||
print dm.listFiles("/mnt/sdcard")
|
||||
if dm.processExist("org.mozilla.fennec"):
|
||||
print "Fennec is running"
|
||||
|
||||
Informational methods
|
||||
`````````````````````
|
||||
.. automethod:: DeviceManager.getInfo(self, directive=None)
|
||||
.. automethod:: DeviceManager.getCurrentTime(self)
|
||||
.. automethod:: DeviceManager.getIP
|
||||
.. automethod:: DeviceManager.saveScreenshot
|
||||
.. automethod:: DeviceManager.recordLogcat
|
||||
.. automethod:: DeviceManager.getLogcat
|
||||
|
||||
File management methods
|
||||
```````````````````````
|
||||
.. automethod:: DeviceManager.pushFile(self, localFilename, remoteFilename, retryLimit=1)
|
||||
.. automethod:: DeviceManager.pushDir(self, localDirname, remoteDirname, retryLimit=1)
|
||||
.. automethod:: DeviceManager.pullFile(self, remoteFilename)
|
||||
.. automethod:: DeviceManager.getFile(self, remoteFilename, localFilename)
|
||||
.. automethod:: DeviceManager.getDirectory(self, remoteDirname, localDirname, checkDir=True)
|
||||
.. automethod:: DeviceManager.validateFile(self, remoteFilename, localFilename)
|
||||
.. automethod:: DeviceManager.mkDir(self, remoteDirname)
|
||||
.. automethod:: DeviceManager.mkDirs(self, filename)
|
||||
.. automethod:: DeviceManager.dirExists(self, dirpath)
|
||||
.. automethod:: DeviceManager.fileExists(self, filepath)
|
||||
.. automethod:: DeviceManager.listFiles(self, rootdir)
|
||||
.. automethod:: DeviceManager.removeFile(self, filename)
|
||||
.. automethod:: DeviceManager.removeDir(self, remoteDirname)
|
||||
.. automethod:: DeviceManager.chmodDir(self, remoteDirname, mask="777")
|
||||
.. automethod:: DeviceManager.getDeviceRoot(self)
|
||||
.. automethod:: DeviceManager.getAppRoot(self, packageName=None)
|
||||
.. automethod:: DeviceManager.getTestRoot(self, harnessName)
|
||||
.. automethod:: DeviceManager.getTempDir(self)
|
||||
|
||||
Process management methods
|
||||
``````````````````````````
|
||||
.. automethod:: DeviceManager.shell(self, cmd, outputfile, env=None, cwd=None, timeout=None, root=False)
|
||||
.. automethod:: DeviceManager.shellCheckOutput(self, cmd, env=None, cwd=None, timeout=None, root=False)
|
||||
.. automethod:: DeviceManager.getProcessList(self)
|
||||
.. automethod:: DeviceManager.processExist(self, processName)
|
||||
.. automethod:: DeviceManager.killProcess(self, processName)
|
||||
|
||||
System control methods
|
||||
``````````````````````
|
||||
.. automethod:: DeviceManager.reboot(self, ipAddr=None, port=30000)
|
||||
|
||||
Application management methods
|
||||
``````````````````````````````
|
||||
.. automethod:: DeviceManager.uninstallAppAndReboot(self, appName, installPath=None)
|
||||
.. automethod:: DeviceManager.installApp(self, appBundlePath, destPath=None)
|
||||
.. automethod:: DeviceManager.uninstallApp(self, appName, installPath=None)
|
||||
.. automethod:: DeviceManager.updateApp(self, appBundlePath, processName=None, destPath=None, ipAddr=None, port=30000)
|
||||
|
||||
DeviceManagerADB implementation
|
||||
-------------------------------
|
||||
|
||||
.. autoclass:: mozdevice.DeviceManagerADB
|
||||
|
||||
ADB-specific methods
|
||||
````````````````````
|
||||
DeviceManagerADB has several methods that are not present in all
|
||||
DeviceManager implementations. Please do not use them in code that
|
||||
is meant to be interoperable.
|
||||
|
||||
.. automethod:: DeviceManagerADB.forward
|
||||
.. automethod:: DeviceManagerADB.remount
|
||||
.. automethod:: DeviceManagerADB.devices
|
||||
|
||||
DeviceManagerSUT implementation
|
||||
-------------------------------
|
||||
|
||||
.. autoclass:: mozdevice.DeviceManagerSUT
|
||||
|
||||
SUT-specific methods
|
||||
````````````````````
|
||||
DeviceManagerSUT has several methods that are only used in specific
|
||||
tests and are not present in all DeviceManager implementations. Please
|
||||
do not use them in code that is meant to be interoperable.
|
||||
|
||||
.. automethod:: DeviceManagerSUT.unpackFile
|
||||
.. automethod:: DeviceManagerSUT.adjustResolution
|
||||
|
||||
Android extensions
|
||||
------------------
|
||||
|
||||
For Android, we provide two variants of the `DeviceManager` interface
|
||||
with extensions useful for that platform. These classes are called
|
||||
DroidADB and DroidSUT. They inherit all methods from DeviceManagerADB
|
||||
and DeviceManagerSUT. Here is the interface for DroidADB:
|
||||
|
||||
.. automethod:: mozdevice.DroidADB.launchApplication
|
||||
.. automethod:: mozdevice.DroidADB.launchFennec
|
||||
.. automethod:: mozdevice.DroidADB.getInstalledApps
|
||||
|
||||
These methods are also found in the DroidSUT class.
|
@ -1,10 +0,0 @@
|
||||
:mod:`mozfile` --- File utilities for use in Mozilla testing
|
||||
============================================================
|
||||
|
||||
mozfile is a convenience library for taking care of some common file-related
|
||||
tasks in automated testing, such as extracting files or recursively removing
|
||||
directories.
|
||||
|
||||
.. automodule:: mozfile
|
||||
:members: extract, extract_tarball, extract_zip, rmtree
|
||||
|
@ -1,18 +0,0 @@
|
||||
Serving up content to be consumed by the browser
|
||||
================================================
|
||||
|
||||
I know, right? ANOTHER Python HTTP server? In all seriousness, we
|
||||
weren't able to find anything out there that was fast enough, flexible
|
||||
enough, and easy-to-use enough for our needs. So we created our own.
|
||||
|
||||
:mod:`mozhttpd` --- Simple webserver
|
||||
------------------------------------
|
||||
|
||||
.. automodule:: mozhttpd
|
||||
:members:
|
||||
|
||||
Interface
|
||||
`````````
|
||||
|
||||
.. autoclass:: MozHttpd
|
||||
:members:
|
@ -1,71 +0,0 @@
|
||||
:mod:`mozinfo` --- Get system information
|
||||
=========================================
|
||||
|
||||
Throughout `mozmill <https://developer.mozilla.org/en/Mozmill>`_
|
||||
and other Mozilla python code, checking the underlying
|
||||
platform is done in many different ways. The various checks needed
|
||||
lead to a lot of copy+pasting, leaving the reader to wonder....is this
|
||||
specific check necessary for (e.g.) an operating system? Because
|
||||
information is not consolidated, checks are not done consistently, nor
|
||||
is it defined what we are checking for.
|
||||
|
||||
`mozinfo <https://github.com/mozilla/mozbase/tree/master/mozinfo>`_
|
||||
proposes to solve this problem. mozinfo is a bridge interface,
|
||||
making the underlying (complex) plethora of OS and architecture
|
||||
combinations conform to a subset of values of relevance to
|
||||
Mozilla software. The current implementation exposes relevant keys and
|
||||
values such as: ``os``, ``version``, ``bits``, and ``processor``. Additionally, the
|
||||
service pack in use is available on the windows platform.
|
||||
|
||||
|
||||
API Usage
|
||||
---------
|
||||
|
||||
mozinfo is a python package. Downloading the software and running
|
||||
``python setup.py develop`` will allow you to do ``import mozinfo``
|
||||
from python.
|
||||
`mozinfo.py <https://raw.github.com/mozilla/mozbase/master/mozinfo/mozinfo/mozinfo.py>`_
|
||||
is the only file contained is this package,
|
||||
so if you need a single-file solution, you can just download or call
|
||||
this file through the web.
|
||||
|
||||
The top level attributes (``os``, ``version``, ``bits``, ``processor``) are
|
||||
available as module globals::
|
||||
|
||||
if mozinfo.os == 'win': ...
|
||||
|
||||
In addition, mozinfo exports a dictionary, ``mozinfo.info``, that
|
||||
contain these values. mozinfo also exports:
|
||||
|
||||
- ``choices``: a dictionary of possible values for os, bits, and
|
||||
processor
|
||||
- ``main``: the console_script entry point for mozinfo
|
||||
- ``unknown``: a singleton denoting a value that cannot be determined
|
||||
|
||||
``unknown`` has the string representation ``"UNKNOWN"``.
|
||||
``unknown`` will evaluate as ``False`` in python::
|
||||
|
||||
if not mozinfo.os: ... # unknown!
|
||||
|
||||
|
||||
Command Line Usage
|
||||
------------------
|
||||
|
||||
mozinfo comes with a command line program, ``mozinfo`` which may be used to
|
||||
diagnose one's current system.
|
||||
|
||||
Example output::
|
||||
|
||||
os: linux
|
||||
version: Ubuntu 10.10
|
||||
bits: 32
|
||||
processor: x86
|
||||
|
||||
Three of these fields, os, bits, and processor, have a finite set of
|
||||
choices. You may display the value of these choices using
|
||||
``mozinfo --os``, ``mozinfo --bits``, and ``mozinfo --processor``.
|
||||
``mozinfo --help`` documents command-line usage.
|
||||
|
||||
|
||||
.. automodule:: mozinfo
|
||||
:members:
|
@ -1,47 +0,0 @@
|
||||
:mod:`mozlog` --- Easy, configurable and uniform logging
|
||||
========================================================
|
||||
|
||||
Mozlog is a python package intended to simplify and standardize logs
|
||||
in the Mozilla universe. It wraps around python's logging module and
|
||||
adds some additional functionality.
|
||||
|
||||
.. automodule:: mozlog
|
||||
:members: getLogger
|
||||
|
||||
.. autoclass:: MozLogger
|
||||
:members: testStart, testEnd, testPass, testFail, testKnownFail
|
||||
|
||||
Examples
|
||||
--------
|
||||
|
||||
Log to stdout::
|
||||
|
||||
import mozlog
|
||||
log = mozlog.getLogger('MODULE_NAME')
|
||||
log.setLevel(mozlog.INFO)
|
||||
log.info('This message will be printed to stdout')
|
||||
log.debug('This won't')
|
||||
log.testPass('A test has passed')
|
||||
mozlog.shutdown()
|
||||
|
||||
Log to a file::
|
||||
|
||||
import mozlog
|
||||
log = mozlog.getLogger('MODULE_NAME', handler=mozlog.FileHandler('path/to/log/file'))
|
||||
log.warning('Careful!')
|
||||
log.testKnownFail('We know the cause for this failure')
|
||||
mozlog.shutdown()
|
||||
|
||||
Log from an existing object using the LoggingMixin::
|
||||
|
||||
import mozlog
|
||||
class Loggable(mozlog.LoggingMixin):
|
||||
"""Trivial class inheriting from LoggingMixin"""
|
||||
def say_hello(self):
|
||||
self.info("hello")
|
||||
|
||||
loggable = Loggable()
|
||||
loggable.say_hello()
|
||||
|
||||
|
||||
.. _logging: http://docs.python.org/library/logging.html
|
@ -1,9 +0,0 @@
|
||||
:mod:`moznetwork` --- Get network information
|
||||
=============================================
|
||||
|
||||
.. automodule:: moznetwork
|
||||
|
||||
.. automethod:: moznetwork.get_ip
|
||||
|
||||
.. autoclass:: moznetwork.NetworkError
|
||||
|
@ -1,20 +0,0 @@
|
||||
:mod:`mozprocess` --- Launch and manage processes
|
||||
=================================================
|
||||
|
||||
Mozprocess is a process-handling module that provides some additional
|
||||
features beyond those available with python's subprocess:
|
||||
|
||||
* better handling of child processes, especially on Windows
|
||||
* the ability to timeout the process after some absolute period, or some
|
||||
period without any data written to stdout/stderr
|
||||
* the ability to specify output handlers that will be called
|
||||
for each line of output produced by the process
|
||||
* the ability to specify handlers that will be called on process timeout
|
||||
and normal process termination
|
||||
|
||||
|
||||
.. module:: mozprocess
|
||||
.. autoclass:: ProcessHandlerMixin
|
||||
:members: __init__, timedOut, commandline, run, kill, readWithTimeout, processOutputLine, onTimeout, onFinish, processOutput, wait
|
||||
.. autoclass:: ProcessHandler
|
||||
:members:
|
@ -1,99 +0,0 @@
|
||||
:mod:`mozprofile` --- Create and modify Mozilla application profiles
|
||||
====================================================================
|
||||
|
||||
Mozprofile_ is a python tool for creating and managing profiles for Mozilla's
|
||||
applications (Firefox, Thunderbird, etc.). In addition to creating profiles,
|
||||
mozprofile can install addons_ and set preferences_ Mozprofile can be utilized
|
||||
from the command line or as an API.
|
||||
|
||||
The preferred way of setting up profile data (addons, permissions, preferences
|
||||
etc) is by passing them to the profile_ constructor.
|
||||
|
||||
Addons
|
||||
------
|
||||
|
||||
.. automodule:: mozprofile.addons
|
||||
:members:
|
||||
|
||||
Addons may be installed individually or from a manifest.
|
||||
|
||||
Example::
|
||||
|
||||
from mozprofile import FirefoxProfile
|
||||
|
||||
# create new profile to pass to mozmill/mozrunner
|
||||
profile = FirefoxProfile(addons=["adblock.xpi"])
|
||||
|
||||
Command Line Interface
|
||||
----------------------
|
||||
|
||||
.. automodule:: mozprofile.cli
|
||||
:members:
|
||||
|
||||
The profile to be operated on may be specified with the ``--profile``
|
||||
switch. If a profile is not specified, one will be created in a
|
||||
temporary directory which will be echoed to the terminal::
|
||||
|
||||
(mozmill)> mozprofile
|
||||
/tmp/tmp4q1iEU.mozrunner
|
||||
(mozmill)> ls /tmp/tmp4q1iEU.mozrunner
|
||||
user.js
|
||||
|
||||
To run mozprofile from the command line enter:
|
||||
``mozprofile --help`` for a list of options.
|
||||
|
||||
Permissions
|
||||
-----------
|
||||
|
||||
.. automodule:: mozprofile.permissions
|
||||
:members:
|
||||
|
||||
You can set permissions by creating a ``ServerLocations`` object that you pass
|
||||
to the ``Profile`` constructor. Hosts can be added to it with
|
||||
``add_host(host, port)``. ``port`` can be 0.
|
||||
|
||||
Preferences
|
||||
-----------
|
||||
|
||||
.. automodule:: mozprofile.prefs
|
||||
:members:
|
||||
|
||||
Preferences can be set in several ways:
|
||||
|
||||
- using the API: You can make a dictionary with the preferences and pass it to
|
||||
the ``Profile`` constructor. You can also add more preferences with the
|
||||
``Profile.set_preferences`` method.
|
||||
- using a JSON blob file: ``mozprofile --preferences myprefs.json``
|
||||
- using a ``.ini`` file: ``mozprofile --preferences myprefs.ini``
|
||||
- via the command line: ``mozprofile --pref key:value --pref key:value [...]``
|
||||
|
||||
When setting preferences from an ``.ini`` file or the ``--pref`` switch,
|
||||
the value will be interpolated as an integer or a boolean
|
||||
(``true``/``false``) if possible.
|
||||
|
||||
Profile
|
||||
--------------------
|
||||
|
||||
.. automodule:: mozprofile.profile
|
||||
:members:
|
||||
|
||||
Resources
|
||||
-----------
|
||||
Other Mozilla programs offer additional and overlapping functionality
|
||||
for profiles. There is also substantive documentation on profiles and
|
||||
their management.
|
||||
|
||||
- ProfileManager_: XULRunner application for managing profiles. Has a GUI and CLI.
|
||||
- python-profilemanager_: python CLI interface similar to ProfileManager
|
||||
- profile documentation_
|
||||
|
||||
|
||||
.. _Mozprofile: https://github.com/mozilla/mozbase/tree/master/mozprofile
|
||||
.. _addons: https://developer.mozilla.org/en/addons
|
||||
.. _preferences: https://developer.mozilla.org/En/A_Brief_Guide_to_Mozilla_Preferences
|
||||
.. _mozprofile.profile: https://github.com/mozilla/mozbase/tree/master/mozprofile/mozprofile/profile.py
|
||||
.. _AddonManager: https://github.com/mozilla/mozbase/tree/master/mozprofile/mozprofile/addons.py
|
||||
.. _here: https://github.com/mozilla/mozbase/blob/master/mozprofile/mozprofile/permissions.py
|
||||
.. _ProfileManager: https://developer.mozilla.org/en/Profile_Manager
|
||||
.. _python-profilemanager: http://k0s.org/mozilla/hg/profilemanager/
|
||||
.. _documentation: http://support.mozilla.com/en-US/kb/Profiles
|
@ -1,78 +0,0 @@
|
||||
:mod:`mozversion` --- Get application information
|
||||
=================================================
|
||||
|
||||
`mozversion <https://github.com/mozilla/mozbase/tree/master/mozversion>`_
|
||||
provides version information such as the application name and the changesets
|
||||
that it has been built from. This is commonly used in reporting or for
|
||||
conditional logic based on the application under test.
|
||||
|
||||
API Usage
|
||||
---------
|
||||
|
||||
.. automodule:: mozversion
|
||||
:members: get_version
|
||||
|
||||
|
||||
Command Line Usage
|
||||
------------------
|
||||
|
||||
mozversion comes with a command line program, ``mozversion`` which may be used to
|
||||
get version information from an application.
|
||||
|
||||
Usage::
|
||||
|
||||
mozversion [options]
|
||||
|
||||
Options
|
||||
```````
|
||||
|
||||
---binary
|
||||
'''''''''
|
||||
|
||||
This is the path to the target application binary. If this is omitted then
|
||||
the current directory is checked for the existance of an application.ini file.
|
||||
If not found, then it is assumed the target application is a remote Firefox OS
|
||||
instance.
|
||||
|
||||
|
||||
---sources
|
||||
''''''''''
|
||||
|
||||
The path to the sources.xml that accompanies the target application (Firefox OS
|
||||
only). If this is omitted then the current directory is checked for the
|
||||
existance of a sources.xml file.
|
||||
|
||||
Examples
|
||||
````````
|
||||
|
||||
Firefox::
|
||||
|
||||
$ mozversion --binary=/path/to/firefox-bin
|
||||
application_buildid: 20131205075310
|
||||
application_changeset: 39faf812aaec
|
||||
application_name: Firefox
|
||||
application_repository: http://hg.mozilla.org/releases/mozilla-release
|
||||
application_version: 26.0
|
||||
platform_buildid: 20131205075310
|
||||
platform_changeset: 39faf812aaec
|
||||
platform_repository: http://hg.mozilla.org/releases/mozilla-release
|
||||
|
||||
Firefox OS::
|
||||
|
||||
$ mozversion --sources=/path/to/sources.xml
|
||||
application_buildid: 20140106040201
|
||||
application_changeset: 14ac61461f2a
|
||||
application_name: B2G
|
||||
application_repository: http://hg.mozilla.org/mozilla-central
|
||||
application_version: 29.0a1
|
||||
build_changeset: 59605a7c026ff06cc1613af3938579b1dddc6cfe
|
||||
device_firmware_date: 1380051975
|
||||
device_firmware_version_incremental: 139
|
||||
device_firmware_version_release: 4.0.4
|
||||
device_id: msm7627a
|
||||
gaia_changeset: 9a222ac02db176e47299bb37112ae40aeadbeca7
|
||||
gaia_date: 1389005812
|
||||
gecko_changeset: 3a2d8af198510726b063a217438fcf2591f4dfcf
|
||||
platform_buildid: 20140106040201
|
||||
platform_changeset: 14ac61461f2a
|
||||
platform_repository: http://hg.mozilla.org/mozilla-central
|
@ -1 +0,0 @@
|
||||
marionette_client
|
@ -1,15 +0,0 @@
|
||||
Set up and running
|
||||
------------------
|
||||
|
||||
Activities under this domain include installing the software, creating
|
||||
a profile (a set of configuration settings), running a program in a
|
||||
controlled environment such that it can be shut down safely, and
|
||||
correctly handling the case where the system crashes.
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
mozfile
|
||||
mozprofile
|
||||
mozprocess
|
||||
mozcrash
|
390
testing/mozbase/generate_diff.py
Normal file
390
testing/mozbase/generate_diff.py
Normal file
@ -0,0 +1,390 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
"""
|
||||
Given a list of packages and the versions to mirror,
|
||||
generate a diff appropriate for mirroring
|
||||
https://github.com/mozilla/mozbase
|
||||
to http://hg.mozilla.org/mozilla-central/file/tip/testing/mozbase
|
||||
|
||||
If a package version is not given, the latest version will be used.
|
||||
|
||||
Note that this shells out to `cp` for simplicity, so you should run this
|
||||
somewhere that has the `cp` command available.
|
||||
|
||||
Your mozilla-central repository must have no outstanding changes before this
|
||||
script is run. The repository must also have no untracked
|
||||
files that show up in `hg st`.
|
||||
|
||||
See: https://bugzilla.mozilla.org/show_bug.cgi?id=702832
|
||||
"""
|
||||
|
||||
import imp
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
from pkg_resources import parse_version
|
||||
from subprocess import check_call as call
|
||||
|
||||
# globals
|
||||
here = os.path.dirname(os.path.abspath(__file__))
|
||||
MOZBASE = 'git://github.com/mozilla/mozbase.git'
|
||||
version_regex = r"""PACKAGE_VERSION *= *['"]([0-9.]+)["'].*"""
|
||||
setup_development = imp.load_source('setup_development',
|
||||
os.path.join(here, 'setup_development.py'))
|
||||
current_package = None
|
||||
current_package_info = {}
|
||||
|
||||
def error(msg):
|
||||
"""err out with a message"""
|
||||
print >> sys.stdout, msg
|
||||
sys.exit(1)
|
||||
|
||||
def remove(path):
|
||||
"""remove a file or directory"""
|
||||
if os.path.isdir(path):
|
||||
shutil.rmtree(path)
|
||||
else:
|
||||
os.remove(path)
|
||||
|
||||
### git functions
|
||||
|
||||
def latest_commit(git_dir):
|
||||
"""returns last commit hash from a git repository directory"""
|
||||
command = ['git', 'log', '--pretty=format:%H', 'HEAD^..HEAD']
|
||||
process = subprocess.Popen(command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
cwd=git_dir)
|
||||
stdout, stderr = process.communicate()
|
||||
return stdout.strip()
|
||||
|
||||
def tags(git_dir):
|
||||
"""return all tags in a git repository"""
|
||||
|
||||
command = ['git', 'tag']
|
||||
process = subprocess.Popen(command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
cwd=git_dir)
|
||||
stdout, stderr = process.communicate()
|
||||
return [line.strip() for line in stdout.strip().splitlines()]
|
||||
|
||||
def checkout(git_dir, tag):
|
||||
"""checkout a tagged version of a git repository"""
|
||||
|
||||
command = ['git', 'checkout', tag]
|
||||
process = subprocess.Popen(command,
|
||||
cwd=git_dir)
|
||||
process.communicate()
|
||||
|
||||
|
||||
### hg functions
|
||||
|
||||
def untracked_files(hg_dir):
|
||||
"""untracked files in an hg repository"""
|
||||
process = subprocess.Popen(['hg', 'st'],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
cwd=hg_dir)
|
||||
stdout, stderr = process.communicate()
|
||||
lines = [line.strip() for line in stdout.strip().splitlines()]
|
||||
status = [line.split(None, 1) for line in lines]
|
||||
return [j for i, j in status if i == '?']
|
||||
|
||||
def revert(hg_dir, excludes=()):
|
||||
"""revert a hg repository directory"""
|
||||
call(['hg', 'revert', '--no-backup', '--all'], cwd=hg_dir)
|
||||
newfiles = untracked_files(hg_dir)
|
||||
for f in newfiles:
|
||||
path = os.path.join(hg_dir, f)
|
||||
if path not in excludes:
|
||||
os.remove(path)
|
||||
|
||||
###
|
||||
|
||||
def generate_packages_txt():
|
||||
"""
|
||||
generate a packages.txt file appropriate for
|
||||
http://mxr.mozilla.org/mozilla-central/source/build/virtualenv/populate_virtualenv.py
|
||||
|
||||
See also:
|
||||
http://mxr.mozilla.org/mozilla-central/source/build/virtualenv/packages.txt
|
||||
"""
|
||||
|
||||
# relative path from topsrcdir
|
||||
prefix = 'testing/mozbase/'
|
||||
|
||||
# gather the packages
|
||||
packages = setup_development.mozbase_packages
|
||||
|
||||
# write them in the appropriate format
|
||||
path = os.path.join(here, 'packages.txt')
|
||||
packages_manifest = [("%s.pth:%s%s\n" % (package, prefix, package))
|
||||
for package in sorted(packages)]
|
||||
with open(path, 'wb') as f:
|
||||
f.writelines(packages_manifest)
|
||||
|
||||
### version-related functions
|
||||
|
||||
def parse_versions(*args):
|
||||
"""return a list of 2-tuples of (directory, version)"""
|
||||
|
||||
retval = []
|
||||
for arg in args:
|
||||
if '=' in arg:
|
||||
directory, version = arg.split('=', 1)
|
||||
else:
|
||||
directory = arg
|
||||
version = None
|
||||
retval.append((directory, version))
|
||||
return retval
|
||||
|
||||
def version_tag(directory, version):
|
||||
"""return a version tag string given the directory name of the package"""
|
||||
package = current_package_info[directory]['name']
|
||||
return '%s-%s' % (package, version)
|
||||
|
||||
def setup(**kwargs):
|
||||
"""monkey-patch function for setuptools.setup"""
|
||||
assert current_package
|
||||
current_package_info[current_package] = kwargs
|
||||
|
||||
def checkout_tag(src, directory, version):
|
||||
"""
|
||||
front end to checkout + version_tag;
|
||||
if version is None, checkout HEAD
|
||||
"""
|
||||
|
||||
if version is None:
|
||||
tag = 'master'
|
||||
else:
|
||||
tag = version_tag(directory, version)
|
||||
checkout(src, tag)
|
||||
|
||||
def check_consistency(*package_info):
|
||||
"""checks consistency between a set of packages"""
|
||||
|
||||
# set versions and dependencies per package
|
||||
versions = {}
|
||||
dependencies = {}
|
||||
for package in package_info:
|
||||
name = package['name']
|
||||
versions[name] = package['version']
|
||||
for dep in package.get('install_requires', []):
|
||||
dependencies.setdefault(name, []).append(dep)
|
||||
|
||||
func_map = {'==': tuple.__eq__,
|
||||
'<=': tuple.__le__,
|
||||
'>=': tuple.__ge__}
|
||||
|
||||
# check dependencies
|
||||
errors = []
|
||||
for package, deps in dependencies.items():
|
||||
for dep in deps:
|
||||
parsed = setup_development.dependency_info(dep)
|
||||
if parsed['Name'] not in versions:
|
||||
# external dependency
|
||||
continue
|
||||
if parsed.get('Version') is None:
|
||||
# no version specified for dependency
|
||||
continue
|
||||
|
||||
# check versions
|
||||
func = func_map[parsed['Type']]
|
||||
comparison = func(parse_version(versions[parsed['Name']]),
|
||||
parse_version(parsed['Version']))
|
||||
|
||||
if not comparison:
|
||||
# an error
|
||||
errors.append("Dependency for package '%s' failed: %s-%s not %s %s" % (package, parsed['Name'], versions[parsed['Name']], parsed['Type'], parsed['Version']))
|
||||
|
||||
# raise an Exception if errors exist
|
||||
if errors:
|
||||
raise Exception('\n'.join(errors))
|
||||
|
||||
###
|
||||
|
||||
def main(args=sys.argv[1:]):
|
||||
"""command line entry point"""
|
||||
|
||||
# parse command line options
|
||||
usage = '%prog [options] package1[=version1] <package2=version2> <...>'
|
||||
class PlainDescriptionFormatter(optparse.IndentedHelpFormatter):
|
||||
"""description formatter for console script entry point"""
|
||||
def format_description(self, description):
|
||||
if description:
|
||||
return description.strip() + '\n'
|
||||
else:
|
||||
return ''
|
||||
parser = optparse.OptionParser(usage=usage,
|
||||
description=__doc__,
|
||||
formatter=PlainDescriptionFormatter())
|
||||
parser.add_option('-o', '--output', dest='output',
|
||||
help="specify the output file; otherwise will be in the current directory with a name based on the hash")
|
||||
parser.add_option('--develop', dest='develop',
|
||||
action='store_true', default=False,
|
||||
help="use development (master) version of packages")
|
||||
parser.add_option('--no-check', dest='check',
|
||||
action='store_false', default=True,
|
||||
help="Do not check current repository state")
|
||||
parser.add_option('--packages', dest='output_packages',
|
||||
default=False, action='store_true',
|
||||
help="generate packages.txt and exit")
|
||||
options, args = parser.parse_args(args)
|
||||
if options.output_packages:
|
||||
generate_packages_txt()
|
||||
parser.exit()
|
||||
if args:
|
||||
versions = parse_versions(*args)
|
||||
else:
|
||||
parser.print_help()
|
||||
parser.exit()
|
||||
output = options.output
|
||||
|
||||
# gather info from current mozbase packages
|
||||
global current_package
|
||||
setuptools = sys.modules.get('setuptools')
|
||||
sys.modules['setuptools'] = sys.modules[__name__]
|
||||
try:
|
||||
for package in setup_development.mozbase_packages:
|
||||
current_package = package
|
||||
imp.load_source('setup', os.path.join(here, package, 'setup.py'))
|
||||
finally:
|
||||
current_package = None
|
||||
sys.modules.pop('setuptools')
|
||||
if setuptools:
|
||||
sys.modules['setuptools'] = setuptools
|
||||
assert set(current_package_info.keys()) == set(setup_development.mozbase_packages)
|
||||
|
||||
# check consistency of current set of packages
|
||||
check_consistency(*current_package_info.values())
|
||||
|
||||
# calculate hg root
|
||||
hg_root = os.path.dirname(os.path.dirname(here)) # testing/mozbase
|
||||
hg_dir = os.path.join(hg_root, '.hg')
|
||||
assert os.path.exists(hg_dir) and os.path.isdir(hg_dir)
|
||||
|
||||
# ensure there are no outstanding changes to m-c
|
||||
process = subprocess.Popen(['hg', 'diff'], cwd=here, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
stdout, stderr = process.communicate()
|
||||
if stdout.strip() and options.check:
|
||||
error("Outstanding changes in %s; aborting" % hg_root)
|
||||
|
||||
# ensure that there are no untracked files in testing/mozbase
|
||||
untracked = untracked_files(hg_root)
|
||||
if untracked and options.check:
|
||||
error("Untracked files in %s:\n %s\naborting" % (hg_root, '\n'.join([' %s' % i for i in untracked])))
|
||||
|
||||
tempdir = tempfile.mkdtemp()
|
||||
try:
|
||||
|
||||
# download mozbase
|
||||
call(['git', 'clone', MOZBASE], cwd=tempdir)
|
||||
src = os.path.join(tempdir, 'mozbase')
|
||||
assert os.path.isdir(src)
|
||||
if output is None:
|
||||
commit_hash = latest_commit(src)
|
||||
output = os.path.join(os.getcwd(), '%s.diff' % commit_hash)
|
||||
|
||||
# get the tags
|
||||
_tags = tags(src)
|
||||
|
||||
# ensure all directories and tags are available
|
||||
for index, (directory, version) in enumerate(versions):
|
||||
|
||||
setup_py = os.path.join(src, directory, 'setup.py')
|
||||
assert os.path.exists(setup_py), "'%s' not found" % setup_py
|
||||
|
||||
if not version:
|
||||
|
||||
if options.develop:
|
||||
# use master of package; keep version=None
|
||||
continue
|
||||
|
||||
# choose maximum version from setup.py
|
||||
with file(setup_py) as f:
|
||||
for line in f.readlines():
|
||||
line = line.strip()
|
||||
match = re.match(version_regex, line)
|
||||
if match:
|
||||
version = match.groups()[0]
|
||||
versions[index] = (directory, version)
|
||||
print "Using %s=%s" % (directory, version)
|
||||
break
|
||||
else:
|
||||
error("Cannot find PACKAGE_VERSION in %s" % setup_py)
|
||||
|
||||
tag = version_tag(directory, version)
|
||||
if tag not in _tags:
|
||||
error("Tag for '%s' -- %s -- not in tags:\n%s" % (directory, version, '\n'.join(sorted(_tags))))
|
||||
|
||||
# ensure that the versions to mirror are compatible with what is in m-c
|
||||
old_package_info = current_package_info.copy()
|
||||
setuptools = sys.modules.get('setuptools')
|
||||
sys.modules['setuptools'] = sys.modules[__name__]
|
||||
try:
|
||||
for directory, version in versions:
|
||||
|
||||
# checkout appropriate revision of mozbase
|
||||
checkout_tag(src, directory, version)
|
||||
|
||||
# update the package information
|
||||
setup_py = os.path.join(src, directory, 'setup.py')
|
||||
current_package = directory
|
||||
imp.load_source('setup', setup_py)
|
||||
finally:
|
||||
current_package = None
|
||||
sys.modules.pop('setuptools')
|
||||
if setuptools:
|
||||
sys.modules['setuptools'] = setuptools
|
||||
checkout(src, 'master')
|
||||
check_consistency(*current_package_info.values())
|
||||
|
||||
# copy mozbase directories to m-c
|
||||
for directory, version in versions:
|
||||
|
||||
# checkout appropriate revision of mozbase
|
||||
checkout_tag(src, directory, version)
|
||||
|
||||
# replace the directory
|
||||
remove(os.path.join(here, directory))
|
||||
call(['cp', '-r', directory, here], cwd=src)
|
||||
|
||||
# regenerate mozbase's packages.txt
|
||||
generate_packages_txt()
|
||||
|
||||
# generate the diff and write to output file
|
||||
command = ['hg', 'addremove']
|
||||
# TODO: don't add untracked files via `hg addremove --exclude...`
|
||||
call(command, cwd=hg_root)
|
||||
process = subprocess.Popen(['hg', 'diff'],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
cwd=hg_root)
|
||||
stdout, stderr = process.communicate()
|
||||
with file(output, 'w') as f:
|
||||
f.write(stdout)
|
||||
f.close()
|
||||
|
||||
# ensure that the diff you just wrote isn't deleted
|
||||
untracked.append(os.path.abspath(output))
|
||||
|
||||
finally:
|
||||
# cleanup
|
||||
if options.check:
|
||||
revert(hg_root, untracked)
|
||||
shutil.rmtree(tempdir)
|
||||
|
||||
print "Diff at %s" % output
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -318,7 +318,7 @@ def read_ini(fp, variables=None, default='DEFAULT',
|
||||
fp = file(fp)
|
||||
|
||||
# read the lines
|
||||
for (linenum, line) in enumerate(fp.readlines(), start=1):
|
||||
for line in fp.readlines():
|
||||
|
||||
stripped = line.strip()
|
||||
|
||||
@ -379,13 +379,8 @@ def read_ini(fp, variables=None, default='DEFAULT',
|
||||
value = '%s%s%s' % (value, os.linesep, stripped)
|
||||
current_section[key] = value
|
||||
else:
|
||||
# something bad happened!
|
||||
if hasattr(fp, 'name'):
|
||||
filename = fp.name
|
||||
else:
|
||||
filename = 'unknown'
|
||||
raise Exception("Error parsing manifest file '%s', line %s" %
|
||||
(filename, linenum))
|
||||
# something bad happen!
|
||||
raise Exception("Not sure what you're trying to do")
|
||||
|
||||
# interpret the variables
|
||||
def interpret_variables(global_dict, local_dict):
|
||||
|
@ -64,7 +64,6 @@ class ExpressionParserTest(unittest.TestCase):
|
||||
self.assertTrue(parse("true && (true || false)"))
|
||||
self.assertTrue(parse("(true && false) || (true && (true || false))"))
|
||||
|
||||
|
||||
def test_comments(self):
|
||||
# comments in expressions work accidentally, via an implementation
|
||||
# detail - the '#' character doesn't match any of the regular
|
||||
|
@ -2,4 +2,4 @@
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from mozversion import cli, get_version
|
||||
from b2gmixin import DeviceADB, DeviceSUT
|
195
testing/mozbase/mozb2g/mozb2g/b2gmixin.py
Normal file
195
testing/mozbase/mozb2g/mozb2g/b2gmixin.py
Normal file
@ -0,0 +1,195 @@
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import with_statement
|
||||
import datetime
|
||||
import os
|
||||
import re
|
||||
import tempfile
|
||||
import time
|
||||
import shutil
|
||||
import socket
|
||||
import subprocess
|
||||
|
||||
from marionette import Marionette
|
||||
from mozdevice import DeviceManagerADB, DeviceManagerSUT, DMError
|
||||
|
||||
class B2GMixin(object):
|
||||
profileDir = None
|
||||
userJS = "/data/local/user.js"
|
||||
marionette = None
|
||||
|
||||
def __init__(self, host=None, marionetteHost=None, marionettePort=2828,
|
||||
**kwargs):
|
||||
|
||||
# (allowing marionneteHost to be specified seems a bit
|
||||
# counter-intuitive since we normally get it below from the ip
|
||||
# address, however we currently need it to be able to connect
|
||||
# via adb port forwarding and localhost)
|
||||
if marionetteHost:
|
||||
self.marionetteHost = marionetteHost
|
||||
elif host:
|
||||
self.marionetteHost = host
|
||||
self.marionettePort = marionettePort
|
||||
|
||||
def cleanup(self):
|
||||
"""
|
||||
If a user profile was setup on the device, restore it to the original.
|
||||
"""
|
||||
if self.profileDir:
|
||||
self.restoreProfile()
|
||||
|
||||
def waitForPort(self, timeout):
|
||||
"""Waits for the marionette server to respond, until the timeout specified.
|
||||
|
||||
:param timeout: Timeout parameter in seconds.
|
||||
"""
|
||||
print "waiting for port"
|
||||
starttime = datetime.datetime.now()
|
||||
while datetime.datetime.now() - starttime < datetime.timedelta(seconds=timeout):
|
||||
try:
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
print "trying %s %s" % (self.marionettePort, self.marionetteHost)
|
||||
sock.connect((self.marionetteHost, self.marionettePort))
|
||||
data = sock.recv(16)
|
||||
sock.close()
|
||||
if '"from"' in data:
|
||||
return True
|
||||
except socket.error:
|
||||
pass
|
||||
except Exception as e:
|
||||
raise DMError("Could not connect to marionette: %s" % e)
|
||||
time.sleep(1)
|
||||
raise DMError("Could not communicate with Marionette port")
|
||||
|
||||
def setupMarionette(self, scriptTimeout=60000):
|
||||
"""
|
||||
Starts a marionette session.
|
||||
If no host was given at init, the ip of the device will be retrieved
|
||||
and networking will be established.
|
||||
"""
|
||||
if not self.marionetteHost:
|
||||
self.setupDHCP()
|
||||
self.marionetteHost = self.getIP()
|
||||
if not self.marionette:
|
||||
self.marionette = Marionette(self.marionetteHost, self.marionettePort)
|
||||
if not self.marionette.session:
|
||||
self.waitForPort(30)
|
||||
self.marionette.start_session()
|
||||
|
||||
self.marionette.set_script_timeout(scriptTimeout)
|
||||
|
||||
def restartB2G(self):
|
||||
"""
|
||||
Restarts the b2g process on the device.
|
||||
"""
|
||||
#restart b2g so we start with a clean slate
|
||||
if self.marionette and self.marionette.session:
|
||||
self.marionette.delete_session()
|
||||
self.shellCheckOutput(['stop', 'b2g'])
|
||||
# Wait for a bit to make sure B2G has completely shut down.
|
||||
tries = 10
|
||||
while "b2g" in self.shellCheckOutput(['ps', 'b2g']) and tries > 0:
|
||||
tries -= 1
|
||||
time.sleep(1)
|
||||
if tries == 0:
|
||||
raise DMError("Could not kill b2g process")
|
||||
self.shellCheckOutput(['start', 'b2g'])
|
||||
|
||||
def setupProfile(self, prefs=None):
|
||||
"""Sets up the user profile on the device.
|
||||
|
||||
:param prefs: String of user_prefs to add to the profile. Defaults to a standard b2g testing profile.
|
||||
"""
|
||||
# currently we have no custom prefs to set (when bug 800138 is fixed,
|
||||
# we will probably want to enable marionette on an external ip by
|
||||
# default)
|
||||
if not prefs:
|
||||
prefs = ""
|
||||
|
||||
#remove previous user.js if there is one
|
||||
if not self.profileDir:
|
||||
self.profileDir = tempfile.mkdtemp()
|
||||
our_userJS = os.path.join(self.profileDir, "user.js")
|
||||
if os.path.exists(our_userJS):
|
||||
os.remove(our_userJS)
|
||||
#copy profile
|
||||
try:
|
||||
self.getFile(self.userJS, our_userJS)
|
||||
except subprocess.CalledProcessError:
|
||||
pass
|
||||
#if we successfully copied the profile, make a backup of the file
|
||||
if os.path.exists(our_userJS):
|
||||
self.shellCheckOutput(['dd', 'if=%s' % self.userJS, 'of=%s.orig' % self.userJS])
|
||||
with open(our_userJS, 'a') as user_file:
|
||||
user_file.write("%s" % prefs)
|
||||
|
||||
self.pushFile(our_userJS, self.userJS)
|
||||
self.restartB2G()
|
||||
self.setupMarionette()
|
||||
|
||||
def setupDHCP(self, interfaces=['eth0', 'wlan0']):
|
||||
"""Sets up networking.
|
||||
|
||||
:param interfaces: Network connection types to try. Defaults to eth0 and wlan0.
|
||||
"""
|
||||
all_interfaces = [line.split()[0] for line in \
|
||||
self.shellCheckOutput(['netcfg']).splitlines()[1:]]
|
||||
interfaces_to_try = filter(lambda i: i in interfaces, all_interfaces)
|
||||
|
||||
tries = 5
|
||||
print "Setting up DHCP..."
|
||||
while tries > 0:
|
||||
print "attempts left: %d" % tries
|
||||
try:
|
||||
for interface in interfaces_to_try:
|
||||
self.shellCheckOutput(['netcfg', interface, 'dhcp'],
|
||||
timeout=10)
|
||||
if self.getIP(interfaces=[interface]):
|
||||
return
|
||||
except DMError:
|
||||
pass
|
||||
time.sleep(1)
|
||||
tries -= 1
|
||||
raise DMError("Could not set up network connection")
|
||||
|
||||
def restoreProfile(self):
|
||||
"""
|
||||
Restores the original user profile on the device.
|
||||
"""
|
||||
if not self.profileDir:
|
||||
raise DMError("There is no profile to restore")
|
||||
#if we successfully copied the profile, make a backup of the file
|
||||
our_userJS = os.path.join(self.profileDir, "user.js")
|
||||
if os.path.exists(our_userJS):
|
||||
self.shellCheckOutput(['dd', 'if=%s.orig' % self.userJS, 'of=%s' % self.userJS])
|
||||
shutil.rmtree(self.profileDir)
|
||||
self.profileDir = None
|
||||
|
||||
def getAppInfo(self):
|
||||
"""
|
||||
Returns the appinfo, with an additional "date" key.
|
||||
|
||||
:rtype: dictionary
|
||||
"""
|
||||
if not self.marionette or not self.marionette.session:
|
||||
self.setupMarionette()
|
||||
self.marionette.set_context("chrome")
|
||||
appinfo = self.marionette.execute_script("""
|
||||
var appInfo = Components.classes["@mozilla.org/xre/app-info;1"]
|
||||
.getService(Components.interfaces.nsIXULAppInfo);
|
||||
return appInfo;
|
||||
""")
|
||||
(year, month, day) = (appinfo["appBuildID"][0:4], appinfo["appBuildID"][4:6], appinfo["appBuildID"][6:8])
|
||||
appinfo['date'] = "%s-%s-%s" % (year, month, day)
|
||||
return appinfo
|
||||
|
||||
class DeviceADB(DeviceManagerADB, B2GMixin):
|
||||
def __init__(self, **kwargs):
|
||||
DeviceManagerADB.__init__(self, **kwargs)
|
||||
B2GMixin.__init__(self, **kwargs)
|
||||
|
||||
class DeviceSUT(DeviceManagerSUT, B2GMixin):
|
||||
def __init__(self, **kwargs):
|
||||
DeviceManagerSUT.__init__(self, **kwargs)
|
||||
B2GMixin.__init__(self, **kwargs)
|
25
testing/mozbase/mozb2g/setup.py
Normal file
25
testing/mozbase/mozb2g/setup.py
Normal file
@ -0,0 +1,25 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from setuptools import setup
|
||||
|
||||
PACKAGE_VERSION = '0.3'
|
||||
|
||||
deps = ['mozdevice >= 0.16', 'marionette_client >= 0.5.2']
|
||||
|
||||
setup(name='mozb2g',
|
||||
version=PACKAGE_VERSION,
|
||||
description="B2G specific code for device automation",
|
||||
long_description="see http://mozbase.readthedocs.org/",
|
||||
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
|
||||
keywords='',
|
||||
author='Mozilla Automation and Testing Team',
|
||||
author_email='tools@lists.mozilla.org',
|
||||
url='https://wiki.mozilla.org/Auto-tools/Projects/Mozbase',
|
||||
license='MPL',
|
||||
packages=['mozb2g'],
|
||||
include_package_data=True,
|
||||
zip_safe=False,
|
||||
install_requires=deps
|
||||
)
|
@ -6,6 +6,7 @@ __all__ = ['check_for_crashes',
|
||||
'check_for_java_exception']
|
||||
|
||||
import glob
|
||||
import mozlog
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
@ -15,8 +16,8 @@ import tempfile
|
||||
import urllib2
|
||||
import zipfile
|
||||
|
||||
import mozfile
|
||||
import mozlog
|
||||
from mozfile import extract_zip
|
||||
from mozfile import is_url
|
||||
|
||||
|
||||
def check_for_crashes(dump_directory, symbols_path,
|
||||
@ -70,7 +71,7 @@ def check_for_crashes(dump_directory, symbols_path,
|
||||
remove_symbols = False
|
||||
# If our symbols are at a remote URL, download them now
|
||||
# We want to download URLs like http://... but not Windows paths like c:\...
|
||||
if symbols_path and mozfile.is_url(symbols_path):
|
||||
if symbols_path and is_url(symbols_path):
|
||||
log.info("Downloading symbols from: %s", symbols_path)
|
||||
remove_symbols = True
|
||||
# Get the symbols and write them to a temporary zipfile
|
||||
@ -81,7 +82,7 @@ def check_for_crashes(dump_directory, symbols_path,
|
||||
# processing all crashes)
|
||||
symbols_path = tempfile.mkdtemp()
|
||||
zfile = zipfile.ZipFile(symbols_file, 'r')
|
||||
mozfile.extract_zip(zfile, symbols_path)
|
||||
extract_zip(zfile, symbols_path)
|
||||
zfile.close()
|
||||
|
||||
for d in dumps:
|
||||
@ -144,12 +145,13 @@ def check_for_crashes(dump_directory, symbols_path,
|
||||
log.info("Saved dump as %s", os.path.join(dump_save_path,
|
||||
os.path.basename(d)))
|
||||
else:
|
||||
mozfile.remove(d)
|
||||
os.remove(d)
|
||||
extra = os.path.splitext(d)[0] + ".extra"
|
||||
mozfile.remove(extra)
|
||||
if os.path.exists(extra):
|
||||
os.remove(extra)
|
||||
finally:
|
||||
if remove_symbols:
|
||||
mozfile.remove(symbols_path)
|
||||
shutil.rmtree(symbols_path)
|
||||
|
||||
return True
|
||||
|
||||
|
@ -4,14 +4,13 @@
|
||||
|
||||
from setuptools import setup
|
||||
|
||||
PACKAGE_NAME = 'mozcrash'
|
||||
PACKAGE_VERSION = '0.11'
|
||||
PACKAGE_VERSION = '0.10'
|
||||
|
||||
# dependencies
|
||||
deps = ['mozfile >= 1.0',
|
||||
deps = ['mozfile >= 0.12',
|
||||
'mozlog']
|
||||
|
||||
setup(name=PACKAGE_NAME,
|
||||
setup(name='mozcrash',
|
||||
version=PACKAGE_VERSION,
|
||||
description="Library for printing stack traces from minidumps left behind by crashed processes",
|
||||
long_description="see http://mozbase.readthedocs.org/",
|
||||
|
@ -168,43 +168,5 @@ class TestCrash(unittest.TestCase):
|
||||
stackwalk_binary=self.stackwalk,
|
||||
quiet=True))
|
||||
|
||||
class TestJavaException(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.test_log = ["01-30 20:15:41.937 E/GeckoAppShell( 1703): >>> REPORTING UNCAUGHT EXCEPTION FROM THREAD 9 (\"GeckoBackgroundThread\")",
|
||||
"01-30 20:15:41.937 E/GeckoAppShell( 1703): java.lang.NullPointerException",
|
||||
"01-30 20:15:41.937 E/GeckoAppShell( 1703): at org.mozilla.gecko.GeckoApp$21.run(GeckoApp.java:1833)",
|
||||
"01-30 20:15:41.937 E/GeckoAppShell( 1703): at android.os.Handler.handleCallback(Handler.java:587)"]
|
||||
|
||||
def test_uncaught_exception(self):
|
||||
"""
|
||||
Test for an exception which should be caught
|
||||
"""
|
||||
self.assert_(mozcrash.check_for_java_exception(self.test_log))
|
||||
|
||||
def test_fatal_exception(self):
|
||||
"""
|
||||
Test for an exception which should be caught
|
||||
"""
|
||||
fatal_log = list(self.test_log)
|
||||
fatal_log[0] = "01-30 20:15:41.937 E/GeckoAppShell( 1703): >>> FATAL EXCEPTION FROM THREAD 9 (\"GeckoBackgroundThread\")"
|
||||
self.assert_(mozcrash.check_for_java_exception(fatal_log))
|
||||
|
||||
def test_truncated_exception(self):
|
||||
"""
|
||||
Test for an exception which should be caught which
|
||||
was truncated
|
||||
"""
|
||||
truncated_log = list(self.test_log)
|
||||
truncated_log[0], truncated_log[1] = truncated_log[1], truncated_log[0]
|
||||
self.assert_(mozcrash.check_for_java_exception(truncated_log))
|
||||
|
||||
def test_unchecked_exception(self):
|
||||
"""
|
||||
Test for an exception which should not be caught
|
||||
"""
|
||||
passable_log = list(self.test_log)
|
||||
passable_log[0] = "01-30 20:15:41.937 E/GeckoAppShell( 1703): >>> NOT-SO-BAD EXCEPTION FROM THREAD 9 (\"GeckoBackgroundThread\")"
|
||||
self.assert_(not mozcrash.check_for_java_exception(passable_log))
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@ -282,24 +282,6 @@ class DeviceManager(object):
|
||||
Does a recursive delete of directory on the device: rm -Rf remoteDirname.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def moveTree(self, source, destination):
|
||||
"""
|
||||
Does a move of the file or directory on the device.
|
||||
|
||||
:param source: Path to the original file or directory
|
||||
:param destination: Path to the destination file or directory
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def copyTree(self, source, destination):
|
||||
"""
|
||||
Does a copy of the file or directory on the device.
|
||||
|
||||
:param source: Path to the original file or directory
|
||||
:param destination: Path to the destination file or directory
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def chmodDir(self, remoteDirname, mask="777"):
|
||||
"""
|
||||
@ -368,7 +350,7 @@ class DeviceManager(object):
|
||||
"""
|
||||
Executes shell command on device and returns exit code.
|
||||
|
||||
:param cmd: Commandline list to execute
|
||||
:param cmd: Command string to execute
|
||||
:param outputfile: File to store output
|
||||
:param env: Environment to pass to exec command
|
||||
:param cwd: Directory to execute command from
|
||||
@ -380,7 +362,6 @@ class DeviceManager(object):
|
||||
"""
|
||||
Executes shell command on device and returns output as a string.
|
||||
|
||||
:param cmd: Commandline list to execute
|
||||
:param env: Environment to pass to exec command
|
||||
:param cwd: Directory to execute command from
|
||||
:param timeout: specified in seconds, defaults to 'default_timeout'
|
||||
@ -448,15 +429,13 @@ class DeviceManager(object):
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def reboot(self, wait=False, ipAddr=None):
|
||||
def reboot(self, ipAddr=None, port=30000):
|
||||
"""
|
||||
Reboots the device.
|
||||
|
||||
:param wait: block on device to come back up before returning
|
||||
:param ipAddr: if specified, try to make the device connect to this
|
||||
specific IP address after rebooting (only works with
|
||||
SUT; if None, we try to determine a reasonable address
|
||||
ourselves)
|
||||
Some implementations may optionally support waiting for a TCP callback from
|
||||
the device once it has restarted before returning, but this is not
|
||||
guaranteed.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
@ -487,21 +466,21 @@ class DeviceManager(object):
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def updateApp(self, appBundlePath, processName=None, destPath=None,
|
||||
wait=False, ipAddr=None):
|
||||
def updateApp(self, appBundlePath, processName=None, destPath=None, ipAddr=None, port=30000):
|
||||
"""
|
||||
Updates the application on the device and reboots.
|
||||
Updates the application on the device.
|
||||
|
||||
:param appBundlePath: path to the application bundle on the device
|
||||
:param processName: used to end the process if the applicaiton is
|
||||
currently running (optional)
|
||||
:param destPath: Destination directory to where the application should
|
||||
be installed (optional)
|
||||
:param wait: block on device to come back up before returning
|
||||
:param ipAddr: if specified, try to make the device connect to this
|
||||
specific IP address after rebooting (only works with
|
||||
SUT; if None and wait is True, we try to determine a
|
||||
reasonable address ourselves)
|
||||
:param ipAddr: IP address to await a callback ping to let us know that
|
||||
the device has updated properly (defaults to current
|
||||
IP)
|
||||
:param port: port to await a callback ping to let us know that the
|
||||
device has updated properly defaults to 30000, and counts
|
||||
up from there if it finds a conflict
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
|
@ -2,18 +2,15 @@
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import mozlog
|
||||
import subprocess
|
||||
from devicemanager import DeviceManager, DMError, _pop_last_line
|
||||
import re
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
import time
|
||||
|
||||
from devicemanager import DeviceManager, DMError, _pop_last_line
|
||||
import mozfile
|
||||
import mozlog
|
||||
|
||||
|
||||
class DeviceManagerADB(DeviceManager):
|
||||
"""
|
||||
Implementation of DeviceManager interface that uses the Android "adb"
|
||||
@ -24,6 +21,7 @@ class DeviceManagerADB(DeviceManager):
|
||||
|
||||
_haveRootShell = False
|
||||
_haveSu = False
|
||||
_useRunAs = False
|
||||
_useZip = False
|
||||
_logcatNeedsRoot = False
|
||||
_pollingInterval = 0.01
|
||||
@ -74,6 +72,12 @@ class DeviceManagerADB(DeviceManager):
|
||||
# existence of an su binary
|
||||
self._checkForRoot()
|
||||
|
||||
# Can we use run-as? (not required)
|
||||
try:
|
||||
self._verifyRunAs()
|
||||
except DMError:
|
||||
pass
|
||||
|
||||
# can we use zip to speed up some file operations? (currently not
|
||||
# required)
|
||||
try:
|
||||
@ -155,7 +159,7 @@ class DeviceManagerADB(DeviceManager):
|
||||
def forward(self, local, remote):
|
||||
"""
|
||||
Forward socket connections.
|
||||
|
||||
|
||||
Forward specs are one of:
|
||||
tcp:<port>
|
||||
localabstract:<unix domain socket name>
|
||||
@ -199,15 +203,22 @@ class DeviceManagerADB(DeviceManager):
|
||||
if not os.access(localname, os.F_OK):
|
||||
raise DMError("File not found: %s" % localname)
|
||||
|
||||
self._checkCmd(["push", os.path.realpath(localname), destname],
|
||||
retryLimit=retryLimit)
|
||||
if self._useRunAs:
|
||||
remoteTmpFile = self.getTempDir() + "/" + os.path.basename(localname)
|
||||
self._checkCmd(["push", os.path.realpath(localname), remoteTmpFile],
|
||||
retryLimit=retryLimit)
|
||||
self.shellCheckOutput(["dd", "if=" + remoteTmpFile, "of=" + destname])
|
||||
self.shellCheckOutput(["rm", remoteTmpFile])
|
||||
else:
|
||||
self._checkCmd(["push", os.path.realpath(localname), destname],
|
||||
retryLimit=retryLimit)
|
||||
|
||||
def mkDir(self, name):
|
||||
result = self._runCmd(["shell", "mkdir", name]).stdout.read()
|
||||
result = self._runCmdAs(["shell", "mkdir", name]).stdout.read()
|
||||
if 'read-only file system' in result.lower():
|
||||
raise DMError("Error creating directory: read only file system")
|
||||
|
||||
def pushDir(self, localDir, remoteDir, retryLimit=None):
|
||||
def pushDir(self, localDir, remoteDir, retryLimit=None, timeout=None):
|
||||
# adb "push" accepts a directory as an argument, but if the directory
|
||||
# contains symbolic links, the links are pushed, rather than the linked
|
||||
# files; we either zip/unzip or re-copy the directory into a temporary
|
||||
@ -222,11 +233,10 @@ class DeviceManagerADB(DeviceManager):
|
||||
subprocess.Popen(["zip", "-r", localZip, '.'], cwd=localDir,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
|
||||
self.pushFile(localZip, remoteZip, retryLimit=retryLimit, createDir=False)
|
||||
mozfile.remove(localZip)
|
||||
data = self._runCmd(["shell", "unzip", "-o", remoteZip,
|
||||
"-d", remoteDir]).stdout.read()
|
||||
self._checkCmd(["shell", "rm", remoteZip],
|
||||
retryLimit=retryLimit)
|
||||
os.remove(localZip)
|
||||
data = self._runCmdAs(["shell", "unzip", "-o", remoteZip,
|
||||
"-d", remoteDir]).stdout.read()
|
||||
self._checkCmdAs(["shell", "rm", remoteZip], retryLimit=retryLimit)
|
||||
if re.search("unzip: exiting", data) or re.search("Operation not permitted", data):
|
||||
raise Exception("unzip failed, or permissions error")
|
||||
except:
|
||||
@ -238,8 +248,8 @@ class DeviceManagerADB(DeviceManager):
|
||||
# copytree's target dir must not already exist, so create a subdir
|
||||
tmpDirTarget = os.path.join(tmpDir, "tmp")
|
||||
shutil.copytree(localDir, tmpDirTarget)
|
||||
self._checkCmd(["push", tmpDirTarget, remoteDir], retryLimit=retryLimit)
|
||||
mozfile.remove(tmpDir)
|
||||
self._checkCmd(["push", tmpDirTarget, remoteDir], retryLimit=retryLimit, timeout=timeout)
|
||||
shutil.rmtree(tmpDir)
|
||||
|
||||
def dirExists(self, remotePath):
|
||||
p = self._runCmd(["shell", "ls", "-a", remotePath + '/'])
|
||||
@ -262,20 +272,14 @@ class DeviceManagerADB(DeviceManager):
|
||||
|
||||
def removeFile(self, filename):
|
||||
if self.fileExists(filename):
|
||||
self._checkCmd(["shell", "rm", filename])
|
||||
self._runCmd(["shell", "rm", filename])
|
||||
|
||||
def removeDir(self, remoteDir):
|
||||
if self.dirExists(remoteDir):
|
||||
self._checkCmd(["shell", "rm", "-r", remoteDir])
|
||||
if (self.dirExists(remoteDir)):
|
||||
self._runCmd(["shell", "rm", "-r", remoteDir]).wait()
|
||||
else:
|
||||
self.removeFile(remoteDir.strip())
|
||||
|
||||
def moveTree(self, source, destination):
|
||||
self._checkCmd(["shell", "mv", source, destination])
|
||||
|
||||
def copyTree(self, source, destination):
|
||||
self._checkCmd(["shell", "dd", "if=%s" % source, "of=%s" % destination])
|
||||
|
||||
def listFiles(self, rootdir):
|
||||
p = self._runCmd(["shell", "ls", "-a", rootdir])
|
||||
data = p.stdout.readlines()
|
||||
@ -378,7 +382,7 @@ class DeviceManagerADB(DeviceManager):
|
||||
if sig:
|
||||
args.append("-%d" % sig)
|
||||
args.append(str(pid))
|
||||
p = self._runCmd(args)
|
||||
p = self._runCmdAs(args)
|
||||
p.communicate()
|
||||
if p.returncode != 0:
|
||||
raise DMError("Error killing process "
|
||||
@ -389,7 +393,25 @@ class DeviceManagerADB(DeviceManager):
|
||||
Pulls remoteFile from device to host
|
||||
"""
|
||||
try:
|
||||
self._runCmd(["pull", remoteFile, localFile]).communicate()
|
||||
# First attempt to pull file regularly
|
||||
outerr = self._runCmd(["pull", remoteFile, localFile]).communicate()
|
||||
|
||||
# Now check stderr for errors
|
||||
if outerr[1]:
|
||||
errl = outerr[1].splitlines()
|
||||
if (len(errl) == 1):
|
||||
if (((errl[0].find("Permission denied") != -1)
|
||||
or (errl[0].find("does not exist") != -1))
|
||||
and self._useRunAs):
|
||||
# If we lack permissions to read but have run-as, then we should try
|
||||
# to copy the file to a world-readable location first before attempting
|
||||
# to pull it again.
|
||||
remoteTmpFile = self.getTempDir() + "/" + os.path.basename(remoteFile)
|
||||
self._checkCmdAs(["shell", "dd", "if=" + remoteFile, "of=" + remoteTmpFile])
|
||||
self._checkCmdAs(["shell", "chmod", "777", remoteTmpFile])
|
||||
self._runCmd(["pull", remoteTmpFile, localFile]).stdout.read()
|
||||
# Clean up temporary file
|
||||
self._checkCmdAs(["shell", "rm", remoteTmpFile])
|
||||
except (OSError, ValueError):
|
||||
raise DMError("Error pulling remote file '%s' to '%s'" % (remoteFile, localFile))
|
||||
|
||||
@ -412,7 +434,7 @@ class DeviceManagerADB(DeviceManager):
|
||||
ret = f.read()
|
||||
|
||||
f.close()
|
||||
mozfile.remove(localFile)
|
||||
os.remove(localFile)
|
||||
return ret
|
||||
|
||||
def getFile(self, remoteFile, localFile):
|
||||
@ -439,7 +461,7 @@ class DeviceManagerADB(DeviceManager):
|
||||
return None
|
||||
|
||||
md5 = self._getLocalHash(localFile)
|
||||
mozfile.remove(localFile)
|
||||
os.remove(localFile)
|
||||
|
||||
return md5
|
||||
|
||||
@ -500,8 +522,9 @@ class DeviceManagerADB(DeviceManager):
|
||||
|
||||
def reboot(self, wait = False, **kwargs):
|
||||
self._checkCmd(["reboot"])
|
||||
if wait:
|
||||
self._checkCmd(["wait-for-device", "shell", "ls", "/sbin"])
|
||||
if not wait:
|
||||
return
|
||||
self._checkCmd(["wait-for-device", "shell", "ls", "/sbin"])
|
||||
|
||||
def updateApp(self, appBundlePath, **kwargs):
|
||||
return self._runCmd(["install", "-r", appBundlePath]).stdout.read()
|
||||
@ -545,6 +568,7 @@ class DeviceManagerADB(DeviceManager):
|
||||
def uninstallAppAndReboot(self, appName, installPath=None):
|
||||
self.uninstallApp(appName)
|
||||
self.reboot()
|
||||
return
|
||||
|
||||
def _runCmd(self, args):
|
||||
"""
|
||||
@ -555,9 +579,26 @@ class DeviceManagerADB(DeviceManager):
|
||||
finalArgs = [self._adbPath]
|
||||
if self._deviceSerial:
|
||||
finalArgs.extend(['-s', self._deviceSerial])
|
||||
# use run-as to execute commands as the package we're testing if
|
||||
# possible
|
||||
if not self._haveRootShell and self._useRunAs and \
|
||||
args[0] == "shell" and args[1] not in [ "run-as", "am" ]:
|
||||
args.insert(1, "run-as")
|
||||
args.insert(2, self._packageName)
|
||||
finalArgs.extend(args)
|
||||
return subprocess.Popen(finalArgs, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT)
|
||||
return subprocess.Popen(finalArgs, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
|
||||
def _runCmdAs(self, args):
|
||||
"""
|
||||
Runs a command using adb
|
||||
If self._useRunAs is True, the command is run-as user specified in self._packageName
|
||||
|
||||
returns: returncode from subprocess.Popen
|
||||
"""
|
||||
if self._useRunAs:
|
||||
args.insert(1, "run-as")
|
||||
args.insert(2, self._packageName)
|
||||
return self._runCmd(args)
|
||||
|
||||
# timeout is specified in seconds, and if no timeout is given,
|
||||
# we will run until we hit the default_timeout specified in the __init__
|
||||
@ -569,9 +610,15 @@ class DeviceManagerADB(DeviceManager):
|
||||
returns: returncode from subprocess.Popen
|
||||
"""
|
||||
retryLimit = retryLimit or self.retryLimit
|
||||
# use run-as to execute commands as the package we're testing if
|
||||
# possible
|
||||
finalArgs = [self._adbPath]
|
||||
if self._deviceSerial:
|
||||
finalArgs.extend(['-s', self._deviceSerial])
|
||||
if not self._haveRootShell and self._useRunAs and \
|
||||
args[0] == "shell" and args[1] not in [ "run-as", "am" ]:
|
||||
args.insert(1, "run-as")
|
||||
args.insert(2, self._packageName)
|
||||
finalArgs.extend(args)
|
||||
if not timeout:
|
||||
# We are asserting that all commands will complete in this time unless otherwise specified
|
||||
@ -579,21 +626,34 @@ class DeviceManagerADB(DeviceManager):
|
||||
|
||||
timeout = int(timeout)
|
||||
retries = 0
|
||||
with tempfile.SpooledTemporaryFile() as procOut:
|
||||
while retries < retryLimit:
|
||||
proc = subprocess.Popen(finalArgs, stdout=procOut, stderr=subprocess.STDOUT)
|
||||
start_time = time.time()
|
||||
while retries < retryLimit:
|
||||
proc = subprocess.Popen(finalArgs, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
start_time = time.time()
|
||||
ret_code = proc.poll()
|
||||
while ((time.time() - start_time) <= timeout) and ret_code == None:
|
||||
time.sleep(self._pollingInterval)
|
||||
ret_code = proc.poll()
|
||||
while ((time.time() - start_time) <= timeout) and ret_code == None:
|
||||
time.sleep(self._pollingInterval)
|
||||
ret_code = proc.poll()
|
||||
if ret_code == None:
|
||||
proc.kill()
|
||||
retries += 1
|
||||
continue
|
||||
return ret_code
|
||||
if ret_code == None:
|
||||
proc.kill()
|
||||
retries += 1
|
||||
continue
|
||||
return ret_code
|
||||
raise DMError("Timeout exceeded for _checkCmd call after %d retries." % retries)
|
||||
|
||||
def _checkCmdAs(self, args, timeout=None, retryLimit=None):
|
||||
"""
|
||||
Runs a command using adb and waits for command to finish
|
||||
If self._useRunAs is True, the command is run-as user specified in self._packageName
|
||||
If timeout is specified, the process is killed after <timeout> seconds
|
||||
|
||||
returns: returncode from subprocess.Popen
|
||||
"""
|
||||
retryLimit = retryLimit or self.retryLimit
|
||||
if (self._useRunAs):
|
||||
args.insert(1, "run-as")
|
||||
args.insert(2, self._packageName)
|
||||
return self._checkCmd(args, timeout, retryLimit=retryLimit)
|
||||
|
||||
def chmodDir(self, remoteDir, mask="777"):
|
||||
if (self.dirExists(remoteDir)):
|
||||
files = self.listFiles(remoteDir.strip())
|
||||
@ -602,12 +662,12 @@ class DeviceManagerADB(DeviceManager):
|
||||
if (self.dirExists(remoteEntry)):
|
||||
self.chmodDir(remoteEntry)
|
||||
else:
|
||||
self._checkCmd(["shell", "chmod", mask, remoteEntry])
|
||||
self._checkCmdAs(["shell", "chmod", mask, remoteEntry])
|
||||
self._logger.info("chmod %s" % remoteEntry)
|
||||
self._checkCmd(["shell", "chmod", mask, remoteDir])
|
||||
self._checkCmdAs(["shell", "chmod", mask, remoteDir])
|
||||
self._logger.info("chmod %s" % remoteDir)
|
||||
else:
|
||||
self._checkCmd(["shell", "chmod", mask, remoteDir.strip()])
|
||||
self._checkCmdAs(["shell", "chmod", mask, remoteDir.strip()])
|
||||
self._logger.info("chmod %s" % remoteDir.strip())
|
||||
|
||||
def _verifyADB(self):
|
||||
@ -651,6 +711,34 @@ class DeviceManagerADB(DeviceManager):
|
||||
if ret:
|
||||
raise DMError("unable to connect to device")
|
||||
|
||||
def _verifyRunAs(self):
|
||||
# If a valid package name is available, and certain other
|
||||
# conditions are met, devicemanagerADB can execute file operations
|
||||
# via the "run-as" command, so that pushed files and directories
|
||||
# are created by the uid associated with the package, more closely
|
||||
# echoing conditions encountered by Fennec at run time.
|
||||
# Check to see if run-as can be used here, by verifying a
|
||||
# file copy via run-as.
|
||||
self._useRunAs = False
|
||||
devroot = self.getDeviceRoot()
|
||||
if self._packageName and devroot:
|
||||
tmpDir = self.getTempDir()
|
||||
|
||||
# The problem here is that run-as doesn't cause a non-zero exit code
|
||||
# when failing because of a non-existent or non-debuggable package :(
|
||||
runAsOut = self._runCmd(["shell", "run-as", self._packageName, "mkdir", devroot + "/sanity"]).communicate()[0]
|
||||
if runAsOut.startswith("run-as:") and ("not debuggable" in runAsOut or "is unknown" in runAsOut):
|
||||
raise DMError("run-as failed sanity check")
|
||||
|
||||
tmpfile = tempfile.NamedTemporaryFile()
|
||||
self._checkCmd(["push", tmpfile.name, tmpDir + "/tmpfile"])
|
||||
self._checkCmd(["shell", "run-as", self._packageName, "dd", "if=" + tmpDir + "/tmpfile", "of=" + devroot + "/sanity/tmpfile"])
|
||||
if (self.fileExists(devroot + "/sanity/tmpfile")):
|
||||
self._logger.info("will execute commands via run-as %s" % self._packageName)
|
||||
self._useRunAs = True
|
||||
self._checkCmd(["shell", "rm", devroot + "/tmp/tmpfile"])
|
||||
self._checkCmd(["shell", "run-as", self._packageName, "rm", "-r", devroot + "/sanity"])
|
||||
|
||||
def _checkForRoot(self):
|
||||
# Check whether we _are_ root by default (some development boards work
|
||||
# this way, this is also the result of some relatively rare rooting
|
||||
@ -680,7 +768,7 @@ class DeviceManagerADB(DeviceManager):
|
||||
self._haveSu = True
|
||||
|
||||
def _isUnzipAvailable(self):
|
||||
data = self._runCmd(["shell", "unzip"]).stdout.read()
|
||||
data = self._runCmdAs(["shell", "unzip"]).stdout.read()
|
||||
if (re.search('Usage', data)):
|
||||
return True
|
||||
else:
|
||||
|
@ -2,7 +2,6 @@
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import datetime
|
||||
import mozlog
|
||||
import select
|
||||
import socket
|
||||
@ -426,12 +425,6 @@ class DeviceManagerSUT(DeviceManager):
|
||||
if self.dirExists(remoteDir):
|
||||
self._runCmds([{ 'cmd': 'rmdr ' + remoteDir }])
|
||||
|
||||
def moveTree(self, source, destination):
|
||||
self._runCmds([{ 'cmd': 'mv %s %s' % (source, destination) }])
|
||||
|
||||
def copyTree(self, source, destination):
|
||||
self._runCmds([{ 'cmd': 'dd if=%s of=%s' % (source, destination) }])
|
||||
|
||||
def getProcessList(self):
|
||||
data = self._runCmds([{ 'cmd': 'ps' }])
|
||||
|
||||
@ -730,83 +723,61 @@ class DeviceManagerSUT(DeviceManager):
|
||||
|
||||
self._runCmds([{ 'cmd': 'unzp %s %s' % (filePath, destDir)}])
|
||||
|
||||
def _getRebootServerSocket(self, ipAddr):
|
||||
# FIXME: getLanIp() only works on linux -- someday would be nice to
|
||||
# replace this with moznetwork, but we probably don't want to add
|
||||
# more internal deps to mozdevice while it's still being used by
|
||||
# things like talos and sut_tools which pull us in statically
|
||||
serverSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
serverSocket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
serverSocket.settimeout(60.0)
|
||||
serverSocket.bind((ipAddr, 0))
|
||||
serverSocket.listen(1)
|
||||
self._logger.debug('Created reboot callback server at %s:%d' %
|
||||
serverSocket.getsockname())
|
||||
return serverSocket
|
||||
|
||||
def _waitForRebootPing(self, serverSocket):
|
||||
def _wait_for_reboot(self, host, port):
|
||||
self._logger.debug('Creating server with %s:%d' % (host, port))
|
||||
timeout_expires = time.time() + self.reboot_timeout
|
||||
conn = None
|
||||
data = None
|
||||
startTime = datetime.datetime.now()
|
||||
waitTime = datetime.timedelta(seconds=self.reboot_timeout)
|
||||
while not data and datetime.datetime.now() - startTime < waitTime:
|
||||
self._logger.info("Waiting for reboot callback ping from device...")
|
||||
data = ''
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
s.settimeout(60.0)
|
||||
s.bind((host, port))
|
||||
s.listen(1)
|
||||
while not data and time.time() < timeout_expires:
|
||||
try:
|
||||
if not conn:
|
||||
conn, _ = serverSocket.accept()
|
||||
conn, _ = s.accept()
|
||||
# Receiving any data is good enough.
|
||||
data = conn.recv(1024)
|
||||
if data:
|
||||
self._logger.info("Received reboot callback ping from device!")
|
||||
conn.sendall('OK')
|
||||
conn.close()
|
||||
except socket.timeout:
|
||||
pass
|
||||
print '.'
|
||||
except socket.error, e:
|
||||
if e.errno != errno.EAGAIN and e.errno != errno.EWOULDBLOCK:
|
||||
raise
|
||||
if data:
|
||||
# Sleep to ensure not only we are online, but all our services are
|
||||
# also up.
|
||||
time.sleep(self.reboot_settling_time)
|
||||
else:
|
||||
self._logger.error('Timed out waiting for reboot callback.')
|
||||
s.close()
|
||||
return data
|
||||
|
||||
if not data:
|
||||
raise DMError('Timed out waiting for reboot callback.')
|
||||
|
||||
self._logger.info("Sleeping for %s seconds to wait for device "
|
||||
"to 'settle'" % self.reboot_settling_time)
|
||||
time.sleep(self.reboot_settling_time)
|
||||
|
||||
|
||||
def reboot(self, ipAddr=None, port=30000, wait=False):
|
||||
# port ^^^ is here for backwards compatibility only, we now
|
||||
# determine a port automatically and safely
|
||||
wait = (wait or ipAddr)
|
||||
|
||||
def reboot(self, ipAddr=None, port=30000):
|
||||
cmd = 'rebt'
|
||||
|
||||
self._logger.info("Rebooting device")
|
||||
self._logger.info("sending rebt command")
|
||||
|
||||
# if we're waiting, create a listening server and pass information on
|
||||
# it to the device before rebooting (we do this instead of just polling
|
||||
# to make sure the device actually rebooted -- yes, there are probably
|
||||
# simpler ways of doing this like polling uptime, but this is what we're
|
||||
# doing for now)
|
||||
if wait:
|
||||
if not ipAddr:
|
||||
nettools = NetworkTools()
|
||||
ipAddr = nettools.getLanIp()
|
||||
serverSocket = self._getRebootServerSocket(ipAddr)
|
||||
if ipAddr is not None:
|
||||
# The update.info command tells the SUTAgent to send a TCP message
|
||||
# after restarting.
|
||||
destname = '/data/data/com.mozilla.SUTAgentAndroid/files/update.info'
|
||||
data = "%s,%s\rrebooting\r" % serverSocket.getsockname()
|
||||
data = "%s,%s\rrebooting\r" % (ipAddr, port)
|
||||
self._runCmds([{'cmd': 'push %s %s' % (destname, len(data)),
|
||||
'data': data}])
|
||||
cmd += " %s %s" % serverSocket.getsockname()
|
||||
|
||||
# actually reboot device
|
||||
self._runCmds([{'cmd': cmd}])
|
||||
# if we're waiting, wait for a callback ping from the agent before
|
||||
# continuing (and throw an exception if we don't receive said ping)
|
||||
if wait:
|
||||
self._waitForRebootPing(serverSocket)
|
||||
ip, port = self._getCallbackIpAndPort(ipAddr, port)
|
||||
cmd += " %s %s" % (ip, port)
|
||||
|
||||
status = self._runCmds([{'cmd': cmd}])
|
||||
|
||||
if ipAddr is not None:
|
||||
status = self._wait_for_reboot(ipAddr, port)
|
||||
|
||||
self._logger.info("rebt- got status back: %s" % status)
|
||||
|
||||
def getInfo(self, directive=None):
|
||||
data = None
|
||||
@ -847,8 +818,10 @@ class DeviceManagerSUT(DeviceManager):
|
||||
|
||||
data = self._runCmds([{ 'cmd': cmd }])
|
||||
|
||||
if 'installation complete [0]' not in data:
|
||||
raise DMError("Remove Device Error: Error installing app. Error message: %s" % data)
|
||||
f = re.compile('Failure')
|
||||
for line in data.split():
|
||||
if (f.match(line)):
|
||||
raise DMError("Remove Device Error: Error installing app. Error message: %s" % data)
|
||||
|
||||
def uninstallApp(self, appName, installPath=None):
|
||||
cmd = 'uninstall ' + appName
|
||||
@ -871,12 +844,8 @@ class DeviceManagerSUT(DeviceManager):
|
||||
self._logger.debug("uninstallAppAndReboot: %s" % data)
|
||||
return
|
||||
|
||||
def updateApp(self, appBundlePath, processName=None, destPath=None,
|
||||
ipAddr=None, port=30000, wait=False):
|
||||
# port ^^^ is here for backwards compatibility only, we now
|
||||
# determine a port automatically and safely
|
||||
wait = (wait or ipAddr)
|
||||
|
||||
def updateApp(self, appBundlePath, processName=None, destPath=None, ipAddr=None, port=30000):
|
||||
status = None
|
||||
cmd = 'updt '
|
||||
if processName is None:
|
||||
# Then we pass '' for processName
|
||||
@ -887,23 +856,39 @@ class DeviceManagerSUT(DeviceManager):
|
||||
if destPath:
|
||||
cmd += " " + destPath
|
||||
|
||||
if wait:
|
||||
if not ipAddr:
|
||||
nettools = NetworkTools()
|
||||
ipAddr = nettools.getLanIp()
|
||||
serverSocket = self._getRebootServerSocket(ipAddr)
|
||||
cmd += " %s %s" % serverSocket.getsockname()
|
||||
if ipAddr is not None:
|
||||
ip, port = self._getCallbackIpAndPort(ipAddr, port)
|
||||
cmd += " %s %s" % (ip, port)
|
||||
|
||||
self._logger.debug("updateApp using command: " % cmd)
|
||||
|
||||
self._runCmds([{'cmd': cmd}])
|
||||
status = self._runCmds([{'cmd': cmd}])
|
||||
|
||||
if wait:
|
||||
self._waitForRebootPing(serverSocket)
|
||||
if ipAddr is not None:
|
||||
status = self._wait_for_reboot(ip, port)
|
||||
|
||||
self._logger.debug("updateApp: got status back: %s" % status)
|
||||
|
||||
def getCurrentTime(self):
|
||||
return int(self._runCmds([{ 'cmd': 'clok' }]).strip())
|
||||
|
||||
def _getCallbackIpAndPort(self, aIp, aPort):
|
||||
"""
|
||||
Connect the ipaddress and port for a callback ping.
|
||||
|
||||
Defaults to current IP address and ports starting at 30000.
|
||||
NOTE: the detection for current IP address only works on Linux!
|
||||
"""
|
||||
ip = aIp
|
||||
nettools = NetworkTools()
|
||||
if (ip == None):
|
||||
ip = nettools.getLanIp()
|
||||
if (aPort != None):
|
||||
port = nettools.findOpenPort(ip, aPort)
|
||||
else:
|
||||
port = nettools.findOpenPort(ip, 30000)
|
||||
return ip, port
|
||||
|
||||
def _formatEnvString(self, env):
|
||||
"""
|
||||
Returns a properly formatted env string for the agent.
|
||||
|
@ -40,8 +40,6 @@ class DMCli(object):
|
||||
'help': 'Don\'t fail if application is already running' }
|
||||
],
|
||||
'help': 'launches application on device' },
|
||||
'listapps': { 'function': self.listapps,
|
||||
'help': 'list applications on device' },
|
||||
'push': { 'function': self.push,
|
||||
'args': [ { 'name': 'local_file' },
|
||||
{ 'name': 'remote_file' }
|
||||
@ -99,11 +97,7 @@ class DMCli(object):
|
||||
'help': 'clear the logcat'
|
||||
},
|
||||
'reboot': { 'function': self.reboot,
|
||||
'help': 'reboot the device',
|
||||
'args': [ { 'name': '--wait',
|
||||
'action': 'store_true',
|
||||
'help': 'Wait for device to come back up before exiting' } ]
|
||||
|
||||
'help': 'reboot the device'
|
||||
},
|
||||
'isfile': { 'function': self.isfile,
|
||||
'args': [ { 'name': 'remote_file' } ],
|
||||
@ -155,9 +149,7 @@ class DMCli(object):
|
||||
help="Verbose output from DeviceManager",
|
||||
default=False)
|
||||
parser.add_argument("--host", action="store",
|
||||
help="Device hostname (only if using TCP/IP, " \
|
||||
"defaults to TEST_DEVICE environment " \
|
||||
"variable if present)",
|
||||
help="Device hostname (only if using TCP/IP)",
|
||||
default=os.environ.get('TEST_DEVICE'))
|
||||
parser.add_argument("-p", "--port", action="store",
|
||||
type=int,
|
||||
@ -165,9 +157,8 @@ class DMCli(object):
|
||||
"adb-over-tcp)", default=None)
|
||||
parser.add_argument("-m", "--dmtype", action="store",
|
||||
help="DeviceManager type (adb or sut, defaults " \
|
||||
"to DM_TRANS environment variable, if " \
|
||||
"present, or adb)",
|
||||
default=os.environ.get('DM_TRANS', 'adb'))
|
||||
"to adb)", default=os.environ.get('DM_TRANS',
|
||||
'adb'))
|
||||
parser.add_argument("-d", "--hwid", action="store",
|
||||
help="HWID", default=None)
|
||||
parser.add_argument("--package-name", action="store",
|
||||
@ -257,10 +248,6 @@ class DMCli(object):
|
||||
args.intent, url=args.url,
|
||||
failIfRunning=(not args.no_fail_if_running))
|
||||
|
||||
def listapps(self, args):
|
||||
for app in self.dm.getInstalledApps():
|
||||
print app
|
||||
|
||||
def kill(self, args):
|
||||
for name in args.process_name:
|
||||
self.dm.killProcess(name)
|
||||
@ -290,7 +277,7 @@ class DMCli(object):
|
||||
self.dm.recordLogcat()
|
||||
|
||||
def reboot(self, args):
|
||||
self.dm.reboot(wait=args.wait)
|
||||
self.dm.reboot()
|
||||
|
||||
def processlist(self, args):
|
||||
pslist = self.dm.getProcessList()
|
||||
|
@ -100,20 +100,6 @@ class DroidMixin(object):
|
||||
self.launchApplication(appName, ".App", intent, url=url, extras=extras,
|
||||
wait=wait, failIfRunning=failIfRunning)
|
||||
|
||||
def getInstalledApps(self):
|
||||
"""
|
||||
Lists applications installed on this Android device
|
||||
|
||||
Returns a list of application names in the form [ 'org.mozilla.fennec', ... ]
|
||||
"""
|
||||
output = self.shellCheckOutput(["pm", "list", "packages", "-f"])
|
||||
apps = []
|
||||
for line in output.splitlines():
|
||||
# lines are of form: package:/system/app/qik-tmo.apk=com.qiktmobile.android
|
||||
apps.append(line.split('=')[1])
|
||||
|
||||
return apps
|
||||
|
||||
class DroidADB(DeviceManagerADB, DroidMixin):
|
||||
|
||||
def getTopActivity(self):
|
||||
|
@ -4,14 +4,9 @@
|
||||
|
||||
from setuptools import setup
|
||||
|
||||
PACKAGE_NAME = 'mozdevice'
|
||||
PACKAGE_VERSION = '0.33'
|
||||
PACKAGE_VERSION = '0.29'
|
||||
|
||||
deps = ['mozfile >= 1.0',
|
||||
'mozlog',
|
||||
]
|
||||
|
||||
setup(name=PACKAGE_NAME,
|
||||
setup(name='mozdevice',
|
||||
version=PACKAGE_VERSION,
|
||||
description="Mozilla-authored device management",
|
||||
long_description="see http://mozbase.readthedocs.org/",
|
||||
@ -24,7 +19,7 @@ setup(name=PACKAGE_NAME,
|
||||
packages=['mozdevice'],
|
||||
include_package_data=True,
|
||||
zip_safe=False,
|
||||
install_requires=deps,
|
||||
install_requires=['mozlog'],
|
||||
entry_points="""
|
||||
# -*- Entry points: -*-
|
||||
[console_scripts]
|
||||
|
@ -4,7 +4,6 @@ skip-if = os == 'win'
|
||||
[sut_app.py]
|
||||
[sut_basic.py]
|
||||
[sut_chmod.py]
|
||||
[sut_copytree.py]
|
||||
[sut_fileExists.py]
|
||||
[sut_fileMethods.py]
|
||||
[sut_info.py]
|
||||
@ -13,7 +12,6 @@ skip-if = os == 'win'
|
||||
[sut_list.py]
|
||||
[sut_logcat.py]
|
||||
[sut_mkdir.py]
|
||||
[sut_movetree.py]
|
||||
[sut_ps.py]
|
||||
[sut_push.py]
|
||||
[sut_pull.py]
|
||||
|
@ -1,65 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import mozdevice
|
||||
import mozlog
|
||||
import unittest
|
||||
from sut import MockAgent
|
||||
|
||||
class CopyTreeTest(unittest.TestCase):
|
||||
def test_copyFile(self):
|
||||
commands = [('dd if=/mnt/sdcard/tests/test.txt of=/mnt/sdcard/tests/test2.txt', ''),
|
||||
('isdir /mnt/sdcard/tests', 'TRUE'),
|
||||
('cd /mnt/sdcard/tests', ''),
|
||||
('ls', 'test.txt\ntest2.txt')]
|
||||
|
||||
m = MockAgent(self, commands=commands)
|
||||
d = mozdevice.DroidSUT("127.0.0.1", port=m.port, logLevel=mozlog.DEBUG)
|
||||
|
||||
self.assertEqual(None, d.copyTree('/mnt/sdcard/tests/test.txt',
|
||||
'/mnt/sdcard/tests/test2.txt'))
|
||||
expected = (commands[3][1].strip()).split('\n')
|
||||
self.assertEqual(expected, d.listFiles('/mnt/sdcard/tests'))
|
||||
|
||||
def test_copyDir(self):
|
||||
commands = [('dd if=/mnt/sdcard/tests/foo of=/mnt/sdcard/tests/bar', ''),
|
||||
('isdir /mnt/sdcard/tests', 'TRUE'),
|
||||
('cd /mnt/sdcard/tests', ''),
|
||||
('ls', 'foo\nbar')]
|
||||
|
||||
m = MockAgent(self, commands=commands)
|
||||
d = mozdevice.DroidSUT("127.0.0.1", port=m.port,
|
||||
logLevel=mozlog.DEBUG)
|
||||
|
||||
self.assertEqual(None, d.copyTree('/mnt/sdcard/tests/foo',
|
||||
'/mnt/sdcard/tests/bar'))
|
||||
expected = (commands[3][1].strip()).split('\n')
|
||||
self.assertEqual(expected, d.listFiles('/mnt/sdcard/tests'))
|
||||
|
||||
def test_copyNonEmptyDir(self):
|
||||
commands = [('isdir /mnt/sdcard/tests/foo/bar', 'TRUE'),
|
||||
('dd if=/mnt/sdcard/tests/foo of=/mnt/sdcard/tests/foo2', ''),
|
||||
('isdir /mnt/sdcard/tests', 'TRUE'),
|
||||
('cd /mnt/sdcard/tests', ''),
|
||||
('ls', 'foo\nfoo2'),
|
||||
('isdir /mnt/sdcard/tests/foo2', 'TRUE'),
|
||||
('cd /mnt/sdcard/tests/foo2', ''),
|
||||
('ls', 'bar')]
|
||||
|
||||
m = MockAgent(self, commands=commands)
|
||||
d = mozdevice.DroidSUT("127.0.0.1", port=m.port,
|
||||
logLevel=mozlog.DEBUG)
|
||||
|
||||
self.assertTrue(d.dirExists('/mnt/sdcard/tests/foo/bar'))
|
||||
self.assertEqual(None, d.copyTree('/mnt/sdcard/tests/foo',
|
||||
'/mnt/sdcard/tests/foo2'))
|
||||
expected = (commands[4][1].strip()).split('\n')
|
||||
self.assertEqual(expected, d.listFiles('/mnt/sdcard/tests'))
|
||||
self.assertTrue(d.fileExists('/mnt/sdcard/tests/foo2/bar'))
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
@ -1,63 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import mozdevice
|
||||
import mozlog
|
||||
import unittest
|
||||
from sut import MockAgent
|
||||
|
||||
class MoveTreeTest(unittest.TestCase):
|
||||
def test_moveFile(self):
|
||||
commands = [('mv /mnt/sdcard/tests/test.txt /mnt/sdcard/tests/test1.txt', ''),
|
||||
('isdir /mnt/sdcard/tests', 'TRUE'),
|
||||
('cd /mnt/sdcard/tests', ''),
|
||||
('ls', 'test1.txt'),
|
||||
('isdir /mnt/sdcard/tests', 'TRUE'),
|
||||
('cd /mnt/sdcard/tests', ''),
|
||||
('ls', 'test1.txt')]
|
||||
|
||||
m = MockAgent(self, commands=commands)
|
||||
d = mozdevice.DroidSUT("127.0.0.1", port=m.port, logLevel=mozlog.DEBUG)
|
||||
self.assertEqual(None, d.moveTree('/mnt/sdcard/tests/test.txt',
|
||||
'/mnt/sdcard/tests/test1.txt'))
|
||||
self.assertFalse(d.fileExists('/mnt/sdcard/tests/test.txt'))
|
||||
self.assertTrue(d.fileExists('/mnt/sdcard/tests/test1.txt'))
|
||||
|
||||
def test_moveDir(self):
|
||||
commands = [("mv /mnt/sdcard/tests/foo /mnt/sdcard/tests/bar", ""),
|
||||
('isdir /mnt/sdcard/tests', 'TRUE'),
|
||||
('cd /mnt/sdcard/tests', ''),
|
||||
('ls', 'bar')]
|
||||
|
||||
m = MockAgent(self, commands=commands)
|
||||
d = mozdevice.DroidSUT("127.0.0.1", port=m.port, logLevel=mozlog.DEBUG)
|
||||
self.assertEqual(None, d.moveTree('/mnt/sdcard/tests/foo',
|
||||
'/mnt/sdcard/tests/bar'))
|
||||
self.assertTrue(d.fileExists('/mnt/sdcard/tests/bar'))
|
||||
|
||||
def test_moveNonEmptyDir(self):
|
||||
commands = [('isdir /mnt/sdcard/tests/foo/bar', 'TRUE'),
|
||||
('mv /mnt/sdcard/tests/foo /mnt/sdcard/tests/foo2', ''),
|
||||
('isdir /mnt/sdcard/tests', 'TRUE'),
|
||||
('cd /mnt/sdcard/tests', ''),
|
||||
('ls', 'foo2'),
|
||||
('isdir /mnt/sdcard/tests/foo2', 'TRUE'),
|
||||
('cd /mnt/sdcard/tests/foo2', ''),
|
||||
('ls', 'bar')]
|
||||
|
||||
m = MockAgent(self, commands=commands)
|
||||
d = mozdevice.DroidSUT("127.0.0.1", port=m.port,
|
||||
logLevel=mozlog.DEBUG)
|
||||
|
||||
self.assertTrue(d.dirExists('/mnt/sdcard/tests/foo/bar'))
|
||||
self.assertEqual(None, d.moveTree('/mnt/sdcard/tests/foo',
|
||||
'/mnt/sdcard/tests/foo2'))
|
||||
self.assertTrue(d.fileExists('/mnt/sdcard/tests/foo2'))
|
||||
self.assertTrue(d.fileExists('/mnt/sdcard/tests/foo2/bar'))
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
@ -5,16 +5,12 @@
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from contextlib import contextmanager
|
||||
import errno
|
||||
import os
|
||||
import shutil
|
||||
import stat
|
||||
import tarfile
|
||||
import tempfile
|
||||
import time
|
||||
import urlparse
|
||||
import urllib2
|
||||
import warnings
|
||||
import zipfile
|
||||
|
||||
__all__ = ['extract_tarball',
|
||||
@ -22,7 +18,6 @@ __all__ = ['extract_tarball',
|
||||
'extract',
|
||||
'is_url',
|
||||
'load',
|
||||
'remove',
|
||||
'rmtree',
|
||||
'tree',
|
||||
'NamedTemporaryFile',
|
||||
@ -115,88 +110,55 @@ def extract(src, dest=None):
|
||||
return top_level_files
|
||||
|
||||
|
||||
### utilities for removal of files and directories
|
||||
### utilities for directory trees
|
||||
|
||||
def rmtree(dir):
|
||||
"""Deprecated wrapper method to remove a directory tree.
|
||||
|
||||
Ensure to update your code to use mozfile.remove() directly
|
||||
|
||||
:param dir: directory to be removed
|
||||
"""
|
||||
|
||||
warnings.warn("mozfile.rmtree() is deprecated in favor of mozfile.remove()",
|
||||
PendingDeprecationWarning, stacklevel=2)
|
||||
return remove(dir)
|
||||
|
||||
|
||||
def remove(path):
|
||||
"""Removes the specified file, link, or directory tree
|
||||
"""Removes the specified directory tree
|
||||
|
||||
This is a replacement for shutil.rmtree that works better under
|
||||
windows.
|
||||
|
||||
:param path: path to be removed
|
||||
"""
|
||||
|
||||
def _call_with_windows_retry(func, args=(), retry_max=5, retry_delay=0.5):
|
||||
"""
|
||||
It's possible to see spurious errors on Windows due to various things
|
||||
keeping a handle to the directory open (explorer, virus scanners, etc)
|
||||
So we try a few times if it fails with a known error.
|
||||
"""
|
||||
retry_count = 0
|
||||
while True:
|
||||
try:
|
||||
func(*args)
|
||||
except OSError, e:
|
||||
# The file or directory to be removed doesn't exist anymore
|
||||
if e.errno == errno.ENOENT:
|
||||
break
|
||||
|
||||
# Error codes are defined in:
|
||||
# http://docs.python.org/2/library/errno.html#module-errno
|
||||
if e.errno not in [errno.EACCES, errno.ENOTEMPTY]:
|
||||
raise
|
||||
|
||||
if retry_count == retry_max:
|
||||
raise
|
||||
|
||||
retry_count += 1
|
||||
|
||||
print '%s() failed for "%s". Reason: %s (%s). Retrying...' % \
|
||||
(func.__name__, args, e.strerror, e.errno)
|
||||
time.sleep(retry_delay)
|
||||
else:
|
||||
# If no exception has been thrown it should be done
|
||||
break
|
||||
|
||||
def _update_permissions(path):
|
||||
"""Sets specified pemissions depending on filetype"""
|
||||
mode = dir_mode if os.path.isdir(path) else file_mode
|
||||
_call_with_windows_retry(os.chmod, (path, mode))
|
||||
|
||||
if not os.path.exists(path):
|
||||
windows."""
|
||||
# (Thanks to Bear at the OSAF for the code.)
|
||||
if not os.path.exists(dir):
|
||||
return
|
||||
if os.path.islink(dir):
|
||||
os.remove(dir)
|
||||
return
|
||||
|
||||
path_stats = os.stat(path)
|
||||
file_mode = path_stats.st_mode | stat.S_IRUSR | stat.S_IWUSR
|
||||
dir_mode = file_mode | stat.S_IXUSR
|
||||
# Verify the directory is read/write/execute for the current user
|
||||
os.chmod(dir, 0700)
|
||||
|
||||
if os.path.isfile(path) or os.path.islink(path):
|
||||
# Verify the file or link is read/write for the current user
|
||||
_update_permissions(path)
|
||||
_call_with_windows_retry(os.remove, (path,))
|
||||
# os.listdir below only returns a list of unicode filenames
|
||||
# if the parameter is unicode.
|
||||
# If a non-unicode-named dir contains a unicode filename,
|
||||
# that filename will get garbled.
|
||||
# So force dir to be unicode.
|
||||
if not isinstance(dir, unicode):
|
||||
try:
|
||||
dir = unicode(dir, "utf-8")
|
||||
except UnicodeDecodeError:
|
||||
if os.environ.get('DEBUG') == '1':
|
||||
print("rmtree: decoding from UTF-8 failed for directory: %s" %s)
|
||||
|
||||
elif os.path.isdir(path):
|
||||
# Verify the directory is read/write/execute for the current user
|
||||
_update_permissions(path)
|
||||
for name in os.listdir(dir):
|
||||
full_name = os.path.join(dir, name)
|
||||
# on Windows, if we don't have write permission we can't remove
|
||||
# the file/directory either, so turn that on
|
||||
if os.name == 'nt':
|
||||
if not os.access(full_name, os.W_OK):
|
||||
# I think this is now redundant, but I don't have an NT
|
||||
# machine to test on, so I'm going to leave it in place
|
||||
# -warner
|
||||
os.chmod(full_name, 0600)
|
||||
|
||||
# We're ensuring that every nested item has writable permission.
|
||||
for root, dirs, files in os.walk(path):
|
||||
for entry in dirs + files:
|
||||
_update_permissions(os.path.join(root, entry))
|
||||
_call_with_windows_retry(shutil.rmtree, (path,))
|
||||
if os.path.islink(full_name):
|
||||
os.remove(full_name)
|
||||
elif os.path.isdir(full_name):
|
||||
rmtree(full_name)
|
||||
else:
|
||||
if os.path.isfile(full_name):
|
||||
os.chmod(full_name, 0700)
|
||||
os.remove(full_name)
|
||||
os.rmdir(dir)
|
||||
|
||||
|
||||
def depth(directory):
|
||||
@ -211,7 +173,6 @@ def depth(directory):
|
||||
break
|
||||
return level
|
||||
|
||||
|
||||
# ASCII delimeters
|
||||
ascii_delimeters = {
|
||||
'vertical_line' : '|',
|
||||
|
@ -4,10 +4,9 @@
|
||||
|
||||
from setuptools import setup
|
||||
|
||||
PACKAGE_NAME = 'mozfile'
|
||||
PACKAGE_VERSION = '1.1'
|
||||
PACKAGE_VERSION = '0.12'
|
||||
|
||||
setup(name=PACKAGE_NAME,
|
||||
setup(name='mozfile',
|
||||
version=PACKAGE_VERSION,
|
||||
description="Library of file utilities for use in Mozilla testing",
|
||||
long_description="see http://mozbase.readthedocs.org/",
|
||||
|
@ -1,6 +1,6 @@
|
||||
[test_extract.py]
|
||||
[test_load.py]
|
||||
[test_remove.py]
|
||||
[test_rmtree.py]
|
||||
[test_tempdir.py]
|
||||
[test_tempfile.py]
|
||||
[test_url.py]
|
||||
|
@ -8,9 +8,7 @@ files = [('foo.txt',),
|
||||
('foo', 'bar.txt'),
|
||||
('foo', 'bar', 'fleem.txt'),
|
||||
('foobar', 'fleem.txt'),
|
||||
('bar.txt'),
|
||||
('nested_tree', 'bar', 'fleem.txt'),
|
||||
('readonly.txt')]
|
||||
('bar.txt')]
|
||||
|
||||
|
||||
def create_stub():
|
||||
|
@ -1,139 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import stat
|
||||
import shutil
|
||||
import tempfile
|
||||
import threading
|
||||
import time
|
||||
import unittest
|
||||
|
||||
import mozfile
|
||||
import mozinfo
|
||||
|
||||
import stubs
|
||||
|
||||
|
||||
def mark_readonly(path):
|
||||
"""Removes all write permissions from given file/directory.
|
||||
|
||||
:param path: path of directory/file of which modes must be changed
|
||||
"""
|
||||
mode = os.stat(path)[stat.ST_MODE]
|
||||
os.chmod(path, mode & ~stat.S_IWUSR & ~stat.S_IWGRP & ~stat.S_IWOTH)
|
||||
|
||||
|
||||
class FileOpenCloseThread(threading.Thread):
|
||||
"""Helper thread for asynchronous file handling"""
|
||||
def __init__(self, path, delay, delete=False):
|
||||
threading.Thread.__init__(self)
|
||||
self.delay = delay
|
||||
self.path = path
|
||||
self.delete = delete
|
||||
|
||||
def run(self):
|
||||
with open(self.path) as f:
|
||||
time.sleep(self.delay)
|
||||
if self.delete:
|
||||
try:
|
||||
os.remove(self.path)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class MozfileRemoveTestCase(unittest.TestCase):
|
||||
"""Test our ability to remove directories and files"""
|
||||
|
||||
def setUp(self):
|
||||
# Generate a stub
|
||||
self.tempdir = stubs.create_stub()
|
||||
|
||||
def tearDown(self):
|
||||
if os.path.isdir(self.tempdir):
|
||||
shutil.rmtree(self.tempdir)
|
||||
|
||||
def test_remove_directory(self):
|
||||
"""Test the removal of a directory"""
|
||||
self.assertTrue(os.path.isdir(self.tempdir))
|
||||
mozfile.remove(self.tempdir)
|
||||
self.assertFalse(os.path.exists(self.tempdir))
|
||||
|
||||
def test_remove_directory_with_open_file(self):
|
||||
"""Test removing a directory with an open file"""
|
||||
# Open a file in the generated stub
|
||||
filepath = os.path.join(self.tempdir, *stubs.files[1])
|
||||
f = file(filepath, 'w')
|
||||
f.write('foo-bar')
|
||||
|
||||
# keep file open and then try removing the dir-tree
|
||||
if mozinfo.isWin:
|
||||
# On the Windows family WindowsError should be raised.
|
||||
self.assertRaises(OSError, mozfile.remove, self.tempdir)
|
||||
self.assertTrue(os.path.exists(self.tempdir))
|
||||
else:
|
||||
# Folder should be deleted on all other platforms
|
||||
mozfile.remove(self.tempdir)
|
||||
self.assertFalse(os.path.exists(self.tempdir))
|
||||
|
||||
def test_remove_closed_file(self):
|
||||
"""Test removing a closed file"""
|
||||
# Open a file in the generated stub
|
||||
filepath = os.path.join(self.tempdir, *stubs.files[1])
|
||||
with open(filepath, 'w') as f:
|
||||
f.write('foo-bar')
|
||||
|
||||
# Folder should be deleted on all platforms
|
||||
mozfile.remove(self.tempdir)
|
||||
self.assertFalse(os.path.exists(self.tempdir))
|
||||
|
||||
def test_removing_open_file_with_retry(self):
|
||||
"""Test removing a file in use with retry"""
|
||||
filepath = os.path.join(self.tempdir, *stubs.files[1])
|
||||
|
||||
thread = FileOpenCloseThread(filepath, 1)
|
||||
thread.start()
|
||||
|
||||
# Wait a bit so we can be sure the file has been opened
|
||||
time.sleep(.5)
|
||||
mozfile.remove(filepath)
|
||||
thread.join()
|
||||
|
||||
# Check deletion was successful
|
||||
self.assertFalse(os.path.exists(filepath))
|
||||
|
||||
def test_removing_already_deleted_file_with_retry(self):
|
||||
"""Test removing a meanwhile removed file with retry"""
|
||||
filepath = os.path.join(self.tempdir, *stubs.files[1])
|
||||
|
||||
thread = FileOpenCloseThread(filepath, .8, True)
|
||||
thread.start()
|
||||
|
||||
# Wait a bit so we can be sure the file has been opened and gets deleted
|
||||
# while remove() waits for the next retry
|
||||
time.sleep(.5)
|
||||
mozfile.remove(filepath)
|
||||
thread.join()
|
||||
|
||||
# Check deletion was successful
|
||||
self.assertFalse(os.path.exists(filepath))
|
||||
|
||||
def test_remove_readonly_tree(self):
|
||||
"""Test removing a read-only directory"""
|
||||
|
||||
dirpath = os.path.join(self.tempdir, "nested_tree")
|
||||
mark_readonly(dirpath)
|
||||
|
||||
# However, mozfile should change write permissions and remove dir.
|
||||
mozfile.remove(dirpath)
|
||||
|
||||
self.assertFalse(os.path.exists(dirpath))
|
||||
|
||||
def test_remove_readonly_file(self):
|
||||
"""Test removing read-only files"""
|
||||
filepath = os.path.join(self.tempdir, *stubs.files[1])
|
||||
mark_readonly(filepath)
|
||||
|
||||
# However, mozfile should change write permission and then remove file.
|
||||
mozfile.remove(filepath)
|
||||
|
||||
self.assertFalse(os.path.exists(filepath))
|
59
testing/mozbase/mozfile/tests/test_rmtree.py
Normal file
59
testing/mozbase/mozfile/tests/test_rmtree.py
Normal file
@ -0,0 +1,59 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import mozfile
|
||||
import mozinfo
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
import stubs
|
||||
|
||||
|
||||
class TestRemoveTree(unittest.TestCase):
|
||||
"""test our ability to remove a directory tree"""
|
||||
|
||||
def setUp(self):
|
||||
# Generate a stub
|
||||
self.tempdir = stubs.create_stub()
|
||||
|
||||
def tearDown(self):
|
||||
# Cleanup the stub if it sill exists
|
||||
if os.path.isdir(self.tempdir):
|
||||
mozfile.rmtree(self.tempdir)
|
||||
|
||||
def test_remove_directory(self):
|
||||
self.assertTrue(os.path.isdir(self.tempdir))
|
||||
try:
|
||||
mozfile.rmtree(self.tempdir)
|
||||
except:
|
||||
shutil.rmtree(self.tempdir)
|
||||
raise
|
||||
self.assertFalse(os.path.exists(self.tempdir))
|
||||
|
||||
def test_remove_directory_with_open_file(self):
|
||||
""" Tests handling when removing a directory tree
|
||||
which has a file in it is still open """
|
||||
# Open a file in the generated stub
|
||||
filepath = os.path.join(self.tempdir, *stubs.files[1])
|
||||
f = file(filepath, 'w')
|
||||
f.write('foo-bar')
|
||||
# keep file open and then try removing the dir-tree
|
||||
if mozinfo.isWin:
|
||||
# On the Windows family WindowsError should be raised.
|
||||
self.assertRaises(WindowsError, mozfile.rmtree, self.tempdir)
|
||||
else:
|
||||
# Folder should be deleted on all other platforms
|
||||
mozfile.rmtree(self.tempdir)
|
||||
self.assertFalse(os.path.exists(self.tempdir))
|
||||
|
||||
def test_remove_directory_after_closing_file(self):
|
||||
""" Test that the call to mozfile.rmtree succeeds on
|
||||
all platforms after file is closed """
|
||||
|
||||
filepath = os.path.join(self.tempdir, *stubs.files[1])
|
||||
with open(filepath, 'w') as f:
|
||||
f.write('foo-bar')
|
||||
# Delete directory tree
|
||||
mozfile.rmtree(self.tempdir)
|
||||
# Check deletion is successful
|
||||
self.assertFalse(os.path.exists(self.tempdir))
|
@ -44,9 +44,9 @@ if system in ["Microsoft", "Windows"]:
|
||||
processor = os.environ.get("PROCESSOR_ARCHITEW6432", processor)
|
||||
else:
|
||||
processor = os.environ.get('PROCESSOR_ARCHITECTURE', processor)
|
||||
system = os.environ.get("OS", system).replace('_', ' ')
|
||||
service_pack = os.sys.getwindowsversion()[4]
|
||||
info['service_pack'] = service_pack
|
||||
system = os.environ.get("OS", system).replace('_', ' ')
|
||||
service_pack = os.sys.getwindowsversion()[4]
|
||||
info['service_pack'] = service_pack
|
||||
elif system == "Linux":
|
||||
if hasattr(platform, "linux_distribution"):
|
||||
(distro, version, codename) = platform.linux_distribution()
|
||||
|
@ -6,6 +6,8 @@
|
||||
applications across platforms.
|
||||
|
||||
"""
|
||||
import mozinfo
|
||||
import mozfile
|
||||
from optparse import OptionParser
|
||||
import os
|
||||
import shutil
|
||||
@ -15,15 +17,6 @@ import tarfile
|
||||
import time
|
||||
import zipfile
|
||||
|
||||
import mozfile
|
||||
import mozinfo
|
||||
|
||||
try:
|
||||
import pefile
|
||||
has_pefile = True
|
||||
except ImportError:
|
||||
has_pefile = False
|
||||
|
||||
if mozinfo.isMac:
|
||||
from plistlib import readPlist
|
||||
|
||||
@ -58,8 +51,10 @@ def get_binary(path, app_name):
|
||||
"""Find the binary in the specified path, and return its path. If binary is
|
||||
not found throw an InvalidBinary exception.
|
||||
|
||||
:param path: Path within to search for the binary
|
||||
:param app_name: Application binary without file extension to look for
|
||||
Arguments:
|
||||
path -- the path within to search for the binary
|
||||
app_name -- application binary without file extension to look for
|
||||
|
||||
"""
|
||||
binary = None
|
||||
|
||||
@ -88,7 +83,7 @@ def get_binary(path, app_name):
|
||||
if not binary:
|
||||
# The expected binary has not been found. Make sure we clean the
|
||||
# install folder to remove any traces
|
||||
mozfile.remove(path)
|
||||
shutil.rmtree(path)
|
||||
|
||||
raise InvalidBinary('"%s" does not contain a valid binary.' % path)
|
||||
|
||||
@ -99,15 +94,18 @@ def install(src, dest):
|
||||
"""Install a zip, exe, tar.gz, tar.bz2 or dmg file, and return the path of
|
||||
the installation folder.
|
||||
|
||||
:param src: Path to the install file
|
||||
:param dest: Path to install to (to ensure we do not overwrite any existent
|
||||
files the folder should not exist yet)
|
||||
Arguments:
|
||||
src -- the path to the install file
|
||||
dest -- the path to install to (to ensure we do not overwrite any existent
|
||||
files the folder should not exist yet)
|
||||
|
||||
"""
|
||||
src = os.path.realpath(src)
|
||||
dest = os.path.realpath(dest)
|
||||
|
||||
if not is_installer(src):
|
||||
raise InvalidSource(src + ' is not valid installer file.')
|
||||
raise InvalidSource(src + ' is not a recognized file type ' +
|
||||
'(zip, exe, tar.gz, tar.bz2 or dmg)')
|
||||
|
||||
if not os.path.exists(dest):
|
||||
os.makedirs(dest)
|
||||
@ -143,10 +141,9 @@ def is_installer(src):
|
||||
Mac: dmg
|
||||
Windows: zip, exe
|
||||
|
||||
On Windows pefile will be used to determine if the executable is the
|
||||
right type, if it is installed on the system.
|
||||
Arguments:
|
||||
src -- the path to the install file
|
||||
|
||||
:param src: Path to the install file.
|
||||
"""
|
||||
src = os.path.realpath(src)
|
||||
|
||||
@ -158,31 +155,15 @@ def is_installer(src):
|
||||
elif mozinfo.isMac:
|
||||
return src.lower().endswith('.dmg')
|
||||
elif mozinfo.isWin:
|
||||
if zipfile.is_zipfile(src):
|
||||
return True
|
||||
|
||||
if os.access(src, os.X_OK) and src.lower().endswith('.exe'):
|
||||
if has_pefile:
|
||||
# try to determine if binary is actually a gecko installer
|
||||
pe_data = pefile.PE(src)
|
||||
data = {}
|
||||
for info in getattr(pe_data, 'FileInfo', []):
|
||||
if info.Key == 'StringFileInfo':
|
||||
for string in info.StringTable:
|
||||
data.update(string.entries)
|
||||
return 'BuildID' not in data
|
||||
else:
|
||||
# pefile not available, just assume a proper binary was passed in
|
||||
return True
|
||||
|
||||
return False
|
||||
return src.lower().endswith('.exe') or zipfile.is_zipfile(src)
|
||||
|
||||
|
||||
def uninstall(install_folder):
|
||||
"""Uninstalls the application in the specified path. If it has been
|
||||
installed via an installer on Windows, use the uninstaller first.
|
||||
|
||||
:param install_folder: Path of the installation folder
|
||||
Arguments:
|
||||
install_folder -- the path of the installation folder
|
||||
|
||||
"""
|
||||
install_folder = os.path.realpath(install_folder)
|
||||
@ -225,13 +206,14 @@ def uninstall(install_folder):
|
||||
|
||||
# Ensure that we remove any trace of the installation. Even the uninstaller
|
||||
# on Windows leaves files behind we have to explicitely remove.
|
||||
mozfile.remove(install_folder)
|
||||
mozfile.rmtree(install_folder)
|
||||
|
||||
|
||||
def _install_dmg(src, dest):
|
||||
"""Extract a dmg file into the destination folder and return the
|
||||
application folder.
|
||||
|
||||
Arguments:
|
||||
src -- DMG image which has to be extracted
|
||||
dest -- the path to extract to
|
||||
|
||||
|
@ -11,10 +11,10 @@ try:
|
||||
except IOError:
|
||||
description = None
|
||||
|
||||
PACKAGE_VERSION = '0.10'
|
||||
PACKAGE_VERSION = '1.8'
|
||||
|
||||
deps = ['mozinfo >= 0.7',
|
||||
'mozfile >= 1.0',
|
||||
'mozfile'
|
||||
]
|
||||
|
||||
setup(name='mozInstall',
|
||||
@ -39,7 +39,6 @@ setup(name='mozInstall',
|
||||
include_package_data=True,
|
||||
zip_safe=False,
|
||||
install_requires=deps,
|
||||
tests_require=['mozprocess >= 0.15',],
|
||||
# we have to generate two more executables for those systems that cannot run as Administrator
|
||||
# and the filename containing "install" triggers the UAC
|
||||
entry_points="""
|
||||
|
@ -76,19 +76,9 @@ class TestMozInstall(unittest.TestCase):
|
||||
if mozinfo.isWin:
|
||||
# test zip installer
|
||||
self.assertTrue(mozinstall.is_installer(self.zipfile))
|
||||
|
||||
# test exe installer
|
||||
self.assertTrue(mozinstall.is_installer(self.exe))
|
||||
|
||||
try:
|
||||
# test stub browser file
|
||||
# without pefile on the system this test will fail
|
||||
import pefile
|
||||
stub_exe = os.path.join(here, 'build_stub', 'firefox.exe')
|
||||
self.assertFalse(mozinstall.is_installer(stub_exe))
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
if mozinfo.isMac:
|
||||
self.assertTrue(mozinstall.is_installer(self.dmg))
|
||||
|
||||
|
@ -2,23 +2,9 @@
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
"""
|
||||
Mozlog aims to standardize log formatting within Mozilla.
|
||||
|
||||
It simply wraps Python's logging_ module and adds a few convenience methods
|
||||
for logging test results and events.
|
||||
|
||||
The structured submodule takes a different approach and implements a
|
||||
JSON-based logging protocol designed for recording test results.
|
||||
"""Mozlog aims to standardize log formatting within Mozilla.
|
||||
It simply wraps Python's logging_ module and adds a few convenience methods for logging test results and events.
|
||||
"""
|
||||
|
||||
from logger import *
|
||||
from loglistener import LogMessageServer
|
||||
from loggingmixin import LoggingMixin
|
||||
|
||||
try:
|
||||
import structured
|
||||
except ImportError:
|
||||
# Structured logging doesn't work on python 2.6 which is still used on some
|
||||
# legacy test machines; https://bugzilla.mozilla.org/show_bug.cgi?id=864866
|
||||
pass
|
||||
|
@ -8,7 +8,10 @@ from logging import *
|
||||
# 'from logging import *'
|
||||
# see https://bugzilla.mozilla.org/show_bug.cgi?id=700415#c35
|
||||
from logging import getLoggerClass, addLevelName, setLoggerClass, shutdown, debug, info, basicConfig
|
||||
import json
|
||||
try:
|
||||
import json
|
||||
except ImportError:
|
||||
import simplejson as json
|
||||
|
||||
_default_level = INFO
|
||||
_LoggerClass = getLoggerClass()
|
||||
@ -60,7 +63,7 @@ class MozLogger(_LoggerClass):
|
||||
|
||||
def log_structured(self, action, params=None):
|
||||
"""Logs a structured message object."""
|
||||
if params is None:
|
||||
if (params is None):
|
||||
params = {}
|
||||
|
||||
level = params.get('_level', _default_level)
|
||||
@ -77,36 +80,23 @@ class MozLogger(_LoggerClass):
|
||||
if not isinstance(level, int):
|
||||
level = _default_level
|
||||
|
||||
params['_namespace'] = self.name
|
||||
params['action'] = action
|
||||
|
||||
# The can message be None. This is expected, and shouldn't cause
|
||||
# unstructured formatters to fail.
|
||||
message = params.get('_message')
|
||||
|
||||
message = params.get('message', 'UNKNOWN')
|
||||
self.log(level, message, extra={'params': params})
|
||||
|
||||
class JSONFormatter(Formatter):
|
||||
"""Log formatter for emitting structured JSON entries."""
|
||||
|
||||
def format(self, record):
|
||||
# Default values determined by logger metadata
|
||||
output = {
|
||||
'_time': int(round(record.created * 1000, 0)),
|
||||
'_namespace': record.name,
|
||||
'_level': getLevelName(record.levelno),
|
||||
}
|
||||
params = getattr(record, 'params')
|
||||
params['_time'] = int(round(record.created * 1000, 0))
|
||||
|
||||
# If this message was created by a call to log_structured,
|
||||
# anything specified by the caller's params should act as
|
||||
# an override.
|
||||
output.update(getattr(record, 'params', {}))
|
||||
if params.get('indent') is not None:
|
||||
return json.dumps(params, indent=params['indent'])
|
||||
|
||||
if record.msg and output.get('_message') is None:
|
||||
# For compatibility with callers using the printf like
|
||||
# API exposed by python logging, call the default formatter.
|
||||
output['_message'] = Formatter.format(self, record)
|
||||
|
||||
return json.dumps(output, indent=output.get('indent'))
|
||||
return json.dumps(params)
|
||||
|
||||
class MozFormatter(Formatter):
|
||||
"""
|
||||
@ -117,13 +107,11 @@ class MozFormatter(Formatter):
|
||||
level_length = 0
|
||||
max_level_length = len('TEST-START')
|
||||
|
||||
def __init__(self, include_timestamp=False):
|
||||
def __init__(self):
|
||||
"""
|
||||
Formatter.__init__ has fmt and datefmt parameters that won't have
|
||||
any affect on a MozFormatter instance. Bypass it to avoid confusion.
|
||||
"""
|
||||
self.include_timestamp = include_timestamp
|
||||
self.datefmt = None
|
||||
|
||||
def format(self, record):
|
||||
record.message = record.getMessage()
|
||||
@ -137,8 +125,6 @@ class MozFormatter(Formatter):
|
||||
pad = self.level_length - len(record.levelname) + 1
|
||||
sep = '|'.rjust(pad)
|
||||
fmt = '%(name)s %(levelname)s ' + sep + ' %(message)s'
|
||||
if self.include_timestamp:
|
||||
fmt = self.formatTime(record, self.datefmt) + ' ' + fmt
|
||||
return fmt % record.__dict__
|
||||
|
||||
def getLogger(name, handler=None):
|
||||
@ -157,7 +143,7 @@ def getLogger(name, handler=None):
|
||||
setLoggerClass(MozLogger)
|
||||
|
||||
if name in Logger.manager.loggerDict:
|
||||
if handler:
|
||||
if (handler):
|
||||
raise ValueError('The handler parameter requires ' + \
|
||||
'that a logger by this name does ' + \
|
||||
'not already exist')
|
||||
|
@ -1,41 +0,0 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import mozlog
|
||||
|
||||
class LoggingMixin(object):
|
||||
"""Expose a subset of logging functions to an inheriting class."""
|
||||
|
||||
def set_logger(self, logger_instance=None, name=None):
|
||||
"""Method for setting the underlying logger instance to be used."""
|
||||
|
||||
if logger_instance and not isinstance(logger_instance, mozlog.Logger):
|
||||
raise ValueError("logger_instance must be an instance of" +
|
||||
"mozlog.Logger")
|
||||
|
||||
if name is None:
|
||||
name = ".".join([self.__module__, self.__class__.__name__])
|
||||
|
||||
self._logger = logger_instance or mozlog.getLogger(name)
|
||||
|
||||
def _log_msg(self, cmd, *args, **kwargs):
|
||||
if not hasattr(self, "_logger"):
|
||||
self._logger = mozlog.getLogger(".".join([self.__module__,
|
||||
self.__class__.__name__]))
|
||||
getattr(self._logger, cmd)(*args, **kwargs)
|
||||
|
||||
def log(self, *args, **kwargs):
|
||||
self._log_msg("log", *args, **kwargs)
|
||||
|
||||
def info(self, *args, **kwargs):
|
||||
self._log_msg("info", *args, **kwargs)
|
||||
|
||||
def error(self, *args, **kwargs):
|
||||
self._log_msg("error", *args, **kwargs)
|
||||
|
||||
def warn(self, *args, **kwargs):
|
||||
self._log_msg("warn", *args, **kwargs)
|
||||
|
||||
def log_structured(self, *args, **kwargs):
|
||||
self._log_msg("log_structured", *args, **kwargs)
|
@ -4,7 +4,10 @@
|
||||
|
||||
import SocketServer
|
||||
import socket
|
||||
import json
|
||||
try:
|
||||
import json
|
||||
except ImportError:
|
||||
import simplejson as json
|
||||
|
||||
class LogMessageServer(SocketServer.TCPServer):
|
||||
def __init__(self, server_address, logger, message_callback=None, timeout=3):
|
||||
|
@ -1,6 +0,0 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import commandline
|
||||
import structuredlog
|
@ -1,88 +0,0 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
from structuredlog import StructuredLogger
|
||||
import handlers
|
||||
import formatters
|
||||
|
||||
log_formatters = {
|
||||
'raw': (formatters.JSONFormatter, "Raw structured log messages"),
|
||||
'unittest': (formatters.UnittestFormatter, "Unittest style output"),
|
||||
'xunit': (formatters.XUnitFormatter, "xUnit compatible XML"),
|
||||
'html': (formatters.HTMLFormatter, "HTML report"),
|
||||
'mach': (formatters.MachFormatter, "Uncolored mach-like output"),
|
||||
}
|
||||
|
||||
|
||||
def log_file(name):
|
||||
if name == "-":
|
||||
return sys.stdout
|
||||
else:
|
||||
return open(name, "w")
|
||||
|
||||
|
||||
def add_logging_group(parser):
|
||||
"""
|
||||
Add logging options to an argparse ArgumentParser.
|
||||
|
||||
Each formatter has a corresponding option of the form --log-{name}
|
||||
where {name} is the name of the formatter. The option takes a value
|
||||
which is either a filename or "-" to indicate stdout.
|
||||
|
||||
:param parser: The ArgumentParser object that should have logging
|
||||
options added.
|
||||
"""
|
||||
group = parser.add_argument_group("Output Logging",
|
||||
description="Options for logging output.\n"
|
||||
"Each option represents a possible logging format "
|
||||
"and takes a filename to write that format to, "
|
||||
"or '-' to write to stdout.")
|
||||
for name, (cls, help_str) in log_formatters.iteritems():
|
||||
group.add_argument("--log-" + name, type=log_file,
|
||||
help=help_str)
|
||||
|
||||
|
||||
def setup_logging(suite, args, defaults):
|
||||
"""
|
||||
Configure a structuredlogger based on command line arguments.
|
||||
|
||||
:param suite: The name of the testsuite being run
|
||||
:param args: The Namespace object produced by argparse from parsing
|
||||
command line arguments from a parser with logging arguments.
|
||||
:param defaults: A dictionary of {formatter name: output stream} to apply
|
||||
when there is no logging supplied on the command line.
|
||||
|
||||
:rtype: StructuredLogger
|
||||
"""
|
||||
logger = StructuredLogger(suite)
|
||||
prefix = "log_"
|
||||
found = False
|
||||
found_stdout_logger = False
|
||||
for name, value in args.iteritems():
|
||||
if name.startswith(prefix) and value is not None:
|
||||
found = True
|
||||
if value == sys.stdout:
|
||||
found_stdout_logger = True
|
||||
formatter_cls = log_formatters[name[len(prefix):]][0]
|
||||
logger.add_handler(handlers.StreamHandler(stream=value,
|
||||
formatter=formatter_cls()))
|
||||
|
||||
#If there is no user-specified logging, go with the default options
|
||||
if not found:
|
||||
for name, value in defaults.iteritems():
|
||||
formatter_cls = log_formatters[name][0]
|
||||
logger.add_handler(handlers.StreamHandler(stream=value,
|
||||
formatter=formatter_cls()))
|
||||
|
||||
elif not found_stdout_logger and sys.stdout in defaults.values():
|
||||
for name, value in defaults.iteritems():
|
||||
if value == sys.stdout:
|
||||
formatter_cls = log_formatters[name][0]
|
||||
logger.add_handler(handlers.StreamHandler(stream=value,
|
||||
formatter=formatter_cls()))
|
||||
|
||||
return logger
|
@ -1,11 +0,0 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import json
|
||||
from unittest import UnittestFormatter
|
||||
from xunit import XUnitFormatter
|
||||
from html import HTMLFormatter
|
||||
from machformatter import MachFormatter, MachTerminalFormatter
|
||||
|
||||
JSONFormatter = lambda: json.dumps
|
@ -1,24 +0,0 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import json
|
||||
|
||||
|
||||
class BaseFormatter(object):
|
||||
def __call__(self, data):
|
||||
if hasattr(self, data["action"]):
|
||||
handler = getattr(self, data["action"])
|
||||
return handler(data)
|
||||
|
||||
|
||||
def format_file(input_file, handler):
|
||||
while True:
|
||||
line = input_file.readline()
|
||||
if not line:
|
||||
break
|
||||
try:
|
||||
data = json.loads(line.strip())
|
||||
formatter(data)
|
||||
except:
|
||||
pass
|
@ -1 +0,0 @@
|
||||
from html import HTMLFormatter
|
@ -1,165 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import sys
|
||||
import datetime
|
||||
import os
|
||||
|
||||
from .. import base
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
html = None
|
||||
raw = None
|
||||
|
||||
base_path = os.path.split(__file__)[0]
|
||||
|
||||
def do_defered_imports():
|
||||
global html
|
||||
global raw
|
||||
|
||||
from py.xml import html, raw
|
||||
|
||||
|
||||
class HTMLFormatter(base.BaseFormatter):
|
||||
def __init__(self):
|
||||
do_defered_imports()
|
||||
self.suite_name = None
|
||||
self.result_rows = []
|
||||
self.test_count = defaultdict(int)
|
||||
self.start_times = {}
|
||||
self.suite_times = {"start": None,
|
||||
"end": None}
|
||||
self.head = None
|
||||
|
||||
def suite_start(self, data):
|
||||
self.suite_times["start"] = data["time"]
|
||||
self.suite_name = data["source"]
|
||||
with open(os.path.join(base_path, "style.css")) as f:
|
||||
self.head = html.head(
|
||||
html.meta(charset="utf-8"),
|
||||
html.title(data["source"]),
|
||||
html.style(raw(f.read())))
|
||||
|
||||
def suite_end(self, data):
|
||||
self.suite_times["end"] = data["time"]
|
||||
return self.generate_html()
|
||||
|
||||
def test_start(self, data):
|
||||
self.start_times[data["test"]] = data["time"]
|
||||
|
||||
def test_end(self, data):
|
||||
self.make_result_html(data)
|
||||
|
||||
def make_result_html(self, data):
|
||||
cls_name = ""
|
||||
tc_name = unicode(data["test"])
|
||||
tc_time = (data["time"] - self.start_times.pop(data["test"])) / 1000.
|
||||
additional_html = []
|
||||
debug = data.get("extra", {})
|
||||
links_html = []
|
||||
|
||||
status = data["status"]
|
||||
expected = data.get("expected", status)
|
||||
|
||||
if status != expected:
|
||||
if status == "PASS":
|
||||
status_name = "UNEXPECTED_" + status
|
||||
else:
|
||||
status_name = "EXPECTED_" + status
|
||||
else:
|
||||
status_name = status
|
||||
|
||||
self.test_count[status_name] += 1
|
||||
|
||||
if status in ['SKIP', 'FAIL', 'ERROR']:
|
||||
if debug.get('screenshot'):
|
||||
screenshot = 'data:image/png;base64,%s' % debug['screenshot']
|
||||
additional_html.append(html.div(
|
||||
html.a(html.img(src=screenshot), href="#"),
|
||||
class_='screenshot'))
|
||||
for name, content in debug.items():
|
||||
try:
|
||||
if 'screenshot' in name:
|
||||
href = '#'
|
||||
else:
|
||||
# use base64 to avoid that some browser (such as Firefox, Opera)
|
||||
# treats '#' as the start of another link if the data URL contains.
|
||||
# use 'charset=utf-8' to show special characters like Chinese.
|
||||
href = 'data:text/plain;charset=utf-8;base64,%s' % base64.b64encode(content)
|
||||
links_html.append(html.a(
|
||||
name.title(),
|
||||
class_=name,
|
||||
href=href,
|
||||
target='_blank'))
|
||||
links_html.append(' ')
|
||||
except:
|
||||
pass
|
||||
|
||||
log = html.div(class_='log')
|
||||
for line in debug.get("stdout", "").splitlines():
|
||||
separator = line.startswith(' ' * 10)
|
||||
if separator:
|
||||
log.append(line[:80])
|
||||
else:
|
||||
if line.lower().find("error") != -1 or line.lower().find("exception") != -1:
|
||||
log.append(html.span(raw(cgi.escape(line)), class_='error'))
|
||||
else:
|
||||
log.append(raw(cgi.escape(line)))
|
||||
log.append(html.br())
|
||||
additional_html.append(log)
|
||||
|
||||
self.result_rows.append(
|
||||
html.tr([html.td(status_name, class_='col-result'),
|
||||
html.td(cls_name, class_='col-class'),
|
||||
html.td(tc_name, class_='col-name'),
|
||||
html.td("%.2f" % tc_time, class_='col-duration'),
|
||||
html.td(links_html, class_='col-links'),
|
||||
html.td(additional_html, class_='debug')],
|
||||
class_=status_name.lower() + ' results-table-row'))
|
||||
|
||||
def generate_html(self):
|
||||
generated = datetime.datetime.now()
|
||||
with open(os.path.join(base_path, "main.js")) as main_f:
|
||||
doc = html.html(
|
||||
self.head,
|
||||
html.body(
|
||||
html.script(
|
||||
raw(main_f.read()),
|
||||
),
|
||||
html.p('Report generated on %s at %s' % (
|
||||
generated.strftime('%d-%b-%Y'),
|
||||
generated.strftime('%H:%M:%S')),
|
||||
html.h2('Summary'),
|
||||
html.p('%i tests ran in %.1f seconds.' % (sum(self.test_count.itervalues()),
|
||||
(self.suite_times["end"] -
|
||||
self.suite_times["start"]) / 1000.),
|
||||
html.br(),
|
||||
html.span('%i passed' % self.test_count["PASS"], class_='pass'), ', ',
|
||||
html.span('%i skipped' % self.test_count["SKIP"], class_='skip'), ', ',
|
||||
html.span('%i failed' % self.test_count["FAIL"], class_='fail'), ', ',
|
||||
html.span('%i errors' % self.test_count["ERROR"], class_='error'), '.',
|
||||
html.br(),
|
||||
html.span('%i expected failures' % self.test_count["EXPECTED_FAIL"],
|
||||
class_='expected_fail'), ', ',
|
||||
html.span('%i unexpected passes' % self.test_count["UNEXPECTED_PASS"],
|
||||
class_='unexpected_pass'), '.'),
|
||||
html.h2('Results'),
|
||||
html.table([html.thead(
|
||||
html.tr([
|
||||
html.th('Result', class_='sortable', col='result'),
|
||||
html.th('Class', class_='sortable', col='class'),
|
||||
html.th('Test Name', class_='sortable', col='name'),
|
||||
html.th('Duration', class_='sortable numeric', col='duration'),
|
||||
html.th('Links')]), id='results-table-head'),
|
||||
html.tbody(self.result_rows, id='results-table-body')], id='results-table'))))
|
||||
|
||||
return doc.unicode(indent=2)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
base.format_file(sys.stdin,
|
||||
handlers.StreamHandler(stream=sys.stdout,
|
||||
formatter=HTMLFormatter()))
|
@ -1,172 +0,0 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
function toArray(iter) {
|
||||
if (iter === null) {
|
||||
return null;
|
||||
}
|
||||
return Array.prototype.slice.call(iter);
|
||||
}
|
||||
|
||||
function find(selector, elem) {
|
||||
if (!elem) {
|
||||
elem = document;
|
||||
}
|
||||
return toArray(elem.querySelector(selector));
|
||||
}
|
||||
|
||||
function find_all(selector, elem) {
|
||||
if (!elem) {
|
||||
elem = document;
|
||||
}
|
||||
return toArray(elem.querySelectorAll(selector));
|
||||
}
|
||||
|
||||
addEventListener("DOMContentLoaded", function() {
|
||||
reset_sort_headers();
|
||||
|
||||
split_debug_onto_two_rows();
|
||||
|
||||
find_all('.col-links a.screenshot').forEach(function(elem) {
|
||||
elem.addEventListener("click",
|
||||
function(event) {
|
||||
var node = elem;
|
||||
while (node && !node.classList.contains('.results-table-row')) {
|
||||
node = node.parentNode;
|
||||
}
|
||||
if (node != null) {
|
||||
if (node.nextSibling &&
|
||||
node.nextSibling.classList.contains("debug")) {
|
||||
var href = find('.screenshot img', node.nextSibling).src;
|
||||
window.open(href);
|
||||
}
|
||||
}
|
||||
event.preventDefault();
|
||||
}, false)
|
||||
});
|
||||
|
||||
find_all('.screenshot a').forEach(function(elem) {
|
||||
elem.addEventListener("click",
|
||||
function(event) {
|
||||
window.open(find('img', elem).getAttribute('src'));
|
||||
event.preventDefault();
|
||||
}, false)
|
||||
});
|
||||
|
||||
find_all('.sortable').forEach(function(elem) {
|
||||
elem.addEventListener("click",
|
||||
function(event) {
|
||||
toggle_sort_states(elem);
|
||||
var colIndex = toArray(elem.parentNode.childNodes).indexOf(elem);
|
||||
var key = elem.classList.contains('numeric') ? key_num : key_alpha;
|
||||
sort_table(elem, key(colIndex));
|
||||
}, false)
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
function sort_table(clicked, key_func) {
|
||||
one_row_for_data();
|
||||
var rows = find_all('.results-table-row');
|
||||
var reversed = !clicked.classList.contains('asc');
|
||||
|
||||
var sorted_rows = sort(rows, key_func, reversed);
|
||||
|
||||
var parent = document.getElementById('results-table-body');
|
||||
sorted_rows.forEach(function(elem) {
|
||||
parent.appendChild(elem);
|
||||
});
|
||||
|
||||
split_debug_onto_two_rows();
|
||||
}
|
||||
|
||||
function sort(items, key_func, reversed) {
|
||||
var sort_array = items.map(function(item, i) {
|
||||
return [key_func(item), i];
|
||||
});
|
||||
var multiplier = reversed ? -1 : 1;
|
||||
|
||||
sort_array.sort(function(a, b) {
|
||||
var key_a = a[0];
|
||||
var key_b = b[0];
|
||||
return multiplier * (key_a >= key_b ? 1 : -1);
|
||||
});
|
||||
|
||||
return sort_array.map(function(item) {
|
||||
var index = item[1];
|
||||
return items[index];
|
||||
});
|
||||
}
|
||||
|
||||
function key_alpha(col_index) {
|
||||
return function(elem) {
|
||||
return elem.childNodes[col_index].firstChild.data.toLowerCase();
|
||||
};
|
||||
}
|
||||
|
||||
function key_num(col_index) {
|
||||
return function(elem) {
|
||||
return parseFloat(elem.childNodes[col_index].firstChild.data);
|
||||
};
|
||||
}
|
||||
|
||||
function reset_sort_headers() {
|
||||
find_all('.sort-icon').forEach(function(elem) {
|
||||
elem.parentNode.removeChild(elem);
|
||||
});
|
||||
find_all('.sortable').forEach(function(elem) {
|
||||
var icon = document.createElement("div");
|
||||
icon.className = "sort-icon";
|
||||
icon.textContent = "vvv";
|
||||
elem.insertBefore(icon, elem.firstChild);
|
||||
elem.classList.remove("desc", "active");
|
||||
elem.classList.add("asc", "inactive");
|
||||
});
|
||||
}
|
||||
|
||||
function toggle_sort_states(elem) {
|
||||
//if active, toggle between asc and desc
|
||||
if (elem.classList.contains('active')) {
|
||||
elem.classList.toggle('asc');
|
||||
elem.classList.toggle('desc');
|
||||
}
|
||||
|
||||
//if inactive, reset all other functions and add ascending active
|
||||
if (elem.classList.contains('inactive')) {
|
||||
reset_sort_headers();
|
||||
elem.classList.remove('inactive');
|
||||
elem.classList.add('active');
|
||||
}
|
||||
}
|
||||
|
||||
function split_debug_onto_two_rows() {
|
||||
find_all('tr.results-table-row').forEach(function(elem) {
|
||||
var new_row = document.createElement("tr")
|
||||
new_row.className = "debug";
|
||||
elem.parentNode.insertBefore(new_row, elem.nextSibling);
|
||||
find_all(".debug", elem).forEach(function (td_elem) {
|
||||
if (find(".log", td_elem)) {
|
||||
new_row.appendChild(td_elem);
|
||||
td_elem.colSpan=5;
|
||||
} else {
|
||||
td_elem.parentNode.removeChild(td_elem);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function one_row_for_data() {
|
||||
find_all('tr.results-table-row').forEach(function(elem) {
|
||||
if (elem.nextSibling.classList.contains('debug')) {
|
||||
toArray(elem.nextSibling.childNodes).forEach(
|
||||
function (td_elem) {
|
||||
elem.appendChild(td_elem);
|
||||
})
|
||||
} else {
|
||||
var new_td = document.createElement("td");
|
||||
new_td.className = "debug";
|
||||
elem.appendChild(new_td);
|
||||
}
|
||||
});
|
||||
}
|
@ -1,158 +0,0 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
body {
|
||||
font-family: Helvetica, Arial, sans-serif;
|
||||
font-size: 12px;
|
||||
min-width: 1200px;
|
||||
color: #999;
|
||||
}
|
||||
h2 {
|
||||
font-size: 16px;
|
||||
color: black;
|
||||
}
|
||||
|
||||
p {
|
||||
color: black;
|
||||
}
|
||||
|
||||
a {
|
||||
color: #999;
|
||||
}
|
||||
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
/******************************
|
||||
* SUMMARY INFORMATION
|
||||
******************************/
|
||||
|
||||
#configuration td {
|
||||
padding: 5px;
|
||||
border: 1px solid #E6E6E6;
|
||||
}
|
||||
|
||||
#configuration tr:nth-child(odd) {
|
||||
background-color: #f6f6f6;
|
||||
}
|
||||
|
||||
/******************************
|
||||
* TEST RESULT COLORS
|
||||
******************************/
|
||||
span.passed, .passed .col-result {
|
||||
color: green;
|
||||
}
|
||||
span.expected.failure, .expected.failure .col-result {
|
||||
color: orange;
|
||||
}
|
||||
span.skipped, .skipped .col-result {
|
||||
color: orange;
|
||||
}
|
||||
span.unexpected.pass, .unexpected.pass .col-result {
|
||||
color: red;
|
||||
}
|
||||
span.failed, .failure .col-result {
|
||||
color: red;
|
||||
}
|
||||
span.error,.error .col-result {
|
||||
color: red;
|
||||
}
|
||||
|
||||
|
||||
/******************************
|
||||
* RESULTS TABLE
|
||||
*
|
||||
* 1. Table Layout
|
||||
* 2. Debug
|
||||
* 3. Sorting items
|
||||
*
|
||||
******************************/
|
||||
|
||||
/*------------------
|
||||
* 1. Table Layout
|
||||
*------------------*/
|
||||
|
||||
#results-table {
|
||||
border: 1px solid #e6e6e6;
|
||||
color: #999;
|
||||
font-size: 12px;
|
||||
width: 100%
|
||||
}
|
||||
|
||||
#results-table th, #results-table td {
|
||||
padding: 5px;
|
||||
border: 1px solid #E6E6E6;
|
||||
text-align: left
|
||||
}
|
||||
#results-table th {
|
||||
font-weight: bold
|
||||
}
|
||||
|
||||
/*------------------
|
||||
* 2. Debug
|
||||
*------------------*/
|
||||
|
||||
.log:only-child {
|
||||
height: inherit
|
||||
}
|
||||
.log {
|
||||
background-color: #e6e6e6;
|
||||
border: 1px solid #e6e6e6;
|
||||
color: black;
|
||||
display: block;
|
||||
font-family: "Courier New", Courier, monospace;
|
||||
height: 230px;
|
||||
overflow-y: scroll;
|
||||
padding: 5px;
|
||||
white-space: pre-wrap
|
||||
}
|
||||
div.screenshot {
|
||||
border: 1px solid #e6e6e6;
|
||||
float: right;
|
||||
margin-left: 5px;
|
||||
height: 240px
|
||||
}
|
||||
div.screenshot img {
|
||||
height: 240px
|
||||
}
|
||||
|
||||
/*if the result is passed or xpassed don't show debug row*/
|
||||
.passed + .debug, .unexpected.pass + .debug {
|
||||
display: none;
|
||||
}
|
||||
|
||||
/*------------------
|
||||
* 3. Sorting items
|
||||
*------------------*/
|
||||
.sortable {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.sort-icon {
|
||||
font-size: 0px;
|
||||
float: left;
|
||||
margin-right: 5px;
|
||||
margin-top: 5px;
|
||||
/*triangle*/
|
||||
width: 0;
|
||||
height: 0;
|
||||
border-left: 8px solid transparent;
|
||||
border-right: 8px solid transparent;
|
||||
}
|
||||
|
||||
.inactive .sort-icon {
|
||||
/*finish triangle*/
|
||||
border-top: 8px solid #E6E6E6;
|
||||
}
|
||||
|
||||
.asc.active .sort-icon {
|
||||
/*finish triangle*/
|
||||
border-bottom: 8px solid #999;
|
||||
}
|
||||
|
||||
.desc.active .sort-icon {
|
||||
/*finish triangle*/
|
||||
border-top: 8px solid #999;
|
||||
}
|
@ -1,143 +0,0 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import time
|
||||
|
||||
import base
|
||||
|
||||
|
||||
def format_seconds(total):
|
||||
"""Format number of seconds to MM:SS.DD form."""
|
||||
minutes, seconds = divmod(total, 60)
|
||||
return '%2d:%05.2f' % (minutes, seconds)
|
||||
|
||||
|
||||
class BaseMachFormatter(base.BaseFormatter):
|
||||
def __init__(self, start_time=None, write_interval=False, write_times=True):
|
||||
if start_time is None:
|
||||
start_time = time.time()
|
||||
self.start_time = start_time
|
||||
self.write_interval = write_interval
|
||||
self.write_times = write_times
|
||||
self.status_buffer = {}
|
||||
self.last_time = None
|
||||
|
||||
def __call__(self, data):
|
||||
s = base.BaseFormatter.__call__(self, data)
|
||||
if s is not None:
|
||||
return "%s %s\n" % (self.generic_formatter(data), s)
|
||||
|
||||
def _get_test_id(self, data):
|
||||
test_id = data["test"]
|
||||
if isinstance(test_id, list):
|
||||
test_id = tuple(test_id)
|
||||
return test_id
|
||||
|
||||
def generic_formatter(self, data):
|
||||
return "%s: %s" % (data["action"].upper(), data["thread"])
|
||||
|
||||
def suite_start(self, data):
|
||||
return "%i" % len(data["tests"])
|
||||
|
||||
def suite_end(self, data):
|
||||
return ""
|
||||
|
||||
def test_start(self, data):
|
||||
return "%s" % (self._get_test_id(data),)
|
||||
|
||||
def test_end(self, data):
|
||||
if "expected" in data:
|
||||
expected_str = ", expected %s" % data["expected"]
|
||||
else:
|
||||
expected_str = ""
|
||||
|
||||
subtests = self._get_subtest_data(data)
|
||||
unexpected = subtests["unexpected"] + (1 if "expected" in data else 0)
|
||||
|
||||
return "Harness status %s%s. Subtests passed %i/%i. Unexpected %i" % (
|
||||
data["status"], expected_str, subtests["pass"],
|
||||
subtests["count"], unexpected)
|
||||
|
||||
def test_status(self, data):
|
||||
test = self._get_test_id(data)
|
||||
if test not in self.status_buffer:
|
||||
self.buffer[test] = {"count": 0, "unexpected": 0, "pass": 0}
|
||||
self.buffer[test]["count"] += 1
|
||||
if "expected" in data:
|
||||
self.buffer[test]["unexpected"] += 1
|
||||
if data["status"] == "PASS":
|
||||
self.buffer[test]["pass"] += 1
|
||||
|
||||
def process_output(self, data):
|
||||
return '"%s" (pid:%s command:%s)' % (data["data"],
|
||||
data["process"],
|
||||
data["command"])
|
||||
|
||||
def log(self, data):
|
||||
return "%s %s" % (data["level"], data["message"])
|
||||
|
||||
def _get_subtest_data(self, data):
|
||||
test = self._get_test_id(data)
|
||||
return self.status_buffer.get(test, {"count": 0, "unexpected": 0, "pass": 0})
|
||||
|
||||
def _time(self, data):
|
||||
entry_time = (data["time"] / 1000)
|
||||
if self.write_interval and self.last_time is not None:
|
||||
t = entry_time - self.last_time
|
||||
self.last_time = entry_time
|
||||
else:
|
||||
t = entry_time - self.start_time
|
||||
|
||||
return t
|
||||
|
||||
|
||||
class MachFormatter(BaseMachFormatter):
|
||||
def __call__(self, data):
|
||||
s = BaseMachFormatter.__call__(self, data)
|
||||
if s is not None:
|
||||
return "%s %s" % (format_seconds(self._time(data)), s)
|
||||
|
||||
|
||||
class MachTerminalFormatter(BaseMachFormatter):
|
||||
def __init__(self, start_time=None, write_interval=False, write_times=True,
|
||||
terminal=None):
|
||||
self.terminal = terminal
|
||||
BaseMachFormatter.__init__(self,
|
||||
start_time=start_time,
|
||||
write_interval=write_interval,
|
||||
write_times=write_times)
|
||||
|
||||
def __call__(self, data):
|
||||
s = BaseMachFormatter.__call__(self, data)
|
||||
if s is not None:
|
||||
t = self.terminal.blue(format_seconds(self._time(entry)))
|
||||
|
||||
return '%s %s' % (t, self._colorize(entry, s))
|
||||
|
||||
def _colorize(self, data, s):
|
||||
if self.terminal is None:
|
||||
return s
|
||||
|
||||
subtests = self._get_subtest_data(data)
|
||||
|
||||
color = None
|
||||
len_action = len(data["action"])
|
||||
|
||||
if data["action"] == "test_end":
|
||||
if "expected" not in data and subtests["unexpected"] == 0:
|
||||
color = self.terminal.green
|
||||
else:
|
||||
color = self.terminal.red
|
||||
elif data["action"] in ("suite_start", "suite_end", "test_start"):
|
||||
color = self.terminal.yellow
|
||||
|
||||
if color is not None:
|
||||
result = color(s[:len_action]) + s[len_action:]
|
||||
|
||||
return result
|
||||
|
||||
if __name__ == "__main__":
|
||||
base.format_file(sys.stdin,
|
||||
handlers.StreamHandler(stream=sys.stdout,
|
||||
formatter=MachFormatter()))
|
@ -1,65 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import sys
|
||||
|
||||
import base
|
||||
from .. import handlers
|
||||
|
||||
|
||||
class UnittestFormatter(base.BaseFormatter):
|
||||
def __init__(self):
|
||||
self.fails = []
|
||||
self.errors = []
|
||||
self.tests_run = 0
|
||||
self.start_time = None
|
||||
self.end_time = None
|
||||
|
||||
def suite_start(self, data):
|
||||
self.start_time = data["time"]
|
||||
|
||||
def test_start(self, data):
|
||||
self.tests_run += 1
|
||||
|
||||
def test_end(self, data):
|
||||
char = "."
|
||||
if "expected" in data:
|
||||
status = data["status"]
|
||||
char = {"FAIL": "F",
|
||||
"ERROR": "E",
|
||||
"PASS": "X"}[status]
|
||||
|
||||
if status == "FAIL":
|
||||
self.fails.append(data)
|
||||
elif status == "ERROR":
|
||||
self.errors.append(data)
|
||||
|
||||
elif data["status"] == "SKIP":
|
||||
char = "S"
|
||||
return char
|
||||
|
||||
def suite_end(self, data):
|
||||
self.end_time = data["time"]
|
||||
summary = "\n".join([self.output_fails(),
|
||||
self.output_errors(),
|
||||
self.output_summary()])
|
||||
return "\n%s\n" % summary
|
||||
|
||||
def output_fails(self):
|
||||
return "\n".join("FAIL %(test)s\n%(message)s\n" % data
|
||||
for data in self.fails)
|
||||
|
||||
def output_errors(self):
|
||||
return "\n".join("ERROR %(test)s\n%(message)s" % data
|
||||
for data in self.errors)
|
||||
|
||||
def output_summary(self):
|
||||
return ("Ran %i tests in %.1fs" % (self.tests_run,
|
||||
(self.end_time - self.start_time) / 1000))
|
||||
|
||||
if __name__ == "__main__":
|
||||
base.format_file(sys.stdin,
|
||||
handlers.StreamHandler(stream=sys.stdout,
|
||||
formatter=UnittestFormatter()))
|
@ -1,93 +0,0 @@
|
||||
import types
|
||||
from xml.etree import ElementTree
|
||||
|
||||
import base
|
||||
from .. import handlers
|
||||
|
||||
def format_test_id(test_id):
|
||||
"""Take a test id and return something that looks a bit like
|
||||
a class path"""
|
||||
if type(test_id) not in types.StringTypes:
|
||||
#Not sure how to deal with reftests yet
|
||||
raise NotImplementedError
|
||||
|
||||
#Turn a path into something like a class heirachy
|
||||
return test_id.replace('.', '_').replace('/', ".")
|
||||
|
||||
|
||||
class XUnitFormatter(base.BaseFormatter):
|
||||
"""The data model here isn't a great match. This implementation creates
|
||||
one <testcase> element for each subtest and one more, with no @name
|
||||
for each test"""
|
||||
|
||||
def __init__(self):
|
||||
self.tree = ElementTree.ElementTree()
|
||||
self.root = None
|
||||
self.suite_start_time = None
|
||||
self.test_start_time = None
|
||||
|
||||
self.tests_run = 0
|
||||
self.errors = 0
|
||||
self.failures = 0
|
||||
self.skips = 0
|
||||
|
||||
def suite_start(self, data):
|
||||
self.root = ElementTree.Element("testsuite")
|
||||
self.tree.root = self.root
|
||||
self.suite_start_time = data["time"]
|
||||
|
||||
def test_start(self, data):
|
||||
self.tests_run += 1
|
||||
self.test_start_time = data["time"]
|
||||
|
||||
def _create_result(self, data):
|
||||
test = ElementTree.SubElement(self.root, "testcase")
|
||||
name = format_test_id(data["test"])
|
||||
test.attrib["classname"] = name
|
||||
|
||||
if "subtest" in data:
|
||||
test.attrib["name"] = data["subtest"]
|
||||
# We generally don't know how long subtests take
|
||||
test.attrib["time"] = "0"
|
||||
else:
|
||||
if "." in name:
|
||||
test_name = name.rsplit(".", 1)[1]
|
||||
else:
|
||||
test_name = name
|
||||
test.attrib["name"] = test_name
|
||||
test.attrib["time"] = "%.2f" % ((data["time"] - self.test_start_time) / 1000)
|
||||
|
||||
if ("expected" in data and data["expected"] != data["status"]):
|
||||
if data["status"] in ("NOTRUN", "ASSERT", "ERROR"):
|
||||
result = ElementTree.SubElement(test, "error")
|
||||
self.errors += 1
|
||||
else:
|
||||
result = ElementTree.SubElement(test, "failure")
|
||||
self.failures += 1
|
||||
|
||||
result.attrib["message"] = "Expected %s, got %s" % (data["status"], data["message"])
|
||||
result.text = data["message"]
|
||||
|
||||
elif data["status"] == "SKIP":
|
||||
result = ElementTree.SubElement(test, "skipped")
|
||||
self.skips += 1
|
||||
|
||||
def test_status(self, data):
|
||||
self._create_result(data)
|
||||
|
||||
def test_end(self, data):
|
||||
self._create_result(data)
|
||||
|
||||
def suite_end(self, data):
|
||||
self.root.attrib.update({"tests": str(self.tests_run),
|
||||
"errors": str(self.errors),
|
||||
"failures": str(self.failures),
|
||||
"skiped": str(self.skips),
|
||||
"time": "%.2f" % (
|
||||
(data["time"] - self.suite_start_time) / 1000)})
|
||||
return ElementTree.tostring(self.root, encoding="utf8")
|
||||
|
||||
if __name__ == "__main__":
|
||||
base.format_file(sys.stdin,
|
||||
handlers.StreamHandler(stream=sys.stdout,
|
||||
formatter=XUnitFormatter()))
|
@ -1,52 +0,0 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from threading import Lock
|
||||
|
||||
|
||||
class BaseHandler(object):
|
||||
def __init__(self, formatter=str):
|
||||
self.formatter = formatter
|
||||
self.filters = []
|
||||
|
||||
def add_filter(self, filter_func):
|
||||
self.filters.append(filter_func)
|
||||
|
||||
def remove_filter(self, filter_func):
|
||||
self.filters.remove(filter_func)
|
||||
|
||||
def filter(self, data):
|
||||
return all(item(data) for item in self.filters)
|
||||
|
||||
|
||||
class LogLevelFilter(object):
|
||||
def __init__(self, inner, level):
|
||||
self.inner = inner
|
||||
self.level = log_levels[level.upper()]
|
||||
|
||||
def __call__(self, item):
|
||||
if (item["action"] != "log" or
|
||||
log_levels[item["level"]] <= self.level):
|
||||
return self.inner(item)
|
||||
|
||||
|
||||
class StreamHandler(BaseHandler):
|
||||
_lock = Lock()
|
||||
|
||||
def __init__(self, stream, formatter):
|
||||
assert stream is not None
|
||||
self.stream = stream
|
||||
BaseHandler.__init__(self, formatter)
|
||||
|
||||
def __call__(self, data):
|
||||
formatted = self.formatter(data)
|
||||
if not formatted:
|
||||
return
|
||||
with self._lock:
|
||||
#XXX Should encoding be the formatter's responsibility?
|
||||
try:
|
||||
self.stream.write(formatted.encode("utf8"))
|
||||
except:
|
||||
raise
|
||||
self.stream.flush()
|
@ -1,30 +0,0 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
import json
|
||||
|
||||
def read(log_f, raise_on_error=False):
|
||||
"""Return a generator that will return the entries in a structured log file
|
||||
|
||||
:param log_f: file-like object containing the log enteries, one per line
|
||||
:param raise_on_error: boolean indicating whether ValueError should be raised
|
||||
for lines that cannot be decoded."""
|
||||
for line in log_f:
|
||||
try:
|
||||
yield json.loads(line)
|
||||
except ValueError:
|
||||
if raise_on_error:
|
||||
raise
|
||||
|
||||
|
||||
def map_action(log_iter, action_map):
|
||||
"""Call a callback per action for each item in a iterable containing structured
|
||||
log entries
|
||||
|
||||
:param log_iter: Iterator returning structured log entries
|
||||
:param action_map: Dictionary mapping action name to callback function. Log items
|
||||
with actions not in this dictionary will be skipped.
|
||||
"""
|
||||
for item in log_iter:
|
||||
if item["action"] in action_map:
|
||||
yield action_map[item["action"]](item)
|
@ -1,280 +0,0 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from multiprocessing import current_process
|
||||
from threading import current_thread, Lock
|
||||
import time
|
||||
|
||||
"""Structured Logging for recording test results.
|
||||
|
||||
Allowed actions, and subfields:
|
||||
suite_start
|
||||
tests - List of test names
|
||||
|
||||
suite_end
|
||||
|
||||
test_start
|
||||
test - ID for the test
|
||||
|
||||
test_end
|
||||
test - ID for the test
|
||||
status [PASS | FAIL | OK | ERROR |
|
||||
TIMEOUT | CRASH | ASSERT | SKIP] - test status
|
||||
expected [As for status] - Status that the test was expected to get,
|
||||
or absent if the test got the expected status
|
||||
extra - Dictionary of harness-specific extra information e.g. debug info
|
||||
|
||||
test_status
|
||||
test - ID for the test
|
||||
subtest - Name of the subtest
|
||||
status [PASS | FAIL | TIMEOUT | NOTRUN] - test status
|
||||
expected [As for status] - Status that the subtest was expected to get,
|
||||
or absent if the subtest got the expected status
|
||||
|
||||
process_output
|
||||
process - PID of the process
|
||||
command - Command line of the process
|
||||
data - Output data from the process
|
||||
|
||||
log
|
||||
level [CRITICAL | ERROR | WARNING |
|
||||
INFO | DEBUG] - level of the logging message
|
||||
message - Message to log
|
||||
|
||||
Subfields for all messages:
|
||||
action - the action type of the current message
|
||||
time - the timestamp in ms since the epoch of the log message
|
||||
thread - name for the thread emitting the message
|
||||
pid - id of the python process in which the logger is running
|
||||
source - name for the source emitting the message
|
||||
"""
|
||||
|
||||
|
||||
log_levels = dict((k.upper(), v) for v, k in
|
||||
enumerate(["critical", "error", "warning", "info", "debug"]))
|
||||
|
||||
|
||||
class StructuredLogger(object):
|
||||
_lock = Lock()
|
||||
_handlers = defaultdict(list)
|
||||
|
||||
def __init__(self, name):
|
||||
"""
|
||||
Create a structured logger with the given name
|
||||
|
||||
:param name: The name of the logger.
|
||||
"""
|
||||
self.name = name
|
||||
|
||||
def add_handler(self, handler):
|
||||
self._handlers[self.name].append(handler)
|
||||
|
||||
def remove_handler(self, handler):
|
||||
for i, candidate_handler in enumerate(self._handlers[self.name][:]):
|
||||
if candidate_handler == handler:
|
||||
del self._handlers[self.name][i]
|
||||
break
|
||||
|
||||
@property
|
||||
def handlers(self):
|
||||
"""Get a list of handlers that will be called when a
|
||||
message is logged from this logger"""
|
||||
return self._handlers[self.name]
|
||||
|
||||
def _log_data(self, action, data=None):
|
||||
if data is None:
|
||||
data = {}
|
||||
with self._lock:
|
||||
log_data = self._make_log_data(action, data)
|
||||
for handler in self.handlers:
|
||||
handler(log_data)
|
||||
|
||||
def _make_log_data(self, action, data):
|
||||
all_data = {"action": action,
|
||||
"time": int(time.time() * 1000),
|
||||
"thread": current_thread().name,
|
||||
"pid": current_process().pid,
|
||||
"source": self.name}
|
||||
all_data.update(data)
|
||||
return all_data
|
||||
|
||||
def suite_start(self, tests):
|
||||
"""
|
||||
Log a suite_start message
|
||||
|
||||
:param tests: List of test identifiers that will be run in the suite.
|
||||
"""
|
||||
self._log_data("suite_start", {"tests": tests})
|
||||
|
||||
def suite_end(self):
|
||||
"""Log a suite_end message"""
|
||||
self._log_data("suite_end")
|
||||
|
||||
def test_start(self, test):
|
||||
"""
|
||||
"Log a test_start message
|
||||
|
||||
:param test: Identifier of the test that will run.
|
||||
"""
|
||||
self._log_data("test_start", {"test": test})
|
||||
|
||||
def test_status(self, test, subtest, status, expected="PASS", message=None):
|
||||
"""
|
||||
Log a test_status message indicating a subtest result. Tests that
|
||||
do not have subtests are not expected to produce test_status messages.
|
||||
|
||||
:param test: Identifier of the test that produced the result.
|
||||
:param subtest: Name of the subtest.
|
||||
:param status: Status string indicating the subtest result
|
||||
:param expected: Status string indicating the expected subtest result.
|
||||
:param message: String containing a message associated with the result.
|
||||
"""
|
||||
if status.upper() not in ["PASS", "FAIL", "TIMEOUT", "NOTRUN", "ASSERT"]:
|
||||
raise ValueError("Unrecognised status %s" % status)
|
||||
data = {"test": test,
|
||||
"subtest": subtest,
|
||||
"status": status.upper()}
|
||||
if message is not None:
|
||||
data["message"] = message
|
||||
if expected != data["status"]:
|
||||
data["expected"] = expected
|
||||
self._log_data("test_status", data)
|
||||
|
||||
def test_end(self, test, status, expected="OK", message=None, extra=None):
|
||||
"""
|
||||
Log a test_end message indicating that a test completed. For tests
|
||||
with subtests this indicates whether the overall test completed without
|
||||
errors. For tests without subtests this indicates the test result
|
||||
directly.
|
||||
|
||||
:param test: Identifier of the test that produced the result.
|
||||
:param status: Status string indicating the test result
|
||||
:param expected: Status string indicating the expected test result.
|
||||
:param message: String containing a message associated with the result.
|
||||
:param extra: suite-specific data associated with the test result.
|
||||
"""
|
||||
if status.upper() not in ["PASS", "FAIL", "OK", "ERROR", "TIMEOUT",
|
||||
"CRASH", "ASSERT", "SKIP"]:
|
||||
raise ValueError("Unrecognised status %s" % status)
|
||||
data = {"test": test,
|
||||
"status": status.upper()}
|
||||
if message is not None:
|
||||
data["message"] = message
|
||||
if expected != data["status"]:
|
||||
data["expected"] = expected
|
||||
if extra is not None:
|
||||
data["extra"] = extra
|
||||
self._log_data("test_end", data)
|
||||
|
||||
def process_output(self, process, data, command=None):
|
||||
"""
|
||||
Log output from a managed process.
|
||||
|
||||
:param process: A unique identifier for the process producing the output
|
||||
(typically the pid)
|
||||
:param data: The output to log
|
||||
:param command: A string representing the full command line used to start
|
||||
the process.
|
||||
"""
|
||||
data = {"process": process, "data": data}
|
||||
if command is not None:
|
||||
data["command"] = command
|
||||
self._log_data("process_output", data)
|
||||
|
||||
|
||||
def _log_func(level_name):
|
||||
def log(self, message, params=None):
|
||||
if params is None:
|
||||
params = {}
|
||||
data = {"level": level_name, "message": message}
|
||||
data.update(params)
|
||||
self._log_data("log", data)
|
||||
return log
|
||||
|
||||
|
||||
# Create all the methods on StructuredLog for debug levels
|
||||
for level_name in log_levels:
|
||||
setattr(StructuredLogger, level_name.lower(), _log_func(level_name))
|
||||
|
||||
|
||||
class StructuredLogFileLike(object):
|
||||
"""
|
||||
Wrapper for file like objects to redirect output to logger
|
||||
instead.
|
||||
|
||||
When using this it is important that the callees i.e. the logging
|
||||
handlers do not themselves try to write to the wrapped file as this
|
||||
will cause infinite recursion.
|
||||
"""
|
||||
def __init__(self, logger, level="info", prefix=None):
|
||||
self.logger = logger
|
||||
self.log_func = getattr(self.logger, level)
|
||||
self.prefix = prefix
|
||||
|
||||
def write(self, data):
|
||||
if data.endswith("\n"):
|
||||
data = data[:-1]
|
||||
if data.endswith("\r"):
|
||||
data = data[:-1]
|
||||
if self.prefix is not None:
|
||||
data = "%s: %s" % (self.prefix, data)
|
||||
self.log_func(data)
|
||||
|
||||
def flush(self):
|
||||
pass
|
||||
|
||||
|
||||
_wrapper_cls = None
|
||||
|
||||
|
||||
def std_logging_adapter(logger):
|
||||
"""
|
||||
Adapter for stdlib logging so that it produces structured
|
||||
messages rather than standard logging messages
|
||||
|
||||
:param logger: logging.Logger to wrap
|
||||
"""
|
||||
global _wrapper_cls
|
||||
import logging
|
||||
|
||||
if _wrapper_cls is not None:
|
||||
return _wrapper_cls(logger)
|
||||
|
||||
class UnstructuredHandler(logging.Handler):
|
||||
def __init__(self, name=None, level=logging.NOTSET):
|
||||
self.structured = StructuredLogger(name)
|
||||
logging.Handler.__init__(self, level=level)
|
||||
|
||||
def emit(self, record):
|
||||
if record.levelname in log_levels:
|
||||
log_func = getattr(self.structured, record.levelname.lower())
|
||||
else:
|
||||
log_func = self.logger.debug
|
||||
log_func(record.msg)
|
||||
|
||||
def handle(self, record):
|
||||
self.emit(record)
|
||||
|
||||
class LoggingWrapper(object):
|
||||
def __init__(self, wrapped):
|
||||
self.wrapped = wrapped
|
||||
self.wrapped.addHandler(UnstructuredHandler(self.wrapped.name,
|
||||
logging.getLevelName(self.wrapped.level)))
|
||||
|
||||
def add_handler(self, handler):
|
||||
self.addHandler(handler)
|
||||
|
||||
def remove_handler(self, handler):
|
||||
self.removeHandler(handler)
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.wrapped, name)
|
||||
|
||||
_wrapper_cls = LoggingWrapper
|
||||
|
||||
return LoggingWrapper(logger)
|
@ -2,10 +2,10 @@
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from setuptools import setup, find_packages
|
||||
from setuptools import setup
|
||||
|
||||
PACKAGE_NAME = 'mozlog'
|
||||
PACKAGE_VERSION = '1.5'
|
||||
PACKAGE_NAME = "mozlog"
|
||||
PACKAGE_VERSION = '1.3'
|
||||
|
||||
setup(name=PACKAGE_NAME,
|
||||
version=PACKAGE_VERSION,
|
||||
@ -15,7 +15,7 @@ setup(name=PACKAGE_NAME,
|
||||
author_email='tools@lists.mozilla.org',
|
||||
url='https://wiki.mozilla.org/Auto-tools/Projects/Mozbase',
|
||||
license='MPL 1.1/GPL 2.0/LGPL 2.1',
|
||||
packages=find_packages(),
|
||||
packages=['mozlog'],
|
||||
zip_safe=False,
|
||||
tests_require=['mozfile'],
|
||||
platforms =['Any'],
|
||||
|
@ -1,2 +1 @@
|
||||
[test_logger.py]
|
||||
[test_structured.py]
|
||||
|
@ -2,16 +2,13 @@
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import socket
|
||||
import threading
|
||||
import time
|
||||
import unittest
|
||||
|
||||
import mozfile
|
||||
|
||||
import mozlog
|
||||
import mozfile
|
||||
import unittest
|
||||
import socket
|
||||
import time
|
||||
import threading
|
||||
import json
|
||||
|
||||
class ListHandler(mozlog.Handler):
|
||||
"""Mock handler appends messages to a list for later inspection."""
|
||||
@ -46,20 +43,6 @@ class TestLogging(unittest.TestCase):
|
||||
self.assertRaises(ValueError, mozlog.getLogger,
|
||||
'file.logger', handler=ListHandler())
|
||||
|
||||
def test_timestamps(self):
|
||||
"""Verifies that timestamps are included when asked for."""
|
||||
log_name = 'test'
|
||||
handler = ListHandler()
|
||||
handler.setFormatter(mozlog.MozFormatter())
|
||||
log = mozlog.getLogger(log_name, handler=handler)
|
||||
log.info('no timestamp')
|
||||
self.assertTrue(handler.messages[-1].startswith('%s ' % log_name))
|
||||
handler.setFormatter(mozlog.MozFormatter(include_timestamp=True))
|
||||
log.info('timestamp')
|
||||
# Just verify that this raises no exceptions.
|
||||
datetime.datetime.strptime(handler.messages[-1][:23],
|
||||
'%Y-%m-%d %H:%M:%S,%f')
|
||||
|
||||
class TestStructuredLogging(unittest.TestCase):
|
||||
"""Tests structured output in mozlog."""
|
||||
|
||||
@ -89,76 +72,39 @@ class TestStructuredLogging(unittest.TestCase):
|
||||
def test_structured_output(self):
|
||||
self.logger.log_structured('test_message',
|
||||
{'_level': mozlog.INFO,
|
||||
'_message': 'message one'})
|
||||
'message': 'message one'})
|
||||
self.logger.log_structured('test_message',
|
||||
{'_level': mozlog.INFO,
|
||||
'_message': 'message two'})
|
||||
self.logger.log_structured('error_message',
|
||||
{'_level': mozlog.ERROR,
|
||||
'diagnostic': 'unexpected error'})
|
||||
'message': 'message two'})
|
||||
|
||||
message_one_expected = {'_namespace': 'test.Logger',
|
||||
'_level': 'INFO',
|
||||
'_message': 'message one',
|
||||
'message': 'message one',
|
||||
'action': 'test_message'}
|
||||
message_two_expected = {'_namespace': 'test.Logger',
|
||||
'_level': 'INFO',
|
||||
'_message': 'message two',
|
||||
'message': 'message two',
|
||||
'action': 'test_message'}
|
||||
message_three_expected = {'_namespace': 'test.Logger',
|
||||
'_level': 'ERROR',
|
||||
'diagnostic': 'unexpected error',
|
||||
'action': 'error_message'}
|
||||
|
||||
message_one_actual = json.loads(self.handler.messages[0])
|
||||
message_two_actual = json.loads(self.handler.messages[1])
|
||||
message_three_actual = json.loads(self.handler.messages[2])
|
||||
|
||||
self.check_messages(message_one_expected, message_one_actual)
|
||||
self.check_messages(message_two_expected, message_two_actual)
|
||||
self.check_messages(message_three_expected, message_three_actual)
|
||||
|
||||
def test_unstructured_conversion(self):
|
||||
""" Tests that logging to a logger with a structured formatter
|
||||
via the traditional logging interface works as expected. """
|
||||
self.logger.info('%s %s %d', 'Message', 'number', 1)
|
||||
self.logger.error('Message number 2')
|
||||
self.logger.debug('Message with %s', 'some extras',
|
||||
extra={'params': {'action': 'mozlog_test_output',
|
||||
'is_failure': False}})
|
||||
message_one_expected = {'_namespace': 'test.Logger',
|
||||
'_level': 'INFO',
|
||||
'_message': 'Message number 1'}
|
||||
message_two_expected = {'_namespace': 'test.Logger',
|
||||
'_level': 'ERROR',
|
||||
'_message': 'Message number 2'}
|
||||
message_three_expected = {'_namespace': 'test.Logger',
|
||||
'_level': 'DEBUG',
|
||||
'_message': 'Message with some extras',
|
||||
'action': 'mozlog_test_output',
|
||||
'is_failure': False}
|
||||
|
||||
message_one_actual = json.loads(self.handler.messages[0])
|
||||
message_two_actual = json.loads(self.handler.messages[1])
|
||||
message_three_actual = json.loads(self.handler.messages[2])
|
||||
|
||||
self.check_messages(message_one_expected, message_one_actual)
|
||||
self.check_messages(message_two_expected, message_two_actual)
|
||||
self.check_messages(message_three_expected, message_three_actual)
|
||||
|
||||
def message_callback(self):
|
||||
if len(self.handler.messages) == 3:
|
||||
message_one_expected = {'_namespace': 'test.Logger',
|
||||
'_level': 'DEBUG',
|
||||
'_message': 'socket message one',
|
||||
'message': 'socket message one',
|
||||
'action': 'test_message'}
|
||||
message_two_expected = {'_namespace': 'test.Logger',
|
||||
'_level': 'DEBUG',
|
||||
'_message': 'socket message two',
|
||||
'message': 'socket message two',
|
||||
'action': 'test_message'}
|
||||
message_three_expected = {'_namespace': 'test.Logger',
|
||||
'_level': 'DEBUG',
|
||||
'_message': 'socket message three',
|
||||
'message': 'socket message three',
|
||||
'action': 'test_message'}
|
||||
|
||||
message_one_actual = json.loads(self.handler.messages[0])
|
||||
@ -178,16 +124,21 @@ class TestStructuredLogging(unittest.TestCase):
|
||||
message_callback=self.message_callback,
|
||||
timeout=0.5)
|
||||
|
||||
message_string_one = json.dumps({'_message': 'socket message one',
|
||||
# The namespace fields of these messages will be overwritten.
|
||||
message_string_one = json.dumps({'message': 'socket message one',
|
||||
'action': 'test_message',
|
||||
'_level': 'DEBUG'})
|
||||
message_string_two = json.dumps({'_message': 'socket message two',
|
||||
'action': 'test_message',
|
||||
'_level': 'DEBUG'})
|
||||
'_level': 'DEBUG',
|
||||
'_namespace': 'foo.logger'})
|
||||
|
||||
message_string_three = json.dumps({'_message': 'socket message three',
|
||||
message_string_two = json.dumps({'message': 'socket message two',
|
||||
'action': 'test_message',
|
||||
'_level': 'DEBUG',
|
||||
'_namespace': 'foo.logger'})
|
||||
|
||||
message_string_three = json.dumps({'message': 'socket message three',
|
||||
'action': 'test_message',
|
||||
'_level': 'DEBUG'})
|
||||
'_level': 'DEBUG',
|
||||
'_namespace': 'foo.logger'})
|
||||
|
||||
message_string = message_string_one + '\n' + \
|
||||
message_string_two + '\n' + \
|
||||
@ -215,45 +166,5 @@ class TestStructuredLogging(unittest.TestCase):
|
||||
|
||||
server_thread.join()
|
||||
|
||||
class Loggable(mozlog.LoggingMixin):
|
||||
"""Trivial class inheriting from LoggingMixin"""
|
||||
pass
|
||||
|
||||
class TestLoggingMixin(unittest.TestCase):
|
||||
"""Tests basic use of LoggingMixin"""
|
||||
|
||||
def test_mixin(self):
|
||||
loggable = Loggable()
|
||||
self.assertTrue(not hasattr(loggable, "_logger"))
|
||||
loggable.log(mozlog.INFO, "This will instantiate the logger")
|
||||
self.assertTrue(hasattr(loggable, "_logger"))
|
||||
self.assertEqual(loggable._logger.name, "test_logger.Loggable")
|
||||
|
||||
self.assertRaises(ValueError, loggable.set_logger,
|
||||
"not a logger")
|
||||
|
||||
logger = mozlog.MozLogger('test.mixin')
|
||||
handler = ListHandler()
|
||||
logger.addHandler(handler)
|
||||
loggable.set_logger(logger)
|
||||
self.assertTrue(isinstance(loggable._logger.handlers[0],
|
||||
ListHandler))
|
||||
self.assertEqual(loggable._logger.name, "test.mixin")
|
||||
|
||||
loggable.log(mozlog.WARN, 'message for "log" method')
|
||||
loggable.info('message for "info" method')
|
||||
loggable.error('message for "error" method')
|
||||
loggable.log_structured('test_message',
|
||||
params={'_message': 'message for ' + \
|
||||
'"log_structured" method'})
|
||||
|
||||
expected_messages = ['message for "log" method',
|
||||
'message for "info" method',
|
||||
'message for "error" method',
|
||||
'message for "log_structured" method']
|
||||
|
||||
actual_messages = loggable._logger.handlers[0].messages
|
||||
self.assertEqual(expected_messages, actual_messages)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@ -1,185 +0,0 @@
|
||||
import os
|
||||
import time
|
||||
import unittest
|
||||
import StringIO
|
||||
|
||||
from mozlog.structured import structuredlog
|
||||
|
||||
|
||||
class TestHandler(object):
|
||||
def __init__(self):
|
||||
self.last_item = None
|
||||
|
||||
def __call__(self, data):
|
||||
self.last_item = data
|
||||
|
||||
|
||||
class BaseStructuredTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.logger = structuredlog.StructuredLogger("test")
|
||||
self.handler = TestHandler()
|
||||
self.logger.add_handler(self.handler)
|
||||
|
||||
@property
|
||||
def last_item(self):
|
||||
return self.handler.last_item
|
||||
|
||||
def assert_log_equals(self, expected, actual=None):
|
||||
if actual is None:
|
||||
actual = self.last_item
|
||||
|
||||
all_expected = {"pid": os.getpid(),
|
||||
"thread": "MainThread",
|
||||
"source": "test"}
|
||||
specials = set(["time"])
|
||||
|
||||
all_expected.update(expected)
|
||||
for key, value in all_expected.iteritems():
|
||||
self.assertEqual(actual[key], value)
|
||||
|
||||
self.assertAlmostEqual(actual["time"], time.time()*1000, delta=100)
|
||||
self.assertEquals(set(all_expected.keys()) | specials, set(actual.keys()))
|
||||
|
||||
|
||||
class TestStructuredLog(BaseStructuredTest):
|
||||
def test_suite_start(self):
|
||||
self.logger.suite_start(["test"])
|
||||
self.assert_log_equals({"action": "suite_start",
|
||||
"tests":["test"]})
|
||||
|
||||
def test_suite_end(self):
|
||||
self.logger.suite_end()
|
||||
self.assert_log_equals({"action": "suite_end"})
|
||||
|
||||
def test_start(self):
|
||||
self.logger.test_start("test1")
|
||||
self.assert_log_equals({"action": "test_start",
|
||||
"test":"test1"})
|
||||
|
||||
self.logger.test_start(("test1", "==", "test1-ref"))
|
||||
self.assert_log_equals({"action": "test_start",
|
||||
"test":("test1", "==", "test1-ref")})
|
||||
|
||||
def test_status(self):
|
||||
self.logger.test_status("test1", "subtest name", "fail", expected="FAIL", message="Test message")
|
||||
self.assert_log_equals({"action": "test_status",
|
||||
"subtest": "subtest name",
|
||||
"status": "FAIL",
|
||||
"message": "Test message",
|
||||
"test":"test1"})
|
||||
|
||||
def test_status_1(self):
|
||||
self.logger.test_status("test1", "subtest name", "fail")
|
||||
self.assert_log_equals({"action": "test_status",
|
||||
"subtest": "subtest name",
|
||||
"status": "FAIL",
|
||||
"expected": "PASS",
|
||||
"test":"test1"})
|
||||
|
||||
def test_status_2(self):
|
||||
self.assertRaises(ValueError, self.logger.test_status, "test1", "subtest name", "XXXUNKNOWNXXX")
|
||||
|
||||
def test_end(self):
|
||||
self.logger.test_end("test1", "fail", message="Test message")
|
||||
self.assert_log_equals({"action": "test_end",
|
||||
"status": "FAIL",
|
||||
"expected": "OK",
|
||||
"message": "Test message",
|
||||
"test":"test1"})
|
||||
|
||||
def test_end_1(self):
|
||||
self.logger.test_end("test1", "PASS", expected="PASS", extra={"data":123})
|
||||
self.assert_log_equals({"action": "test_end",
|
||||
"status": "PASS",
|
||||
"extra": {"data": 123},
|
||||
"test":"test1"})
|
||||
|
||||
def test_end_2(self):
|
||||
self.assertRaises(ValueError, self.logger.test_end, "test1", "XXXUNKNOWNXXX")
|
||||
|
||||
def test_process(self):
|
||||
self.logger.process_output(1234, "test output")
|
||||
self.assert_log_equals({"action": "process_output",
|
||||
"process": 1234,
|
||||
"data": "test output"})
|
||||
|
||||
def test_log(self):
|
||||
for level in ["critical", "error", "warning", "info", "debug"]:
|
||||
getattr(self.logger, level)("message")
|
||||
self.assert_log_equals({"action": "log",
|
||||
"level": level.upper(),
|
||||
"message": "message"})
|
||||
|
||||
def test_logging_adapter(self):
|
||||
import logging
|
||||
logging.basicConfig(level="DEBUG")
|
||||
old_level = logging.root.getEffectiveLevel()
|
||||
logging.root.setLevel("DEBUG")
|
||||
|
||||
std_logger = logging.getLogger("test")
|
||||
std_logger.setLevel("DEBUG")
|
||||
|
||||
logger = structuredlog.std_logging_adapter(std_logger)
|
||||
|
||||
try:
|
||||
for level in ["critical", "error", "warning", "info", "debug"]:
|
||||
getattr(logger, level)("message")
|
||||
self.assert_log_equals({"action": "log",
|
||||
"level": level.upper(),
|
||||
"message": "message"})
|
||||
finally:
|
||||
logging.root.setLevel(old_level)
|
||||
|
||||
def test_add_remove_handlers(self):
|
||||
handler = TestHandler()
|
||||
self.logger.add_handler(handler)
|
||||
self.logger.info("test1")
|
||||
|
||||
self.assert_log_equals({"action": "log",
|
||||
"level": "INFO",
|
||||
"message": "test1"})
|
||||
|
||||
self.assert_log_equals({"action": "log",
|
||||
"level": "INFO",
|
||||
"message": "test1"}, actual=handler.last_item)
|
||||
|
||||
self.logger.remove_handler(handler)
|
||||
self.logger.info("test2")
|
||||
|
||||
self.assert_log_equals({"action": "log",
|
||||
"level": "INFO",
|
||||
"message": "test2"})
|
||||
|
||||
self.assert_log_equals({"action": "log",
|
||||
"level": "INFO",
|
||||
"message": "test1"}, actual=handler.last_item)
|
||||
|
||||
def test_wrapper(self):
|
||||
file_like = structuredlog.StructuredLogFileLike(self.logger)
|
||||
|
||||
file_like.write("line 1")
|
||||
|
||||
self.assert_log_equals({"action": "log",
|
||||
"level": "INFO",
|
||||
"message": "line 1"})
|
||||
|
||||
file_like.write("line 2\n")
|
||||
|
||||
self.assert_log_equals({"action": "log",
|
||||
"level": "INFO",
|
||||
"message": "line 2"})
|
||||
|
||||
file_like.write("line 3\r")
|
||||
|
||||
self.assert_log_equals({"action": "log",
|
||||
"level": "INFO",
|
||||
"message": "line 3"})
|
||||
|
||||
file_like.write("line 4\r\n")
|
||||
|
||||
self.assert_log_equals({"action": "log",
|
||||
"level": "INFO",
|
||||
"message": "line 4"})
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
@ -39,7 +39,7 @@ class ProcessHandlerMixin(object):
|
||||
:param env: is the environment to use for the process (defaults to os.environ).
|
||||
:param ignore_children: causes system to ignore child processes when True, defaults to False (which tracks child processes).
|
||||
:param kill_on_timeout: when True, the process will be killed when a timeout is reached. When False, the caller is responsible for killing the process. Failure to do so could cause a call to wait() to hang indefinitely. (Defaults to True.)
|
||||
:param processOutputLine: function or list of functions to be called for each line of output produced by the process (defaults to None).
|
||||
:param processOutputLine: function to be called for each line of output produced by the process (defaults to None).
|
||||
:param onTimeout: function to be called when the process times out.
|
||||
:param onFinish: function to be called when the process terminates normally without timing out.
|
||||
:param kwargs: additional keyword args to pass directly into Popen.
|
||||
@ -100,18 +100,18 @@ class ProcessHandlerMixin(object):
|
||||
|
||||
def __del__(self, _maxint=sys.maxint):
|
||||
if isWin:
|
||||
handle = getattr(self, '_handle', None)
|
||||
if handle:
|
||||
if self._handle:
|
||||
if hasattr(self, '_internal_poll'):
|
||||
self._internal_poll(_deadstate=_maxint)
|
||||
else:
|
||||
self.poll(_deadstate=sys.maxint)
|
||||
if handle or self._job or self._io_port:
|
||||
if self._handle or self._job or self._io_port:
|
||||
self._cleanup()
|
||||
else:
|
||||
subprocess.Popen.__del__(self)
|
||||
|
||||
def kill(self, sig=None):
|
||||
self.returncode = 0
|
||||
if isWin:
|
||||
if not self._ignore_children and self._handle and self._job:
|
||||
winprocess.TerminateJobObject(self._job, winprocess.ERROR_CONTROL_C_EXIT)
|
||||
@ -122,7 +122,7 @@ class ProcessHandlerMixin(object):
|
||||
winprocess.TerminateProcess(self._handle, winprocess.ERROR_CONTROL_C_EXIT)
|
||||
except:
|
||||
err = "Could not terminate process"
|
||||
winprocess.GetExitCodeProcess(self._handle)
|
||||
self.returncode = winprocess.GetExitCodeProcess(self._handle)
|
||||
self._cleanup()
|
||||
if err is not None:
|
||||
raise OSError(err)
|
||||
@ -137,21 +137,11 @@ class ProcessHandlerMixin(object):
|
||||
print >> sys.stdout, "Could not kill process, could not find pid: %s, assuming it's already dead" % self.pid
|
||||
else:
|
||||
os.kill(self.pid, sig)
|
||||
self.returncode = -sig
|
||||
|
||||
self.returncode = self.wait()
|
||||
self._cleanup()
|
||||
return self.returncode
|
||||
|
||||
def poll(self):
|
||||
""" Popen.poll
|
||||
Check if child process has terminated. Set and return returncode attribute.
|
||||
"""
|
||||
# If we have a handle, the process is alive
|
||||
if isWin and getattr(self, '_handle', None):
|
||||
return None
|
||||
|
||||
return subprocess.Popen.poll(self)
|
||||
|
||||
def wait(self):
|
||||
""" Popen.wait
|
||||
Called to wait for a running process to shut down and return
|
||||
@ -167,23 +157,12 @@ class ProcessHandlerMixin(object):
|
||||
|
||||
if isWin:
|
||||
# Redefine the execute child so that we can track process groups
|
||||
def _execute_child(self, *args_tuple):
|
||||
# workaround for bug 950894
|
||||
if sys.hexversion < 0x02070600: # prior to 2.7.6
|
||||
(args, executable, preexec_fn, close_fds,
|
||||
cwd, env, universal_newlines, startupinfo,
|
||||
creationflags, shell,
|
||||
p2cread, p2cwrite,
|
||||
c2pread, c2pwrite,
|
||||
errread, errwrite) = args_tuple
|
||||
to_close = set()
|
||||
else: # 2.7.6 and later
|
||||
(args, executable, preexec_fn, close_fds,
|
||||
cwd, env, universal_newlines, startupinfo,
|
||||
creationflags, shell, to_close,
|
||||
p2cread, p2cwrite,
|
||||
c2pread, c2pwrite,
|
||||
errread, errwrite) = args_tuple
|
||||
def _execute_child(self, args, executable, preexec_fn, close_fds,
|
||||
cwd, env, universal_newlines, startupinfo,
|
||||
creationflags, shell,
|
||||
p2cread, p2cwrite,
|
||||
c2pread, c2pwrite,
|
||||
errread, errwrite):
|
||||
if not isinstance(args, basestring):
|
||||
args = subprocess.list2cmdline(args)
|
||||
|
||||
@ -562,8 +541,7 @@ falling back to not using job objects for managing child processes"""
|
||||
# close
|
||||
print >> sys.stderr, "Encountered error waiting for pid to close: %s" % e
|
||||
raise
|
||||
|
||||
return self.returncode
|
||||
return 0
|
||||
|
||||
else:
|
||||
# For non-group wait, call base class
|
||||
@ -610,8 +588,6 @@ falling back to not using job objects for managing child processes"""
|
||||
self.env = env
|
||||
|
||||
# handlers
|
||||
if callable(processOutputLine):
|
||||
processOutputLine = [processOutputLine]
|
||||
self.processOutputLineHandlers = list(processOutputLine)
|
||||
self.onTimeoutHandlers = list(onTimeout)
|
||||
self.onFinishHandlers = list(onFinish)
|
||||
@ -681,12 +657,7 @@ falling back to not using job objects for managing child processes"""
|
||||
(has no effect on Windows)
|
||||
"""
|
||||
try:
|
||||
self.proc.kill(sig=sig)
|
||||
|
||||
# When we kill the the managed process we also have to wait for the
|
||||
# outThread to be finished. Otherwise consumers would have to assume
|
||||
# that it still has not completely shutdown.
|
||||
return self.wait()
|
||||
return self.proc.kill(sig=sig)
|
||||
except AttributeError:
|
||||
# Try to print a relevant error message.
|
||||
if not self.proc:
|
||||
@ -724,25 +695,6 @@ falling back to not using job objects for managing child processes"""
|
||||
for handler in self.onFinishHandlers:
|
||||
handler()
|
||||
|
||||
def poll(self):
|
||||
"""Check if child process has terminated
|
||||
|
||||
Returns the current returncode value:
|
||||
- None if the process hasn't terminated yet
|
||||
- A negative number if the process was killed by signal N (Unix only)
|
||||
- '0' if the process ended without failures
|
||||
|
||||
"""
|
||||
# Ensure that we first check for the outputThread status. Otherwise
|
||||
# we might mark the process as finished while output is still getting
|
||||
# processed.
|
||||
if self.outThread and self.outThread.isAlive():
|
||||
return None
|
||||
elif hasattr(self.proc, "returncode"):
|
||||
return self.proc.returncode
|
||||
else:
|
||||
return self.proc.poll()
|
||||
|
||||
def processOutput(self, timeout=None, outputTimeout=None):
|
||||
"""
|
||||
Handle process output until the process terminates or times out.
|
||||
@ -801,11 +753,9 @@ falling back to not using job objects for managing child processes"""
|
||||
This timeout only causes the wait function to return and
|
||||
does not kill the process.
|
||||
|
||||
Returns the process exit code value:
|
||||
- None if the process hasn't terminated yet
|
||||
- A negative number if the process was killed by signal N (Unix only)
|
||||
- '0' if the process ended without failures
|
||||
|
||||
Returns the process' exit code. A None value indicates the
|
||||
process hasn't terminated yet. A negative value -N indicates
|
||||
the process was killed by signal N (Unix only).
|
||||
"""
|
||||
if self.outThread:
|
||||
# Thread.join() blocks the main thread until outThread is finished
|
||||
@ -929,9 +879,9 @@ class ProcessHandler(ProcessHandlerMixin):
|
||||
Convenience class for handling processes with default output handlers.
|
||||
|
||||
If no processOutputLine keyword argument is specified, write all
|
||||
output to stdout. Otherwise, the function or the list of functions
|
||||
specified by this argument will be called for each line of output;
|
||||
the output will not be written to stdout automatically.
|
||||
output to stdout. Otherwise, the function specified by this argument
|
||||
will be called for each line of output; the output will not be written
|
||||
to stdout automatically.
|
||||
|
||||
If storeOutput==True, the output produced by the process will be saved
|
||||
as self.output.
|
||||
@ -942,8 +892,6 @@ class ProcessHandler(ProcessHandlerMixin):
|
||||
|
||||
def __init__(self, cmd, logfile=None, storeOutput=True, **kwargs):
|
||||
kwargs.setdefault('processOutputLine', [])
|
||||
if callable(kwargs['processOutputLine']):
|
||||
kwargs['processOutputLine'] = [kwargs['processOutputLine']]
|
||||
|
||||
# Print to standard output only if no outputline provided
|
||||
if not kwargs['processOutputLine']:
|
||||
|
@ -4,7 +4,7 @@
|
||||
|
||||
from setuptools import setup
|
||||
|
||||
PACKAGE_VERSION = '0.18'
|
||||
PACKAGE_VERSION = '0.14'
|
||||
|
||||
setup(name='mozprocess',
|
||||
version=PACKAGE_VERSION,
|
||||
|
@ -11,6 +11,5 @@ disabled = bug 877864
|
||||
[test_mozprocess_kill_broad_wait.py]
|
||||
disabled = bug 921632
|
||||
[test_mozprocess_misc.py]
|
||||
[test_mozprocess_poll.py]
|
||||
[test_mozprocess_wait.py]
|
||||
[test_mozprocess_nonewline.py]
|
||||
|
@ -1,2 +0,0 @@
|
||||
[main]
|
||||
maxtime=10
|
@ -1,3 +0,0 @@
|
||||
import sys
|
||||
print "this is a newline"
|
||||
sys.stdout.write("this has NO newline")
|
@ -70,10 +70,8 @@ class ProcTest(unittest.TestCase):
|
||||
"""
|
||||
if 'returncode' in expectedfail:
|
||||
self.assertTrue(returncode, "Detected an unexpected return code of: %s" % returncode)
|
||||
elif isalive:
|
||||
self.assertEqual(returncode, None, "Detected not None return code of: %s" % returncode)
|
||||
else:
|
||||
self.assertNotEqual(returncode, None, "Detected unexpected None return code of")
|
||||
elif not isalive:
|
||||
self.assertTrue(returncode == 0, "Detected non-zero return code of: %d" % returncode)
|
||||
|
||||
if 'didtimeout' in expectedfail:
|
||||
self.assertTrue(didtimeout, "Detected that process didn't time out")
|
||||
|
@ -1,33 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import unittest
|
||||
import proctest
|
||||
from mozprocess import processhandler
|
||||
|
||||
here = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
class ProcTestMisc(proctest.ProcTest):
|
||||
""" Class to test misc operations """
|
||||
|
||||
def test_process_output_nonewline(self):
|
||||
"""
|
||||
Process is started, outputs data with no newline
|
||||
"""
|
||||
p = processhandler.ProcessHandler([self.python, "procnonewline.py"],
|
||||
cwd=here)
|
||||
|
||||
p.run()
|
||||
p.processOutput(timeout=5)
|
||||
p.wait()
|
||||
|
||||
detected, output = proctest.check_for_process("procnonewline.py")
|
||||
self.determine_status(detected,
|
||||
output,
|
||||
p.proc.returncode,
|
||||
p.didTimeout,
|
||||
False,
|
||||
())
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@ -1,127 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import signal
|
||||
import unittest
|
||||
|
||||
import mozinfo
|
||||
from mozprocess import processhandler
|
||||
|
||||
import proctest
|
||||
|
||||
|
||||
here = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
class ProcTestPoll(proctest.ProcTest):
|
||||
""" Class to test process poll """
|
||||
|
||||
def test_poll_before_run(self):
|
||||
"""Process is not started, and poll() is called"""
|
||||
|
||||
p = processhandler.ProcessHandler([self.python, self.proclaunch,
|
||||
"process_normal_finish_python.ini"],
|
||||
cwd=here)
|
||||
self.assertRaises(AttributeError, p.poll)
|
||||
|
||||
def test_poll_while_running(self):
|
||||
"""Process is started, and poll() is called"""
|
||||
|
||||
p = processhandler.ProcessHandler([self.python, self.proclaunch,
|
||||
"process_normal_finish_python.ini"],
|
||||
cwd=here)
|
||||
p.run()
|
||||
returncode = p.poll()
|
||||
|
||||
self.assertEqual(returncode, None)
|
||||
|
||||
detected, output = proctest.check_for_process(self.proclaunch)
|
||||
self.determine_status(detected,
|
||||
output,
|
||||
returncode,
|
||||
p.didTimeout,
|
||||
True)
|
||||
p.kill()
|
||||
|
||||
def test_poll_after_kill(self):
|
||||
"""Process is killed, and poll() is called"""
|
||||
|
||||
p = processhandler.ProcessHandler([self.python, self.proclaunch,
|
||||
"process_normal_finish_python.ini"],
|
||||
cwd=here)
|
||||
p.run()
|
||||
returncode = p.kill()
|
||||
|
||||
# We killed the process, so the returncode should be < 0
|
||||
self.assertLess(returncode, 0)
|
||||
self.assertEqual(returncode, p.poll())
|
||||
|
||||
detected, output = proctest.check_for_process(self.proclaunch)
|
||||
self.determine_status(detected,
|
||||
output,
|
||||
returncode,
|
||||
p.didTimeout)
|
||||
|
||||
def test_poll_after_kill_no_process_group(self):
|
||||
"""Process (no group) is killed, and poll() is called"""
|
||||
|
||||
p = processhandler.ProcessHandler([self.python, self.proclaunch,
|
||||
"process_normal_finish_no_process_group.ini"],
|
||||
cwd=here,
|
||||
ignore_children=True
|
||||
)
|
||||
p.run()
|
||||
returncode = p.kill()
|
||||
|
||||
# We killed the process, so the returncode should be < 0
|
||||
self.assertLess(returncode, 0)
|
||||
self.assertEqual(returncode, p.poll())
|
||||
|
||||
detected, output = proctest.check_for_process(self.proclaunch)
|
||||
self.determine_status(detected,
|
||||
output,
|
||||
returncode,
|
||||
p.didTimeout)
|
||||
|
||||
def test_poll_after_double_kill(self):
|
||||
"""Process is killed twice, and poll() is called"""
|
||||
|
||||
p = processhandler.ProcessHandler([self.python, self.proclaunch,
|
||||
"process_normal_finish_python.ini"],
|
||||
cwd=here)
|
||||
p.run()
|
||||
p.kill()
|
||||
returncode = p.kill()
|
||||
|
||||
# We killed the process, so the returncode should be < 0
|
||||
self.assertLess(returncode, 0)
|
||||
self.assertEqual(returncode, p.poll())
|
||||
|
||||
detected, output = proctest.check_for_process(self.proclaunch)
|
||||
self.determine_status(detected,
|
||||
output,
|
||||
returncode,
|
||||
p.didTimeout)
|
||||
|
||||
def test_poll_after_external_kill(self):
|
||||
"""Process is killed externally, and poll() is called"""
|
||||
|
||||
p = processhandler.ProcessHandler([self.python, self.proclaunch,
|
||||
"process_normal_finish_python.ini"],
|
||||
cwd=here)
|
||||
p.run()
|
||||
os.kill(p.pid, signal.SIGTERM)
|
||||
returncode = p.wait()
|
||||
|
||||
# We killed the process, so the returncode should be < 0
|
||||
self.assertEqual(returncode, -signal.SIGTERM)
|
||||
self.assertEqual(returncode, p.poll())
|
||||
|
||||
detected, output = proctest.check_for_process(self.proclaunch)
|
||||
self.determine_status(detected,
|
||||
output,
|
||||
returncode,
|
||||
p.didTimeout)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@ -11,7 +11,7 @@ here = os.path.dirname(os.path.abspath(__file__))
|
||||
class ProcTestWait(proctest.ProcTest):
|
||||
""" Class to test process waits and timeouts """
|
||||
|
||||
def test_normal_finish(self):
|
||||
def test_process_normal_finish(self):
|
||||
"""Process is started, runs to completion while we wait for it"""
|
||||
|
||||
p = processhandler.ProcessHandler([self.python, self.proclaunch, "process_normal_finish_python.ini"],
|
||||
@ -25,7 +25,7 @@ class ProcTestWait(proctest.ProcTest):
|
||||
p.proc.returncode,
|
||||
p.didTimeout)
|
||||
|
||||
def test_wait(self):
|
||||
def test_process_wait(self):
|
||||
"""Process is started runs to completion while we wait indefinitely"""
|
||||
|
||||
p = processhandler.ProcessHandler([self.python, self.proclaunch,
|
||||
@ -41,7 +41,7 @@ class ProcTestWait(proctest.ProcTest):
|
||||
p.didTimeout)
|
||||
|
||||
|
||||
def test_timeout(self):
|
||||
def test_process_timeout(self):
|
||||
""" Process is started, runs but we time out waiting on it
|
||||
to complete
|
||||
"""
|
||||
@ -63,7 +63,7 @@ class ProcTestWait(proctest.ProcTest):
|
||||
False,
|
||||
['returncode', 'didtimeout'])
|
||||
|
||||
def test_waittimeout(self):
|
||||
def test_process_waittimeout(self):
|
||||
"""
|
||||
Process is started, then wait is called and times out.
|
||||
Process is still running and didn't timeout
|
||||
@ -83,7 +83,7 @@ class ProcTestWait(proctest.ProcTest):
|
||||
True,
|
||||
())
|
||||
|
||||
def test_waitnotimeout(self):
|
||||
def test_process_waitnotimeout(self):
|
||||
""" Process is started, runs to completion before our wait times out
|
||||
"""
|
||||
p = processhandler.ProcessHandler([self.python, self.proclaunch,
|
||||
@ -98,26 +98,5 @@ class ProcTestWait(proctest.ProcTest):
|
||||
p.proc.returncode,
|
||||
p.didTimeout)
|
||||
|
||||
def test_wait_twice_after_kill(self):
|
||||
"""Bug 968718: Process is started and stopped. wait() twice afterward."""
|
||||
p = processhandler.ProcessHandler([self.python, self.proclaunch,
|
||||
"process_waittimeout_python.ini"],
|
||||
cwd=here)
|
||||
p.run()
|
||||
p.kill()
|
||||
returncode1 = p.wait()
|
||||
returncode2 = p.wait()
|
||||
|
||||
detected, output = proctest.check_for_process(self.proclaunch)
|
||||
self.determine_status(detected,
|
||||
output,
|
||||
returncode2,
|
||||
p.didTimeout)
|
||||
|
||||
self.assertLess(returncode2, 0,
|
||||
'Negative returncode expected, got "%s"' % returncode2)
|
||||
self.assertEqual(returncode1, returncode2,
|
||||
'Expected both returncodes of wait() to be equal')
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@ -4,27 +4,16 @@
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
import urllib2
|
||||
import zipfile
|
||||
from xml.dom import minidom
|
||||
|
||||
from distutils import dir_util
|
||||
from manifestparser import ManifestParser
|
||||
import mozfile
|
||||
import mozlog
|
||||
from xml.dom import minidom
|
||||
|
||||
# Needed for the AMO's rest API - https://developer.mozilla.org/en/addons.mozilla.org_%28AMO%29_API_Developers%27_Guide/The_generic_AMO_API
|
||||
AMO_API_VERSION = "1.5"
|
||||
|
||||
# Logger for 'mozprofile.addons' module
|
||||
module_logger = mozlog.getLogger(__name__)
|
||||
|
||||
|
||||
class AddonFormatError(Exception):
|
||||
"""Exception for not well-formed add-on manifest files"""
|
||||
|
||||
|
||||
class AddonManager(object):
|
||||
"""
|
||||
Handles all operations regarding addons in a profile including:
|
||||
@ -39,124 +28,16 @@ class AddonManager(object):
|
||||
self.profile = profile
|
||||
self.restore = restore
|
||||
|
||||
# Initialize all class members
|
||||
self._internal_init()
|
||||
|
||||
def _internal_init(self):
|
||||
"""Internal: Initialize all class members to their default value"""
|
||||
|
||||
# Add-ons installed; needed for cleanup
|
||||
self._addons = []
|
||||
|
||||
# Backup folder for already existing addons
|
||||
self.backup_dir = None
|
||||
|
||||
# Add-ons downloaded and which have to be removed from the file system
|
||||
self.downloaded_addons = []
|
||||
|
||||
# Information needed for profile reset (see http://bit.ly/17JesUf)
|
||||
# information needed for profile reset:
|
||||
# https://github.com/mozilla/mozbase/blob/270a857328b130860d1b1b512e23899557a3c8f7/mozprofile/mozprofile/profile.py#L93
|
||||
self.installed_addons = []
|
||||
self.installed_manifests = []
|
||||
|
||||
def __del__(self):
|
||||
# reset to pre-instance state
|
||||
if self.restore:
|
||||
self.clean()
|
||||
# addons that we've installed; needed for cleanup
|
||||
self._addons = []
|
||||
|
||||
def clean(self):
|
||||
"""Clean up addons in the profile."""
|
||||
|
||||
# Remove all add-ons installed
|
||||
for addon in self._addons:
|
||||
# TODO (bug 934642)
|
||||
# Once we have a proper handling of add-ons we should kill the id
|
||||
# from self._addons once the add-on is removed. For now lets forget
|
||||
# about the exception
|
||||
try:
|
||||
self.remove_addon(addon)
|
||||
except IOError, e:
|
||||
pass
|
||||
|
||||
# Remove all downloaded add-ons
|
||||
for addon in self.downloaded_addons:
|
||||
mozfile.remove(addon)
|
||||
|
||||
# restore backups
|
||||
if self.backup_dir and os.path.isdir(self.backup_dir):
|
||||
extensions_path = os.path.join(self.profile, 'extensions', 'staged')
|
||||
|
||||
for backup in os.listdir(self.backup_dir):
|
||||
backup_path = os.path.join(self.backup_dir, backup)
|
||||
shutil.move(backup_path, extensions_path)
|
||||
|
||||
if not os.listdir(self.backup_dir):
|
||||
mozfile.remove(self.backup_dir)
|
||||
|
||||
# reset instance variables to defaults
|
||||
self._internal_init()
|
||||
|
||||
@classmethod
|
||||
def download(self, url, target_folder=None):
|
||||
"""
|
||||
Downloads an add-on from the specified URL to the target folder
|
||||
|
||||
:param url: URL of the add-on (XPI file)
|
||||
:param target_folder: Folder to store the XPI file in
|
||||
|
||||
"""
|
||||
response = urllib2.urlopen(url)
|
||||
fd, path = tempfile.mkstemp(suffix='.xpi')
|
||||
os.write(fd, response.read())
|
||||
os.close(fd)
|
||||
|
||||
if not self.is_addon(path):
|
||||
mozfile.remove(path)
|
||||
raise AddonFormatError('Not a valid add-on: %s' % url)
|
||||
|
||||
# Give the downloaded file a better name by using the add-on id
|
||||
details = self.addon_details(path)
|
||||
new_path = path.replace('.xpi', '_%s.xpi' % details.get('id'))
|
||||
|
||||
# Move the add-on to the target folder if requested
|
||||
if target_folder:
|
||||
new_path = os.path.join(target_folder, os.path.basename(new_path))
|
||||
|
||||
os.rename(path, new_path)
|
||||
|
||||
return new_path
|
||||
|
||||
def get_addon_path(self, addon_id):
|
||||
"""Returns the path to the installed add-on
|
||||
|
||||
:param addon_id: id of the add-on to retrieve the path from
|
||||
"""
|
||||
# By default we should expect add-ons being located under the
|
||||
# extensions folder. Only if the application hasn't been run and
|
||||
# installed the add-ons yet, it will be located under 'staged'.
|
||||
# Also add-ons could have been unpacked by the application.
|
||||
extensions_path = os.path.join(self.profile, 'extensions')
|
||||
paths = [os.path.join(extensions_path, addon_id),
|
||||
os.path.join(extensions_path, addon_id + '.xpi'),
|
||||
os.path.join(extensions_path, 'staged', addon_id),
|
||||
os.path.join(extensions_path, 'staged', addon_id + '.xpi')]
|
||||
for path in paths:
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
|
||||
raise IOError('Add-on not found: %s' % addon_id)
|
||||
|
||||
@classmethod
|
||||
def is_addon(self, addon_path):
|
||||
"""
|
||||
Checks if the given path is a valid addon
|
||||
|
||||
:param addon_path: path to the add-on directory or XPI
|
||||
"""
|
||||
try:
|
||||
details = self.addon_details(addon_path)
|
||||
return True
|
||||
except AddonFormatError, e:
|
||||
return False
|
||||
# backup dir for already existing addons
|
||||
self.backup_dir = None
|
||||
|
||||
def install_addons(self, addons=None, manifests=None):
|
||||
"""
|
||||
@ -165,14 +46,12 @@ class AddonManager(object):
|
||||
:param addons: a list of addon paths to install
|
||||
:param manifest: a list of addon manifests to install
|
||||
"""
|
||||
|
||||
# install addon paths
|
||||
if addons:
|
||||
if isinstance(addons, basestring):
|
||||
addons = [addons]
|
||||
for addon in set(addons):
|
||||
for addon in addons:
|
||||
self.install_from_path(addon)
|
||||
|
||||
# install addon manifests
|
||||
if manifests:
|
||||
if isinstance(manifests, basestring):
|
||||
@ -262,50 +141,31 @@ class AddonManager(object):
|
||||
rc.append(node.data)
|
||||
return ''.join(rc).strip()
|
||||
|
||||
if not os.path.exists(addon_path):
|
||||
raise IOError('Add-on path does not exist: %s' % addon_path)
|
||||
if zipfile.is_zipfile(addon_path):
|
||||
compressed_file = zipfile.ZipFile(addon_path, 'r')
|
||||
try:
|
||||
parseable = compressed_file.read('install.rdf')
|
||||
doc = minidom.parseString(parseable)
|
||||
finally:
|
||||
compressed_file.close()
|
||||
else:
|
||||
doc = minidom.parse(os.path.join(addon_path, 'install.rdf'))
|
||||
|
||||
try:
|
||||
if zipfile.is_zipfile(addon_path):
|
||||
# Bug 944361 - We cannot use 'with' together with zipFile because
|
||||
# it will cause an exception thrown in Python 2.6.
|
||||
try:
|
||||
compressed_file = zipfile.ZipFile(addon_path, 'r')
|
||||
manifest = compressed_file.read('install.rdf')
|
||||
finally:
|
||||
compressed_file.close()
|
||||
elif os.path.isdir(addon_path):
|
||||
with open(os.path.join(addon_path, 'install.rdf'), 'r') as f:
|
||||
manifest = f.read()
|
||||
else:
|
||||
raise IOError('Add-on path is neither an XPI nor a directory: %s' % addon_path)
|
||||
except (IOError, KeyError), e:
|
||||
raise AddonFormatError, str(e), sys.exc_info()[2]
|
||||
# Get the namespaces abbreviations
|
||||
em = get_namespace_id(doc, "http://www.mozilla.org/2004/em-rdf#")
|
||||
rdf = get_namespace_id(doc, "http://www.w3.org/1999/02/22-rdf-syntax-ns#")
|
||||
|
||||
try:
|
||||
doc = minidom.parseString(manifest)
|
||||
|
||||
# Get the namespaces abbreviations
|
||||
em = get_namespace_id(doc, 'http://www.mozilla.org/2004/em-rdf#')
|
||||
rdf = get_namespace_id(doc, 'http://www.w3.org/1999/02/22-rdf-syntax-ns#')
|
||||
|
||||
description = doc.getElementsByTagName(rdf + 'Description').item(0)
|
||||
for node in description.childNodes:
|
||||
# Remove the namespace prefix from the tag for comparison
|
||||
entry = node.nodeName.replace(em, "")
|
||||
if entry in details.keys():
|
||||
details.update({entry: get_text(node)})
|
||||
except Exception, e:
|
||||
raise AddonFormatError, str(e), sys.exc_info()[2]
|
||||
description = doc.getElementsByTagName(rdf + "Description").item(0)
|
||||
for node in description.childNodes:
|
||||
# Remove the namespace prefix from the tag for comparison
|
||||
entry = node.nodeName.replace(em, "")
|
||||
if entry in details.keys():
|
||||
details.update({ entry: get_text(node) })
|
||||
|
||||
# turn unpack into a true/false value
|
||||
if isinstance(details['unpack'], basestring):
|
||||
details['unpack'] = details['unpack'].lower() == 'true'
|
||||
|
||||
# If no ID is set, the add-on is invalid
|
||||
if details.get('id') is None:
|
||||
raise AddonFormatError('Add-on id could not be found.')
|
||||
|
||||
return details
|
||||
|
||||
def install_from_path(self, path, unpack=False):
|
||||
@ -316,79 +176,104 @@ class AddonManager(object):
|
||||
:param unpack: whether to unpack unless specified otherwise in the install.rdf
|
||||
"""
|
||||
|
||||
# if the addon is a URL, download it
|
||||
# if the addon is a url, download it
|
||||
# note that this won't work with protocols urllib2 doesn't support
|
||||
if mozfile.is_url(path):
|
||||
path = self.download(path)
|
||||
self.downloaded_addons.append(path)
|
||||
if '://' in path:
|
||||
response = urllib2.urlopen(path)
|
||||
fd, path = tempfile.mkstemp(suffix='.xpi')
|
||||
os.write(fd, response.read())
|
||||
os.close(fd)
|
||||
tmpfile = path
|
||||
else:
|
||||
tmpfile = None
|
||||
|
||||
# if the addon is a directory, install all addons in it
|
||||
addons = [path]
|
||||
|
||||
# if path is not an add-on, try to install all contained add-ons
|
||||
try:
|
||||
self.addon_details(path)
|
||||
except AddonFormatError, e:
|
||||
module_logger.warning('Could not install %s: %s' % (path, str(e)))
|
||||
|
||||
if not path.endswith('.xpi') and not os.path.exists(os.path.join(path, 'install.rdf')):
|
||||
# If the path doesn't exist, then we don't really care, just return
|
||||
if not os.path.isdir(path):
|
||||
return
|
||||
|
||||
addons = [os.path.join(path, x) for x in os.listdir(path) if
|
||||
self.is_addon(os.path.join(path, x))]
|
||||
addons.sort()
|
||||
os.path.isdir(os.path.join(path, x))]
|
||||
|
||||
# install each addon
|
||||
for addon in addons:
|
||||
# determine the addon id
|
||||
addon_details = self.addon_details(addon)
|
||||
addon_id = addon_details.get('id')
|
||||
tmpdir = None
|
||||
xpifile = None
|
||||
if addon.endswith('.xpi'):
|
||||
tmpdir = tempfile.mkdtemp(suffix = '.' + os.path.split(addon)[-1])
|
||||
compressed_file = zipfile.ZipFile(addon, 'r')
|
||||
for name in compressed_file.namelist():
|
||||
if name.endswith('/'):
|
||||
os.makedirs(os.path.join(tmpdir, name))
|
||||
else:
|
||||
if not os.path.isdir(os.path.dirname(os.path.join(tmpdir, name))):
|
||||
os.makedirs(os.path.dirname(os.path.join(tmpdir, name)))
|
||||
data = compressed_file.read(name)
|
||||
f = open(os.path.join(tmpdir, name), 'wb')
|
||||
f.write(data)
|
||||
f.close()
|
||||
xpifile = addon
|
||||
addon = tmpdir
|
||||
|
||||
# if the add-on has to be unpacked force it now
|
||||
# note: we might want to let Firefox do it in case of addon details
|
||||
orig_path = None
|
||||
if os.path.isfile(addon) and (unpack or addon_details['unpack']):
|
||||
orig_path = addon
|
||||
addon = tempfile.mkdtemp()
|
||||
mozfile.extract(orig_path, addon)
|
||||
# determine the addon id
|
||||
addon_details = AddonManager.addon_details(addon)
|
||||
addon_id = addon_details.get('id')
|
||||
assert addon_id, 'The addon id could not be found: %s' % addon
|
||||
|
||||
# copy the addon to the profile
|
||||
extensions_path = os.path.join(self.profile, 'extensions', 'staged')
|
||||
addon_path = os.path.join(extensions_path, addon_id)
|
||||
|
||||
if os.path.isfile(addon):
|
||||
addon_path += '.xpi'
|
||||
|
||||
# move existing xpi file to backup location to restore later
|
||||
if os.path.exists(addon_path):
|
||||
self.backup_dir = self.backup_dir or tempfile.mkdtemp()
|
||||
shutil.move(addon_path, self.backup_dir)
|
||||
|
||||
# copy new add-on to the extension folder
|
||||
if not unpack and not addon_details['unpack'] and xpifile:
|
||||
if not os.path.exists(extensions_path):
|
||||
os.makedirs(extensions_path)
|
||||
shutil.copy(addon, addon_path)
|
||||
else:
|
||||
# move existing folder to backup location to restore later
|
||||
# save existing xpi file to restore later
|
||||
addon_path += '.xpi'
|
||||
if os.path.exists(addon_path):
|
||||
self.backup_dir = self.backup_dir or tempfile.mkdtemp()
|
||||
shutil.move(addon_path, self.backup_dir)
|
||||
shutil.copy(addon_path, self.backup_dir)
|
||||
shutil.copy(xpifile, addon_path)
|
||||
else:
|
||||
# save existing dir to restore later
|
||||
if os.path.exists(addon_path):
|
||||
self.backup_dir = self.backup_dir or tempfile.mkdtemp()
|
||||
dir_util.copy_tree(addon_path, self.backup_dir, preserve_symlinks=1)
|
||||
dir_util.copy_tree(addon, addon_path, preserve_symlinks=1)
|
||||
self._addons.append(addon_path)
|
||||
|
||||
# copy new add-on to the extension folder
|
||||
shutil.copytree(addon, addon_path, symlinks=True)
|
||||
# remove the temporary directory, if any
|
||||
if tmpdir:
|
||||
dir_util.remove_tree(tmpdir)
|
||||
|
||||
# if we had to extract the addon, remove the temporary directory
|
||||
if orig_path:
|
||||
mozfile.remove(addon)
|
||||
addon = orig_path
|
||||
|
||||
self._addons.append(addon_id)
|
||||
self.installed_addons.append(addon)
|
||||
|
||||
def remove_addon(self, addon_id):
|
||||
"""Remove the add-on as specified by the id
|
||||
# remove temporary file, if any
|
||||
if tmpfile:
|
||||
os.remove(tmpfile)
|
||||
|
||||
:param addon_id: id of the add-on to be removed
|
||||
"""
|
||||
path = self.get_addon_path(addon_id)
|
||||
mozfile.remove(path)
|
||||
def clean_addons(self):
|
||||
"""Cleans up addons in the profile."""
|
||||
|
||||
# remove addons installed by this instance
|
||||
for addon in self._addons:
|
||||
if os.path.isdir(addon):
|
||||
dir_util.remove_tree(addon)
|
||||
elif os.path.isfile(addon):
|
||||
os.remove(addon)
|
||||
|
||||
# restore backups
|
||||
if self.backup_dir and os.path.isdir(self.backup_dir):
|
||||
extensions_path = os.path.join(self.profile, 'extensions', 'staged')
|
||||
for backup in os.listdir(self.backup_dir):
|
||||
backup_path = os.path.join(self.backup_dir, backup)
|
||||
addon_path = os.path.join(extensions_path, backup)
|
||||
shutil.move(backup_path, addon_path)
|
||||
if not os.listdir(self.backup_dir):
|
||||
shutil.rmtree(self.backup_dir, ignore_errors=True)
|
||||
|
||||
# reset instance variables to defaults via __init__
|
||||
self.__init__(self.profile, restore=self.restore)
|
||||
|
||||
def __del__(self):
|
||||
if self.restore:
|
||||
self.clean_addons() # reset to pre-instance state
|
||||
|
@ -15,136 +15,102 @@ import types
|
||||
import uuid
|
||||
|
||||
from addons import AddonManager
|
||||
import mozfile
|
||||
from mozfile import tree
|
||||
from permissions import Permissions
|
||||
from prefs import Preferences
|
||||
from shutil import copytree
|
||||
from shutil import copytree, rmtree
|
||||
from webapps import WebappCollection
|
||||
|
||||
|
||||
class Profile(object):
|
||||
"""Handles all operations regarding profile.
|
||||
"""Handles all operations regarding profile. Created new profiles, installs extensions,
|
||||
sets preferences and handles cleanup.
|
||||
|
||||
Creating new profiles, installing add-ons, setting preferences and
|
||||
handling cleanup.
|
||||
:param profile: Path to the profile
|
||||
:param addons: String of one or list of addons to install
|
||||
:param addon_manifests: Manifest for addons, see http://ahal.ca/blog/2011/bulk-installing-fx-addons/
|
||||
:param apps: Dictionary or class of webapps to install
|
||||
:param preferences: Dictionary or class of preferences
|
||||
:param locations: ServerLocations object
|
||||
:param proxy: setup a proxy
|
||||
:param restore: If true remove all added addons and preferences when cleaning up
|
||||
"""
|
||||
|
||||
def __init__(self, profile=None, addons=None, addon_manifests=None, apps=None,
|
||||
preferences=None, locations=None, proxy=None, restore=True):
|
||||
"""
|
||||
:param profile: Path to the profile
|
||||
:param addons: String of one or list of addons to install
|
||||
:param addon_manifests: Manifest for addons (see http://bit.ly/17jQ7i6)
|
||||
:param apps: Dictionary or class of webapps to install
|
||||
:param preferences: Dictionary or class of preferences
|
||||
:param locations: ServerLocations object
|
||||
:param proxy: Setup a proxy
|
||||
:param restore: Flag for removing all custom settings during cleanup
|
||||
"""
|
||||
self._addons = addons
|
||||
self._addon_manifests = addon_manifests
|
||||
self._apps = apps
|
||||
self._locations = locations
|
||||
self._proxy = proxy
|
||||
|
||||
# Prepare additional preferences
|
||||
if preferences:
|
||||
if isinstance(preferences, dict):
|
||||
# unordered
|
||||
preferences = preferences.items()
|
||||
# if true, remove installed addons/prefs afterwards
|
||||
self.restore = restore
|
||||
|
||||
# sanity check
|
||||
assert not [i for i in preferences if len(i) != 2]
|
||||
else:
|
||||
preferences = []
|
||||
self._preferences = preferences
|
||||
# prefs files written to
|
||||
self.written_prefs = set()
|
||||
|
||||
# our magic markers
|
||||
nonce = '%s %s' % (str(time.time()), uuid.uuid4())
|
||||
self.delimeters = ('#MozRunner Prefs Start %s' % nonce,'#MozRunner Prefs End %s' % nonce)
|
||||
|
||||
# Handle profile creation
|
||||
self.create_new = not profile
|
||||
if profile:
|
||||
# Ensure we have a full path to the profile
|
||||
self.profile = os.path.abspath(os.path.expanduser(profile))
|
||||
if not os.path.exists(self.profile):
|
||||
os.makedirs(self.profile)
|
||||
else:
|
||||
self.profile = tempfile.mkdtemp(suffix='.mozrunner')
|
||||
self.profile = self.create_new_profile()
|
||||
|
||||
self.restore = restore
|
||||
|
||||
# Initialize all class members
|
||||
self._internal_init()
|
||||
|
||||
def _internal_init(self):
|
||||
"""Internal: Initialize all class members to their default value"""
|
||||
|
||||
if not os.path.exists(self.profile):
|
||||
os.makedirs(self.profile)
|
||||
|
||||
# Preferences files written to
|
||||
self.written_prefs = set()
|
||||
|
||||
# Our magic markers
|
||||
nonce = '%s %s' % (str(time.time()), uuid.uuid4())
|
||||
self.delimeters = ('#MozRunner Prefs Start %s' % nonce,
|
||||
'#MozRunner Prefs End %s' % nonce)
|
||||
|
||||
# If sub-classes want to set default preferences
|
||||
# set preferences
|
||||
if hasattr(self.__class__, 'preferences'):
|
||||
# class preferences
|
||||
self.set_preferences(self.__class__.preferences)
|
||||
# Set additional preferences
|
||||
self.set_preferences(self._preferences)
|
||||
self._preferences = preferences
|
||||
if preferences:
|
||||
# supplied preferences
|
||||
if isinstance(preferences, dict):
|
||||
# unordered
|
||||
preferences = preferences.items()
|
||||
# sanity check
|
||||
assert not [i for i in preferences
|
||||
if len(i) != 2]
|
||||
else:
|
||||
preferences = []
|
||||
self.set_preferences(preferences)
|
||||
|
||||
self.permissions = Permissions(self.profile, self._locations)
|
||||
prefs_js, user_js = self.permissions.network_prefs(self._proxy)
|
||||
# set permissions
|
||||
self._locations = locations # store this for reconstruction
|
||||
self._proxy = proxy
|
||||
self.permissions = Permissions(self.profile, locations)
|
||||
prefs_js, user_js = self.permissions.network_prefs(proxy)
|
||||
self.set_preferences(prefs_js, 'prefs.js')
|
||||
self.set_preferences(user_js)
|
||||
|
||||
# handle add-on installation
|
||||
# handle addon installation
|
||||
self.addon_manager = AddonManager(self.profile, restore=self.restore)
|
||||
self.addon_manager.install_addons(self._addons, self._addon_manifests)
|
||||
self.addon_manager.install_addons(addons, addon_manifests)
|
||||
|
||||
# handle webapps
|
||||
self.webapps = WebappCollection(profile=self.profile, apps=self._apps)
|
||||
self.webapps = WebappCollection(profile=self.profile, apps=apps)
|
||||
self.webapps.update_manifests()
|
||||
|
||||
def __del__(self):
|
||||
self.cleanup()
|
||||
|
||||
### cleanup
|
||||
|
||||
def cleanup(self):
|
||||
"""Cleanup operations for the profile."""
|
||||
|
||||
if self.restore:
|
||||
# If copies of those class instances exist ensure we correctly
|
||||
# reset them all (see bug 934484)
|
||||
self.clean_preferences()
|
||||
if getattr(self, 'addon_manager', None) is not None:
|
||||
self.addon_manager.clean()
|
||||
if getattr(self, 'permissions', None) is not None:
|
||||
self.permissions.clean_db()
|
||||
if getattr(self, 'webapps', None) is not None:
|
||||
self.webapps.clean()
|
||||
|
||||
# If it's a temporary profile we have to remove it
|
||||
if self.create_new:
|
||||
mozfile.remove(self.profile)
|
||||
def exists(self):
|
||||
"""returns whether the profile exists or not"""
|
||||
return os.path.exists(self.profile)
|
||||
|
||||
def reset(self):
|
||||
"""
|
||||
reset the profile to the beginning state
|
||||
"""
|
||||
self.cleanup()
|
||||
|
||||
self._internal_init()
|
||||
|
||||
def clean_preferences(self):
|
||||
"""Removed preferences added by mozrunner."""
|
||||
for filename in self.written_prefs:
|
||||
if not os.path.exists(os.path.join(self.profile, filename)):
|
||||
# file has been deleted
|
||||
break
|
||||
while True:
|
||||
if not self.pop_preferences(filename):
|
||||
break
|
||||
if self.create_new:
|
||||
profile = None
|
||||
else:
|
||||
profile = self.profile
|
||||
self.__init__(profile=profile,
|
||||
addons=self.addon_manager.installed_addons,
|
||||
addon_manifests=self.addon_manager.installed_manifests,
|
||||
preferences=self._preferences,
|
||||
locations=self._locations,
|
||||
proxy = self._proxy)
|
||||
|
||||
@classmethod
|
||||
def clone(cls, path_from, path_to=None, **kwargs):
|
||||
@ -154,7 +120,7 @@ class Profile(object):
|
||||
"""
|
||||
if not path_to:
|
||||
tempdir = tempfile.mkdtemp() # need an unused temp dir name
|
||||
mozfile.remove(tempdir) # copytree requires that dest does not exist
|
||||
rmtree(tempdir) # copytree requires that dest does not exist
|
||||
path_to = tempdir
|
||||
copytree(path_from, path_to)
|
||||
|
||||
@ -163,16 +129,17 @@ class Profile(object):
|
||||
def wrapped(self):
|
||||
fn(self)
|
||||
if self.restore and os.path.exists(self.profile):
|
||||
mozfile.remove(self.profile)
|
||||
rmtree(self.profile, onerror=self._cleanup_error)
|
||||
return wrapped
|
||||
|
||||
c = cls(path_to, **kwargs)
|
||||
c.__del__ = c.cleanup = types.MethodType(cleanup_clone(cls.cleanup), c)
|
||||
return c
|
||||
|
||||
def exists(self):
|
||||
"""returns whether the profile exists or not"""
|
||||
return os.path.exists(self.profile)
|
||||
def create_new_profile(self):
|
||||
"""Create a new clean temporary profile which is a simple empty folder"""
|
||||
return tempfile.mkdtemp(suffix='.mozrunner')
|
||||
|
||||
|
||||
### methods for preferences
|
||||
|
||||
@ -235,6 +202,59 @@ class Profile(object):
|
||||
f.write(cleaned_prefs)
|
||||
return True
|
||||
|
||||
def clean_preferences(self):
|
||||
"""Removed preferences added by mozrunner."""
|
||||
for filename in self.written_prefs:
|
||||
if not os.path.exists(os.path.join(self.profile, filename)):
|
||||
# file has been deleted
|
||||
break
|
||||
while True:
|
||||
if not self.pop_preferences(filename):
|
||||
break
|
||||
|
||||
### cleanup
|
||||
|
||||
def _cleanup_error(self, function, path, excinfo):
|
||||
""" Specifically for windows we need to handle the case where the windows
|
||||
process has not yet relinquished handles on files, so we do a wait/try
|
||||
construct and timeout if we can't get a clear road to deletion
|
||||
"""
|
||||
|
||||
try:
|
||||
from exceptions import WindowsError
|
||||
from time import sleep
|
||||
def is_file_locked():
|
||||
return excinfo[0] is WindowsError and excinfo[1].winerror == 32
|
||||
|
||||
if excinfo[0] is WindowsError and excinfo[1].winerror == 32:
|
||||
# Then we're on windows, wait to see if the file gets unlocked
|
||||
# we wait 10s
|
||||
count = 0
|
||||
while count < 10:
|
||||
sleep(1)
|
||||
try:
|
||||
function(path)
|
||||
break
|
||||
except:
|
||||
count += 1
|
||||
except ImportError:
|
||||
# We can't re-raise an error, so we'll hope the stuff above us will throw
|
||||
pass
|
||||
|
||||
def cleanup(self):
|
||||
"""Cleanup operations for the profile."""
|
||||
if self.restore:
|
||||
if self.create_new:
|
||||
if os.path.exists(self.profile):
|
||||
rmtree(self.profile, onerror=self._cleanup_error)
|
||||
else:
|
||||
self.clean_preferences()
|
||||
self.addon_manager.clean_addons()
|
||||
self.permissions.clean_db()
|
||||
self.webapps.clean()
|
||||
|
||||
__del__ = cleanup
|
||||
|
||||
### methods for introspection
|
||||
|
||||
def summary(self, return_parts=False):
|
||||
@ -247,7 +267,7 @@ class Profile(object):
|
||||
parts = [('Path', self.profile)] # profile path
|
||||
|
||||
# directory tree
|
||||
parts.append(('Files', '\n%s' % mozfile.tree(self.profile)))
|
||||
parts.append(('Files', '\n%s' % tree(self.profile)))
|
||||
|
||||
# preferences
|
||||
for prefs_file in ('user.js', 'prefs.js'):
|
||||
@ -329,8 +349,6 @@ class FirefoxProfile(Profile):
|
||||
# see: https://developer.mozilla.org/en/Installing_extensions
|
||||
'extensions.enabledScopes' : 5,
|
||||
'extensions.autoDisableScopes' : 10,
|
||||
# Don't send the list of installed addons to AMO
|
||||
'extensions.getAddons.cache.enabled' : False,
|
||||
# Don't install distribution add-ons from the app folder
|
||||
'extensions.installDistroAddons' : False,
|
||||
# Dont' run the add-on compatibility check during start-up
|
||||
@ -364,15 +382,11 @@ class MetroFirefoxProfile(Profile):
|
||||
'browser.shell.checkDefaultBrowser' : False,
|
||||
# Don't send Firefox health reports to the production server
|
||||
'datareporting.healthreport.documentServerURI' : 'http://%(server)s/healthreport/',
|
||||
# Enable extensions
|
||||
'extensions.defaultProviders.enabled' : True,
|
||||
# Only install add-ons from the profile and the application scope
|
||||
# Also ensure that those are not getting disabled.
|
||||
# see: https://developer.mozilla.org/en/Installing_extensions
|
||||
'extensions.enabledScopes' : 5,
|
||||
'extensions.autoDisableScopes' : 10,
|
||||
# Don't send the list of installed addons to AMO
|
||||
'extensions.getAddons.cache.enabled' : False,
|
||||
# Don't install distribution add-ons from the app folder
|
||||
'extensions.installDistroAddons' : False,
|
||||
# Dont' run the add-on compatibility check during start-up
|
||||
|
@ -20,20 +20,15 @@ import json
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import mozfile
|
||||
|
||||
|
||||
# from http://hg.mozilla.org/mozilla-central/file/add0b94c2c0b/caps/idl/nsIPrincipal.idl#l163
|
||||
APP_STATUS_NOT_INSTALLED = 0
|
||||
APP_STATUS_INSTALLED = 1
|
||||
APP_STATUS_PRIVILEGED = 2
|
||||
APP_STATUS_CERTIFIED = 3
|
||||
|
||||
|
||||
class WebappFormatException(Exception):
|
||||
"""thrown for invalid webapp objects"""
|
||||
|
||||
|
||||
class Webapp(dict):
|
||||
"""A webapp definition"""
|
||||
|
||||
@ -183,7 +178,8 @@ class WebappCollection(object):
|
||||
for app in remove_apps:
|
||||
self._installed_apps.remove(app)
|
||||
manifest_dir = os.path.join(self.webapps_dir, app['name'])
|
||||
mozfile.remove(manifest_dir)
|
||||
if os.path.isdir(manifest_dir):
|
||||
shutil.rmtree(manifest_dir)
|
||||
|
||||
def update_manifests(self):
|
||||
"""Updates the webapp manifests with the webapps represented in this collection
|
||||
@ -237,12 +233,12 @@ class WebappCollection(object):
|
||||
|
||||
def clean(self):
|
||||
"""Remove all webapps that were installed and restore profile to previous state"""
|
||||
if self._installed_apps:
|
||||
mozfile.remove(self.webapps_dir)
|
||||
if self._installed_apps and os.path.isdir(self.webapps_dir):
|
||||
shutil.rmtree(self.webapps_dir)
|
||||
|
||||
if os.path.isdir(self.backup_dir):
|
||||
shutil.copytree(self.backup_dir, self.webapps_dir)
|
||||
mozfile.remove(self.backup_dir)
|
||||
shutil.rmtree(self.backup_dir)
|
||||
|
||||
self._apps = []
|
||||
self._installed_apps = []
|
||||
|
@ -5,17 +5,15 @@
|
||||
import sys
|
||||
from setuptools import setup
|
||||
|
||||
PACKAGE_NAME = 'mozprofile'
|
||||
PACKAGE_VERSION = '0.20'
|
||||
PACKAGE_VERSION = '0.16'
|
||||
|
||||
# we only support python 2 right now
|
||||
assert sys.version_info[0] == 2
|
||||
|
||||
deps = ['ManifestDestiny >= 0.5.4',
|
||||
'mozfile >= 1.0',
|
||||
'mozlog']
|
||||
deps = ["ManifestDestiny >= 0.5.4",
|
||||
"mozfile >= 0.12"]
|
||||
|
||||
setup(name=PACKAGE_NAME,
|
||||
setup(name='mozprofile',
|
||||
version=PACKAGE_VERSION,
|
||||
description="Library to create and modify Mozilla application profiles",
|
||||
long_description="see http://mozbase.readthedocs.org/",
|
||||
@ -36,7 +34,7 @@ setup(name=PACKAGE_NAME,
|
||||
include_package_data=True,
|
||||
zip_safe=False,
|
||||
install_requires=deps,
|
||||
tests_require=['mozhttpd'],
|
||||
tests_require=['mozhttpd', 'mozfile'],
|
||||
entry_points="""
|
||||
# -*- Entry points: -*-
|
||||
[console_scripts]
|
||||
|
@ -1,79 +1,84 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
import zipfile
|
||||
|
||||
import mozfile
|
||||
import mozhttpd
|
||||
import os
|
||||
import zipfile
|
||||
|
||||
|
||||
here = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
# stubs is a dict of the form {'addon id': 'install manifest content'}
|
||||
# stubs is a dict of the form {'addon name': 'install manifest content'}
|
||||
stubs = {
|
||||
'test-addon-1@mozilla.org': 'test_addon_1.rdf',
|
||||
'test-addon-2@mozilla.org': 'test_addon_2.rdf',
|
||||
'test-addon-3@mozilla.org': 'test_addon_3.rdf',
|
||||
'test-addon-4@mozilla.org': 'test_addon_4.rdf',
|
||||
'test-addon-invalid-no-id@mozilla.org': 'test_addon_invalid_no_id.rdf',
|
||||
'test-addon-invalid-version@mozilla.org': 'test_addon_invalid_version.rdf',
|
||||
'test-addon-invalid-no-manifest@mozilla.org': None,
|
||||
'test-addon-invalid-not-wellformed@mozilla.org': 'test_addon_invalid_not_wellformed.rdf',
|
||||
'test-addon-unpack@mozilla.org': 'test_addon_unpack.rdf'}
|
||||
'empty-0-1.xpi':
|
||||
open(os.path.join(here, "install_manifests", "empty-0-1.rdf"), 'r').read(),
|
||||
'empty-0-2.xpi':
|
||||
open(os.path.join(here, "install_manifests", "empty-0-2.rdf"), 'r').read(),
|
||||
'another-empty-0-1.xpi':
|
||||
open(os.path.join(here, "install_manifests", "another-empty-0-1.rdf"), 'r').read(),
|
||||
'empty-invalid.xpi':
|
||||
open(os.path.join(here, "install_manifests", "empty-invalid.rdf"), 'r').read()}
|
||||
|
||||
|
||||
def generate_addon(addon_id, path=None, name=None, xpi=True):
|
||||
def generate_addon(name, path=None):
|
||||
"""
|
||||
Method to generate a single addon.
|
||||
|
||||
:param addon_id: id of an addon to generate from the stubs dictionary
|
||||
:param name: name of an addon to generate from the stubs dictionary
|
||||
:param path: path where addon and .xpi should be generated
|
||||
:param name: name for the addon folder or .xpi file
|
||||
:param xpi: Flag if an XPI or folder should be generated
|
||||
|
||||
Returns the file-path of the addon's .xpi file
|
||||
"""
|
||||
|
||||
if not addon_id in stubs.keys():
|
||||
raise IOError('Requested addon stub "%s" does not exist' % addon_id)
|
||||
if name in stubs.keys():
|
||||
addon = name
|
||||
else:
|
||||
# If `name` is not in listed stubs, raise exception
|
||||
raise IOError('Requested addon stub does not exist')
|
||||
|
||||
# Generate directory structure for addon
|
||||
try:
|
||||
tmpdir = path or tempfile.mkdtemp()
|
||||
addon_dir = os.path.join(tmpdir, name or addon_id)
|
||||
if path:
|
||||
tmpdir = path
|
||||
else:
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
addon_dir = os.path.join(tmpdir, addon[:-4])
|
||||
os.mkdir(addon_dir)
|
||||
install_rdf = os.path.join(addon_dir, 'install.rdf')
|
||||
xpi = os.path.join(tmpdir, addon)
|
||||
except IOError:
|
||||
raise IOError('Could not generate directory structure for addon stub.')
|
||||
|
||||
# Write install.rdf for addon
|
||||
if stubs[addon_id]:
|
||||
install_rdf = os.path.join(addon_dir, 'install.rdf')
|
||||
with open(install_rdf, 'w') as f:
|
||||
manifest = os.path.join(here, 'install_manifests', stubs[addon_id])
|
||||
f.write(open(manifest, 'r').read())
|
||||
|
||||
if not xpi:
|
||||
return addon_dir
|
||||
|
||||
with open(install_rdf, 'w') as f:
|
||||
f.write(stubs[addon])
|
||||
# Generate the .xpi for the addon
|
||||
xpi_file = os.path.join(tmpdir, (name or addon_id) + '.xpi')
|
||||
with zipfile.ZipFile(xpi_file, 'w') as x:
|
||||
with zipfile.ZipFile(xpi, 'w') as x:
|
||||
x.write(install_rdf, install_rdf[len(addon_dir):])
|
||||
|
||||
# Ensure we remove the temporary folder to not install the addon twice
|
||||
mozfile.rmtree(addon_dir)
|
||||
return xpi
|
||||
|
||||
return xpi_file
|
||||
def generate_invalid_addon(path=None):
|
||||
"""
|
||||
Method to create an invalid addon
|
||||
|
||||
Returns the file-path to the .xpi of an invalid addon
|
||||
"""
|
||||
return generate_addon(name='empty-invalid.xpi', path=path)
|
||||
|
||||
def generate_manifest(addon_list, path=None):
|
||||
tmpdir = path or tempfile.mkdtemp()
|
||||
addons = [generate_addon(addon, path=tmpdir) for addon in addon_list]
|
||||
def generate_manifest(path=None):
|
||||
|
||||
if path:
|
||||
tmpdir = path
|
||||
else:
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
|
||||
addon_list = ['empty-0-1.xpi', 'another-empty-0-1.xpi']
|
||||
for a in addon_list:
|
||||
generate_addon(a, tmpdir)
|
||||
|
||||
manifest = os.path.join(tmpdir, 'manifest.ini')
|
||||
with open(manifest, 'w') as f:
|
||||
for addon in addons:
|
||||
f.write('[' + addon + ']\n')
|
||||
for a in addon_list:
|
||||
f.write('[' + a + ']\n')
|
||||
|
||||
return manifest
|
||||
|
Binary file not shown.
@ -16,38 +16,33 @@ class Bug758250(unittest.TestCase):
|
||||
https://bugzilla.mozilla.org/show_bug.cgi?id=758250
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.tmpdir = tempfile.mkdtemp()
|
||||
self.addon = os.path.join(here, 'addons', 'empty')
|
||||
|
||||
def tearDown(self):
|
||||
# remove vestiges
|
||||
shutil.rmtree(self.tmpdir)
|
||||
|
||||
def test_profile_addon_cleanup(self):
|
||||
|
||||
# sanity check: the empty addon should be here
|
||||
self.assertTrue(os.path.exists(self.addon))
|
||||
self.assertTrue(os.path.isdir(self.addon))
|
||||
self.assertTrue(os.path.exists(os.path.join(self.addon, 'install.rdf')))
|
||||
empty = os.path.join(here, 'addons', 'empty')
|
||||
self.assertTrue(os.path.exists(empty))
|
||||
self.assertTrue(os.path.isdir(empty))
|
||||
self.assertTrue(os.path.exists(os.path.join(empty, 'install.rdf')))
|
||||
|
||||
# because we are testing data loss, let's make sure we make a copy
|
||||
shutil.rmtree(self.tmpdir)
|
||||
shutil.copytree(self.addon, self.tmpdir)
|
||||
self.assertTrue(os.path.exists(os.path.join(self.tmpdir, 'install.rdf')))
|
||||
tmpdir = tempfile.mktemp()
|
||||
shutil.copytree(empty, tmpdir)
|
||||
self.assertTrue(os.path.exists(os.path.join(tmpdir, 'install.rdf')))
|
||||
|
||||
# make a starter profile
|
||||
profile = mozprofile.FirefoxProfile()
|
||||
path = profile.profile
|
||||
|
||||
# make a new profile based on the old
|
||||
newprofile = mozprofile.FirefoxProfile(profile=path, addons=[self.tmpdir])
|
||||
newprofile = mozprofile.FirefoxProfile(profile=path, addons=[tmpdir])
|
||||
newprofile.cleanup()
|
||||
|
||||
# the source addon *should* still exist
|
||||
self.assertTrue(os.path.exists(self.tmpdir))
|
||||
self.assertTrue(os.path.exists(os.path.join(self.tmpdir, 'install.rdf')))
|
||||
self.assertTrue(os.path.exists(tmpdir))
|
||||
self.assertTrue(os.path.exists(os.path.join(tmpdir, 'install.rdf')))
|
||||
|
||||
# remove vestiges
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@ -2,11 +2,11 @@
|
||||
<RDF xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:em="http://www.mozilla.org/2004/em-rdf#">
|
||||
<Description about="urn:mozilla:install-manifest">
|
||||
<em:id>test-addon-4@mozilla.org</em:id>
|
||||
<em:id>another-test-empty@quality.mozilla.org</em:id>
|
||||
<em:version>0.1</em:version>
|
||||
<em:name>Test Add-on 4</em:name>
|
||||
<em:creator>Mozilla</em:creator>
|
||||
<em:homepageURL>http://mozilla.org</em:homepageURL>
|
||||
<em:name>Another Test Extension (empty)</em:name>
|
||||
<em:creator>Mozilla QA</em:creator>
|
||||
<em:homepageURL>http://quality.mozilla.org</em:homepageURL>
|
||||
<em:type>2</em:type>
|
||||
|
||||
<!-- Firefox -->
|
@ -2,11 +2,11 @@
|
||||
<RDF xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:em="http://www.mozilla.org/2004/em-rdf#">
|
||||
<Description about="urn:mozilla:install-manifest">
|
||||
<em:id>test-addon-3@mozilla.org</em:id>
|
||||
<em:id>test-empty@quality.mozilla.org</em:id>
|
||||
<em:version>0.1</em:version>
|
||||
<em:name>Test Add-on 3</em:name>
|
||||
<em:creator>Mozilla</em:creator>
|
||||
<em:homepageURL>http://mozilla.org</em:homepageURL>
|
||||
<em:name>Test Extension (empty)</em:name>
|
||||
<em:creator>Mozilla QA</em:creator>
|
||||
<em:homepageURL>http://quality.mozilla.org</em:homepageURL>
|
||||
<em:type>2</em:type>
|
||||
|
||||
<!-- Firefox -->
|
||||
@ -19,4 +19,3 @@
|
||||
</em:targetApplication>
|
||||
</Description>
|
||||
</RDF>
|
||||
|
@ -2,11 +2,11 @@
|
||||
<RDF xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:em="http://www.mozilla.org/2004/em-rdf#">
|
||||
<Description about="urn:mozilla:install-manifest">
|
||||
<em:id>test-addon-2@mozilla.org</em:id>
|
||||
<em:id>test-empty@quality.mozilla.org</em:id>
|
||||
<em:version>0.2</em:version>
|
||||
<em:name>Test Add-on 2</em:name>
|
||||
<em:creator>Mozilla</em:creator>
|
||||
<em:homepageURL>http://mozilla.org</em:homepageURL>
|
||||
<em:name>Test Extension (empty)</em:name>
|
||||
<em:creator>Mozilla QA</em:creator>
|
||||
<em:homepageURL>http://quality.mozilla.org</em:homepageURL>
|
||||
<em:type>2</em:type>
|
||||
|
||||
<!-- Firefox -->
|
@ -2,12 +2,12 @@
|
||||
<RDF xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:em="http://www.mozilla.org/2004/em-rdf#">
|
||||
<Description about="urn:mozilla:install-manifest">
|
||||
<em:id>test-addon-invalid-version@mozilla.org</em:id>
|
||||
<!-- Invalid addon version -->
|
||||
<em:id>test-empty@quality.mozilla.org</em:id>
|
||||
<!-- Invalid plugin version -->
|
||||
<em:version>0.NOPE</em:version>
|
||||
<em:name>Test Invalid Extension (invalid version)</em:name>
|
||||
<em:creator>Mozilla</em:creator>
|
||||
<em:homepageURL>http://mozilla.org</em:homepageURL>
|
||||
<em:name>Test Extension (empty)</em:name>
|
||||
<em:creator>Mozilla QA</em:creator>
|
||||
<em:homepageURL>http://quality.mozilla.org</em:homepageURL>
|
||||
<em:type>2</em:type>
|
||||
|
||||
<!-- Firefox -->
|
@ -1,21 +0,0 @@
|
||||
<?xml version="1.0"?>
|
||||
<RDF xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:em="http://www.mozilla.org/2004/em-rdf#">
|
||||
<Description about="urn:mozilla:install-manifest">
|
||||
<em:id>test-addon-1@mozilla.org</em:id>
|
||||
<em:version>0.1</em:version>
|
||||
<em:name>Test Add-on 1</em:name>
|
||||
<em:creator>Mozilla</em:creator>
|
||||
<em:homepageURL>http://mozilla.org</em:homepageURL>
|
||||
<em:type>2</em:type>
|
||||
|
||||
<!-- Firefox -->
|
||||
<em:targetApplication>
|
||||
<Description>
|
||||
<em:id>{ec8030f7-c20a-464f-9b0e-13a3a9e97384}</em:id>
|
||||
<em:minVersion>3.5.*</em:minVersion>
|
||||
<em:maxVersion>*</em:maxVersion>
|
||||
</Description>
|
||||
</em:targetApplication>
|
||||
</Description>
|
||||
</RDF>
|
@ -1,22 +0,0 @@
|
||||
<?xml version="1.0"?>
|
||||
<RDF xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:em="http://www.mozilla.org/2004/em-rdf#">
|
||||
<Description about="urn:mozilla:install-manifest">
|
||||
<!-- Invalid because of a missing add-on id -->
|
||||
<em:version>0.1</em:version>
|
||||
<em:name>Test Invalid Extension (no id)</em:name>
|
||||
<em:creator>Mozilla</em:creator>
|
||||
<em:homepageURL>http://mozilla.org</em:homepageURL>
|
||||
<em:type>2</em:type>
|
||||
|
||||
<!-- Firefox -->
|
||||
<em:targetApplication>
|
||||
<Description>
|
||||
<!-- Invalid target application string -->
|
||||
<em:id>{ec8030f7-c20a-464f-9b0e-13a3a9e97384}</em:id>
|
||||
<em:minVersion>3.5.*</em:minVersion>
|
||||
<em:maxVersion>*</em:maxVersion>
|
||||
</Description>
|
||||
</em:targetApplication>
|
||||
</Description>
|
||||
</RDF>
|
@ -1,23 +0,0 @@
|
||||
<?xml version="1.0"?>
|
||||
<RDF xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:em="http://www.mozilla.org/2004/em-rdf#">
|
||||
<Description about="urn:mozilla:install-manifest">
|
||||
<!-- Invalid because it's not well-formed -->
|
||||
<em:id>test-addon-invalid-not-wellformed@mozilla.org</em:id
|
||||
<em:version>0.1</em:version>
|
||||
<em:name>Test Invalid Extension (no id)</em:name>
|
||||
<em:creator>Mozilla</em:creator>
|
||||
<em:homepageURL>http://mozilla.org</em:homepageURL>
|
||||
<em:type>2</em:type>
|
||||
|
||||
<!-- Firefox -->
|
||||
<em:targetApplication>
|
||||
<Description>
|
||||
<!-- Invalid target application string -->
|
||||
<em:id>{ec8030f7-c20a-464f-9b0e-13a3a9e97384}</em:id>
|
||||
<em:minVersion>3.5.*</em:minVersion>
|
||||
<em:maxVersion>*</em:maxVersion>
|
||||
</Description>
|
||||
</em:targetApplication>
|
||||
</Description>
|
||||
</RDF>
|
@ -1,22 +0,0 @@
|
||||
<?xml version="1.0"?>
|
||||
<RDF xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:em="http://www.mozilla.org/2004/em-rdf#">
|
||||
<Description about="urn:mozilla:install-manifest">
|
||||
<em:id>test-addon-unpack@mozilla.org</em:id>
|
||||
<em:version>0.1</em:version>
|
||||
<em:name>Test Add-on (unpack)</em:name>
|
||||
<em:creator>Mozilla</em:creator>
|
||||
<em:homepageURL>http://mozilla.org</em:homepageURL>
|
||||
<em:type>2</em:type>
|
||||
<em:unpack>true</em:unpack>
|
||||
|
||||
<!-- Firefox -->
|
||||
<em:targetApplication>
|
||||
<Description>
|
||||
<em:id>{ec8030f7-c20a-464f-9b0e-13a3a9e97384}</em:id>
|
||||
<em:minVersion>3.5.*</em:minVersion>
|
||||
<em:maxVersion>*</em:maxVersion>
|
||||
</Description>
|
||||
</em:targetApplication>
|
||||
</Description>
|
||||
</RDF>
|
@ -4,337 +4,70 @@
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
import urllib2
|
||||
|
||||
from manifestparser import ManifestParser
|
||||
import mozfile
|
||||
import mozhttpd
|
||||
import mozlog
|
||||
import addon_stubs
|
||||
import mozprofile
|
||||
|
||||
from addon_stubs import generate_addon, generate_manifest
|
||||
|
||||
|
||||
here = os.path.dirname(os.path.abspath(__file__))
|
||||
import mozfile
|
||||
import tempfile
|
||||
import os
|
||||
import unittest
|
||||
from manifestparser import ManifestParser
|
||||
|
||||
|
||||
class TestAddonsManager(unittest.TestCase):
|
||||
""" Class to test mozprofile.addons.AddonManager """
|
||||
|
||||
def setUp(self):
|
||||
self.logger = mozlog.getLogger('mozprofile.addons')
|
||||
self.logger.setLevel(mozlog.ERROR)
|
||||
|
||||
self.profile = mozprofile.profile.Profile()
|
||||
self.am = self.profile.addon_manager
|
||||
self.am = mozprofile.addons.AddonManager(profile=self.profile.profile)
|
||||
|
||||
self.profile_path = self.profile.profile
|
||||
self.tmpdir = tempfile.mkdtemp()
|
||||
def test_install_from_path(self):
|
||||
|
||||
def tearDown(self):
|
||||
mozfile.rmtree(self.tmpdir)
|
||||
|
||||
self.am = None
|
||||
self.profile = None
|
||||
|
||||
# Bug 934484
|
||||
# Sometimes the profile folder gets recreated at the end and will be left
|
||||
# behind. So we should ensure that we clean it up correctly.
|
||||
mozfile.rmtree(self.profile_path)
|
||||
|
||||
def test_install_addons_multiple_same_source(self):
|
||||
# Generate installer stubs for all possible types of addons
|
||||
addon_xpi = generate_addon('test-addon-1@mozilla.org',
|
||||
path=self.tmpdir)
|
||||
addon_folder = generate_addon('test-addon-1@mozilla.org',
|
||||
path=self.tmpdir,
|
||||
xpi=False)
|
||||
|
||||
# The same folder should not be installed twice
|
||||
self.am.install_addons([addon_folder, addon_folder])
|
||||
self.assertEqual(self.am.installed_addons, [addon_folder])
|
||||
self.am.clean()
|
||||
|
||||
# The same XPI file should not be installed twice
|
||||
self.am.install_addons([addon_xpi, addon_xpi])
|
||||
self.assertEqual(self.am.installed_addons, [addon_xpi])
|
||||
self.am.clean()
|
||||
|
||||
# Even if it is the same id the add-on should be installed twice, if
|
||||
# specified via XPI and folder
|
||||
self.am.install_addons([addon_folder, addon_xpi])
|
||||
self.assertEqual(len(self.am.installed_addons), 2)
|
||||
self.assertIn(addon_folder, self.am.installed_addons)
|
||||
self.assertIn(addon_xpi, self.am.installed_addons)
|
||||
self.am.clean()
|
||||
|
||||
def test_download(self):
|
||||
server = mozhttpd.MozHttpd(docroot=os.path.join(here, 'addons'))
|
||||
server.start()
|
||||
|
||||
# Download a valid add-on without a class instance to the general
|
||||
# tmp folder and clean-up
|
||||
try:
|
||||
addon = server.get_url() + 'empty.xpi'
|
||||
xpi_file = mozprofile.addons.AddonManager.download(addon)
|
||||
self.assertTrue(os.path.isfile(xpi_file))
|
||||
self.assertIn('test-empty@quality.mozilla.org.xpi',
|
||||
os.path.basename(xpi_file))
|
||||
self.assertNotIn(self.tmpdir, os.path.dirname(xpi_file))
|
||||
finally:
|
||||
# Given that the file is stored outside of the created tmp dir
|
||||
# we have to ensure to explicitely remove it
|
||||
if os.path.isfile(xpi_file):
|
||||
os.remove(xpi_file)
|
||||
|
||||
# Download an valid add-on to a special folder
|
||||
addon = server.get_url() + 'empty.xpi'
|
||||
xpi_file = self.am.download(addon, self.tmpdir)
|
||||
self.assertTrue(os.path.isfile(xpi_file))
|
||||
self.assertIn('test-empty@quality.mozilla.org.xpi',
|
||||
os.path.basename(xpi_file))
|
||||
self.assertIn(self.tmpdir, os.path.dirname(xpi_file))
|
||||
self.assertEqual(self.am.downloaded_addons, [])
|
||||
os.remove(xpi_file)
|
||||
|
||||
# Download an invalid add-on to a special folder
|
||||
addon = server.get_url() + 'invalid.xpi'
|
||||
self.assertRaises(mozprofile.addons.AddonFormatError,
|
||||
self.am.download, addon, self.tmpdir)
|
||||
self.assertEqual(os.listdir(self.tmpdir), [])
|
||||
|
||||
# Download from an invalid URL
|
||||
addon = server.get_url() + 'not_existent.xpi'
|
||||
self.assertRaises(urllib2.HTTPError,
|
||||
self.am.download, addon, self.tmpdir)
|
||||
self.assertEqual(os.listdir(self.tmpdir), [])
|
||||
|
||||
# Download from an invalid URL
|
||||
addon = 'not_existent.xpi'
|
||||
self.assertRaises(ValueError,
|
||||
self.am.download, addon, self.tmpdir)
|
||||
self.assertEqual(os.listdir(self.tmpdir), [])
|
||||
|
||||
server.stop()
|
||||
|
||||
def test_install_from_path_xpi(self):
|
||||
addons_to_install = []
|
||||
addons_installed = []
|
||||
|
||||
# Generate installer stubs and install them
|
||||
for ext in ['test-addon-1@mozilla.org', 'test-addon-2@mozilla.org']:
|
||||
temp_addon = generate_addon(ext, path=self.tmpdir)
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
for t in ['empty-0-1.xpi', 'another-empty-0-1.xpi']:
|
||||
temp_addon = addon_stubs.generate_addon(name=t, path=tmpdir)
|
||||
addons_to_install.append(self.am.addon_details(temp_addon)['id'])
|
||||
self.am.install_from_path(temp_addon)
|
||||
|
||||
# Generate a list of addons installed in the profile
|
||||
addons_installed = [unicode(x[:-len('.xpi')]) for x in os.listdir(os.path.join(
|
||||
self.profile.profile, 'extensions', 'staged'))]
|
||||
self.assertEqual(addons_to_install.sort(), addons_installed.sort())
|
||||
|
||||
def test_install_from_path_folder(self):
|
||||
# Generate installer stubs for all possible types of addons
|
||||
addons = []
|
||||
addons.append(generate_addon('test-addon-1@mozilla.org',
|
||||
path=self.tmpdir))
|
||||
addons.append(generate_addon('test-addon-2@mozilla.org',
|
||||
path=self.tmpdir,
|
||||
xpi=False))
|
||||
addons.append(generate_addon('test-addon-3@mozilla.org',
|
||||
path=self.tmpdir,
|
||||
name='addon-3'))
|
||||
addons.append(generate_addon('test-addon-4@mozilla.org',
|
||||
path=self.tmpdir,
|
||||
name='addon-4',
|
||||
xpi=False))
|
||||
addons.sort()
|
||||
|
||||
self.am.install_from_path(self.tmpdir)
|
||||
|
||||
self.assertEqual(self.am.installed_addons, addons)
|
||||
|
||||
def test_install_from_path_unpack(self):
|
||||
# Generate installer stubs for all possible types of addons
|
||||
addon_xpi = generate_addon('test-addon-unpack@mozilla.org',
|
||||
path=self.tmpdir)
|
||||
addon_folder = generate_addon('test-addon-unpack@mozilla.org',
|
||||
path=self.tmpdir,
|
||||
xpi=False)
|
||||
addon_no_unpack = generate_addon('test-addon-1@mozilla.org',
|
||||
path=self.tmpdir)
|
||||
|
||||
# Test unpack flag for add-on as XPI
|
||||
self.am.install_from_path(addon_xpi)
|
||||
self.assertEqual(self.am.installed_addons, [addon_xpi])
|
||||
self.am.clean()
|
||||
|
||||
# Test unpack flag for add-on as folder
|
||||
self.am.install_from_path(addon_folder)
|
||||
self.assertEqual(self.am.installed_addons, [addon_folder])
|
||||
self.am.clean()
|
||||
|
||||
# Test forcing unpack an add-on
|
||||
self.am.install_from_path(addon_no_unpack, unpack=True)
|
||||
self.assertEqual(self.am.installed_addons, [addon_no_unpack])
|
||||
self.am.clean()
|
||||
|
||||
def test_install_from_path_url(self):
|
||||
server = mozhttpd.MozHttpd(docroot=os.path.join(here, 'addons'))
|
||||
server.start()
|
||||
|
||||
addon = server.get_url() + 'empty.xpi'
|
||||
self.am.install_from_path(addon)
|
||||
|
||||
server.stop()
|
||||
|
||||
self.assertEqual(len(self.am.downloaded_addons), 1)
|
||||
self.assertTrue(os.path.isfile(self.am.downloaded_addons[0]))
|
||||
self.assertIn('test-empty@quality.mozilla.org.xpi',
|
||||
os.path.basename(self.am.downloaded_addons[0]))
|
||||
|
||||
def test_install_from_path_after_reset(self):
|
||||
# Installing the same add-on after a reset should not cause a failure
|
||||
addon = generate_addon('test-addon-1@mozilla.org',
|
||||
path=self.tmpdir, xpi=False)
|
||||
|
||||
# We cannot use self.am because profile.reset() creates a new instance
|
||||
self.profile.addon_manager.install_from_path(addon)
|
||||
|
||||
self.profile.reset()
|
||||
|
||||
self.profile.addon_manager.install_from_path(addon)
|
||||
self.assertEqual(self.profile.addon_manager.installed_addons, [addon])
|
||||
|
||||
def test_install_from_path_backup(self):
|
||||
staged_path = os.path.join(self.profile_path, 'extensions', 'staged')
|
||||
|
||||
# Generate installer stubs for all possible types of addons
|
||||
addon_xpi = generate_addon('test-addon-1@mozilla.org',
|
||||
path=self.tmpdir)
|
||||
addon_folder = generate_addon('test-addon-1@mozilla.org',
|
||||
path=self.tmpdir,
|
||||
xpi=False)
|
||||
addon_name = generate_addon('test-addon-1@mozilla.org',
|
||||
path=self.tmpdir,
|
||||
name='test-addon-1-dupe@mozilla.org')
|
||||
|
||||
# Test backup of xpi files
|
||||
self.am.install_from_path(addon_xpi)
|
||||
self.assertIsNone(self.am.backup_dir)
|
||||
|
||||
self.am.install_from_path(addon_xpi)
|
||||
self.assertIsNotNone(self.am.backup_dir)
|
||||
self.assertEqual(os.listdir(self.am.backup_dir),
|
||||
['test-addon-1@mozilla.org.xpi'])
|
||||
|
||||
self.am.clean()
|
||||
self.assertEqual(os.listdir(staged_path),
|
||||
['test-addon-1@mozilla.org.xpi'])
|
||||
self.am.clean()
|
||||
|
||||
# Test backup of folders
|
||||
self.am.install_from_path(addon_folder)
|
||||
self.assertIsNone(self.am.backup_dir)
|
||||
|
||||
self.am.install_from_path(addon_folder)
|
||||
self.assertIsNotNone(self.am.backup_dir)
|
||||
self.assertEqual(os.listdir(self.am.backup_dir),
|
||||
['test-addon-1@mozilla.org'])
|
||||
|
||||
self.am.clean()
|
||||
self.assertEqual(os.listdir(staged_path),
|
||||
['test-addon-1@mozilla.org'])
|
||||
self.am.clean()
|
||||
|
||||
# Test backup of xpi files with another file name
|
||||
self.am.install_from_path(addon_name)
|
||||
self.assertIsNone(self.am.backup_dir)
|
||||
|
||||
self.am.install_from_path(addon_xpi)
|
||||
self.assertIsNotNone(self.am.backup_dir)
|
||||
self.assertEqual(os.listdir(self.am.backup_dir),
|
||||
['test-addon-1@mozilla.org.xpi'])
|
||||
|
||||
self.am.clean()
|
||||
self.assertEqual(os.listdir(staged_path),
|
||||
['test-addon-1@mozilla.org.xpi'])
|
||||
self.am.clean()
|
||||
|
||||
def test_install_from_path_invalid_addons(self):
|
||||
# Generate installer stubs for all possible types of addons
|
||||
addons = []
|
||||
addons.append(generate_addon('test-addon-invalid-no-manifest@mozilla.org',
|
||||
path=self.tmpdir,
|
||||
xpi=False))
|
||||
addons.append(generate_addon('test-addon-invalid-no-id@mozilla.org',
|
||||
path=self.tmpdir))
|
||||
|
||||
self.am.install_from_path(self.tmpdir)
|
||||
|
||||
self.assertEqual(self.am.installed_addons, [])
|
||||
# Cleanup the temporary addon directories
|
||||
mozfile.rmtree(tmpdir)
|
||||
|
||||
@unittest.skip("Feature not implemented as part of AddonManger")
|
||||
def test_install_from_path_error(self):
|
||||
""" Check install_from_path raises an error with an invalid addon"""
|
||||
|
||||
temp_addon = generate_addon('test-addon-invalid-version@mozilla.org')
|
||||
temp_addon = addon_stubs.generate_invalid_addon()
|
||||
# This should raise an error here
|
||||
self.am.install_from_path(temp_addon)
|
||||
|
||||
def test_install_from_manifest(self):
|
||||
temp_manifest = generate_manifest(['test-addon-1@mozilla.org',
|
||||
'test-addon-2@mozilla.org'])
|
||||
|
||||
temp_manifest = addon_stubs.generate_manifest()
|
||||
m = ManifestParser()
|
||||
m.read(temp_manifest)
|
||||
addons = m.get()
|
||||
|
||||
# Obtain details of addons to install from the manifest
|
||||
addons_to_install = [self.am.addon_details(x['path']).get('id') for x in addons]
|
||||
addons_to_install = [self.am.addon_details(x['path'])['id'] for x in addons]
|
||||
|
||||
self.am.install_from_manifest(temp_manifest)
|
||||
# Generate a list of addons installed in the profile
|
||||
addons_installed = [unicode(x[:-len('.xpi')]) for x in os.listdir(os.path.join(
|
||||
self.profile.profile, 'extensions', 'staged'))]
|
||||
self.assertEqual(addons_installed.sort(), addons_to_install.sort())
|
||||
|
||||
# Cleanup the temporary addon and manifest directories
|
||||
mozfile.rmtree(os.path.dirname(temp_manifest))
|
||||
|
||||
def test_addon_details(self):
|
||||
# Generate installer stubs for a valid and invalid add-on manifest
|
||||
valid_addon = generate_addon('test-addon-1@mozilla.org',
|
||||
path=self.tmpdir)
|
||||
invalid_addon = generate_addon('test-addon-invalid-not-wellformed@mozilla.org',
|
||||
path=self.tmpdir)
|
||||
|
||||
# Check valid add-on
|
||||
details = self.am.addon_details(valid_addon)
|
||||
self.assertEqual(details['id'], 'test-addon-1@mozilla.org')
|
||||
self.assertEqual(details['name'], 'Test Add-on 1')
|
||||
self.assertEqual(details['unpack'], False)
|
||||
self.assertEqual(details['version'], '0.1')
|
||||
|
||||
# Check invalid add-on
|
||||
self.assertRaises(mozprofile.addons.AddonFormatError,
|
||||
self.am.addon_details, invalid_addon)
|
||||
|
||||
# Check invalid path
|
||||
self.assertRaises(IOError,
|
||||
self.am.addon_details, '')
|
||||
|
||||
# Check invalid add-on format
|
||||
addon_path = os.path.join(os.path.join(here, 'files'), 'not_an_addon.txt')
|
||||
self.assertRaises(mozprofile.addons.AddonFormatError,
|
||||
self.am.addon_details, addon_path)
|
||||
|
||||
@unittest.skip("Bug 900154")
|
||||
def test_clean_addons(self):
|
||||
addon_one = generate_addon('test-addon-1@mozilla.org')
|
||||
addon_two = generate_addon('test-addon-2@mozilla.org')
|
||||
|
||||
addon_one = addon_stubs.generate_addon('empty-0-1.xpi')
|
||||
addon_two = addon_stubs.generate_addon('another-empty-0-1.xpi')
|
||||
|
||||
self.am.install_addons(addon_one)
|
||||
installed_addons = [unicode(x[:-len('.xpi')]) for x in os.listdir(os.path.join(
|
||||
@ -345,7 +78,7 @@ class TestAddonsManager(unittest.TestCase):
|
||||
# Cleanup addons
|
||||
duplicate_profile = mozprofile.profile.Profile(profile=self.profile.profile,
|
||||
addons=addon_two)
|
||||
duplicate_profile.addon_manager.clean()
|
||||
duplicate_profile.addon_manager.clean_addons()
|
||||
|
||||
addons_after_cleanup = [unicode(x[:-len('.xpi')]) for x in os.listdir(os.path.join(
|
||||
duplicate_profile.profile, 'extensions', 'staged'))]
|
||||
@ -355,71 +88,39 @@ class TestAddonsManager(unittest.TestCase):
|
||||
def test_noclean(self):
|
||||
"""test `restore=True/False` functionality"""
|
||||
|
||||
server = mozhttpd.MozHttpd(docroot=os.path.join(here, 'addons'))
|
||||
server.start()
|
||||
|
||||
profile = tempfile.mkdtemp()
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
|
||||
try:
|
||||
|
||||
# empty initially
|
||||
self.assertFalse(bool(os.listdir(profile)))
|
||||
|
||||
# make an addon
|
||||
addons = []
|
||||
addons.append(generate_addon('test-addon-1@mozilla.org',
|
||||
path=tmpdir))
|
||||
addons.append(server.get_url() + 'empty.xpi')
|
||||
stub = addon_stubs.generate_addon(name='empty-0-1.xpi',
|
||||
path=tmpdir)
|
||||
|
||||
# install it with a restore=True AddonManager
|
||||
am = mozprofile.addons.AddonManager(profile, restore=True)
|
||||
|
||||
for addon in addons:
|
||||
am.install_from_path(addon)
|
||||
addons = mozprofile.addons.AddonManager(profile, restore=True)
|
||||
addons.install_from_path(stub)
|
||||
|
||||
# now its there
|
||||
self.assertEqual(os.listdir(profile), ['extensions'])
|
||||
staging_folder = os.path.join(profile, 'extensions', 'staged')
|
||||
self.assertTrue(os.path.exists(staging_folder))
|
||||
self.assertEqual(len(os.listdir(staging_folder)), 2)
|
||||
extensions = os.path.join(profile, 'extensions', 'staged')
|
||||
self.assertTrue(os.path.exists(extensions))
|
||||
contents = os.listdir(extensions)
|
||||
self.assertEqual(len(contents), 1)
|
||||
|
||||
# del addons; now its gone though the directory tree exists
|
||||
downloaded_addons = am.downloaded_addons
|
||||
del am
|
||||
|
||||
del addons
|
||||
self.assertEqual(os.listdir(profile), ['extensions'])
|
||||
self.assertTrue(os.path.exists(staging_folder))
|
||||
self.assertEqual(os.listdir(staging_folder), [])
|
||||
|
||||
for addon in downloaded_addons:
|
||||
self.assertFalse(os.path.isfile(addon))
|
||||
self.assertTrue(os.path.exists(extensions))
|
||||
contents = os.listdir(extensions)
|
||||
self.assertEqual(len(contents), 0)
|
||||
|
||||
finally:
|
||||
mozfile.rmtree(tmpdir)
|
||||
mozfile.rmtree(profile)
|
||||
|
||||
def test_remove_addon(self):
|
||||
addons = []
|
||||
addons.append(generate_addon('test-addon-1@mozilla.org',
|
||||
path=self.tmpdir))
|
||||
addons.append(generate_addon('test-addon-2@mozilla.org',
|
||||
path=self.tmpdir))
|
||||
|
||||
self.am.install_from_path(self.tmpdir)
|
||||
|
||||
extensions_path = os.path.join(self.profile_path, 'extensions')
|
||||
staging_path = os.path.join(extensions_path, 'staged')
|
||||
|
||||
# Fake a run by virtually installing one of the staged add-ons
|
||||
shutil.move(os.path.join(staging_path, 'test-addon-1@mozilla.org.xpi'),
|
||||
extensions_path)
|
||||
|
||||
for addon in self.am._addons:
|
||||
self.am.remove_addon(addon)
|
||||
|
||||
self.assertEqual(os.listdir(staging_path), [])
|
||||
self.assertEqual(os.listdir(extensions_path), ['staged'])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user