Merge m-c to inbound.

This commit is contained in:
Ryan VanderMeulen 2013-01-30 08:06:05 -05:00
commit d999b4efd2
48 changed files with 1062 additions and 471 deletions

View File

@ -6,15 +6,13 @@
# drop-in replacement for autoconf 2.13's config.status, with features
# borrowed from autoconf > 2.5, and additional features.
from __future__ import with_statement
from optparse import OptionParser
import sys, re, os, posixpath, ntpath
import errno
from StringIO import StringIO
from os.path import relpath
import os
import sys
from optparse import OptionParser
from mozbuild.backend.configenvironment import ConfigEnvironment
# Standalone js doesn't have virtualenv.
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'config'))
from Preprocessor import Preprocessor
# Basic logging facility
@ -24,200 +22,6 @@ def log(string):
print >>sys.stderr, string
def ensureParentDir(file):
'''Ensures the directory parent to the given file exists'''
dir = os.path.dirname(file)
if dir and not os.path.exists(dir):
try:
os.makedirs(dir)
except OSError, error:
if error.errno != errno.EEXIST:
raise
class FileAvoidWrite(StringIO):
'''file-like object that buffers its output and only writes it to disk
if the new contents are different from what the file may already contain.
'''
def __init__(self, filename):
self.filename = filename
StringIO.__init__(self)
def close(self):
buf = self.getvalue()
StringIO.close(self)
try:
file = open(self.filename, 'rU')
except IOError:
pass
else:
try:
if file.read() == buf:
log("%s is unchanged" % relpath(self.filename, os.curdir))
return
except IOError:
pass
finally:
file.close()
log("creating %s" % relpath(self.filename, os.curdir))
ensureParentDir(self.filename)
with open(self.filename, 'w') as file:
file.write(buf)
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def shell_escape(s):
'''Escape some characters with a backslash, and double dollar signs.
'''
return re.sub('''([ \t`#$^&*(){}\\|;'"<>?\[\]])''', r'\\\1', str(s)).replace('$', '$$')
class ConfigEnvironment(object):
'''A ConfigEnvironment is defined by a source directory and a build
directory. It preprocesses files from the source directory and stores
the result in the object directory.
There are two types of files: config files and config headers,
each treated through a different member function.
Creating a ConfigEnvironment requires a few arguments:
- topsrcdir and topobjdir are, respectively, the top source and
the top object directory.
- defines is a list of (name, value) tuples. In autoconf, these are
set with AC_DEFINE and AC_DEFINE_UNQUOTED
- non_global_defines are a list of names appearing in defines above
that are not meant to be exported in ACDEFINES and ALLDEFINES (see
below)
- substs is a list of (name, value) tuples. In autoconf, these are
set with AC_SUBST.
ConfigEnvironment automatically defines two additional substs variables
from all the defines not appearing in non_global_defines:
- ACDEFINES contains the defines in the form -DNAME=VALUE, for use on
preprocessor command lines. The order in which defines were given
when creating the ConfigEnvironment is preserved.
- ALLDEFINES contains the defines in the form #define NAME VALUE, in
sorted order, for use in config files, for an automatic listing of
defines.
and another additional subst variable from all the other substs:
- ALLSUBSTS contains the substs in the form NAME = VALUE, in sorted
order, for use in autoconf.mk. It includes ACDEFINES, but doesn't
include ALLDEFINES.
ConfigEnvironment expects a "top_srcdir" subst to be set with the top
source directory, in msys format on windows. It is used to derive a
"srcdir" subst when treating config files. It can either be an absolute
path or a path relative to the topobjdir.
'''
def __init__(self, topobjdir = '.', topsrcdir = '.',
defines = [], non_global_defines = [], substs = []):
self.defines = dict(defines)
self.substs = dict(substs)
self.topsrcdir = topsrcdir
self.topobjdir = topobjdir
global_defines = [name for name, value in defines if not name in non_global_defines]
self.substs['ACDEFINES'] = ' '.join(["-D%s=%s" % (name, shell_escape(self.defines[name])) for name in global_defines])
self.substs['ALLSUBSTS'] = '\n'.join(sorted(["%s = %s" % (name, self.substs[name]) for name in self.substs]))
self.substs['ALLDEFINES'] = '\n'.join(sorted(["#define %s %s" % (name, self.defines[name]) for name in global_defines]))
def get_relative_srcdir(self, file):
'''Returns the relative source directory for the given file, always
using / as a path separator.
'''
assert(isinstance(file, basestring))
dir = posixpath.dirname(relpath(file, self.topobjdir).replace(os.sep, '/'))
if dir:
return dir
return '.'
def get_top_srcdir(self, file):
'''Returns a normalized top_srcdir for the given file: if
substs['top_srcdir'] is a relative path, it is relative to the
topobjdir. Adjust it to be relative to the file path.'''
top_srcdir = self.substs['top_srcdir']
if posixpath.isabs(top_srcdir) or ntpath.isabs(top_srcdir):
return top_srcdir
return posixpath.normpath(posixpath.join(self.get_depth(file), top_srcdir))
def get_file_srcdir(self, file):
'''Returns the srcdir for the given file, where srcdir is in msys
format on windows, thus derived from top_srcdir.
'''
dir = self.get_relative_srcdir(file)
top_srcdir = self.get_top_srcdir(file)
return posixpath.normpath(posixpath.join(top_srcdir, dir))
def get_depth(self, file):
'''Returns the DEPTH for the given file, that is, the path to the
object directory relative to the directory containing the given file.
Always uses / as a path separator.
'''
return relpath(self.topobjdir, os.path.dirname(file)).replace(os.sep, '/')
def get_input(self, file):
'''Returns the input file path in the source tree that can be used
to create the given config file or header.
'''
assert(isinstance(file, basestring))
return os.path.normpath(os.path.join(self.topsrcdir, "%s.in" % relpath(file, self.topobjdir)))
def create_config_file(self, path):
'''Creates the given config file. A config file is generated by
taking the corresponding source file and replacing occurences of
"@VAR@" by the value corresponding to "VAR" in the substs dict.
Additional substs are defined according to the file being treated:
"srcdir" for its the path to its source directory
"relativesrcdir" for its source directory relative to the top
"DEPTH" for the path to the top object directory
'''
input = self.get_input(path)
pp = Preprocessor()
pp.context.update(self.substs)
pp.context.update(top_srcdir = self.get_top_srcdir(path))
pp.context.update(srcdir = self.get_file_srcdir(path))
pp.context.update(relativesrcdir = self.get_relative_srcdir(path))
pp.context.update(DEPTH = self.get_depth(path))
pp.do_filter('attemptSubstitution')
pp.setMarker(None)
with FileAvoidWrite(path) as pp.out:
pp.do_include(input)
def create_config_header(self, path):
'''Creates the given config header. A config header is generated by
taking the corresponding source file and replacing some #define/#undef
occurences:
"#undef NAME" is turned into "#define NAME VALUE"
"#define NAME" is unchanged
"#define NAME ORIGINAL_VALUE" is turned into "#define NAME VALUE"
"#undef UNKNOWN_NAME" is turned into "/* #undef UNKNOWN_NAME */"
Whitespaces are preserved.
'''
with open(self.get_input(path), 'rU') as input:
ensureParentDir(path)
output = FileAvoidWrite(path)
r = re.compile('^\s*#\s*(?P<cmd>[a-z]+)(?:\s+(?P<name>\S+)(?:\s+(?P<value>\S+))?)?', re.U)
for l in input:
m = r.match(l)
if m:
cmd = m.group('cmd')
name = m.group('name')
value = m.group('value')
if name:
if name in self.defines:
if cmd == 'define' and value:
l = l[:m.start('value')] + str(self.defines[name]) + l[m.end('value'):]
elif cmd == 'undef':
l = l[:m.start('cmd')] + 'define' + l[m.end('cmd'):m.end('name')] + ' ' + str(self.defines[name]) + l[m.end('name'):]
elif cmd == 'undef':
l = '/* ' + l[:m.end('name')] + ' */' + l[m.end('name'):]
output.write(l)
output.close()
def config_status(topobjdir = '.', topsrcdir = '.',
defines = [], non_global_defines = [], substs = [],
files = [], headers = []):
@ -276,9 +80,8 @@ def config_status(topobjdir = '.', topsrcdir = '.',
if not options.not_topobjdir:
topobjdir = '.'
env = ConfigEnvironment(topobjdir = topobjdir, topsrcdir = topsrcdir,
defines = defines, non_global_defines = non_global_defines,
substs = substs)
env = ConfigEnvironment(topsrcdir, topobjdir, defines=defines,
non_global_defines=non_global_defines, substs=substs)
if options.recheck:
# Execute configure from the top object directory

View File

@ -103,7 +103,6 @@ GDBINIT_DEST = $(FINAL_TARGET)
INSTALL_TARGETS += GDBINIT
PYTHON_UNIT_TESTS := \
tests/unit-ConfigStatus.py \
tests/test.py \
$(NULL)

View File

@ -6,15 +6,13 @@
# drop-in replacement for autoconf 2.13's config.status, with features
# borrowed from autoconf > 2.5, and additional features.
from __future__ import with_statement
from optparse import OptionParser
import sys, re, os, posixpath, ntpath
import errno
from StringIO import StringIO
from os.path import relpath
import os
import sys
from optparse import OptionParser
from mozbuild.backend.configenvironment import ConfigEnvironment
# Standalone js doesn't have virtualenv.
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'config'))
from Preprocessor import Preprocessor
# Basic logging facility
@ -24,200 +22,6 @@ def log(string):
print >>sys.stderr, string
def ensureParentDir(file):
'''Ensures the directory parent to the given file exists'''
dir = os.path.dirname(file)
if dir and not os.path.exists(dir):
try:
os.makedirs(dir)
except OSError, error:
if error.errno != errno.EEXIST:
raise
class FileAvoidWrite(StringIO):
'''file-like object that buffers its output and only writes it to disk
if the new contents are different from what the file may already contain.
'''
def __init__(self, filename):
self.filename = filename
StringIO.__init__(self)
def close(self):
buf = self.getvalue()
StringIO.close(self)
try:
file = open(self.filename, 'rU')
except IOError:
pass
else:
try:
if file.read() == buf:
log("%s is unchanged" % relpath(self.filename, os.curdir))
return
except IOError:
pass
finally:
file.close()
log("creating %s" % relpath(self.filename, os.curdir))
ensureParentDir(self.filename)
with open(self.filename, 'w') as file:
file.write(buf)
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def shell_escape(s):
'''Escape some characters with a backslash, and double dollar signs.
'''
return re.sub('''([ \t`#$^&*(){}\\|;'"<>?\[\]])''', r'\\\1', str(s)).replace('$', '$$')
class ConfigEnvironment(object):
'''A ConfigEnvironment is defined by a source directory and a build
directory. It preprocesses files from the source directory and stores
the result in the object directory.
There are two types of files: config files and config headers,
each treated through a different member function.
Creating a ConfigEnvironment requires a few arguments:
- topsrcdir and topobjdir are, respectively, the top source and
the top object directory.
- defines is a list of (name, value) tuples. In autoconf, these are
set with AC_DEFINE and AC_DEFINE_UNQUOTED
- non_global_defines are a list of names appearing in defines above
that are not meant to be exported in ACDEFINES and ALLDEFINES (see
below)
- substs is a list of (name, value) tuples. In autoconf, these are
set with AC_SUBST.
ConfigEnvironment automatically defines two additional substs variables
from all the defines not appearing in non_global_defines:
- ACDEFINES contains the defines in the form -DNAME=VALUE, for use on
preprocessor command lines. The order in which defines were given
when creating the ConfigEnvironment is preserved.
- ALLDEFINES contains the defines in the form #define NAME VALUE, in
sorted order, for use in config files, for an automatic listing of
defines.
and another additional subst variable from all the other substs:
- ALLSUBSTS contains the substs in the form NAME = VALUE, in sorted
order, for use in autoconf.mk. It includes ACDEFINES, but doesn't
include ALLDEFINES.
ConfigEnvironment expects a "top_srcdir" subst to be set with the top
source directory, in msys format on windows. It is used to derive a
"srcdir" subst when treating config files. It can either be an absolute
path or a path relative to the topobjdir.
'''
def __init__(self, topobjdir = '.', topsrcdir = '.',
defines = [], non_global_defines = [], substs = []):
self.defines = dict(defines)
self.substs = dict(substs)
self.topsrcdir = topsrcdir
self.topobjdir = topobjdir
global_defines = [name for name, value in defines if not name in non_global_defines]
self.substs['ACDEFINES'] = ' '.join(["-D%s=%s" % (name, shell_escape(self.defines[name])) for name in global_defines])
self.substs['ALLSUBSTS'] = '\n'.join(sorted(["%s = %s" % (name, self.substs[name]) for name in self.substs]))
self.substs['ALLDEFINES'] = '\n'.join(sorted(["#define %s %s" % (name, self.defines[name]) for name in global_defines]))
def get_relative_srcdir(self, file):
'''Returns the relative source directory for the given file, always
using / as a path separator.
'''
assert(isinstance(file, basestring))
dir = posixpath.dirname(relpath(file, self.topobjdir).replace(os.sep, '/'))
if dir:
return dir
return '.'
def get_top_srcdir(self, file):
'''Returns a normalized top_srcdir for the given file: if
substs['top_srcdir'] is a relative path, it is relative to the
topobjdir. Adjust it to be relative to the file path.'''
top_srcdir = self.substs['top_srcdir']
if posixpath.isabs(top_srcdir) or ntpath.isabs(top_srcdir):
return top_srcdir
return posixpath.normpath(posixpath.join(self.get_depth(file), top_srcdir))
def get_file_srcdir(self, file):
'''Returns the srcdir for the given file, where srcdir is in msys
format on windows, thus derived from top_srcdir.
'''
dir = self.get_relative_srcdir(file)
top_srcdir = self.get_top_srcdir(file)
return posixpath.normpath(posixpath.join(top_srcdir, dir))
def get_depth(self, file):
'''Returns the DEPTH for the given file, that is, the path to the
object directory relative to the directory containing the given file.
Always uses / as a path separator.
'''
return relpath(self.topobjdir, os.path.dirname(file)).replace(os.sep, '/')
def get_input(self, file):
'''Returns the input file path in the source tree that can be used
to create the given config file or header.
'''
assert(isinstance(file, basestring))
return os.path.normpath(os.path.join(self.topsrcdir, "%s.in" % relpath(file, self.topobjdir)))
def create_config_file(self, path):
'''Creates the given config file. A config file is generated by
taking the corresponding source file and replacing occurences of
"@VAR@" by the value corresponding to "VAR" in the substs dict.
Additional substs are defined according to the file being treated:
"srcdir" for its the path to its source directory
"relativesrcdir" for its source directory relative to the top
"DEPTH" for the path to the top object directory
'''
input = self.get_input(path)
pp = Preprocessor()
pp.context.update(self.substs)
pp.context.update(top_srcdir = self.get_top_srcdir(path))
pp.context.update(srcdir = self.get_file_srcdir(path))
pp.context.update(relativesrcdir = self.get_relative_srcdir(path))
pp.context.update(DEPTH = self.get_depth(path))
pp.do_filter('attemptSubstitution')
pp.setMarker(None)
with FileAvoidWrite(path) as pp.out:
pp.do_include(input)
def create_config_header(self, path):
'''Creates the given config header. A config header is generated by
taking the corresponding source file and replacing some #define/#undef
occurences:
"#undef NAME" is turned into "#define NAME VALUE"
"#define NAME" is unchanged
"#define NAME ORIGINAL_VALUE" is turned into "#define NAME VALUE"
"#undef UNKNOWN_NAME" is turned into "/* #undef UNKNOWN_NAME */"
Whitespaces are preserved.
'''
with open(self.get_input(path), 'rU') as input:
ensureParentDir(path)
output = FileAvoidWrite(path)
r = re.compile('^\s*#\s*(?P<cmd>[a-z]+)(?:\s+(?P<name>\S+)(?:\s+(?P<value>\S+))?)?', re.U)
for l in input:
m = r.match(l)
if m:
cmd = m.group('cmd')
name = m.group('name')
value = m.group('value')
if name:
if name in self.defines:
if cmd == 'define' and value:
l = l[:m.start('value')] + str(self.defines[name]) + l[m.end('value'):]
elif cmd == 'undef':
l = l[:m.start('cmd')] + 'define' + l[m.end('cmd'):m.end('name')] + ' ' + str(self.defines[name]) + l[m.end('name'):]
elif cmd == 'undef':
l = '/* ' + l[:m.end('name')] + ' */' + l[m.end('name'):]
output.write(l)
output.close()
def config_status(topobjdir = '.', topsrcdir = '.',
defines = [], non_global_defines = [], substs = [],
files = [], headers = []):
@ -276,9 +80,8 @@ def config_status(topobjdir = '.', topsrcdir = '.',
if not options.not_topobjdir:
topobjdir = '.'
env = ConfigEnvironment(topobjdir = topobjdir, topsrcdir = topsrcdir,
defines = defines, non_global_defines = non_global_defines,
substs = substs)
env = ConfigEnvironment(topsrcdir, topobjdir, defines=defines,
non_global_defines=non_global_defines, substs=substs)
if options.recheck:
# Execute configure from the top object directory

View File

@ -11,6 +11,7 @@ include $(DEPTH)/config/autoconf.mk
test_dirs := \
mozbuild/mozbuild/test \
mozbuild/mozbuild/test/backend \
mozbuild/mozbuild/test/compilation \
mozbuild/mozbuild/test/frontend \
mozbuild/mozpack/test \

View File

@ -8,6 +8,10 @@ build system.
Modules Overview
================
* mozbuild.backend -- Functionality for producing and interacting with build
backends. A build backend is an entity that consumes build system metadata
(from mozbuild.frontend) and does something useful with it (typically writing
out files that can be used by a build tool to build the tree).
* mozbuild.compilation -- Functionality related to compiling. This
includes managing compiler warnings.
* mozbuild.frontend -- Functionality for reading build frontend files
@ -31,9 +35,6 @@ backends consume the build configuration and do something with it. They
typically produce tool-specific files such as make files which can be used
to build the tree.
Builders are entities that build the tree. They typically have high
cohesion with a specific build backend.
Piecing it all together, we have frontend files that are parsed into data
structures. These data structures are fed into a build backend. The output
from build backends is used by builders to build the tree.

View File

@ -0,0 +1,78 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import unicode_literals
from abc import (
ABCMeta,
abstractmethod,
)
import os
import sys
from mach.mixin.logging import LoggingMixin
from ..frontend.data import SandboxDerived
from .configenvironment import ConfigEnvironment
class BuildBackend(LoggingMixin):
"""Abstract base class for build backends.
A build backend is merely a consumer of the build configuration (the output
of the frontend processing). It does something with said data. What exactly
is the discretion of the specific implementation.
"""
__metaclass__ = ABCMeta
def __init__(self, environment):
assert isinstance(environment, ConfigEnvironment)
self.populate_logger()
self.environment = environment
self._init()
def _init():
"""Hook point for child classes to perform actions during __init__.
This exists so child classes don't need to implement __init__.
"""
def consume(self, objs):
"""Consume a stream of TreeMetadata instances.
This is the main method of the interface. This is what takes the
frontend output and does something with it.
Child classes are not expected to implement this method. Instead, the
base class consumes objects and calls methods (possibly) implemented by
child classes.
"""
for obj in objs:
self.consume_object(obj)
# Write out a file indicating when this backend was last generated.
age_file = os.path.join(self.environment.topobjdir,
'backend.%s.built' % self.__class__.__name__)
with open(age_file, 'a'):
os.utime(age_file, None)
self.consume_finished()
@abstractmethod
def consume_object(self, obj):
"""Consumes an individual TreeMetadata instance.
This is the main method used by child classes to react to build
metadata.
"""
def consume_finished(self):
"""Called when consume() has completed handling all objects."""

View File

@ -0,0 +1,179 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import unicode_literals
import ntpath
import os
import posixpath
import re
from os.path import relpath
from Preprocessor import Preprocessor
from ..util import (
ensureParentDir,
FileAvoidWrite,
)
RE_SHELL_ESCAPE = re.compile('''([ \t`#$^&*(){}\\|;'"<>?\[\]])''')
def shell_escape(s):
"""Escape some characters with a backslash, and double dollar signs."""
return RE_SHELL_ESCAPE.sub(r'\\\1', str(s)).replace('$', '$$')
class ConfigEnvironment(object):
"""Perform actions associated with a configured but bare objdir.
The purpose of this class is to preprocess files from the source directory
and output results in the object directory.
There are two types of files: config files and config headers,
each treated through a different member function.
Creating a ConfigEnvironment requires a few arguments:
- topsrcdir and topobjdir are, respectively, the top source and
the top object directory.
- defines is a list of (name, value) tuples. In autoconf, these are
set with AC_DEFINE and AC_DEFINE_UNQUOTED
- non_global_defines are a list of names appearing in defines above
that are not meant to be exported in ACDEFINES and ALLDEFINES (see
below)
- substs is a list of (name, value) tuples. In autoconf, these are
set with AC_SUBST.
ConfigEnvironment automatically defines two additional substs variables
from all the defines not appearing in non_global_defines:
- ACDEFINES contains the defines in the form -DNAME=VALUE, for use on
preprocessor command lines. The order in which defines were given
when creating the ConfigEnvironment is preserved.
- ALLDEFINES contains the defines in the form #define NAME VALUE, in
sorted order, for use in config files, for an automatic listing of
defines.
and another additional subst variable from all the other substs:
- ALLSUBSTS contains the substs in the form NAME = VALUE, in sorted
order, for use in autoconf.mk. It includes ACDEFINES, but doesn't
include ALLDEFINES.
ConfigEnvironment expects a "top_srcdir" subst to be set with the top
source directory, in msys format on windows. It is used to derive a
"srcdir" subst when treating config files. It can either be an absolute
path or a path relative to the topobjdir.
"""
def __init__(self, topsrcdir, topobjdir, defines=[], non_global_defines=[],
substs=[]):
self.defines = dict(defines)
self.substs = dict(substs)
self.topsrcdir = topsrcdir
self.topobjdir = topobjdir
global_defines = [name for name, value in defines
if not name in non_global_defines]
self.substs['ACDEFINES'] = ' '.join(['-D%s=%s' % (name,
shell_escape(self.defines[name])) for name in global_defines])
self.substs['ALLSUBSTS'] = '\n'.join(sorted(['%s = %s' % (name,
self.substs[name]) for name in self.substs]))
self.substs['ALLDEFINES'] = '\n'.join(sorted(['#define %s %s' % (name,
self.defines[name]) for name in global_defines]))
def get_relative_srcdir(self, file):
'''Returns the relative source directory for the given file, always
using / as a path separator.
'''
assert(isinstance(file, basestring))
dir = posixpath.dirname(relpath(file, self.topobjdir).replace(os.sep, '/'))
if dir:
return dir
return '.'
def get_top_srcdir(self, file):
'''Returns a normalized top_srcdir for the given file: if
substs['top_srcdir'] is a relative path, it is relative to the
topobjdir. Adjust it to be relative to the file path.'''
top_srcdir = self.substs['top_srcdir']
if posixpath.isabs(top_srcdir) or ntpath.isabs(top_srcdir):
return top_srcdir
return posixpath.normpath(posixpath.join(self.get_depth(file), top_srcdir))
def get_file_srcdir(self, file):
'''Returns the srcdir for the given file, where srcdir is in msys
format on windows, thus derived from top_srcdir.
'''
dir = self.get_relative_srcdir(file)
top_srcdir = self.get_top_srcdir(file)
return posixpath.normpath(posixpath.join(top_srcdir, dir))
def get_depth(self, file):
'''Returns the DEPTH for the given file, that is, the path to the
object directory relative to the directory containing the given file.
Always uses / as a path separator.
'''
return relpath(self.topobjdir, os.path.dirname(file)).replace(os.sep, '/')
def get_input(self, file):
'''Returns the input file path in the source tree that can be used
to create the given config file or header.
'''
assert(isinstance(file, basestring))
return os.path.normpath(os.path.join(self.topsrcdir, "%s.in" % relpath(file, self.topobjdir)))
def create_config_file(self, path):
'''Creates the given config file. A config file is generated by
taking the corresponding source file and replacing occurences of
"@VAR@" by the value corresponding to "VAR" in the substs dict.
Additional substs are defined according to the file being treated:
"srcdir" for its the path to its source directory
"relativesrcdir" for its source directory relative to the top
"DEPTH" for the path to the top object directory
'''
input = self.get_input(path)
pp = Preprocessor()
pp.context.update(self.substs)
pp.context.update(top_srcdir = self.get_top_srcdir(path))
pp.context.update(srcdir = self.get_file_srcdir(path))
pp.context.update(relativesrcdir = self.get_relative_srcdir(path))
pp.context.update(DEPTH = self.get_depth(path))
pp.do_filter('attemptSubstitution')
pp.setMarker(None)
with FileAvoidWrite(path) as pp.out:
pp.do_include(input)
def create_config_header(self, path):
'''Creates the given config header. A config header is generated by
taking the corresponding source file and replacing some #define/#undef
occurences:
"#undef NAME" is turned into "#define NAME VALUE"
"#define NAME" is unchanged
"#define NAME ORIGINAL_VALUE" is turned into "#define NAME VALUE"
"#undef UNKNOWN_NAME" is turned into "/* #undef UNKNOWN_NAME */"
Whitespaces are preserved.
'''
with open(self.get_input(path), 'rU') as input:
ensureParentDir(path)
output = FileAvoidWrite(path)
r = re.compile('^\s*#\s*(?P<cmd>[a-z]+)(?:\s+(?P<name>\S+)(?:\s+(?P<value>\S+))?)?', re.U)
for l in input:
m = r.match(l)
if m:
cmd = m.group('cmd')
name = m.group('name')
value = m.group('value')
if name:
if name in self.defines:
if cmd == 'define' and value:
l = l[:m.start('value')] + str(self.defines[name]) + l[m.end('value'):]
elif cmd == 'undef':
l = l[:m.start('cmd')] + 'define' + l[m.end('cmd'):m.end('name')] + ' ' + str(self.defines[name]) + l[m.end('name'):]
elif cmd == 'undef':
l = '/* ' + l[:m.end('name')] + ' */' + l[m.end('name'):]
output.write(l)
output.close()

View File

@ -0,0 +1,144 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import unicode_literals
import logging
import os
from .base import BuildBackend
from ..frontend.data import (
ConfigFileSubstitution,
DirectoryTraversal,
)
from ..util import FileAvoidWrite
class BackendMakeFile(object):
"""Represents a generated backend.mk file.
This is both a wrapper around FileAvoidWrite as well as a container that
holds accumulated state.
"""
def __init__(self, srcdir, objdir):
self.srcdir = srcdir
self.objdir = objdir
# Filenames that influenced the content of this file.
self.inputs = set()
# Filenames that are automatically generated by the build backend.
self.outputs = set()
self.fh = FileAvoidWrite(os.path.join(objdir, 'backend.mk'))
self.fh.write('# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT EDIT.\n')
def write(self, buf):
self.fh.write(buf)
def close(self):
if len(self.inputs):
self.fh.write('BACKEND_INPUT_FILES += %s\n' % ' '.join(self.inputs))
if len(self.outputs):
self.fh.write('BACKEND_OUTPUT_FILES += %s\n' % ' '.join(self.outputs))
self.fh.close()
class RecursiveMakeBackend(BuildBackend):
"""Backend that integrates with the existing recursive make build system.
This backend facilitates the transition from Makefile.in to moz.build
files.
This backend performs Makefile.in -> Makefile conversion. It also writes
out .mk files containing content derived from moz.build files. Both are
consumed by the recursive make builder.
This backend may eventually evolve to write out non-recursive make files.
However, as long as there are Makefile.in files in the tree, we are tied to
recursive make and thus will need this backend.
"""
def _init(self):
self._backend_files = {}
def consume_object(self, obj):
"""Write out build files necessary to build with recursive make."""
backend_file = self._backend_files.get(obj.srcdir,
BackendMakeFile(obj.srcdir, obj.objdir))
backend_file.inputs |= obj.sandbox_all_paths
if isinstance(obj, DirectoryTraversal):
self._process_directory_traversal(obj, backend_file)
elif isinstance(obj, ConfigFileSubstitution):
backend_file.inputs.add(obj.input_path)
backend_file.outputs.add(obj.output_path)
self.environment.create_config_file(obj.output_path)
self._backend_files[obj.srcdir] = backend_file
def consume_finished(self):
for srcdir in sorted(self._backend_files.keys()):
bf = self._backend_files[srcdir]
if not os.path.exists(bf.objdir):
os.makedirs(bf.objdir)
makefile_in = os.path.join(srcdir, 'Makefile.in')
if not os.path.exists(makefile_in):
raise Exception('Could not find Makefile.in: %s' % makefile_in)
out_path = os.path.join(bf.objdir, 'Makefile')
self.log(logging.DEBUG, 'create_makefile', {'path': out_path},
'Generating makefile: {path}')
self.environment.create_config_file(out_path)
bf.outputs.add(out_path)
bf.close()
def _process_directory_traversal(self, obj, backend_file):
"""Process a data.DirectoryTraversal instance."""
fh = backend_file.fh
for tier, dirs in obj.tier_dirs.iteritems():
fh.write('TIERS += %s\n' % tier)
if dirs:
fh.write('tier_%s_dirs += %s\n' % (tier, ' '.join(dirs)))
# tier_static_dirs should have the same keys as tier_dirs.
if obj.tier_static_dirs[tier]:
fh.write('tier_%s_staticdirs += %s\n' % (
tier, ' '.join(obj.tier_static_dirs[tier])))
if obj.dirs:
fh.write('DIRS := %s\n' % ' '.join(obj.dirs))
if obj.parallel_dirs:
fh.write('PARALLEL_DIRS := %s\n' % ' '.join(obj.parallel_dirs))
if obj.tool_dirs:
fh.write('TOOL_DIRS := %s\n' % ' '.join(obj.tool_dirs))
if obj.test_dirs:
fh.write('TEST_DIRS := %s\n' % ' '.join(obj.test_dirs))
if obj.test_tool_dirs:
fh.write('ifdef ENABLE_TESTS\n')
fh.write('TOOL_DIRS += %s\n' % ' '.join(obj.test_tool_dirs))
fh.write('endif\n')
if len(obj.external_make_dirs):
fh.write('DIRS += %s\n' % ' '.join(obj.external_make_dirs))
if len(obj.parallel_external_make_dirs):
fh.write('PARALLEL_DIRS += %s\n' %
' '.join(obj.parallel_external_make_dirs))

View File

@ -34,12 +34,19 @@ class SandboxDerived(TreeMetadata):
__slots__ = (
'objdir',
'relativedir',
'sandbox_all_paths',
'sandbox_path',
'srcdir',
'topobjdir',
'topsrcdir',
)
def __init__(self, sandbox):
# Capture the files that were evaluated to build this sandbox.
self.sandbox_main_path = sandbox.main_path
self.sandbox_all_paths = sandbox.all_paths
# Basic directory state.
self.topsrcdir = sandbox['TOPSRCDIR']
self.topobjdir = sandbox['TOPOBJDIR']
@ -68,6 +75,8 @@ class DirectoryTraversal(SandboxDerived):
'test_tool_dirs',
'tier_dirs',
'tier_static_dirs',
'external_make_dirs',
'parallel_external_make_dirs',
)
def __init__(self, sandbox):
@ -80,3 +89,24 @@ class DirectoryTraversal(SandboxDerived):
self.test_tool_dirs = []
self.tier_dirs = OrderedDict()
self.tier_static_dirs = OrderedDict()
self.external_make_dirs = []
self.parallel_external_make_dirs = []
class ConfigFileSubstitution(SandboxDerived):
"""Describes a config file that will be generated using substitutions.
The output_path attribute defines the relative path from topsrcdir of the
output file to generate.
"""
__slots__ = (
'input_path',
'output_path',
)
def __init__(self, sandbox):
SandboxDerived.__init__(self, sandbox)
self.input_path = None
self.output_path = None

View File

@ -4,7 +4,13 @@
from __future__ import unicode_literals
from .data import DirectoryTraversal
import os
from .data import (
DirectoryTraversal,
ConfigFileSubstitution,
)
from .reader import MozbuildSandbox
@ -42,6 +48,15 @@ class TreeMetadataEmitter(object):
# the recursive make backend.
for o in self._emit_directory_traversal_from_sandbox(sandbox): yield o
for path in sandbox['CONFIGURE_SUBST_FILES']:
if os.path.isabs(path):
path = path[1:]
sub = ConfigFileSubstitution(sandbox)
sub.input_path = os.path.join(sandbox['SRCDIR'], '%s.in' % path)
sub.output_path = os.path.join(sandbox['OBJDIR'], path)
yield sub
def _emit_directory_traversal_from_sandbox(self, sandbox):
o = DirectoryTraversal(sandbox)
o.dirs = sandbox.get('DIRS', [])
@ -49,6 +64,8 @@ class TreeMetadataEmitter(object):
o.tool_dirs = sandbox.get('TOOL_DIRS', [])
o.test_dirs = sandbox.get('TEST_DIRS', [])
o.test_tool_dirs = sandbox.get('TEST_TOOL_DIRS', [])
o.external_make_dirs = sandbox.get('EXTERNAL_MAKE_DIRS', [])
o.parallel_external_make_dirs = sandbox.get('PARALLEL_EXTERNAL_MAKE_DIRS', [])
if 'TIERS' in sandbox:
for tier in sandbox['TIERS']:

View File

@ -37,6 +37,7 @@ from mozbuild.util import (
)
from .sandbox import (
SandboxError,
SandboxExecutionError,
SandboxLoadError,
Sandbox,
@ -59,6 +60,14 @@ def log(logger, level, action, params, formatter):
logger.log(level, formatter, extra={'action': action, 'params': params})
class SandboxCalledError(SandboxError):
"""Represents an error resulting from calling the error() function."""
def __init__(self, file_stack, message):
SandboxError.__init__(self, file_stack)
self.message = message
class MozbuildSandbox(Sandbox):
"""Implementation of a Sandbox tailored for mozbuild files.
@ -165,6 +174,13 @@ class MozbuildSandbox(Sandbox):
# exec_file() handles normalization and verification of the path.
self.exec_file(path)
def _warning(self, message):
# FUTURE consider capturing warnings in a variable instead of printing.
print('WARNING: %s' % message, file=sys.stderr)
def _error(self, message):
raise SandboxCalledError(self._execution_stack, message)
class SandboxValidationError(Exception):
"""Represents an error encountered when validating sandbox results."""
@ -187,10 +203,12 @@ class BuildReaderError(Exception):
which affect error messages, of course).
"""
def __init__(self, file_stack, trace, sandbox_exec_error=None,
sandbox_load_error=None, validation_error=None, other_error=None):
sandbox_load_error=None, validation_error=None, other_error=None,
sandbox_called_error=None):
self.file_stack = file_stack
self.trace = trace
self.sandbox_called_error = sandbox_called_error
self.sandbox_exec = sandbox_exec_error
self.sandbox_load = sandbox_load_error
self.validation_error = validation_error
@ -218,7 +236,8 @@ class BuildReaderError(Exception):
@property
def sandbox_error(self):
return self.sandbox_exec or self.sandbox_load
return self.sandbox_exec or self.sandbox_load or \
self.sandbox_called_error
def __str__(self):
s = StringIO()
@ -260,7 +279,15 @@ class BuildReaderError(Exception):
def _print_sandbox_error(self, s):
# Try to find the frame of the executed code.
script_frame = None
for frame in traceback.extract_tb(self.sandbox_error.trace):
# We don't currently capture the trace for SandboxCalledError.
# Therefore, we don't get line numbers from the moz.build file.
# FUTURE capture this.
trace = getattr(self.sandbox_error, 'trace', None)
frames = []
if trace:
frames = traceback.extract_tb(trace)
for frame in frames:
if frame[0] == self.actual_file:
script_frame = frame
@ -276,12 +303,27 @@ class BuildReaderError(Exception):
s.write(' %s\n' % script_frame[3])
s.write('\n')
if self.sandbox_called_error is not None:
self._print_sandbox_called_error(s)
return
if self.sandbox_load is not None:
self._print_sandbox_load_error(s)
return
self._print_sandbox_exec_error(s)
def _print_sandbox_called_error(self, s):
assert self.sandbox_called_error is not None
s.write('A moz.build file called the error() function.\n')
s.write('\n')
s.write('The error it encountered is:\n')
s.write('\n')
s.write(' %s\n' % self.sandbox_called_error.message)
s.write('\n')
s.write('Correct the error condition and try again.\n')
def _print_sandbox_load_error(self, s):
assert self.sandbox_load is not None
@ -479,6 +521,10 @@ class BuildReader(object):
except BuildReaderError as bre:
raise bre
except SandboxCalledError as sce:
raise BuildReaderError(list(self._execution_stack),
sys.exc_info()[2], sandbox_called_error=sce)
except SandboxExecutionError as se:
raise BuildReaderError(list(self._execution_stack),
sys.exc_info()[2], sandbox_exec_error=se)

View File

@ -259,6 +259,8 @@ class Sandbox(object):
builtins=builtins)
self._locals = LocalNamespace(self._globals)
self._execution_stack = []
self.main_path = None
self.all_paths = set()
def exec_file(self, path):
"""Execute code at a path in the sandbox.
@ -290,6 +292,11 @@ class Sandbox(object):
"""
self._execution_stack.append(path)
if self.main_path is None:
self.main_path = path
self.all_paths.add(path)
# We don't have to worry about bytecode generation here because we are
# too low-level for that. However, we could add bytecode generation via
# the marshall module if parsing performance were ever an issue.

View File

@ -118,6 +118,28 @@ VARIABLES = {
This variable is typically not populated directly. Instead, it is
populated by calling add_tier_dir().
"""),
'EXTERNAL_MAKE_DIRS': (list, [],
"""Directories that build with make but don't use moz.build files.
This is like DIRS except it implies that |make| is used to build the
directory and that the directory does not define itself with moz.build
files.
"""),
'PARALLEL_EXTERNAL_MAKE_DIRS': (list, [],
"""Parallel version of EXTERNAL_MAKE_DIRS.
"""),
'CONFIGURE_SUBST_FILES': (list, [],
"""Output files that will be generated using configure-like substitution.
This is a substitute for AC_OUTPUT in autoconf. For each path in this
list, we will search for a file in the srcdir having the name
{path}.in. The contents of this file will be read and variable patterns
like @foo@ will be substituted with the values of the AC_SUBST
variables declared during configure.
"""),
}
# The set of functions exposed to the sandbox.
@ -184,6 +206,19 @@ FUNCTIONS = {
add_tier_dir('base', 'foo', static=True)
"""),
'warning': ('_warning', (str,),
"""Issue a warning.
Warnings are string messages that are printed during execution.
Warnings are ignored during execution.
"""),
'error': ('_error', (str,),
"""Issue a fatal error.
If this function is called, processing is aborted immediately.
"""),
}
# Special variables. These complement VARIABLES.

View File

@ -0,0 +1,102 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import unicode_literals
import os
import unittest
from shutil import rmtree
from tempfile import mkdtemp
from mach.logging import LoggingManager
from mozbuild.backend.configenvironment import ConfigEnvironment
from mozbuild.frontend.emitter import TreeMetadataEmitter
from mozbuild.frontend.reader import BuildReader
log_manager = LoggingManager()
log_manager.add_terminal_logging()
test_data_path = os.path.abspath(os.path.dirname(__file__))
test_data_path = os.path.join(test_data_path, 'data')
CONFIGS = {
'stub0': {
'defines': [
('MOZ_TRUE_1', '1'),
('MOZ_TRUE_2', '1'),
],
'non_global_defines': [
('MOZ_NONGLOBAL_1', '1'),
('MOZ_NONGLOBAL_2', '1'),
],
'substs': [
('MOZ_FOO', 'foo'),
('MOZ_BAR', 'bar'),
],
},
'external_make_dirs': {
'defines': [],
'non_global_defines': [],
'substs': [],
},
'substitute_config_files': {
'defines': [],
'non_global_defines': [],
'substs': [
('MOZ_FOO', 'foo'),
('MOZ_BAR', 'bar'),
],
},
}
class BackendTester(unittest.TestCase):
def _get_environment(self, name):
"""Obtain a new instance of a ConfigEnvironment for a known profile.
A new temporary object directory is created for the environment. The
environment is cleaned up automatically when the test finishes.
"""
config = CONFIGS[name]
objdir = mkdtemp()
self.addCleanup(rmtree, objdir)
srcdir = os.path.join(test_data_path, name)
config['substs'].append(('top_srcdir', srcdir))
return ConfigEnvironment(srcdir, objdir, **config)
def _emit(self, name):
env = self._get_environment(name)
reader = BuildReader(env)
emitter = TreeMetadataEmitter(env)
return env, emitter.emit(reader.read_topsrcdir())
def _consume(self, name, cls):
env, objs = self._emit(name)
backend = cls(env)
backend.consume(objs)
return env
def _tree_paths(self, topdir, filename):
for dirpath, dirnames, filenames in os.walk(topdir):
for f in filenames:
if f == filename:
yield os.path.relpath(os.path.join(dirpath, f), topdir)
def _mozbuild_paths(self, env):
return self._tree_paths(env.topsrcdir, 'moz.build')
def _makefile_in_paths(self, env):
return self._tree_paths(env.topsrcdir, 'Makefile.in')
__all__ = ['BackendTester']

View File

@ -0,0 +1,7 @@
# Any copyright is dedicated to the Public Domain.
# http://creativecommons.org/publicdomain/zero/1.0/
DIRS = ['dir']
PARALLEL_DIRS = ['p_dir']
EXTERNAL_MAKE_DIRS = ['external']
PARALLEL_EXTERNAL_MAKE_DIRS = ['p_external']

View File

@ -0,0 +1,11 @@
# Any copyright is dedicated to the Public Domain.
# http://creativecommons.org/publicdomain/zero/1.0/
DEPTH := @DEPTH@
topsrcdir := @top_srcdir@
srcdir := @srcdir@
VPATH = @srcdir@
include $(DEPTH)/config/autoconf.mk
include $(topsrcdir)/config/rules.mk

View File

@ -0,0 +1,12 @@
# Any copyright is dedicated to the Public Domain.
# http://creativecommons.org/publicdomain/zero/1.0/
DEPTH := @DEPTH@
topsrcdir := @top_srcdir@
srcdir := @srcdir@
VPATH = @srcdir@
include $(DEPTH)/config/autoconf.mk
include $(topsrcdir)/config/rules.mk

View File

@ -0,0 +1,4 @@
# Any copyright is dedicated to the Public Domain.
# http://creativecommons.org/publicdomain/zero/1.0/

View File

@ -0,0 +1,12 @@
# Any copyright is dedicated to the Public Domain.
# http://creativecommons.org/publicdomain/zero/1.0/
DEPTH := @DEPTH@
topsrcdir := @top_srcdir@
srcdir := @srcdir@
VPATH = @srcdir@
include $(DEPTH)/config/autoconf.mk
include $(topsrcdir)/config/rules.mk

View File

@ -0,0 +1,3 @@
# Any copyright is dedicated to the Public Domain.
# http://creativecommons.org/publicdomain/zero/1.0/

View File

@ -0,0 +1,12 @@
# Any copyright is dedicated to the Public Domain.
# http://creativecommons.org/publicdomain/zero/1.0/
DEPTH := @DEPTH@
topsrcdir := @top_srcdir@
srcdir := @srcdir@
VPATH = @srcdir@
include $(DEPTH)/config/autoconf.mk
include $(topsrcdir)/config/rules.mk

View File

@ -0,0 +1,3 @@
# Any copyright is dedicated to the Public Domain.
# http://creativecommons.org/publicdomain/zero/1.0/

View File

@ -0,0 +1,6 @@
# Any copyright is dedicated to the Public Domain.
# http://creativecommons.org/publicdomain/zero/1.0/
DIRS += ['dir1']
PARALLEL_DIRS += ['dir2']
TEST_DIRS += ['dir3']

View File

@ -0,0 +1 @@
TEST = @MOZ_FOO@

View File

@ -0,0 +1,4 @@
# Any copyright is dedicated to the Public Domain.
# http://creativecommons.org/publicdomain/zero/1.0/
CONFIGURE_SUBST_FILES = ['foo']

View File

@ -1,55 +1,23 @@
from __future__ import with_statement
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os, posixpath
from StringIO import StringIO
import unittest
from mozunit import main, MockedOpen
import ConfigStatus
from ConfigStatus import FileAvoidWrite
import mozbuild.backend.configenvironment as ConfigStatus
class ConfigEnvironment(ConfigStatus.ConfigEnvironment):
def __init__(self, **args):
ConfigStatus.ConfigEnvironment.__init__(self, **args)
def __init__(self, *args, **kwargs):
ConfigStatus.ConfigEnvironment.__init__(self, *args, **kwargs)
# Be helpful to unit tests
if not 'top_srcdir' in self.substs:
if os.path.isabs(self.topsrcdir):
self.substs['top_srcdir'] = self.topsrcdir.replace(os.sep, '/')
else:
self.substs['top_srcdir'] = ConfigStatus.relpath(self.topsrcdir, self.topobjdir).replace(os.sep, '/')
class TestFileAvoidWrite(unittest.TestCase):
def test_file_avoid_write(self):
'''Test the FileAvoidWrite class
'''
with MockedOpen({'file': 'content'}):
# Overwriting an existing file replaces its content
with FileAvoidWrite('file') as file:
file.write('bazqux')
self.assertEqual(open('file', 'r').read(), 'bazqux')
# Creating a new file (obviously) stores its content
with FileAvoidWrite('file2') as file:
file.write('content')
self.assertEqual(open('file2').read(), 'content')
class MyMockedOpen(MockedOpen):
'''MockedOpen extension to raise an exception if something
attempts to write in an opened file.
'''
def __call__(self, name, mode):
if 'w' in mode:
raise Exception, 'Unexpected open with write mode'
return MockedOpen.__call__(self, name, mode)
with MyMockedOpen({'file': 'content'}):
# Validate that MyMockedOpen works as intended
file = FileAvoidWrite('file')
file.write('foobar')
self.assertRaises(Exception, file.close)
# Check that no write actually happens when writing the
# same content as what already is in the file
with FileAvoidWrite('file') as file:
file.write('content')
self.substs['top_srcdir'] = os.path.relpath(self.topsrcdir, self.topobjdir).replace(os.sep, '/')
class TestEnvironment(unittest.TestCase):
@ -57,7 +25,7 @@ class TestEnvironment(unittest.TestCase):
'''Test the automatically set values of ACDEFINES, ALLDEFINES
and ALLSUBSTS.
'''
env = ConfigEnvironment(
env = ConfigEnvironment('.', '.',
defines = [ ('foo', 'bar'), ('baz', 'qux 42'),
('abc', 'def'), ('extra', 'foobar') ],
non_global_defines = ['extra', 'ignore'],
@ -86,7 +54,7 @@ zzz = "abc def"''')
@foo@
@bar@
'''}):
env = ConfigEnvironment(substs = [ ('foo', 'bar baz') ])
env = ConfigEnvironment('.', '.', substs = [ ('foo', 'bar baz') ])
env.create_config_file('file')
self.assertEqual(open('file', 'r').read(), '''#ifdef foo
bar baz
@ -113,7 +81,7 @@ bar baz
# define foo 42
#endif
'''}):
env = ConfigEnvironment(defines = [ ('foo', 'baz qux'), ('baz', 1) ])
env = ConfigEnvironment('.', '.', defines = [ ('foo', 'baz qux'), ('baz', 1) ])
env.create_config_header('file')
self.assertEqual(open('file','r').read(), '''
/* Comment */

View File

@ -0,0 +1,117 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import unicode_literals
import os
from mozunit import main
from mozbuild.backend.configenvironment import ConfigEnvironment
from mozbuild.backend.recursivemake import RecursiveMakeBackend
from mozbuild.frontend.emitter import TreeMetadataEmitter
from mozbuild.frontend.reader import BuildReader
from mozbuild.test.backend.common import BackendTester
class TestRecursiveMakeBackend(BackendTester):
def test_basic(self):
"""Ensure the RecursiveMakeBackend works without error."""
env = self._consume('stub0', RecursiveMakeBackend)
self.assertTrue(os.path.exists(os.path.join(env.topobjdir,
'backend.RecursiveMakeBackend.built')))
def test_output_files(self):
"""Ensure proper files are generated."""
env = self._consume('stub0', RecursiveMakeBackend)
expected = ['', 'dir1', 'dir2']
for d in expected:
in_path = os.path.join(env.topsrcdir, d, 'Makefile.in')
out_makefile = os.path.join(env.topobjdir, d, 'Makefile')
out_backend = os.path.join(env.topobjdir, d, 'backend.mk')
self.assertTrue(os.path.exists(in_path))
self.assertTrue(os.path.exists(out_makefile))
self.assertTrue(os.path.exists(out_backend))
def test_makefile_conversion(self):
"""Ensure Makefile.in is converted properly."""
env = self._consume('stub0', RecursiveMakeBackend)
p = os.path.join(env.topobjdir, 'Makefile')
lines = [l.strip() for l in open(p, 'rt').readlines()[3:]]
self.assertEqual(lines, [
'DEPTH := .',
'topsrcdir := %s' % env.topsrcdir,
'srcdir := %s' % env.topsrcdir,
'VPATH = %s' % env.topsrcdir,
'',
'include $(DEPTH)/config/autoconf.mk',
'',
'include $(topsrcdir)/config/rules.mk'
])
def test_backend_mk(self):
"""Ensure backend.mk file is written out properly."""
env = self._consume('stub0', RecursiveMakeBackend)
p = os.path.join(env.topobjdir, 'backend.mk')
lines = [l.strip() for l in open(p, 'rt').readlines()[1:-2]]
self.assertEqual(lines, [
'DIRS := dir1',
'PARALLEL_DIRS := dir2',
'TEST_DIRS := dir3',
])
def test_no_mtime_bump(self):
"""Ensure mtime is not updated if file content does not change."""
env = self._consume('stub0', RecursiveMakeBackend)
makefile_path = os.path.join(env.topobjdir, 'Makefile')
backend_path = os.path.join(env.topobjdir, 'backend.mk')
makefile_mtime = os.path.getmtime(makefile_path)
backend_mtime = os.path.getmtime(backend_path)
reader = BuildReader(env)
emitter = TreeMetadataEmitter(env)
backend = RecursiveMakeBackend(env)
backend.consume(emitter.emit(reader.read_topsrcdir()))
self.assertEqual(os.path.getmtime(makefile_path), makefile_mtime)
self.assertEqual(os.path.getmtime(backend_path), backend_mtime)
def test_external_make_dirs(self):
"""Ensure we have make recursion into external make directories."""
env = self._consume('external_make_dirs', RecursiveMakeBackend)
backend_path = os.path.join(env.topobjdir, 'backend.mk')
lines = [l.strip() for l in open(backend_path, 'rt').readlines()[1:-2]]
self.assertEqual(lines, [
'DIRS := dir',
'PARALLEL_DIRS := p_dir',
'DIRS += external',
'PARALLEL_DIRS += p_external',
])
def test_substitute_config_files(self):
"""Ensure substituted config files are produced."""
env = self._consume('substitute_config_files', RecursiveMakeBackend)
p = os.path.join(env.topobjdir, 'foo')
self.assertTrue(os.path.exists(p))
lines = [l.strip() for l in open(p, 'rt').readlines()]
self.assertEqual(lines, [
'TEST = foo',
])
if __name__ == '__main__':
main()

View File

@ -0,0 +1,5 @@
# Any copyright is dedicated to the Public Domain.
# http://creativecommons.org/publicdomain/zero/1.0/
CONFIGURE_SUBST_FILES += ['foo']
CONFIGURE_SUBST_FILES += ['bar']

View File

@ -0,0 +1,5 @@
# Any copyright is dedicated to the Public Domain.
# http://creativecommons.org/publicdomain/zero/1.0/
error('Some error.')

View File

@ -6,3 +6,6 @@ PARALLEL_DIRS = ['parallel']
TEST_DIRS = ['test']
TEST_TOOL_DIRS = ['test_tool']
TOOL_DIRS = ['tool']
EXTERNAL_MAKE_DIRS = ['external_make']
PARALLEL_EXTERNAL_MAKE_DIRS = ['parallel_external_make']

View File

@ -7,7 +7,12 @@ from __future__ import unicode_literals
import os
import unittest
from mozbuild.frontend.data import DirectoryTraversal
from mozunit import main
from mozbuild.frontend.data import (
ConfigFileSubstitution,
DirectoryTraversal,
)
from mozbuild.frontend.emitter import TreeMetadataEmitter
from mozbuild.frontend.reader import BuildReader
@ -41,6 +46,8 @@ class TestEmitterBasic(unittest.TestCase):
self.assertEqual(o.test_tool_dirs, [])
self.assertEqual(len(o.tier_dirs), 0)
self.assertEqual(len(o.tier_static_dirs), 0)
self.assertTrue(os.path.isabs(o.sandbox_main_path))
self.assertEqual(len(o.sandbox_all_paths), 1)
reldirs = [o.relativedir for o in objs]
self.assertEqual(reldirs, ['', 'foo', 'foo/biz', 'bar'])
@ -71,6 +78,9 @@ class TestEmitterBasic(unittest.TestCase):
self.assertEqual(o.test_dirs, ['test'])
self.assertEqual(o.test_tool_dirs, ['test_tool'])
self.assertEqual(o.tool_dirs, ['tool'])
self.assertEqual(o.external_make_dirs, ['external_make'])
self.assertEqual(o.parallel_external_make_dirs,
['parallel_external_make'])
def test_tier_simple(self):
reader = self.reader('traversal-tier-simple')
@ -83,3 +93,23 @@ class TestEmitterBasic(unittest.TestCase):
self.assertEqual(reldirs, ['', 'foo', 'foo/biz', 'foo_static', 'bar',
'baz'])
def test_config_file_substitution(self):
reader = self.reader('config-file-substitution')
emitter = TreeMetadataEmitter(reader.config)
objs = list(emitter.emit(reader.read_topsrcdir()))
self.assertEqual(len(objs), 3)
self.assertIsInstance(objs[0], DirectoryTraversal)
self.assertIsInstance(objs[1], ConfigFileSubstitution)
self.assertIsInstance(objs[2], ConfigFileSubstitution)
topobjdir = os.path.abspath(reader.config.topobjdir)
self.assertEqual(os.path.normpath(objs[1].output_path),
os.path.normpath(os.path.join(topobjdir, 'foo')))
self.assertEqual(os.path.normpath(objs[2].output_path),
os.path.normpath(os.path.join(topobjdir, 'bar')))
if __name__ == '__main__':
main()

View File

@ -227,6 +227,16 @@ class TestBuildReader(unittest.TestCase):
self.assertIn('Directory (foo) registered multiple times in DIRS',
str(e))
def test_error_error_func(self):
reader = self.reader('reader-error-error-func')
with self.assertRaises(BuildReaderError) as bre:
list(reader.read_topsrcdir())
e = bre.exception
self.assertIn('A moz.build file called the error() function.', str(e))
self.assertIn(' Some error.', str(e))
if __name__ == '__main__':
main()

View File

@ -10,7 +10,10 @@ import unittest
from mozunit import main
from mozbuild.frontend.reader import MozbuildSandbox
from mozbuild.frontend.reader import (
MozbuildSandbox,
SandboxCalledError,
)
from mozbuild.frontend.sandbox import (
SandboxExecutionError,
@ -117,6 +120,8 @@ class TestSandbox(unittest.TestCase):
sandbox.exec_source('foo = True', 'foo.py')
self.assertNotIn('foo', sandbox)
self.assertEqual(sandbox.main_path, 'foo.py')
self.assertEqual(sandbox.all_paths, set(['foo.py']))
def test_exec_compile_error(self):
sandbox = self.sandbox()
@ -126,6 +131,7 @@ class TestSandbox(unittest.TestCase):
self.assertEqual(se.exception.file_stack, ['foo.py'])
self.assertIsInstance(se.exception.exc_value, SyntaxError)
self.assertEqual(sandbox.main_path, 'foo.py')
def test_exec_import_denied(self):
sandbox = self.sandbox()
@ -210,6 +216,9 @@ add_tier_dir('t1', 'bat', static=True)
sandbox.exec_file('moz.build')
self.assertEqual(sandbox['DIRS'], ['foo', 'bar'])
self.assertEqual(sandbox.main_path,
os.path.join(sandbox['TOPSRCDIR'], 'moz.build'))
self.assertEqual(len(sandbox.all_paths), 2)
def test_include_outside_topsrcdir(self):
sandbox = self.sandbox(data_path='include-outside-topsrcdir')
@ -267,6 +276,30 @@ add_tier_dir('t1', 'bat', static=True)
self.assertEqual(sandbox['DIRS'], ['foo'])
def test_external_make_dirs(self):
sandbox = self.sandbox()
sandbox.exec_source('EXTERNAL_MAKE_DIRS += ["foo"]', 'test.py')
sandbox.exec_source('PARALLEL_EXTERNAL_MAKE_DIRS += ["bar"]', 'test.py')
self.assertEqual(sandbox['EXTERNAL_MAKE_DIRS'], ['foo'])
self.assertEqual(sandbox['PARALLEL_EXTERNAL_MAKE_DIRS'], ['bar'])
def test_error(self):
sandbox = self.sandbox()
with self.assertRaises(SandboxCalledError) as sce:
sandbox.exec_source('error("This is an error.")', 'test.py')
e = sce.exception
self.assertEqual(e.message, 'This is an error.')
def test_substitute_config_files(self):
sandbox = self.sandbox()
sandbox.exec_source('CONFIGURE_SUBST_FILES += ["foo", "bar"]',
'test.py')
self.assertEqual(sandbox['CONFIGURE_SUBST_FILES'], ['foo', 'bar'])
if __name__ == '__main__':
main()

View File

@ -1,6 +1,6 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import unicode_literals
@ -8,9 +8,15 @@ import hashlib
import unittest
from mozfile.mozfile import NamedTemporaryFile
from mozunit import main
from mozunit import (
main,
MockedOpen,
)
from mozbuild.util import hash_file
from mozbuild.util import (
FileAvoidWrite,
hash_file,
)
class TestHashing(unittest.TestCase):
@ -44,5 +50,39 @@ class TestHashing(unittest.TestCase):
self.assertEqual(actual, expected)
class TestFileAvoidWrite(unittest.TestCase):
def test_file_avoid_write(self):
with MockedOpen({'file': 'content'}):
# Overwriting an existing file replaces its content
with FileAvoidWrite('file') as file:
file.write('bazqux')
self.assertEqual(open('file', 'r').read(), 'bazqux')
# Creating a new file (obviously) stores its content
with FileAvoidWrite('file2') as file:
file.write('content')
self.assertEqual(open('file2').read(), 'content')
class MyMockedOpen(MockedOpen):
'''MockedOpen extension to raise an exception if something
attempts to write in an opened file.
'''
def __call__(self, name, mode):
if 'w' in mode:
raise Exception, 'Unexpected open with write mode'
return MockedOpen.__call__(self, name, mode)
with MyMockedOpen({'file': 'content'}):
# Validate that MyMockedOpen works as intended
file = FileAvoidWrite('file')
file.write('foobar')
self.assertRaises(Exception, file.close)
# Check that no write actually happens when writing the
# same content as what already is in the file
with FileAvoidWrite('file') as file:
file.write('content')
if __name__ == '__main__':
main()

View File

@ -8,7 +8,11 @@
from __future__ import unicode_literals
import copy
import errno
import hashlib
import os
from StringIO import StringIO
def hash_file(path):
@ -84,3 +88,52 @@ class ReadOnlyDefaultDict(DefaultOnReadDict, ReadOnlyDict):
def __init__(self, d, defaults=None, global_default=undefined):
DefaultOnReadDict.__init__(self, d, defaults, global_default)
def ensureParentDir(path):
"""Ensures the directory parent to the given file exists."""
d = os.path.dirname(path)
if d and not os.path.exists(path):
try:
os.makedirs(d)
except OSError, error:
if error.errno != errno.EEXIST:
raise
class FileAvoidWrite(StringIO):
"""File-like object that buffers output and only writes if content changed.
We create an instance from an existing filename. New content is written to
it. When we close the file object, if the content in the in-memory buffer
differs from what is on disk, then we write out the new content. Otherwise,
the original file is untouched.
"""
def __init__(self, filename):
StringIO.__init__(self)
self.filename = filename
def close(self):
buf = self.getvalue()
StringIO.close(self)
try:
existing = open(self.filename, 'rU')
except IOError:
pass
else:
try:
if existing.read() == buf:
return
except IOError:
pass
finally:
existing.close()
ensureParentDir(self.filename)
with open(self.filename, 'w') as file:
file.write(buf)
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()

View File

@ -366,15 +366,17 @@ HwcComposer2D::PrepareLayerList(Layer* aLayer,
hwcLayer.compositionType = HWC_USE_COPYBIT;
if (!fillColor) {
if (transform.xx == 0) {
if (transform.xy < 0) {
gfxMatrix rotation = transform * aGLWorldTransform;
// Compute fuzzy equal like PreservesAxisAlignedRectangles()
if (fabs(rotation.xx) < 1e-6) {
if (rotation.xy < 0) {
hwcLayer.transform = HWC_TRANSFORM_ROT_90;
LOGD("Layer buffer rotated 90 degrees");
} else {
hwcLayer.transform = HWC_TRANSFORM_ROT_270;
LOGD("Layer buffer rotated 270 degrees");
}
} else if (transform.xx < 0) {
} else if (rotation.xx < 0) {
hwcLayer.transform = HWC_TRANSFORM_ROT_180;
LOGD("Layer buffer rotated 180 degrees");
} else {
@ -400,6 +402,11 @@ bool
HwcComposer2D::TryRender(Layer* aRoot,
const gfxMatrix& aGLWorldTransform)
{
if (!aGLWorldTransform.PreservesAxisAlignedRectangles()) {
LOGD("Render aborted. World transform has non-square angle rotation");
return false;
}
MOZ_ASSERT(Initialized());
if (mList) {
mList->numHwLayers = 0;