mirror of
https://gitlab.winehq.org/wine/wine-gecko.git
synced 2024-09-13 09:24:08 -07:00
Merge the last PGO-green inbound changeset to m-c
This commit is contained in:
commit
522364278a
@ -151,7 +151,7 @@ class VersionFlag(object):
|
||||
elif len(value) > 1 and value[0] in ['<', '>']:
|
||||
if value[1] == '=':
|
||||
if len(value) < 3:
|
||||
return errors.fatal('Malformed flag: %s' % definition)
|
||||
return errors.fatal('Malformed flag: %s' % definition)
|
||||
self.values.append((value[0:2], LooseVersion(value[2:])))
|
||||
else:
|
||||
self.values.append((value[0], LooseVersion(value[1:])))
|
||||
|
@ -324,6 +324,7 @@ MANIFESTS_TYPES = dict([(c.type, c) for c in globals().values()
|
||||
|
||||
MANIFEST_RE = re.compile(r'\s*#.*$')
|
||||
|
||||
|
||||
def parse_manifest_line(base, line):
|
||||
'''
|
||||
Parse a line from a manifest file with the given base directory and
|
||||
|
@ -130,7 +130,7 @@ class FileCopier(FileRegistry):
|
||||
FileRegistry with the ability to copy the registered files to a separate
|
||||
directory.
|
||||
'''
|
||||
def copy(self, destination):
|
||||
def copy(self, destination, skip_if_older=True):
|
||||
'''
|
||||
Copy all registered files to the given destination path. The given
|
||||
destination can be an existing directory, or not exist at all. It
|
||||
@ -148,7 +148,7 @@ class FileCopier(FileRegistry):
|
||||
destfile = os.path.normpath(os.path.join(destination, path))
|
||||
dest_files.add(destfile)
|
||||
ensure_parent_dir(destfile)
|
||||
file.copy(destfile)
|
||||
file.copy(destfile, skip_if_older)
|
||||
|
||||
actual_dest_files = set()
|
||||
for root, dirs, files in os.walk(destination):
|
||||
@ -176,7 +176,7 @@ class Jarrer(FileRegistry, BaseFile):
|
||||
self._preload = []
|
||||
FileRegistry.__init__(self)
|
||||
|
||||
def copy(self, dest):
|
||||
def copy(self, dest, skip_if_older=True):
|
||||
'''
|
||||
Pack all registered files in the given destination jar. The given
|
||||
destination jar may be a path to jar file, or a Dest instance for
|
||||
@ -234,7 +234,7 @@ class Jarrer(FileRegistry, BaseFile):
|
||||
deflater = DeflaterDest(old_contents[path], self.compress)
|
||||
else:
|
||||
deflater = DeflaterDest(compress=self.compress)
|
||||
file.copy(deflater)
|
||||
file.copy(deflater, skip_if_older)
|
||||
jar.add(path, deflater.deflater)
|
||||
if self._preload:
|
||||
jar.preload(self._preload)
|
||||
|
@ -128,5 +128,10 @@ class ErrorCollector(object):
|
||||
if count:
|
||||
raise AccumulatedErrors()
|
||||
|
||||
@property
|
||||
def count(self):
|
||||
# _count can be None.
|
||||
return self._count if self._count else 0
|
||||
|
||||
|
||||
errors = ErrorCollector()
|
||||
|
@ -15,7 +15,9 @@ from mozpack.executables import (
|
||||
from mozpack.chrome.manifest import ManifestEntry
|
||||
from io import BytesIO
|
||||
from mozpack.errors import ErrorMessage
|
||||
from mozpack.mozjar import JarReader
|
||||
import mozpack.path
|
||||
from collections import OrderedDict
|
||||
|
||||
|
||||
class Dest(object):
|
||||
@ -59,12 +61,13 @@ class BaseFile(object):
|
||||
their own copy function, or rely on BaseFile.copy using the open() member
|
||||
function and/or the path property.
|
||||
'''
|
||||
def copy(self, dest):
|
||||
def copy(self, dest, skip_if_older=True):
|
||||
'''
|
||||
Copy the BaseFile content to the destination given as a string or a
|
||||
Dest instance. Avoids replacing existing files if the BaseFile content
|
||||
matches that of the destination, or in case of plain files, if the
|
||||
destination is newer than the original file.
|
||||
destination is newer than the original file. This latter behaviour is
|
||||
disabled when skip_if_older is False.
|
||||
Returns whether a copy was actually performed (True) or not (False).
|
||||
'''
|
||||
if isinstance(dest, basestring):
|
||||
@ -80,7 +83,7 @@ class BaseFile(object):
|
||||
# the microsecond. But microsecond is too precise because
|
||||
# shutil.copystat only copies milliseconds, and seconds is not
|
||||
# enough precision.
|
||||
if int(os.path.getmtime(self.path) * 1000) \
|
||||
if skip_if_older and int(os.path.getmtime(self.path) * 1000) \
|
||||
<= int(os.path.getmtime(dest.path) * 1000):
|
||||
return False
|
||||
elif os.path.getsize(self.path) != os.path.getsize(dest.path):
|
||||
@ -137,9 +140,14 @@ class ExecutableFile(File):
|
||||
File class for executable and library files on OS/2, OS/X and ELF systems.
|
||||
(see mozpack.executables.is_executable documentation).
|
||||
'''
|
||||
def copy(self, dest):
|
||||
def copy(self, dest, skip_if_older=True):
|
||||
assert isinstance(dest, basestring)
|
||||
File.copy(self, dest)
|
||||
# If File.copy didn't actually copy because dest is newer, check the
|
||||
# file sizes. If dest is smaller, it means it is already stripped and
|
||||
# elfhacked, so we can skip.
|
||||
if not File.copy(self, dest, skip_if_older) and \
|
||||
os.path.getsize(self.path) > os.path.getsize(dest):
|
||||
return False
|
||||
try:
|
||||
if may_strip(dest):
|
||||
strip(dest)
|
||||
@ -202,12 +210,13 @@ class XPTFile(GeneratedFile):
|
||||
assert isinstance(xpt, BaseFile)
|
||||
self._files.remove(xpt)
|
||||
|
||||
def copy(self, dest):
|
||||
def copy(self, dest, skip_if_older=True):
|
||||
'''
|
||||
Link the registered XPTs and place the resulting linked XPT at the
|
||||
destination given as a string or a Dest instance. Avoids an expensive
|
||||
XPT linking if the interfaces in an existing destination match those of
|
||||
the individual XPTs to link.
|
||||
skip_if_older is ignored.
|
||||
'''
|
||||
if isinstance(dest, basestring):
|
||||
dest = Dest(dest)
|
||||
@ -288,7 +297,7 @@ class ManifestFile(BaseFile):
|
||||
the manifest.
|
||||
'''
|
||||
return BytesIO(''.join('%s\n' % e.rebase(self._base)
|
||||
for e in self._entries))
|
||||
for e in self._entries))
|
||||
|
||||
def __iter__(self):
|
||||
'''
|
||||
@ -318,16 +327,13 @@ class MinifiedProperties(BaseFile):
|
||||
the properties file.
|
||||
'''
|
||||
return BytesIO(''.join(l for l in self._file.open().readlines()
|
||||
if not l.startswith('#')))
|
||||
if not l.startswith('#')))
|
||||
|
||||
|
||||
class FileFinder(object):
|
||||
'''
|
||||
Helper to get appropriate BaseFile instances from the file system.
|
||||
'''
|
||||
class BaseFinder(object):
|
||||
def __init__(self, base, minify=False):
|
||||
'''
|
||||
Create a FileFinder for files under the given base directory. The
|
||||
Initializes the instance with a reference base directory. The
|
||||
optional minify argument specifies whether file types supporting
|
||||
minification (currently only "*.properties") should be minified.
|
||||
'''
|
||||
@ -339,18 +345,65 @@ class FileFinder(object):
|
||||
Yield path, BaseFile_instance pairs for all files under the base
|
||||
directory and its subdirectories that match the given pattern. See the
|
||||
mozpack.path.match documentation for a description of the handled
|
||||
patterns. Note all files with a name starting with a '.' are ignored
|
||||
when scanning directories, but are not ignored when explicitely
|
||||
requested.
|
||||
patterns.
|
||||
'''
|
||||
while pattern.startswith('/'):
|
||||
pattern = pattern[1:]
|
||||
return self._find(pattern)
|
||||
for p, f in self._find(pattern):
|
||||
yield p, self._minify_file(p, f)
|
||||
|
||||
def __iter__(self):
|
||||
'''
|
||||
Iterates over all files under the base directory (excluding files
|
||||
starting with a '.' and files at any level under a directory starting
|
||||
with a '.').
|
||||
for path, file in finder:
|
||||
...
|
||||
'''
|
||||
return self.find('')
|
||||
|
||||
def __contains__(self, pattern):
|
||||
raise RuntimeError("'in' operator forbidden for %s. Use contains()." %
|
||||
self.__class__.__name__)
|
||||
|
||||
def contains(self, pattern):
|
||||
'''
|
||||
Return whether some files under the base directory match the given
|
||||
pattern. See the mozpack.path.match documentation for a description of
|
||||
the handled patterns.
|
||||
'''
|
||||
return any(self.find(pattern))
|
||||
|
||||
def _minify_file(self, path, file):
|
||||
'''
|
||||
Return an appropriate MinifiedSomething wrapper for the given BaseFile
|
||||
instance (file), according to the file type (determined by the given
|
||||
path), if the FileFinder was created with minification enabled.
|
||||
Otherwise, just return the given BaseFile instance.
|
||||
Currently, only "*.properties" files are handled.
|
||||
'''
|
||||
if self._minify and not isinstance(file, ExecutableFile):
|
||||
if path.endswith('.properties'):
|
||||
return MinifiedProperties(file)
|
||||
return file
|
||||
|
||||
|
||||
class FileFinder(BaseFinder):
|
||||
'''
|
||||
Helper to get appropriate BaseFile instances from the file system.
|
||||
'''
|
||||
def __init__(self, base, **kargs):
|
||||
'''
|
||||
Create a FileFinder for files under the given base directory.
|
||||
'''
|
||||
BaseFinder.__init__(self, base, **kargs)
|
||||
|
||||
def _find(self, pattern):
|
||||
'''
|
||||
Actual implementation of FileFinder.find(), dispatching to specialized
|
||||
member functions depending on what kind of pattern was given.
|
||||
Note all files with a name starting with a '.' are ignored when
|
||||
scanning directories, but are not ignored when explicitely requested.
|
||||
'''
|
||||
if '*' in pattern:
|
||||
return self._find_glob('', mozpack.path.split(pattern))
|
||||
@ -384,7 +437,7 @@ class FileFinder(object):
|
||||
if is_executable(srcpath):
|
||||
yield path, ExecutableFile(srcpath)
|
||||
else:
|
||||
yield path, self._minify_file(srcpath, File(srcpath))
|
||||
yield path, File(srcpath)
|
||||
|
||||
def _find_glob(self, base, pattern):
|
||||
'''
|
||||
@ -418,37 +471,35 @@ class FileFinder(object):
|
||||
pattern[1:]):
|
||||
yield p, f
|
||||
|
||||
def __iter__(self):
|
||||
'''
|
||||
Iterates over all files under the base directory (excluding files
|
||||
starting with a '.' and files at any level under a directory starting
|
||||
with a '.').
|
||||
for path, file in finder:
|
||||
...
|
||||
'''
|
||||
return self.find('')
|
||||
|
||||
def __contains__(self, pattern):
|
||||
raise RuntimeError("'in' operator forbidden for %s. Use contains()." %
|
||||
self.__class__.__name__)
|
||||
class JarFinder(BaseFinder):
|
||||
'''
|
||||
Helper to get appropriate DeflatedFile instances from a JarReader.
|
||||
'''
|
||||
def __init__(self, base, reader, **kargs):
|
||||
'''
|
||||
Create a JarFinder for files in the given JarReader. The base argument
|
||||
is used as an indication of the Jar file location.
|
||||
'''
|
||||
assert isinstance(reader, JarReader)
|
||||
BaseFinder.__init__(self, base, **kargs)
|
||||
self._files = OrderedDict((f.filename, f) for f in reader)
|
||||
|
||||
def contains(self, pattern):
|
||||
def _find(self, pattern):
|
||||
'''
|
||||
Return whether some files under the base directory match the given
|
||||
pattern. See the mozpack.path.match documentation for a description of
|
||||
the handled patterns.
|
||||
Actual implementation of JarFinder.find(), dispatching to specialized
|
||||
member functions depending on what kind of pattern was given.
|
||||
'''
|
||||
return any(self.find(pattern))
|
||||
|
||||
def _minify_file(self, path, file):
|
||||
'''
|
||||
Return an appropriate MinifiedSomething wrapper for the given BaseFile
|
||||
instance (file), according to the file type (determined by the given
|
||||
path), if the FileFinder was created with minification enabled.
|
||||
Otherwise, just return the given BaseFile instance.
|
||||
Currently, only "*.properties" files are handled.
|
||||
'''
|
||||
if self._minify:
|
||||
if path.endswith('.properties'):
|
||||
return MinifiedProperties(file)
|
||||
return file
|
||||
if '*' in pattern:
|
||||
for p in self._files:
|
||||
if mozpack.path.match(p, pattern):
|
||||
yield p, DeflatedFile(self._files[p])
|
||||
elif pattern == '':
|
||||
for p in self._files:
|
||||
yield p, DeflatedFile(self._files[p])
|
||||
elif pattern in self._files:
|
||||
yield pattern, DeflatedFile(self._files[pattern])
|
||||
else:
|
||||
for p in self._files:
|
||||
if mozpack.path.basedir(p, [pattern]) == pattern:
|
||||
yield p, DeflatedFile(self._files[p])
|
||||
|
@ -65,7 +65,7 @@ class JarStruct(object):
|
||||
'''
|
||||
assert self.MAGIC and isinstance(self.STRUCT, OrderedDict)
|
||||
self.size_fields = set(t for t in self.STRUCT.itervalues()
|
||||
if not t in JarStruct.TYPE_MAPPING)
|
||||
if not t in JarStruct.TYPE_MAPPING)
|
||||
self._values = {}
|
||||
if data:
|
||||
self._init_data(data)
|
||||
@ -277,7 +277,7 @@ class JarFileReader(object):
|
||||
Return a list containing all the lines of data in the uncompressed
|
||||
data.
|
||||
'''
|
||||
return self.read().splitlines()
|
||||
return self.read().splitlines(True)
|
||||
|
||||
def seek(self, pos, whence=os.SEEK_SET):
|
||||
'''
|
||||
@ -375,7 +375,7 @@ class JarReader(object):
|
||||
xattr = entry['external_attr']
|
||||
# Skip directories
|
||||
if (host == 0 and xattr & 0x10) or (host == 3 and
|
||||
xattr & (040000 << 16)):
|
||||
xattr & (040000 << 16)):
|
||||
continue
|
||||
entries[entry['filename']] = entry
|
||||
if entry['offset'] < preload:
|
||||
@ -416,7 +416,8 @@ class JarReader(object):
|
||||
if key in header and header[key] != value:
|
||||
raise JarReaderError('Central directory and file header ' +
|
||||
'mismatch. Corrupted archive?')
|
||||
return JarFileReader(header, self._data[entry['offset'] + header.size:])
|
||||
return JarFileReader(header,
|
||||
self._data[entry['offset'] + header.size:])
|
||||
|
||||
def __iter__(self):
|
||||
'''
|
||||
@ -529,7 +530,7 @@ class JarWriter(object):
|
||||
end['disk_entries'] = len(self._contents)
|
||||
end['cdir_entries'] = end['disk_entries']
|
||||
end['cdir_size'] = reduce(lambda x, y: x + y[0].size,
|
||||
self._contents.values(), 0)
|
||||
self._contents.values(), 0)
|
||||
# On optimized archives, store the preloaded size and the central
|
||||
# directory entries, followed by the first end of central directory.
|
||||
if self._optimize:
|
||||
@ -584,7 +585,8 @@ class JarWriter(object):
|
||||
data.seek(0)
|
||||
deflater.write(data.read())
|
||||
else:
|
||||
raise JarWriterError("Don't know how to handle %s" % type(data))
|
||||
raise JarWriterError("Don't know how to handle %s" %
|
||||
type(data))
|
||||
# Fill a central directory entry for this new member.
|
||||
entry = JarCdirEntry()
|
||||
# Not storing as created on unix, which avoids having to deal with
|
||||
|
@ -19,7 +19,6 @@ from mozpack.copier import (
|
||||
FileRegistry,
|
||||
Jarrer,
|
||||
)
|
||||
from mozpack.errors import errors
|
||||
|
||||
STARTUP_CACHE_PATHS = [
|
||||
'jsloader',
|
||||
@ -61,6 +60,7 @@ Formatters all take a FileCopier instance they will fill with the packaged
|
||||
data.
|
||||
'''
|
||||
|
||||
|
||||
class FlatFormatter(object):
|
||||
'''
|
||||
Formatter for the flat package format.
|
||||
@ -192,8 +192,8 @@ class JarFormatter(FlatFormatter):
|
||||
return self.copier.contains(path)
|
||||
if not self.copier.contains(chrome + '.jar'):
|
||||
return False
|
||||
return self.copier[chrome + '.jar'].contains(mozpack.path.relpath(path,
|
||||
chrome))
|
||||
return self.copier[chrome + '.jar']. \
|
||||
contains(mozpack.path.relpath(path, chrome))
|
||||
|
||||
|
||||
class OmniJarFormatter(FlatFormatter):
|
||||
|
@ -25,7 +25,6 @@ from mozpack.packager.formats import (
|
||||
STARTUP_CACHE_PATHS,
|
||||
)
|
||||
from urlparse import urlparse
|
||||
from collections import OrderedDict
|
||||
|
||||
|
||||
class UnpackFinder(FileFinder):
|
||||
@ -172,4 +171,4 @@ def unpack(source):
|
||||
if mozpack.path.split(p)[0] not in STARTUP_CACHE_PATHS:
|
||||
packager.add(p, f)
|
||||
packager.close()
|
||||
copier.copy(source)
|
||||
copier.copy(source, skip_if_older=False)
|
||||
|
@ -59,6 +59,7 @@ class TestFileRegistry(MatchTestTemplate, unittest.TestCase):
|
||||
self.registry.remove('bar')
|
||||
self.assertEqual(self.registry.paths(), [])
|
||||
|
||||
self.prepare_match_test()
|
||||
self.do_match_test()
|
||||
self.assertTrue(self.checked)
|
||||
self.assertEqual(self.registry.paths(), [
|
||||
|
@ -11,6 +11,7 @@ from mozpack.files import (
|
||||
XPTFile,
|
||||
MinifiedProperties,
|
||||
FileFinder,
|
||||
JarFinder,
|
||||
)
|
||||
from mozpack.mozjar import (
|
||||
JarReader,
|
||||
@ -209,6 +210,10 @@ class TestFile(TestWithTmpDir):
|
||||
os.utime(dest, (time, time))
|
||||
self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest))
|
||||
|
||||
# skip_if_older=False is expected to force a copy in this situation.
|
||||
f.copy(dest, skip_if_older=False)
|
||||
self.assertEqual('fooo', open(dest, 'rb').read())
|
||||
|
||||
|
||||
class TestGeneratedFile(TestWithTmpDir):
|
||||
def test_generated_file(self):
|
||||
@ -486,7 +491,7 @@ class TestMinifiedProperties(TestWithTmpDir):
|
||||
|
||||
|
||||
class MatchTestTemplate(object):
|
||||
def do_match_test(self):
|
||||
def prepare_match_test(self, with_dotfiles=False):
|
||||
self.add('bar')
|
||||
self.add('foo/bar')
|
||||
self.add('foo/baz')
|
||||
@ -494,7 +499,11 @@ class MatchTestTemplate(object):
|
||||
self.add('foo/qux/bar')
|
||||
self.add('foo/qux/2/test')
|
||||
self.add('foo/qux/2/test2')
|
||||
if with_dotfiles:
|
||||
self.add('foo/.foo')
|
||||
self.add('foo/.bar/foo')
|
||||
|
||||
def do_match_test(self):
|
||||
self.do_check('', [
|
||||
'bar', 'foo/bar', 'foo/baz', 'foo/qux/1', 'foo/qux/bar',
|
||||
'foo/qux/2/test', 'foo/qux/2/test2'
|
||||
@ -533,6 +542,33 @@ class MatchTestTemplate(object):
|
||||
self.do_check('**/barbaz', [])
|
||||
self.do_check('f**/bar', ['foo/bar'])
|
||||
|
||||
def do_finder_test(self, finder):
|
||||
self.assertTrue(finder.contains('foo/.foo'))
|
||||
self.assertTrue(finder.contains('foo/.bar'))
|
||||
self.assertTrue('foo/.foo' in [f for f, c in
|
||||
finder.find('foo/.foo')])
|
||||
self.assertTrue('foo/.bar/foo' in [f for f, c in
|
||||
finder.find('foo/.bar')])
|
||||
self.assertEqual(sorted([f for f, c in finder.find('foo/.*')]),
|
||||
['foo/.bar/foo', 'foo/.foo'])
|
||||
for pattern in ['foo', '**', '**/*', '**/foo', 'foo/*']:
|
||||
self.assertFalse('foo/.foo' in [f for f, c in
|
||||
finder.find(pattern)])
|
||||
self.assertFalse('foo/.bar/foo' in [f for f, c in
|
||||
finder.find(pattern)])
|
||||
self.assertEqual(sorted([f for f, c in finder.find(pattern)]),
|
||||
sorted([f for f, c in finder
|
||||
if mozpack.path.match(f, pattern)]))
|
||||
|
||||
|
||||
def do_check(test, finder, pattern, result):
|
||||
if result:
|
||||
test.assertTrue(finder.contains(pattern))
|
||||
else:
|
||||
test.assertFalse(finder.contains(pattern))
|
||||
test.assertEqual(sorted(list(f for f, c in finder.find(pattern))),
|
||||
sorted(result))
|
||||
|
||||
|
||||
class TestFileFinder(MatchTestTemplate, TestWithTmpDir):
|
||||
def add(self, path):
|
||||
@ -540,34 +576,30 @@ class TestFileFinder(MatchTestTemplate, TestWithTmpDir):
|
||||
open(self.tmppath(path), 'wb').write(path)
|
||||
|
||||
def do_check(self, pattern, result):
|
||||
if result:
|
||||
self.assertTrue(self.finder.contains(pattern))
|
||||
else:
|
||||
self.assertFalse(self.finder.contains(pattern))
|
||||
self.assertEqual(sorted(list(f for f, c in self.finder.find(pattern))),
|
||||
sorted(result))
|
||||
do_check(self, self.finder, pattern, result)
|
||||
|
||||
def test_file_finder(self):
|
||||
self.prepare_match_test(with_dotfiles=True)
|
||||
self.finder = FileFinder(self.tmpdir)
|
||||
self.do_match_test()
|
||||
self.add('foo/.foo')
|
||||
self.add('foo/.bar/foo')
|
||||
self.assertTrue(self.finder.contains('foo/.foo'))
|
||||
self.assertTrue(self.finder.contains('foo/.bar'))
|
||||
self.assertTrue('foo/.foo' in [f for f, c in
|
||||
self.finder.find('foo/.foo')])
|
||||
self.assertTrue('foo/.bar/foo' in [f for f, c in
|
||||
self.finder.find('foo/.bar')])
|
||||
self.assertEqual(sorted([f for f, c in self.finder.find('foo/.*')]),
|
||||
['foo/.bar/foo', 'foo/.foo'])
|
||||
for pattern in ['foo', '**', '**/*', '**/foo', 'foo/*']:
|
||||
self.assertFalse('foo/.foo' in [f for f, c in
|
||||
self.finder.find(pattern)])
|
||||
self.assertFalse('foo/.bar/foo' in [f for f, c in
|
||||
self.finder.find(pattern)])
|
||||
self.assertEqual(sorted([f for f, c in self.finder.find(pattern)]),
|
||||
sorted([f for f, c in self.finder
|
||||
if mozpack.path.match(f, pattern)]))
|
||||
self.do_finder_test(self.finder)
|
||||
|
||||
|
||||
class TestJarFinder(MatchTestTemplate, TestWithTmpDir):
|
||||
def add(self, path):
|
||||
self.jar.add(path, path, compress=True)
|
||||
|
||||
def do_check(self, pattern, result):
|
||||
do_check(self, self.finder, pattern, result)
|
||||
|
||||
def test_jar_finder(self):
|
||||
self.jar = JarWriter(file=self.tmppath('test.jar'))
|
||||
self.prepare_match_test()
|
||||
self.jar.finish()
|
||||
reader = JarReader(file=self.tmppath('test.jar'))
|
||||
self.finder = JarFinder(self.tmppath('test.jar'), reader)
|
||||
self.do_match_test()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
mozunit.main()
|
||||
|
@ -233,7 +233,6 @@ class TestOmniJarFormatter(TestWithTmpDir):
|
||||
'app/components/foo.so', 'app/foo'
|
||||
])
|
||||
|
||||
|
||||
def test_omnijar_is_resource(self):
|
||||
registry = FileRegistry()
|
||||
f = OmniJarFormatter(registry, 'omni.foo', non_resources=[
|
||||
@ -250,12 +249,13 @@ class TestOmniJarFormatter(TestWithTmpDir):
|
||||
self.assertTrue(f.is_resource(base + 'components/foo.js'))
|
||||
self.assertFalse(f.is_resource(base + 'components/foo.so'))
|
||||
self.assertTrue(f.is_resource(base + 'res/foo.css'))
|
||||
self.assertFalse( f.is_resource(base + 'res/cursors/foo.png'))
|
||||
self.assertFalse(f.is_resource(base + 'res/cursors/foo.png'))
|
||||
self.assertFalse(f.is_resource(base + 'res/MainMenu.nib/'))
|
||||
self.assertTrue(f.is_resource(base + 'defaults/pref/foo.js'))
|
||||
self.assertFalse(
|
||||
f.is_resource(base + 'defaults/pref/channel-prefs.js'))
|
||||
self.assertTrue(f.is_resource(base + 'defaults/preferences/foo.js'))
|
||||
self.assertTrue(
|
||||
f.is_resource(base + 'defaults/preferences/foo.js'))
|
||||
self.assertFalse(
|
||||
f.is_resource(base + 'defaults/preferences/channel-prefs.js'))
|
||||
self.assertTrue(f.is_resource(base + 'modules/foo.jsm'))
|
||||
|
@ -9,8 +9,17 @@ from mozpack.unify import (
|
||||
import mozunit
|
||||
from mozpack.test.test_files import TestWithTmpDir
|
||||
from mozpack.copier import ensure_parent_dir
|
||||
from mozpack.files import FileFinder
|
||||
from mozpack.mozjar import JarWriter
|
||||
from mozpack.test.test_files import MockDest
|
||||
from cStringIO import StringIO
|
||||
import os
|
||||
from mozpack.errors import ErrorMessage
|
||||
import sys
|
||||
from mozpack.errors import (
|
||||
ErrorMessage,
|
||||
AccumulatedErrors,
|
||||
errors,
|
||||
)
|
||||
|
||||
|
||||
class TestUnified(TestWithTmpDir):
|
||||
@ -36,7 +45,8 @@ class TestUnifiedFinder(TestUnified):
|
||||
self.create_one('b', 'test/foo', 'b\nc\na\n')
|
||||
self.create_both('test/bar', 'a\nb\nc\n')
|
||||
|
||||
finder = UnifiedFinder(self.tmppath('a'), self.tmppath('b'),
|
||||
finder = UnifiedFinder(FileFinder(self.tmppath('a')),
|
||||
FileFinder(self.tmppath('b')),
|
||||
sorted=['test'])
|
||||
self.assertEqual(sorted([(f, c.open().read())
|
||||
for f, c in finder.find('foo')]),
|
||||
@ -73,7 +83,8 @@ class TestUnifiedBuildFinder(TestUnified):
|
||||
'</body>',
|
||||
'</html>',
|
||||
]))
|
||||
finder = UnifiedBuildFinder(self.tmppath('a'), self.tmppath('b'))
|
||||
finder = UnifiedBuildFinder(FileFinder(self.tmppath('a')),
|
||||
FileFinder(self.tmppath('b')))
|
||||
self.assertEqual(sorted([(f, c.open().read()) for f, c in
|
||||
finder.find('**/chrome.manifest')]),
|
||||
[('chrome.manifest', 'a\nb\nc\n'),
|
||||
@ -92,6 +103,25 @@ class TestUnifiedBuildFinder(TestUnified):
|
||||
'</html>',
|
||||
]))])
|
||||
|
||||
xpi = MockDest()
|
||||
with JarWriter(fileobj=xpi, compress=True) as jar:
|
||||
jar.add('foo', 'foo')
|
||||
jar.add('bar', 'bar')
|
||||
foo_xpi = xpi.read()
|
||||
self.create_both('foo.xpi', foo_xpi)
|
||||
|
||||
with JarWriter(fileobj=xpi, compress=True) as jar:
|
||||
jar.add('foo', 'bar')
|
||||
self.create_one('a', 'bar.xpi', foo_xpi)
|
||||
self.create_one('b', 'bar.xpi', xpi.read())
|
||||
|
||||
errors.out = StringIO()
|
||||
with self.assertRaises(AccumulatedErrors), errors.accumulate():
|
||||
self.assertEqual([(f, c.open().read()) for f, c in
|
||||
finder.find('*.xpi')],
|
||||
[('foo.xpi', foo_xpi)])
|
||||
errors.out = sys.stderr
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
mozunit.main()
|
||||
|
@ -3,20 +3,19 @@
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from mozpack.files import (
|
||||
FileFinder,
|
||||
BaseFinder,
|
||||
JarFinder,
|
||||
ExecutableFile,
|
||||
BaseFile,
|
||||
GeneratedFile,
|
||||
)
|
||||
from mozpack.executables import (
|
||||
MACHO_SIGNATURES,
|
||||
may_strip,
|
||||
strip,
|
||||
)
|
||||
from mozpack.mozjar import JarReader
|
||||
from mozpack.errors import errors
|
||||
from tempfile import mkstemp
|
||||
import mozpack.path
|
||||
import shutil
|
||||
import struct
|
||||
import os
|
||||
import subprocess
|
||||
@ -42,92 +41,99 @@ class UnifiedExecutableFile(BaseFile):
|
||||
'''
|
||||
File class for executable and library files that to be unified with 'lipo'.
|
||||
'''
|
||||
def __init__(self, path1, path2):
|
||||
def __init__(self, executable1, executable2):
|
||||
'''
|
||||
Initialize a UnifiedExecutableFile with the path to both non-fat Mach-O
|
||||
executables to be unified.
|
||||
Initialize a UnifiedExecutableFile with a pair of ExecutableFiles to
|
||||
be unified. They are expected to be non-fat Mach-O executables.
|
||||
'''
|
||||
self.path1 = path1
|
||||
self.path2 = path2
|
||||
assert isinstance(executable1, ExecutableFile)
|
||||
assert isinstance(executable2, ExecutableFile)
|
||||
self._executables = (executable1, executable2)
|
||||
|
||||
def copy(self, dest):
|
||||
def copy(self, dest, skip_if_older=True):
|
||||
'''
|
||||
Create a fat executable from the two Mach-O executable given when
|
||||
creating the instance.
|
||||
skip_if_older is ignored.
|
||||
'''
|
||||
assert isinstance(dest, basestring)
|
||||
tmpfiles = []
|
||||
try:
|
||||
for p in [self.path1, self.path2]:
|
||||
for e in self._executables:
|
||||
fd, f = mkstemp()
|
||||
os.close(fd)
|
||||
tmpfiles.append(f)
|
||||
shutil.copy2(p, f)
|
||||
if may_strip(f):
|
||||
strip(f)
|
||||
e.copy(f, skip_if_older=False)
|
||||
subprocess.call(['lipo', '-create'] + tmpfiles + ['-output', dest])
|
||||
finally:
|
||||
for f in tmpfiles:
|
||||
os.unlink(f)
|
||||
|
||||
|
||||
class UnifiedFinder(FileFinder):
|
||||
class UnifiedFinder(BaseFinder):
|
||||
'''
|
||||
Helper to get unified BaseFile instances from two distinct trees on the
|
||||
file system.
|
||||
'''
|
||||
def __init__(self, base1, base2, sorted=[], **kargs):
|
||||
def __init__(self, finder1, finder2, sorted=[], **kargs):
|
||||
'''
|
||||
Initialize a UnifiedFinder. base1 and base2 are the base directories
|
||||
for the two trees from which files are picked. UnifiedFinder.find()
|
||||
will act as FileFinder.find() but will error out when matches can only
|
||||
be found in one of the two trees and not the other. It will also error
|
||||
out if matches can be found on both ends but their contents are not
|
||||
identical.
|
||||
Initialize a UnifiedFinder. finder1 and finder2 are BaseFinder
|
||||
instances from which files are picked. UnifiedFinder.find() will act as
|
||||
FileFinder.find() but will error out when matches can only be found in
|
||||
one of the two trees and not the other. It will also error out if
|
||||
matches can be found on both ends but their contents are not identical.
|
||||
|
||||
The sorted argument gives a list of mozpack.path.match patterns. File
|
||||
paths matching one of these patterns will have their contents compared
|
||||
with their lines sorted.
|
||||
'''
|
||||
self._base1 = FileFinder(base1, **kargs)
|
||||
self._base2 = FileFinder(base2, **kargs)
|
||||
assert isinstance(finder1, BaseFinder)
|
||||
assert isinstance(finder2, BaseFinder)
|
||||
self._finder1 = finder1
|
||||
self._finder2 = finder2
|
||||
self._sorted = sorted
|
||||
BaseFinder.__init__(self, finder1.base, **kargs)
|
||||
|
||||
def _find(self, path):
|
||||
'''
|
||||
UnifiedFinder.find() implementation.
|
||||
'''
|
||||
files1 = OrderedDict()
|
||||
for p, f in self._base1.find(path):
|
||||
for p, f in self._finder1.find(path):
|
||||
files1[p] = f
|
||||
files2 = set()
|
||||
for p, f in self._base2.find(path):
|
||||
for p, f in self._finder2.find(path):
|
||||
files2.add(p)
|
||||
if p in files1:
|
||||
if may_unify_binary(files1[p]) and \
|
||||
may_unify_binary(f):
|
||||
yield p, UnifiedExecutableFile(files1[p].path, f.path)
|
||||
yield p, UnifiedExecutableFile(files1[p], f)
|
||||
else:
|
||||
err = errors.count
|
||||
unified = self.unify_file(p, files1[p], f)
|
||||
if unified:
|
||||
yield p, unified
|
||||
else:
|
||||
elif err == errors.count:
|
||||
self._report_difference(p, files1[p], f)
|
||||
else:
|
||||
errors.error('File missing in %s: %s' % (self._base1.base, p))
|
||||
errors.error('File missing in %s: %s' %
|
||||
(self._finder1.base, p))
|
||||
for p in [p for p in files1 if not p in files2]:
|
||||
errors.error('File missing in %s: %s' % (self._base2.base, p))
|
||||
errors.error('File missing in %s: %s' % (self._finder2.base, p))
|
||||
|
||||
def _report_difference(self, path, file1, file2):
|
||||
'''
|
||||
Report differences between files in both trees.
|
||||
'''
|
||||
errors.error("Can't unify %s: file differs between %s and %s" %
|
||||
(path, self._base1.base, self._base2.base))
|
||||
(path, self._finder1.base, self._finder2.base))
|
||||
if not isinstance(file1, ExecutableFile) and \
|
||||
not isinstance(file2, ExecutableFile):
|
||||
from difflib import unified_diff
|
||||
import sys
|
||||
for line in unified_diff(file1.open().readlines(),
|
||||
file2.open().readlines(),
|
||||
os.path.join(self._base1.base, path),
|
||||
os.path.join(self._base2.base, path)):
|
||||
os.path.join(self._finder1.base, path),
|
||||
os.path.join(self._finder2.base, path)):
|
||||
errors.out.write(line)
|
||||
|
||||
def unify_file(self, path, file1, file2):
|
||||
@ -153,8 +159,8 @@ class UnifiedBuildFinder(UnifiedFinder):
|
||||
"*.manifest" files to differ in their order, and unifies "buildconfig.html"
|
||||
files by merging their content.
|
||||
'''
|
||||
def __init__(self, base1, base2, **kargs):
|
||||
UnifiedFinder.__init__(self, base1, base2,
|
||||
def __init__(self, finder1, finder2, **kargs):
|
||||
UnifiedFinder.__init__(self, finder1, finder2,
|
||||
sorted=['**/*.manifest'], **kargs)
|
||||
|
||||
def unify_file(self, path, file1, file2):
|
||||
@ -172,4 +178,15 @@ class UnifiedBuildFinder(UnifiedFinder):
|
||||
['<hr> </hr>\n'] +
|
||||
content2[content2.index('<h1>about:buildconfig</h1>\n') + 1:]
|
||||
))
|
||||
if path.endswith('.xpi'):
|
||||
finder1 = JarFinder(os.path.join(self._finder1.base, path),
|
||||
JarReader(fileobj=file1.open()))
|
||||
finder2 = JarFinder(os.path.join(self._finder2.base, path),
|
||||
JarReader(fileobj=file2.open()))
|
||||
unifier = UnifiedFinder(finder1, finder2, sorted=self._sorted)
|
||||
err = errors.count
|
||||
all(unifier.find(''))
|
||||
if err == errors.count:
|
||||
return file1
|
||||
return None
|
||||
return UnifiedFinder.unify_file(self, path, file1, file2)
|
||||
|
@ -5,10 +5,7 @@
|
||||
import sys
|
||||
import hashlib
|
||||
from mozpack.packager.unpack import UnpackFinder
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
except ImportError:
|
||||
from simplejson import OrderedDict
|
||||
from collections import OrderedDict
|
||||
|
||||
'''
|
||||
Find files duplicated in a given packaged directory, independently of its
|
||||
|
@ -7,7 +7,6 @@ Replace localized parts of a packaged directory with data from a langpack
|
||||
directory.
|
||||
'''
|
||||
|
||||
import sys
|
||||
import os
|
||||
import mozpack.path
|
||||
from mozpack.packager.formats import (
|
||||
@ -181,7 +180,7 @@ def repack(source, l10n, non_resources=[]):
|
||||
assert isinstance(copier[path], Jarrer)
|
||||
copier[path].preload([l.replace(locale, l10n_locale) for l in log])
|
||||
|
||||
copier.copy(source)
|
||||
copier.copy(source, skip_if_older=False)
|
||||
generate_precomplete(source)
|
||||
|
||||
|
||||
|
@ -28,8 +28,6 @@ import buildconfig
|
||||
from argparse import ArgumentParser
|
||||
from createprecomplete import generate_precomplete
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from StringIO import StringIO
|
||||
import subprocess
|
||||
import platform
|
||||
@ -94,12 +92,12 @@ class LibSignFile(File):
|
||||
'''
|
||||
File class for shlibsign signatures.
|
||||
'''
|
||||
def copy(self, dest):
|
||||
def copy(self, dest, skip_if_older=True):
|
||||
assert isinstance(dest, basestring)
|
||||
# os.path.getmtime returns a result in seconds with precision up to the
|
||||
# microsecond. But microsecond is too precise because shutil.copystat
|
||||
# only copies milliseconds, and seconds is not enough precision.
|
||||
if os.path.exists(dest) and \
|
||||
if os.path.exists(dest) and skip_if_older and \
|
||||
int(os.path.getmtime(self.path) * 1000) <= \
|
||||
int(os.path.getmtime(dest) * 1000):
|
||||
return False
|
||||
@ -296,7 +294,8 @@ def main():
|
||||
|
||||
with errors.accumulate():
|
||||
if args.unify:
|
||||
finder = UnifiedBuildFinder(args.source, args.unify,
|
||||
finder = UnifiedBuildFinder(FileFinder(args.source),
|
||||
FileFinder(args.unify),
|
||||
minify=args.minify)
|
||||
else:
|
||||
finder = FileFinder(args.source, minify=args.minify)
|
||||
@ -328,7 +327,8 @@ def main():
|
||||
libname = '%s%s' % (libbase, buildconfig.substs['DLL_SUFFIX'])
|
||||
if copier.contains(libname):
|
||||
copier.add(libbase + '.chk',
|
||||
LibSignFile(os.path.join(args.destination, libname)))
|
||||
LibSignFile(os.path.join(args.destination,
|
||||
libname)))
|
||||
|
||||
# Setup preloading
|
||||
if args.jarlogs:
|
||||
|
@ -7,6 +7,7 @@ import os
|
||||
from mozpack.packager.unpack import unpack
|
||||
import buildconfig
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) != 2:
|
||||
print >>sys.stderr, "Usage: %s directory" % \
|
||||
|
@ -847,19 +847,6 @@ nsXREDirProvider::DoShutdown()
|
||||
obsSvc->NotifyObservers(nullptr, "profile-change-net-teardown", kShutdownPersist);
|
||||
obsSvc->NotifyObservers(nullptr, "profile-change-teardown", kShutdownPersist);
|
||||
|
||||
// Phase 2c: Now that things are torn down, force JS GC so that things which depend on
|
||||
// resources which are about to go away in "profile-before-change" are destroyed first.
|
||||
|
||||
nsCOMPtr<nsIJSRuntimeService> rtsvc
|
||||
(do_GetService("@mozilla.org/js/xpc/RuntimeService;1"));
|
||||
if (rtsvc)
|
||||
{
|
||||
JSRuntime *rt = nullptr;
|
||||
rtsvc->GetRuntime(&rt);
|
||||
if (rt)
|
||||
::JS_GC(rt);
|
||||
}
|
||||
|
||||
// Phase 3: Notify observers of a profile change
|
||||
obsSvc->NotifyObservers(nullptr, "profile-before-change", kShutdownPersist);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user