Merge m-c to inbound.

This commit is contained in:
Ryan VanderMeulen 2013-01-24 07:09:10 -05:00
commit bd08b980a7
59 changed files with 6128 additions and 2065 deletions

View File

@ -28,12 +28,7 @@ DEFINES += \
-DPREF_DIR=$(PREF_DIR) \
$(NULL)
ifeq ($(MOZ_CHROME_FILE_FORMAT),jar)
JAREXT=.jar
else
JAREXT=
endif
DEFINES += -DJAREXT=$(JAREXT)
DEFINES += -DJAREXT=
include $(topsrcdir)/ipc/app/defs.mk
DEFINES += -DMOZ_CHILD_PROCESS_NAME=$(MOZ_CHILD_PROCESS_NAME)
@ -51,7 +46,7 @@ ifdef MOZ_PKG_MANIFEST_P
MOZ_PKG_MANIFEST = package-manifest
endif
MOZ_POST_STAGING_CMD = find chrome -type f -name *.properties -exec $(PERL) -n -i -e 'print unless /^\#/' {} \;
MOZ_PACKAGER_MINIFY=1
include $(topsrcdir)/toolkit/mozapps/installer/packager.mk

View File

@ -60,11 +60,7 @@ ifdef _MSC_VER
DEFINES += -D_MSC_VER=$(_MSC_VER)
endif
ifeq ($(MOZ_CHROME_FILE_FORMAT),jar)
DEFINES += -DJAREXT=.jar
else
DEFINES += -DJAREXT=
endif
ifdef MOZ_ANGLE_RENDERER
DEFINES += -DMOZ_ANGLE_RENDERER=$(MOZ_ANGLE_RENDERER)
@ -152,7 +148,7 @@ ifdef MOZ_PKG_MANIFEST_P
rm -f $(DIST)/pack-list.txt $(DIST)/bin-list.txt
endif
installer:: removed-files
installer::
ifdef INSTALLER_DIR
$(MAKE) -C $(INSTALLER_DIR)
endif

View File

@ -564,7 +564,6 @@
; shell icons
#ifdef XP_UNIX
#ifndef XP_MACOSX
@BINPATH@/icons/*.xpm
@BINPATH@/icons/*.png
#endif
#endif

View File

@ -884,11 +884,11 @@ xpicleanup@BIN_SUFFIX@
components/addonManager.js
components/amContentHandler.js
components/amWebInstallListener.js
components/binary.manifest
components/browser.xpt
components/BrowserElementParent.js
components/BrowserElementParent.manifest
components/BrowserElementPromptService.jsm
components/components.manifest
components/contentAreaDropListener.js
components/contentSecurityPolicy.js
components/crypto-SDR.js

View File

@ -248,5 +248,5 @@ l10n-check::
$(NSINSTALL) -D x-test/toolkit
echo "#define MOZ_LANG_TITLE Just testing" > x-test/toolkit/defines.inc
$(MAKE) installers-x-test L10NBASEDIR="$(PWD)" LOCALE_MERGEDIR="$(PWD)/mergedir"
cd $(DIST)/l10n-stage && $(UNMAKE_PACKAGE)
$(PYTHON) $(topsrcdir)/toolkit/mozapps/installer/unpack.py $(DIST)/l10n-stage/$(STAGEPATH)$(MOZ_PKG_DIR)$(_BINPATH)
cd $(DIST)/l10n-stage && test $$(cat $(STAGEPATH)$(MOZ_PKG_DIR)$(_BINPATH)/update.locale) = x-test

View File

@ -178,86 +178,6 @@ libs:: $(topsrcdir)/tools/rb/fix_stack_using_bpsyms.py
ifeq ($(OS_ARCH),Darwin)
libs:: $(topsrcdir)/tools/rb/fix_macosx_stack.py
$(INSTALL) $< $(DIST)/bin
# Basic unit tests for some stuff in the unify script
check::
# build x64/i386 binaries, and unify them
rm -f unify-test-x64 unify-test-i386 unify-test-universal
$(HOST_CC) -arch x86_64 $(srcdir)/unify-test.c -o unify-test-x64
$(HOST_CC) -arch i386 $(srcdir)/unify-test.c -o unify-test-i386
@if ! $(srcdir)/macosx/universal/unify ./unify-test-x64 ./unify-test-i386 \
./unify-test-universal; then \
echo "TEST-UNEXPECTED-FAIL | build/ | unify failed to produce a universal binary!"; \
false; \
fi
@if test ! -f ./unify-test-universal; then \
echo "TEST-UNEXPECTED-FAIL | build/ | unify failed to produce a universal binary!"; \
false; \
fi
@if ! file -b ./unify-test-universal | head -n1 | grep -q "^Mach-O universal binary"; then \
echo "TEST-UNEXPECTED-FAIL | build/ | unify failed to produce a universal binary!"; \
false; \
else \
echo "TEST-PASS | build/ | unify produced a universal binary!"; \
fi
# try building an x86-64 binary. if that succeeds, try unifying it
# with an i386 binary
rm -f unify-test-x86_64 unify-test-universal-64
-$(HOST_CC) -arch x86_64 $(srcdir)/unify-test.c -o unify-test-x86_64
@if test -f ./unify-test-x86_64; then \
if ! $(srcdir)/macosx/universal/unify ./unify-test-x86_64 ./unify-test-i386 \
./unify-test-universal-64; then \
echo "TEST-UNEXPECTED-FAIL | build/ | unify failed to produce a universal binary with a 64-bit input!"; \
false; \
fi; \
if test ! -f ./unify-test-universal-64; then \
echo "TEST-UNEXPECTED-FAIL | build/ | unify failed to produce a universal binary with a 64-bit input!"; \
false; \
fi; \
if ! file -b ./unify-test-universal-64 | head -n1 | grep -q "^Mach-O universal binary"; then \
echo "TEST-UNEXPECTED-FAIL | build/ | unify failed to produce a universal binary with a 64-bit input!"; \
false; \
else \
echo "TEST-PASS | build/ | unify produced a universal binary with a 64-bit input!"; \
fi \
fi
# try unifying two identical Java class files
rm -f unifytesta.class unifytestb.class unifytestc.class
cp $(srcdir)/unifytest.class ./unifytesta.class
cp $(srcdir)/unifytest.class ./unifytestb.class
@if ! $(srcdir)/macosx/universal/unify ./unifytesta.class ./unifytestb.class \
./unifytestc.class; then \
echo "TEST-UNEXPECTED-FAIL | build/ | unify failed to unify a Java class file!"; \
false; \
fi
@if test ! -f ./unifytestc.class; then \
echo "TEST-UNEXPECTED-FAIL | build/ | unify failed to unify a Java class file!"; \
false; \
fi
@if ! diff -q ./unifytesta.class ./unifytestc.class; then \
echo "TEST-UNEXPECTED-FAIL | build/ | unify failed to unify a Java class file!"; \
false; \
else \
echo "TEST-PASS | build/ | unify unified a Java class file!"; \
fi
# try unifying some files that differ only in line ordering
rm -rf unify-sort-test
mkdir unify-sort-test unify-sort-test/a unify-sort-test/b
printf "lmn\nabc\nxyz\n" > unify-sort-test/a/file.foo
printf "xyz\nlmn\nabc" > unify-sort-test/b/file.foo
printf "lmn\nabc\nxyz\n" > unify-sort-test/expected-result
@if ! $(srcdir)/macosx/universal/unify --unify-with-sort "\.foo$$" \
./unify-sort-test/a ./unify-sort-test/b \
./unify-sort-test/c; then \
echo "TEST-UNEXPECTED-FAIL | build/ | unify failed to unify files with differing line ordering!"; \
false; \
fi
@if ! diff -q ./unify-sort-test/expected-result ./unify-sort-test/c/file.foo; then \
echo "TEST-UNEXPECTED-FAIL | build/ | unify failed to unify files with differing line ordering!"; \
false; \
else \
echo "TEST-PASS | build/ | unify unified files with differing line ordering!"; \
fi
endif
ifeq ($(OS_ARCH),Linux)

View File

@ -16,12 +16,15 @@ while not os.path.exists(os.path.join(path, 'config.status')):
path = os.path.join(path, 'config.status')
config = imp.load_module('_buildconfig', open(path), path, ('', 'r', imp.PY_SOURCE))
for var in os.environ:
if var in config.substs:
config.substs[var] = os.environ[var]
# Copy values from the config.status namespace into this module namespace.
# This effectively imports topsrcdir, topobjdir, defines, substs, files,
# headers and non_global_defines
for var in config.__all__:
value = getattr(config, var)
if isinstance(value, list) and value and isinstance(value[0], tuple):
value = dict(value)
setattr(sys.modules[__name__], var, value)
for var in os.environ:
if var != 'SHELL' and var in substs:
substs[var] = os.environ[var]

View File

@ -1,144 +0,0 @@
#!/usr/bin/perl
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
use strict;
use warnings;
use Archive::Zip(':ERROR_CODES');
my ($BUILDCONFIG);
sub fixBuildconfig($$$);
$BUILDCONFIG = 'content/global/buildconfig.html';
if (scalar(@ARGV) != 3) {
print STDERR ("usage: fix-buildconfig <jar|file> <file1> <file2>\n");
exit(1);
}
if (!fixBuildconfig($ARGV[0], $ARGV[1], $ARGV[2])) {
exit(1);
}
exit(0);
sub fixBuildconfig($$$) {
my ($mode, $path1, $path2);
($mode, $path1, $path2) = @_;
if ($mode ne 'jar' && $mode ne 'file') {
print STDERR ($0.': must specify jar or file\n');
return 0;
}
my ($contents1, $contents2);
my ($ze, $zip1, $zip2);
if ($mode eq 'jar') {
$zip1 = Archive::Zip->new();
if (($ze = $zip1->read($path1)) != AZ_OK) {
print STDERR ($0.': could not read "'.$path1.'": error '.$ze."\n");
return 0;
}
$zip2 = Archive::Zip->new();
if (($ze = $zip2->read($path2)) != AZ_OK) {
print STDERR ($0.': could not read "'.$path2.'": error '.$ze."\n");
return 0;
}
$contents1 = $zip1->contents($BUILDCONFIG);
$contents2 = $zip2->contents($BUILDCONFIG);
} elsif ($mode eq 'file') {
local($/);
my ($file1, $file2);
open($file1, '<'.$path1.$BUILDCONFIG) or return 0;
open($file2, '<'.$path2.$BUILDCONFIG) or return 0;
$contents1 = <$file1>;
$contents2 = <$file2>;
close($file1);
close($file2);
}
if (!defined($contents1)) {
print STDERR ($0.': could not get "'.$BUILDCONFIG.'" from "'.$path1.'"'.
"\n");
return 0;
}
if (!defined($contents2)) {
print STDERR ($0.': could not get "'.$BUILDCONFIG.'" from "'.$path2.'"'.
"\n");
return 0;
}
my (@lines1, @lines2);
@lines1 = split(/\n/, $contents1);
@lines2 = split(/\n/, $contents2);
my ($line, @linesNew);
@linesNew = ();
# Copy everything from the first file up to the end of its <body>.
while ($line = shift(@lines1)) {
if ($line eq '</body>') {
last;
}
push(@linesNew, $line);
}
# Insert a <hr> between the two files.
push (@linesNew, '<hr> </hr>');
# Copy the second file's content beginning after its leading <h1>.
while ($line = shift(@lines2)) {
if ($line eq '<h1>about:buildconfig</h1>') {
last;
}
}
while ($line = shift(@lines2)) {
push(@linesNew, $line);
}
my ($contentsNew);
$contentsNew = join("\n", @linesNew);
if ($mode eq 'jar') {
if (!defined($zip1->contents($BUILDCONFIG, $contentsNew))) {
print STDERR ($0.': could not set "'.$BUILDCONFIG.'" to "'.$path1.'"'.
"\n");
return 0;
}
if (!defined($zip2->contents($BUILDCONFIG, $contentsNew))) {
print STDERR ($0.': could not set "'.$BUILDCONFIG.'" to "'.$path2.'"'.
"\n");
return 0;
}
if (($ze = $zip1->overwrite()) != AZ_OK) {
print STDERR ($0.': could not write "'.$path1.'": error '.$ze."\n");
return 0;
}
if (($ze = $zip2->overwrite()) != AZ_OK) {
print STDERR ($0.': could not write "'.$path2.'": error '.$ze."\n");
return 0;
}
} elsif ($mode eq 'file') {
my ($file1, $file2);
open($file1, '>'.$path1.$BUILDCONFIG) or return 0;
open($file2, '>'.$path2.$BUILDCONFIG) or return 0;
print $file1 ($contentsNew);
print $file2 ($contentsNew);
close($file1);
close($file2);
}
return 1;
}

View File

@ -22,69 +22,11 @@ core_abspath = $(if $(filter /%,$(1)),$(1),$(CURDIR)/$(1))
DIST = $(OBJDIR)/dist
ifdef LIBXUL_SDK # {
APP_CONTENTS = Contents/Frameworks/XUL.framework
else # } {
APP_CONTENTS = Contents/MacOS
endif # } LIBXUL_SDK
ifeq ($(MOZ_BUILD_APP),camino) # {
INSTALLER_DIR = camino/installer
MOZ_PKG_APPNAME = camino
APPNAME = Camino.app
BUILDCONFIG_BASE = Contents/MacOS/chrome
else # } {
MOZ_PKG_APPNAME = $(MOZ_APP_NAME)
APPNAME = $(MOZ_MACBUNDLE_NAME)
INSTALLER_DIR = $(MOZ_BUILD_APP)/installer
ifeq ($(MOZ_BUILD_APP),xulrunner) # {
APPNAME = XUL.framework
APP_CONTENTS = Versions/Current
endif # } xulrunner
BUILDCONFIG_BASE = $(APP_CONTENTS)/chrome
endif # } !camino
ifeq ($(MOZ_CHROME_FILE_FORMAT),jar)
BUILDCONFIG = $(BUILDCONFIG_BASE)/toolkit.jar
FIX_MODE = jar
else
BUILDCONFIG = $(BUILDCONFIG_BASE)/toolkit/
FIX_MODE = file
endif
postflight_all:
# Build the universal package out of only the bits that would be released.
# Call the packager to set this up. Set UNIVERSAL_BINARY= to avoid producing
# a universal binary too early, before the unified bits have been staged.
# Set SIGN_NSS= to skip shlibsign.
$(MAKE) -C $(OBJDIR_ARCH_1)/$(INSTALLER_DIR) \
UNIVERSAL_BINARY= SIGN_NSS= PKG_SKIP_STRIP=1 stage-package
$(MAKE) -C $(OBJDIR_ARCH_2)/$(INSTALLER_DIR) \
UNIVERSAL_BINARY= SIGN_NSS= PKG_SKIP_STRIP=1 stage-package
# Remove .chk files that may have been copied from the NSS build. These will
# cause unify to warn or fail if present. New .chk files that are
# appropriate for the merged libraries will be generated when the universal
# dmg is built.
rm -f $(DIST_ARCH_1)/$(MOZ_PKG_APPNAME)/$(APPNAME)/$(APP_CONTENTS)/*.chk \
$(DIST_ARCH_2)/$(MOZ_PKG_APPNAME)/$(APPNAME)/$(APP_CONTENTS)/*.chk
# The only difference betewen the two trees now should be the
# about:buildconfig page. Fix it up.
$(TOPSRCDIR)/build/macosx/universal/fix-buildconfig $(FIX_MODE) \
$(DIST_ARCH_1)/$(MOZ_PKG_APPNAME)/$(APPNAME)/$(BUILDCONFIG) \
$(DIST_ARCH_2)/$(MOZ_PKG_APPNAME)/$(APPNAME)/$(BUILDCONFIG)
ifdef ENABLE_TESTS
mkdir -p $(DIST_UNI)/$(MOZ_PKG_APPNAME)
rm -f $(DIST_ARCH_2)/universal
ln -s $(call core_abspath,$(DIST_UNI)) $(DIST_ARCH_2)/universal
rm -rf $(DIST_UNI)/$(MOZ_PKG_APPNAME)/$(APPNAME)
$(TOPSRCDIR)/build/macosx/universal/unify \
--unify-with-sort "\.manifest$$" \
--unify-with-sort "components\.list$$" \
$(DIST_ARCH_1)/$(MOZ_PKG_APPNAME)/$(APPNAME) \
$(DIST_ARCH_2)/$(MOZ_PKG_APPNAME)/$(APPNAME) \
$(DIST_UNI)/$(MOZ_PKG_APPNAME)/$(APPNAME)
# A universal .dmg can now be produced by making in either architecture's
# INSTALLER_DIR.
ifdef ENABLE_TESTS
# Now, repeat the process for the test package.
$(MAKE) -C $(OBJDIR_ARCH_1) UNIVERSAL_BINARY= CHROME_JAR= package-tests
$(MAKE) -C $(OBJDIR_ARCH_2) UNIVERSAL_BINARY= CHROME_JAR= package-tests

View File

@ -9,6 +9,8 @@ mk_add_options MOZ_POSTFLIGHT_ALL+=build/macosx/universal/flight.mk
DARWIN_VERSION=`uname -r`
ac_add_app_options i386 --target=i386-apple-darwin$DARWIN_VERSION
ac_add_app_options x86_64 --target=x86_64-apple-darwin$DARWIN_VERSION
ac_add_app_options i386 --with-unify-dist=../x86_64/dist
ac_add_app_options x86_64 --with-unify-dist=../i386/dist
ac_add_options --with-macos-sdk=/Developer/SDKs/MacOSX10.6.sdk

View File

@ -21,4 +21,5 @@ which.pth:python/which
mock.pth:python/mock-1.0.0
mozilla.pth:build
mozilla.pth:config
mozilla.pth:xpcom/typelib/xpt/tools
copy:build/buildconfig.py

View File

@ -727,8 +727,6 @@ DIRS += $(foreach tier,$(TIERS),$(tier_$(tier)_dirs))
STATIC_DIRS += $(foreach tier,$(TIERS),$(tier_$(tier)_staticdirs))
endif
OPTIMIZE_JARS_CMD = $(PYTHON) $(call core_abspath,$(topsrcdir)/config/optimizejars.py)
CREATE_PRECOMPLETE_CMD = $(PYTHON) $(call core_abspath,$(topsrcdir)/config/createprecomplete.py)
# MDDEPDIR is the subdirectory where dependency files are stored

View File

@ -1,341 +0,0 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import sys, os, subprocess, struct, re
local_file_header = [
("signature", "uint32"),
("min_version", "uint16"),
("general_flag", "uint16"),
("compression", "uint16"),
("lastmod_time", "uint16"),
("lastmod_date", "uint16"),
("crc32", "uint32"),
("compressed_size", "uint32"),
("uncompressed_size", "uint32"),
("filename_size", "uint16"),
("extra_field_size", "uint16"),
("filename", "filename_size"),
("extra_field", "extra_field_size"),
("data", "compressed_size")
]
cdir_entry = [
("signature", "uint32"),
("creator_version", "uint16"),
("min_version", "uint16"),
("general_flag", "uint16"),
("compression", "uint16"),
("lastmod_time", "uint16"),
("lastmod_date", "uint16"),
("crc32", "uint32"),
("compressed_size", "uint32"),
("uncompressed_size", "uint32"),
("filename_size", "uint16"),
("extrafield_size", "uint16"),
("filecomment_size", "uint16"),
("disknum", "uint16"),
("internal_attr", "uint16"),
("external_attr", "uint32"),
("offset", "uint32"),
("filename", "filename_size"),
("extrafield", "extrafield_size"),
("filecomment", "filecomment_size"),
]
cdir_end = [
("signature", "uint32"),
("disk_num", "uint16"),
("cdir_disk", "uint16"),
("disk_entries", "uint16"),
("cdir_entries", "uint16"),
("cdir_size", "uint32"),
("cdir_offset", "uint32"),
("comment_size", "uint16"),
]
type_mapping = { "uint32":"I", "uint16":"H"}
def format_struct (format):
string_fields = {}
fmt = "<"
for (name,value) in iter(format):
try:
fmt += type_mapping[value][0]
except KeyError:
string_fields[name] = value
return (fmt, string_fields)
def size_of(format):
return struct.calcsize(format_struct(format)[0])
class MyStruct:
def __init__(self, format, string_fields):
self.__dict__["struct_members"] = {}
self.__dict__["format"] = format
self.__dict__["string_fields"] = string_fields
def addMember(self, name, value):
self.__dict__["struct_members"][name] = value
def __getattr__(self, item):
try:
return self.__dict__["struct_members"][item]
except:
pass
print("no %s" %item)
print(self.__dict__["struct_members"])
raise AttributeError
def __setattr__(self, item, value):
if item in self.__dict__["struct_members"]:
self.__dict__["struct_members"][item] = value
else:
raise AttributeError
def pack(self):
extra_data = ""
values = []
string_fields = self.__dict__["string_fields"]
struct_members = self.__dict__["struct_members"]
format = self.__dict__["format"]
for (name,_) in format:
if name in string_fields:
extra_data = extra_data + struct_members[name]
else:
values.append(struct_members[name]);
return struct.pack(format_struct(format)[0], *values) + extra_data
ENDSIG = 0x06054b50
def assert_true(cond, msg):
if not cond:
raise Exception(msg)
exit(1)
class BinaryBlob:
def __init__(self, f):
self.data = open(f, "rb").read()
self.offset = 0
self.length = len(self.data)
def readAt(self, pos, length):
self.offset = pos + length
return self.data[pos:self.offset]
def read_struct (self, format, offset = None):
if offset == None:
offset = self.offset
(fstr, string_fields) = format_struct(format)
size = struct.calcsize(fstr)
data = self.readAt(offset, size)
ret = struct.unpack(fstr, data)
retstruct = MyStruct(format, string_fields)
i = 0
for (name,_) in iter(format):
member_desc = None
if not name in string_fields:
member_data = ret[i]
i = i + 1
else:
# zip has data fields which are described by other struct fields, this does
# additional reads to fill em in
member_desc = string_fields[name]
member_data = self.readAt(self.offset, retstruct.__getattr__(member_desc))
retstruct.addMember(name, member_data)
# sanity check serialization code
data = self.readAt(offset, self.offset - offset)
out_data = retstruct.pack()
assert_true(out_data == data, "Serialization fail %d !=%d"% (len(out_data), len(data)))
return retstruct
def optimizejar(jar, outjar, inlog = None):
if inlog is not None:
inlog = open(inlog).read().rstrip()
# in the case of an empty log still move the index forward
if len(inlog) == 0:
inlog = []
else:
inlog = inlog.split("\n")
outlog = []
jarblob = BinaryBlob(jar)
dirend = jarblob.read_struct(cdir_end, jarblob.length - size_of(cdir_end))
assert_true(dirend.signature == ENDSIG, "no signature in the end");
cdir_offset = dirend.cdir_offset
readahead = 0
if inlog is None and cdir_offset == 4:
readahead = struct.unpack("<I", jarblob.readAt(0, 4))[0]
print("%s: startup data ends at byte %d" % (outjar, readahead));
total_stripped = 0;
jarblob.offset = cdir_offset
central_directory = []
for i in range(0, dirend.cdir_entries):
entry = jarblob.read_struct(cdir_entry)
if entry.filename[-1:] == "/":
total_stripped += len(entry.pack())
else:
total_stripped += entry.extrafield_size
central_directory.append(entry)
reordered_count = 0
if inlog is not None:
dup_guard = set()
for ordered_name in inlog:
if ordered_name in dup_guard:
continue
else:
dup_guard.add(ordered_name)
found = False
for i in range(reordered_count, len(central_directory)):
if central_directory[i].filename == ordered_name:
# swap the cdir entries
tmp = central_directory[i]
central_directory[i] = central_directory[reordered_count]
central_directory[reordered_count] = tmp
reordered_count = reordered_count + 1
found = True
break
if not found:
print( "Can't find '%s' in %s" % (ordered_name, jar))
outfd = open(outjar, "wb")
out_offset = 0
if inlog is not None:
# have to put central directory at offset 4 cos 0 confuses some tools.
# This also lets us specify how many entries should be preread
dirend.cdir_offset = 4
# make room for central dir + end of dir + 4 extra bytes at front
out_offset = dirend.cdir_offset + dirend.cdir_size + size_of(cdir_end) - total_stripped
outfd.seek(out_offset)
cdir_data = ""
written_count = 0
crc_mapping = {}
dups_found = 0
dupe_bytes = 0
# store number of bytes suggested for readahead
for entry in central_directory:
# read in the header twice..first for comparison, second time for convenience when writing out
jarfile = jarblob.read_struct(local_file_header, entry.offset)
assert_true(jarfile.filename == entry.filename, "Directory/Localheader mismatch")
# drop directory entries
if entry.filename[-1:] == "/":
total_stripped += len(jarfile.pack())
dirend.cdir_entries -= 1
continue
# drop extra field data
else:
total_stripped += jarfile.extra_field_size;
entry.extrafield = jarfile.extra_field = ""
entry.extrafield_size = jarfile.extra_field_size = 0
# January 1st, 2010
entry.lastmod_date = jarfile.lastmod_date = ((2010 - 1980) << 9) | (1 << 5) | 1
entry.lastmod_time = jarfile.lastmod_time = 0
data = jarfile.pack()
outfd.write(data)
old_entry_offset = entry.offset
entry.offset = out_offset
out_offset = out_offset + len(data)
entry_data = entry.pack()
cdir_data += entry_data
expected_len = entry.filename_size + entry.extrafield_size + entry.filecomment_size
assert_true(len(entry_data) != expected_len,
"%s entry size - expected:%d got:%d" % (entry.filename, len(entry_data), expected_len))
written_count += 1
if entry.crc32 in crc_mapping:
dups_found += 1
dupe_bytes += entry.compressed_size + len(data) + len(entry_data)
print("%s\n\tis a duplicate of\n%s\n---"%(entry.filename, crc_mapping[entry.crc32]))
else:
crc_mapping[entry.crc32] = entry.filename;
if inlog is not None:
if written_count == reordered_count:
readahead = out_offset
print("%s: startup data ends at byte %d"%( outjar, readahead));
elif written_count < reordered_count:
pass
#print("%s @ %d" % (entry.filename, out_offset))
elif readahead >= old_entry_offset + len(data):
outlog.append(entry.filename)
reordered_count += 1
if inlog is None:
dirend.cdir_offset = out_offset
if dups_found > 0:
print("WARNING: Found %d duplicate files taking %d bytes"%(dups_found, dupe_bytes))
dirend.cdir_size = len(cdir_data)
dirend.disk_entries = dirend.cdir_entries
dirend_data = dirend.pack()
assert_true(size_of(cdir_end) == len(dirend_data), "Failed to serialize directory end correctly. Serialized size;%d, expected:%d"%(len(dirend_data), size_of(cdir_end)));
outfd.seek(dirend.cdir_offset)
outfd.write(cdir_data)
outfd.write(dirend_data)
# for ordered jars the central directory is written in the begining of the file, so a second central-directory
# entry has to be written in the end of the file
if inlog is not None:
outfd.seek(0)
outfd.write(struct.pack("<I", readahead));
outfd.seek(out_offset)
outfd.write(dirend_data)
print "Stripped %d bytes" % total_stripped
print "%s %d/%d in %s" % (("Ordered" if inlog is not None else "Deoptimized"),
reordered_count, len(central_directory), outjar)
outfd.close()
return outlog
if len(sys.argv) != 5:
print "Usage: --optimize|--deoptimize %s JAR_LOG_DIR IN_JAR_DIR OUT_JAR_DIR" % sys.argv[0]
exit(1)
jar_regex = re.compile("\\.jar?$")
def optimize(JAR_LOG_DIR, IN_JAR_DIR, OUT_JAR_DIR):
ls = os.listdir(IN_JAR_DIR)
for jarfile in ls:
if not re.search(jar_regex, jarfile):
continue
injarfile = os.path.join(IN_JAR_DIR, jarfile)
outjarfile = os.path.join(OUT_JAR_DIR, jarfile)
logfile = os.path.join(JAR_LOG_DIR, jarfile + ".log")
if not os.path.isfile(logfile):
logfile = None
optimizejar(injarfile, outjarfile, logfile)
def deoptimize(JAR_LOG_DIR, IN_JAR_DIR, OUT_JAR_DIR):
if not os.path.exists(JAR_LOG_DIR):
os.makedirs(JAR_LOG_DIR)
ls = os.listdir(IN_JAR_DIR)
for jarfile in ls:
if not re.search(jar_regex, jarfile):
continue
injarfile = os.path.join(IN_JAR_DIR, jarfile)
outjarfile = os.path.join(OUT_JAR_DIR, jarfile)
logfile = os.path.join(JAR_LOG_DIR, jarfile + ".log")
log = optimizejar(injarfile, outjarfile, None)
open(logfile, "wb").write("\n".join(log))
def main():
MODE = sys.argv[1]
JAR_LOG_DIR = sys.argv[2]
IN_JAR_DIR = sys.argv[3]
OUT_JAR_DIR = sys.argv[4]
if MODE == "--optimize":
optimize(JAR_LOG_DIR, IN_JAR_DIR, OUT_JAR_DIR)
elif MODE == "--deoptimize":
deoptimize(JAR_LOG_DIR, IN_JAR_DIR, OUT_JAR_DIR)
else:
print("Unknown mode %s" % MODE)
exit(1)
if __name__ == '__main__':
main()

View File

@ -878,6 +878,23 @@ dnl when we can run target binaries.
AC_SUBST(UNIVERSAL_BINARY)
AC_SUBST(MOZ_CAN_RUN_PROGRAMS)
MOZ_ARG_WITH_STRING(unify-dist,
[ --with-unify-dist=dir Location of the dist directory to unify with at packaging time (Mac OS X universal build only)],
UNIFY_DIST=$withval)
if test -n "$UNIVERSAL_BINARY"; then
if test -z "$UNIFY_DIST"; then
AC_MSG_ERROR([You need to provide the --with-unify-dist=dir argument when performing a universal build])
fi
case "$UNIFY_DIST" in
/*)
;;
*)
UNIFY_DIST="${MOZ_BUILD_ROOT}/${UNIFY_DIST}"
;;
esac
fi
AC_SUBST(UNIFY_DIST)
dnl ========================================================
dnl Check for MacOS deployment target version
dnl ========================================================
@ -2327,6 +2344,8 @@ ia64*-hpux*)
RCFLAGS='-n'
MOZ_USER_DIR="Mozilla"
ZIP="$ZIP -X"
STRIP=lxlite
STRIP_FLAGS="/yua /ydd /yxd /ynl /anp /b- /cs+ /d /i- /ml1 /mr2 /mf2 /r+ /u+ /x- /zs:0 /zx /zd"
if test "$MOZTOOLS"; then
MOZ_TOOLS_DIR=`echo $MOZTOOLS | sed -e 's|\\\\|/|g'`
@ -7677,18 +7696,20 @@ dnl done during packaging with omnijar.
if test "$MOZ_CHROME_FILE_FORMAT" = "omni"; then
MOZ_OMNIJAR=1
AC_DEFINE(MOZ_OMNIJAR)
if test "$OS_ARCH" = "WINNT" -o "$OS_ARCH" = "OS2" -o "$MOZ_WIDGET_TOOLKIT" = "android"; then
MOZ_CHROME_FILE_FORMAT=flat
else
MOZ_CHROME_FILE_FORMAT=symlink
fi
elif test "$MOZ_CHROME_FILE_FORMAT" = "jar"; then
AC_DEFINE(MOZ_CHROME_FILE_FORMAT_JAR)
fi
MOZ_PACKAGER_FORMAT="$MOZ_CHROME_FILE_FORMAT"
if test "$OS_ARCH" = "WINNT" -o "$OS_ARCH" = "OS2" -o "$MOZ_WIDGET_TOOLKIT" = "android"; then
MOZ_CHROME_FILE_FORMAT=flat
else
MOZ_CHROME_FILE_FORMAT=symlink
fi
OMNIJAR_NAME=omni.ja
AC_SUBST(OMNIJAR_NAME)
AC_SUBST(MOZ_OMNIJAR)
AC_SUBST(MOZ_PACKAGER_FORMAT)
dnl ========================================================
dnl = Define default location for MOZILLA_FIVE_HOME

View File

@ -727,8 +727,6 @@ DIRS += $(foreach tier,$(TIERS),$(tier_$(tier)_dirs))
STATIC_DIRS += $(foreach tier,$(TIERS),$(tier_$(tier)_staticdirs))
endif
OPTIMIZE_JARS_CMD = $(PYTHON) $(call core_abspath,$(topsrcdir)/config/optimizejars.py)
CREATE_PRECOMPLETE_CMD = $(PYTHON) $(call core_abspath,$(topsrcdir)/config/createprecomplete.py)
# MDDEPDIR is the subdirectory where dependency files are stored

View File

@ -34,12 +34,7 @@ DEFINES += \
-DPREF_DIR=$(PREF_DIR) \
$(NULL)
ifeq ($(MOZ_CHROME_FILE_FORMAT),jar)
JAREXT=.jar
else
JAREXT=
endif
DEFINES += -DJAREXT=$(JAREXT)
DEFINES += -DJAREXT=
include $(topsrcdir)/ipc/app/defs.mk
DEFINES += -DMOZ_CHILD_PROCESS_NAME=$(MOZ_CHILD_PROCESS_NAME)
@ -48,7 +43,7 @@ ifdef MOZ_PKG_MANIFEST_P
MOZ_PKG_MANIFEST = package-manifest
endif
MOZ_POST_STAGING_CMD = find chrome -type f -name *.properties -exec $(PERL) -n -i -e 'print unless /^\#/' {} \;
MOZ_PACKAGER_MINIFY=1
include $(topsrcdir)/toolkit/mozapps/installer/packager.mk

View File

@ -34,12 +34,7 @@ DEFINES += \
-DPREF_DIR=$(PREF_DIR) \
$(NULL)
ifeq ($(MOZ_CHROME_FILE_FORMAT),jar)
JAREXT=.jar
else
JAREXT=
endif
DEFINES += -DJAREXT=$(JAREXT)
DEFINES += -DJAREXT=
include $(topsrcdir)/ipc/app/defs.mk
DEFINES += -DMOZ_CHILD_PROCESS_NAME=$(MOZ_CHILD_PROCESS_NAME)
@ -48,7 +43,7 @@ ifdef MOZ_PKG_MANIFEST_P
MOZ_PKG_MANIFEST = package-manifest
endif
MOZ_POST_STAGING_CMD = find chrome -type f -name *.properties -exec $(PERL) -n -i -e 'print unless /^\#/' {} \;
MOZ_PACKAGER_MINIFY=1
include $(topsrcdir)/toolkit/mozapps/installer/packager.mk

View File

@ -13,6 +13,7 @@ test_dirs := \
mozbuild/mozbuild/test \
mozbuild/mozbuild/test/compilation \
mozbuild/mozbuild/test/frontend \
mozbuild/mozpack/test \
$(NULL)
PYTHON_UNIT_TESTS := $(foreach dir,$(test_dirs),$(wildcard $(srcdir)/$(dir)/*.py))

View File

@ -13,6 +13,7 @@ Modules Overview
* mozbuild.frontend -- Functionality for reading build frontend files
(what defines the build system) and converting them to data structures
which are fed into build backends to produce backend configurations.
* mozpack -- Functionality related to packaging builds.
Overview
========

View File

View File

@ -0,0 +1,255 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import re
from distutils.version import LooseVersion
from mozpack.errors import errors
from collections import OrderedDict
class Flag(object):
'''
Class for flags in manifest entries in the form:
"flag" (same as "flag=true")
"flag=yes|true|1"
"flag=no|false|0"
'''
def __init__(self, name):
'''
Initialize a Flag with the given name.
'''
self.name = name
self.value = None
def add_definition(self, definition):
'''
Add a flag value definition. Replaces any previously set value.
'''
if definition == self.name:
self.value = True
return
assert(definition.startswith(self.name))
if definition[len(self.name)] != '=':
return errors.fatal('Malformed flag: %s' % definition)
value = definition[len(self.name) + 1:]
if value in ('yes', 'true', '1', 'no', 'false', '0'):
self.value = value
else:
return errors.fatal('Unknown value in: %s' % definition)
def matches(self, value):
'''
Return whether the flag value matches the given value. The values
are canonicalized for comparison.
'''
if value in ('yes', 'true', '1', True):
return self.value in ('yes', 'true', '1', True)
if value in ('no', 'false', '0', False):
return self.value in ('no', 'false', '0', False, None)
raise RuntimeError('Invalid value: %s' % value)
def __str__(self):
'''
Serialize the flag value in the same form given to the last
add_definition() call.
'''
if self.value is None:
return ''
if self.value is True:
return self.name
return '%s=%s' % (self.name, self.value)
class StringFlag(object):
'''
Class for string flags in manifest entries in the form:
"flag=string"
"flag!=string"
'''
def __init__(self, name):
'''
Initialize a StringFlag with the given name.
'''
self.name = name
self.values = []
def add_definition(self, definition):
'''
Add a string flag definition.
'''
assert(definition.startswith(self.name))
value = definition[len(self.name):]
if value.startswith('='):
self.values.append(('==', value[1:]))
elif value.startswith('!='):
self.values.append(('!=', value[2:]))
else:
return errors.fatal('Malformed flag: %s' % definition)
def matches(self, value):
'''
Return whether one of the string flag definitions matches the given
value.
For example,
flag = StringFlag('foo')
flag.add_definition('foo!=bar')
flag.matches('bar') returns False
flag.matches('qux') returns True
flag = StringFlag('foo')
flag.add_definition('foo=bar')
flag.add_definition('foo=baz')
flag.matches('bar') returns True
flag.matches('baz') returns True
flag.matches('qux') returns False
'''
if not self.values:
return True
for comparison, val in self.values:
if eval('value %s val' % comparison):
return True
return False
def __str__(self):
'''
Serialize the flag definitions in the same form given to each
add_definition() call.
'''
res = []
for comparison, val in self.values:
if comparison == '==':
res.append('%s=%s' % (self.name, val))
else:
res.append('%s!=%s' % (self.name, val))
return ' '.join(res)
class VersionFlag(object):
'''
Class for version flags in manifest entries in the form:
"flag=version"
"flag<=version"
"flag<version"
"flag>=version"
"flag>version"
'''
def __init__(self, name):
'''
Initialize a VersionFlag with the given name.
'''
self.name = name
self.values = []
def add_definition(self, definition):
'''
Add a version flag definition.
'''
assert(definition.startswith(self.name))
value = definition[len(self.name):]
if value.startswith('='):
self.values.append(('==', LooseVersion(value[1:])))
elif len(value) > 1 and value[0] in ['<', '>']:
if value[1] == '=':
if len(value) < 3:
return errors.fatal('Malformed flag: %s' % definition)
self.values.append((value[0:2], LooseVersion(value[2:])))
else:
self.values.append((value[0], LooseVersion(value[1:])))
else:
return errors.fatal('Malformed flag: %s' % definition)
def matches(self, value):
'''
Return whether one of the version flag definitions matches the given
value.
For example,
flag = VersionFlag('foo')
flag.add_definition('foo>=1.0')
flag.matches('1.0') returns True
flag.matches('1.1') returns True
flag.matches('0.9') returns False
flag = VersionFlag('foo')
flag.add_definition('foo>=1.0')
flag.add_definition('foo<0.5')
flag.matches('0.4') returns True
flag.matches('1.0') returns True
flag.matches('0.6') returns False
'''
value = LooseVersion(value)
if not self.values:
return True
for comparison, val in self.values:
if eval('value %s val' % comparison):
return True
return False
def __str__(self):
'''
Serialize the flag definitions in the same form given to each
add_definition() call.
'''
res = []
for comparison, val in self.values:
if comparison == '==':
res.append('%s=%s' % (self.name, val))
else:
res.append('%s%s%s' % (self.name, comparison, val))
return ' '.join(res)
class Flags(OrderedDict):
'''
Class to handle a set of flags definitions given on a single manifest
entry.
'''
FLAGS = {
'application': StringFlag,
'appversion': VersionFlag,
'platformversion': VersionFlag,
'contentaccessible': Flag,
'os': StringFlag,
'osversion': VersionFlag,
'abi': StringFlag,
'platform': Flag,
'xpcnativewrappers': Flag,
'tablet': Flag,
}
RE = re.compile(r'([!<>=]+)')
def __init__(self, *flags):
'''
Initialize a set of flags given in string form.
flags = Flags('contentaccessible=yes', 'appversion>=3.5')
'''
OrderedDict.__init__(self)
for f in flags:
name = self.RE.split(f)
name = name[0]
if not name in self.FLAGS:
errors.fatal('Unknown flag: %s' % name)
continue
if not name in self:
self[name] = self.FLAGS[name](name)
self[name].add_definition(f)
def __str__(self):
'''
Serialize the set of flags.
'''
return ' '.join(str(self[k]) for k in self)
def match(self, **filter):
'''
Return whether the set of flags match the set of given filters.
flags = Flags('contentaccessible=yes', 'appversion>=3.5',
'application=foo')
flags.match(application='foo') returns True
flags.match(application='foo', appversion='3.5') returns True
flags.match(application='foo', appversion='3.0') returns False
'''
for name, value in filter.iteritems():
if not name in self:
continue
if not self[name].matches(value):
return False
return True

View File

@ -0,0 +1,364 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import re
import os
from urlparse import urlparse
import mozpack.path
from mozpack.chrome.flags import Flags
from mozpack.errors import errors
class ManifestEntry(object):
'''
Base class for all manifest entry types.
Subclasses may define the following class or member variables:
- localized: indicates whether the manifest entry is used for localized
data.
- type: the manifest entry type (e.g. 'content' in
'content global content/global/')
- allowed_flags: a set of flags allowed to be defined for the given
manifest entry type.
A manifest entry is attached to a base path, defining where the manifest
entry is bound to, and that is used to find relative paths defined in
entries.
'''
localized = False
type = None
allowed_flags = [
'application',
'platformversion',
'os',
'osversion',
'abi',
'xpcnativewrappers',
'tablet',
]
def __init__(self, base, *flags):
'''
Initialize a manifest entry with the given base path and flags.
'''
self.base = base
self.flags = Flags(*flags)
if not all(f in self.allowed_flags for f in self.flags):
errors.fatal('%s unsupported for %s manifest entries' %
(','.join(f for f in self.flags
if not f in self.allowed_flags), self.type))
def serialize(self, *args):
'''
Serialize the manifest entry.
'''
entry = [self.type] + list(args)
flags = str(self.flags)
if flags:
entry.append(flags)
return ' '.join(entry)
def __eq__(self, other):
return self.base == other.base and str(self) == str(other)
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return '<%s@%s>' % (str(self), self.base)
def move(self, base):
'''
Return a new manifest entry with a different base path.
'''
return parse_manifest_line(base, str(self))
def rebase(self, base):
'''
Return a new manifest entry with all relative paths defined in the
entry relative to a new base directory.
The base class doesn't define relative paths, so it is equivalent to
move().
'''
return self.move(base)
class ManifestEntryWithRelPath(ManifestEntry):
'''
Abstract manifest entry type with a relative path definition.
'''
def __init__(self, base, relpath, *flags):
ManifestEntry.__init__(self, base, *flags)
self.relpath = relpath
def __str__(self):
return self.serialize(self.relpath)
def rebase(self, base):
'''
Return a new manifest entry with all relative paths defined in the
entry relative to a new base directory.
'''
clone = ManifestEntry.rebase(self, base)
clone.relpath = mozpack.path.rebase(self.base, base, self.relpath)
return clone
@property
def path(self):
return mozpack.path.normpath(mozpack.path.join(self.base,
self.relpath))
class Manifest(ManifestEntryWithRelPath):
'''
Class for 'manifest' entries.
manifest some/path/to/another.manifest
'''
type = 'manifest'
class ManifestChrome(ManifestEntryWithRelPath):
'''
Abstract class for chrome entries.
'''
def __init__(self, base, name, relpath, *flags):
ManifestEntryWithRelPath.__init__(self, base, relpath, *flags)
self.name = name
@property
def location(self):
return mozpack.path.join(self.base, self.relpath)
class ManifestContent(ManifestChrome):
'''
Class for 'content' entries.
content global content/global/
'''
type = 'content'
allowed_flags = ManifestChrome.allowed_flags + [
'contentaccessible',
'platform',
]
def __str__(self):
return self.serialize(self.name, self.relpath)
class ManifestMultiContent(ManifestChrome):
'''
Abstract class for chrome entries with multiple definitions.
Used for locale and skin entries.
'''
type = None
def __init__(self, base, name, id, relpath, *flags):
ManifestChrome.__init__(self, base, name, relpath, *flags)
self.id = id
def __str__(self):
return self.serialize(self.name, self.id, self.relpath)
class ManifestLocale(ManifestMultiContent):
'''
Class for 'locale' entries.
locale global en-US content/en-US/
locale global fr content/fr/
'''
localized = True
type = 'locale'
class ManifestSkin(ManifestMultiContent):
'''
Class for 'skin' entries.
skin global classic/1.0 content/skin/classic/
'''
type = 'skin'
class ManifestOverload(ManifestEntry):
'''
Abstract class for chrome entries defining some kind of overloading.
Used for overlay, override or style entries.
'''
type = None
def __init__(self, base, overloaded, overload, *flags):
ManifestEntry.__init__(self, base, *flags)
self.overloaded = overloaded
self.overload = overload
def __str__(self):
return self.serialize(self.overloaded, self.overload)
@property
def localized(self):
u = urlparse(self.overload)
return u.scheme == 'chrome' and \
u.path.split('/')[0:2] == ['', 'locale']
class ManifestOverlay(ManifestOverload):
'''
Class for 'overlay' entries.
overlay chrome://global/content/viewSource.xul \
chrome://browser/content/viewSourceOverlay.xul
'''
type = 'overlay'
class ManifestStyle(ManifestOverload):
'''
Class for 'style' entries.
style chrome://global/content/customizeToolbar.xul \
chrome://browser/skin/
'''
type = 'style'
class ManifestOverride(ManifestOverload):
'''
Class for 'override' entries.
override chrome://global/locale/netError.dtd \
chrome://browser/locale/netError.dtd
'''
type = 'override'
class ManifestResource(ManifestEntry):
'''
Class for 'resource' entries.
resource gre-resources toolkit/res/
resource services-sync resource://gre/modules/services-sync/
The target may be a relative path or a resource or chrome url.
'''
type = 'resource'
def __init__(self, base, name, target, *flags):
ManifestEntry.__init__(self, base, *flags)
self.name = name
self.target = target
def __str__(self):
return self.serialize(self.name, self.target)
def rebase(self, base):
u = urlparse(self.target)
if u.scheme and u.scheme != 'jar':
return ManifestEntry.rebase(self, base)
clone = ManifestEntry.rebase(self, base)
clone.target = mozpack.path.rebase(self.base, base, self.target)
return clone
class ManifestBinaryComponent(ManifestEntryWithRelPath):
'''
Class for 'binary-component' entries.
binary-component some/path/to/a/component.dll
'''
type = 'binary-component'
class ManifestComponent(ManifestEntryWithRelPath):
'''
Class for 'component' entries.
component {b2bba4df-057d-41ea-b6b1-94a10a8ede68} foo.js
'''
type = 'component'
def __init__(self, base, cid, file, *flags):
ManifestEntryWithRelPath.__init__(self, base, file, *flags)
self.cid = cid
def __str__(self):
return self.serialize(self.cid, self.relpath)
class ManifestInterfaces(ManifestEntryWithRelPath):
'''
Class for 'interfaces' entries.
interfaces foo.xpt
'''
type = 'interfaces'
class ManifestCategory(ManifestEntry):
'''
Class for 'category' entries.
category command-line-handler m-browser @mozilla.org/browser/clh;
'''
type = 'category'
def __init__(self, base, category, name, value, *flags):
ManifestEntry.__init__(self, base, *flags)
self.category = category
self.name = name
self.value = value
def __str__(self):
return self.serialize(self.category, self.name, self.value)
class ManifestContract(ManifestEntry):
'''
Class for 'contract' entries.
contract @mozilla.org/foo;1 {b2bba4df-057d-41ea-b6b1-94a10a8ede68}
'''
type = 'contract'
def __init__(self, base, contractID, cid, *flags):
ManifestEntry.__init__(self, base, *flags)
self.contractID = contractID
self.cid = cid
def __str__(self):
return self.serialize(self.contractID, self.cid)
# All manifest classes by their type name.
MANIFESTS_TYPES = dict([(c.type, c) for c in globals().values()
if type(c) == type and issubclass(c, ManifestEntry)
and hasattr(c, 'type') and c.type])
MANIFEST_RE = re.compile(r'\s*#.*$')
def parse_manifest_line(base, line):
'''
Parse a line from a manifest file with the given base directory and
return the corresponding ManifestEntry instance.
'''
# Remove comments
cmd = MANIFEST_RE.sub('', line).strip().split()
if not cmd:
return None
if not cmd[0] in MANIFESTS_TYPES:
return errors.fatal('Unknown manifest directive: %s' % cmd[0])
return MANIFESTS_TYPES[cmd[0]](base, *cmd[1:])
def parse_manifest(root, path, fileobj=None):
'''
Parse a manifest file.
'''
base = mozpack.path.dirname(path)
if root:
path = os.path.normpath(os.path.abspath(os.path.join(root, path)))
if not fileobj:
fileobj = open(path)
linenum = 0
for line in fileobj.readlines():
linenum += 1
with errors.context(path, linenum):
e = parse_manifest_line(base, line)
if e:
yield e
def is_manifest(path):
'''
Return whether the given path is that of a manifest file.
'''
return path.endswith('.manifest') and not path.endswith('.CRT.manifest') \
and not path.endswith('.exe.manifest')

View File

@ -0,0 +1,250 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
from mozpack.errors import errors
from mozpack.files import (
BaseFile,
Dest,
)
import mozpack.path
import errno
from collections import OrderedDict
def ensure_parent_dir(file):
'''Ensures the directory parent to the given file exists'''
dir = os.path.dirname(file)
if not dir or os.path.exists(dir):
return
try:
os.makedirs(dir)
except OSError as error:
if error.errno != errno.EEXIST:
raise
class FileRegistry(object):
'''
Generic container to keep track of a set of BaseFile instances. It
preserves the order under which the files are added, but doesn't keep
track of empty directories (directories are not stored at all).
The paths associated with the BaseFile instances are relative to an
unspecified (virtual) root directory.
registry = FileRegistry()
registry.add('foo/bar', file_instance)
'''
def __init__(self):
self._files = OrderedDict()
def add(self, path, content):
'''
Add a BaseFile instance to the container, under the given path.
'''
assert isinstance(content, BaseFile)
if path in self._files:
return errors.error("%s already added" % path)
# Check whether any parent of the given path is already stored
partial_path = path
while partial_path:
partial_path = mozpack.path.dirname(partial_path)
if partial_path in self._files:
return errors.error("Can't add %s: %s is a file" %
(path, partial_path))
self._files[path] = content
def match(self, pattern):
'''
Return the list of paths, stored in the container, matching the
given pattern. See the mozpack.path.match documentation for a
description of the handled patterns.
'''
if '*' in pattern:
return [p for p in self.paths()
if mozpack.path.match(p, pattern)]
if pattern == '':
return self.paths()
if pattern in self._files:
return [pattern]
return [p for p in self.paths()
if mozpack.path.basedir(p, [pattern]) == pattern]
def remove(self, pattern):
'''
Remove paths matching the given pattern from the container. See the
mozpack.path.match documentation for a description of the handled
patterns.
'''
items = self.match(pattern)
if not items:
return errors.error("Can't remove %s: %s" % (pattern,
"not matching anything previously added"))
for i in items:
del self._files[i]
def paths(self):
'''
Return all paths stored in the container, in the order they were added.
'''
return self._files.keys()
def __len__(self):
'''
Return number of paths stored in the container.
'''
return len(self._files)
def __contains__(self, pattern):
raise RuntimeError("'in' operator forbidden for %s. Use contains()." %
self.__class__.__name__)
def contains(self, pattern):
'''
Return whether the container contains paths matching the given
pattern. See the mozpack.path.match documentation for a description of
the handled patterns.
'''
return len(self.match(pattern)) > 0
def __getitem__(self, path):
'''
Return the BaseFile instance stored in the container for the given
path.
'''
return self._files[path]
def __iter__(self):
'''
Iterate over all (path, BaseFile instance) pairs from the container.
for path, file in registry:
(...)
'''
return self._files.iteritems()
class FileCopier(FileRegistry):
'''
FileRegistry with the ability to copy the registered files to a separate
directory.
'''
def copy(self, destination):
'''
Copy all registered files to the given destination path. The given
destination can be an existing directory, or not exist at all. It
can't be e.g. a file.
The copy process acts a bit like rsync: files are not copied when they
don't need to (see mozpack.files for details on file.copy), and files
existing in the destination directory that aren't registered are
removed.
'''
assert isinstance(destination, basestring)
assert not os.path.exists(destination) or os.path.isdir(destination)
destination = os.path.normpath(destination)
dest_files = set()
for path, file in self:
destfile = os.path.normpath(os.path.join(destination, path))
dest_files.add(destfile)
ensure_parent_dir(destfile)
file.copy(destfile)
actual_dest_files = set()
for root, dirs, files in os.walk(destination):
for f in files:
actual_dest_files.add(os.path.normpath(os.path.join(root, f)))
for f in actual_dest_files - dest_files:
os.remove(f)
for root, dirs, files in os.walk(destination):
if not files and not dirs:
os.removedirs(root)
class Jarrer(FileRegistry, BaseFile):
'''
FileRegistry with the ability to copy and pack the registered files as a
jar file. Also acts as a BaseFile instance, to be copied with a FileCopier.
'''
def __init__(self, compress=True, optimize=True):
'''
Create a Jarrer instance. See mozpack.mozjar.JarWriter documentation
for details on the compress and optimize arguments.
'''
self.compress = compress
self.optimize = optimize
self._preload = []
FileRegistry.__init__(self)
def copy(self, dest):
'''
Pack all registered files in the given destination jar. The given
destination jar may be a path to jar file, or a Dest instance for
a jar file.
If the destination jar file exists, its (compressed) contents are used
instead of the registered BaseFile instances when appropriate.
'''
class DeflaterDest(Dest):
'''
Dest-like class, reading from a file-like object initially, but
switching to a Deflater object if written to.
dest = DeflaterDest(original_file)
dest.read() # Reads original_file
dest.write(data) # Creates a Deflater and write data there
dest.read() # Re-opens the Deflater and reads from it
'''
def __init__(self, orig=None, compress=True):
self.mode = None
self.deflater = orig
self.compress = compress
def read(self, length=-1):
if self.mode != 'r':
assert self.mode is None
self.mode = 'r'
return self.deflater.read(length)
def write(self, data):
if self.mode != 'w':
from mozpack.mozjar import Deflater
self.deflater = Deflater(self.compress)
self.mode = 'w'
self.deflater.write(data)
def exists(self):
return self.deflater is not None
if isinstance(dest, basestring):
dest = Dest(dest)
assert isinstance(dest, Dest)
from mozpack.mozjar import JarWriter, JarReader
try:
old_jar = JarReader(fileobj=dest)
except Exception:
old_jar = []
old_contents = dict([(f.filename, f) for f in old_jar])
with JarWriter(fileobj=dest, compress=self.compress,
optimize=self.optimize) as jar:
for path, file in self:
if path in old_contents:
deflater = DeflaterDest(old_contents[path], self.compress)
else:
deflater = DeflaterDest(compress=self.compress)
file.copy(deflater)
jar.add(path, deflater.deflater)
if self._preload:
jar.preload(self._preload)
def open(self):
raise RuntimeError('unsupported')
def preload(self, paths):
'''
Add the given set of paths to the list of preloaded files. See
mozpack.mozjar.JarWriter documentation for details on jar preloading.
'''
self._preload.extend(paths)

View File

@ -0,0 +1,132 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import sys
from contextlib import contextmanager
class ErrorMessage(Exception):
'''Exception type raised from errors.error() and errors.fatal()'''
class AccumulatedErrors(Exception):
'''Exception type raised from errors.accumulate()'''
class ErrorCollector(object):
'''
Error handling/logging class. A global instance, errors, is provided for
convenience.
Warnings, errors and fatal errors may be logged by calls to the following
functions:
errors.warn(message)
errors.error(message)
errors.fatal(message)
Warnings only send the message on the logging output, while errors and
fatal errors send the message and throw an ErrorMessage exception. The
exception, however, may be deferred. See further below.
Errors may be ignored by calling:
errors.ignore_errors()
After calling that function, only fatal errors throw an exception.
The warnings, errors or fatal errors messages may be augmented with context
information when a context is provided. Context is defined by a pair
(filename, linenumber), and may be set with errors.context() used as a
context manager:
with errors.context(filename, linenumber):
errors.warn(message)
Arbitrary nesting is supported, both for errors.context calls:
with errors.context(filename1, linenumber1):
errors.warn(message)
with errors.context(filename2, linenumber2):
errors.warn(message)
as well as for function calls:
def func():
errors.warn(message)
with errors.context(filename, linenumber):
func()
Errors and fatal errors can have their exception thrown at a later time,
allowing for several different errors to be reported at once before
throwing. This is achieved with errors.accumulate() as a context manager:
with errors.accumulate():
if test1:
errors.error(message1)
if test2:
errors.error(message2)
In such cases, a single AccumulatedErrors exception is thrown, but doesn't
contain information about the exceptions. The logged messages do.
'''
out = sys.stderr
WARN = 1
ERROR = 2
FATAL = 3
_level = ERROR
_context = []
_count = None
def ignore_errors(self, ignore=True):
if ignore:
self._level = self.FATAL
else:
self._level = self.ERROR
def _full_message(self, level, msg):
if level >= self._level:
level = 'Error'
else:
level = 'Warning'
if self._context:
file, line = self._context[-1]
return "%s: %s:%d: %s" % (level, file, line, msg)
return "%s: %s" % (level, msg)
def _handle(self, level, msg):
msg = self._full_message(level, msg)
if level >= self._level:
if self._count is None:
raise ErrorMessage(msg)
self._count += 1
print >>self.out, msg
def fatal(self, msg):
self._handle(self.FATAL, msg)
def error(self, msg):
self._handle(self.ERROR, msg)
def warn(self, msg):
self._handle(self.WARN, msg)
def get_context(self):
if self._context:
return self._context[-1]
@contextmanager
def context(self, file, line):
if file and line:
self._context.append((file, line))
yield
if file and line:
self._context.pop()
@contextmanager
def accumulate(self):
assert self._count is None
self._count = 0
yield
count = self._count
self._count = None
if count:
raise AccumulatedErrors()
errors = ErrorCollector()

View File

@ -0,0 +1,107 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
import struct
from buildconfig import (
substs,
topobjdir,
)
import subprocess
from mozpack.errors import errors
MACHO_SIGNATURES = [
0xfeedface, # mach-o 32-bits big endian
0xcefaedfe, # mach-o 32-bits little endian
0xfeedfacf, # mach-o 64-bits big endian
0xcffaedfe, # mach-o 64-bits little endian
]
FAT_SIGNATURE = 0xcafebabe # mach-o FAT binary
EXECUTABLE_SIGNATURES = [
0x7f454c46, # Elf
] + MACHO_SIGNATURES
def is_executable(path):
'''
Return whether a given file path points to an executable or a library,
where an executable or library is identified by:
- the file extension on OS/2
- the file signature on OS/X and ELF systems (GNU/Linux, Android, BSD,
Solaris)
As this function is intended for use to choose between the ExecutableFile
and File classes in FileFinder, and choosing ExecutableFile only matters
on OS/2, OS/X and ELF systems, we don't bother detecting other kind of
executables.
'''
if not os.path.exists(path):
return False
if substs['OS_ARCH'] == 'OS2':
return path.lower().endswith((substs['DLL_SUFFIX'],
substs['BIN_SUFFIX']))
with open(path, 'rb') as f:
signature = f.read(4)
if len(signature) < 4:
return False
signature = struct.unpack('>L', signature)[0]
if signature in EXECUTABLE_SIGNATURES:
return True
if signature != FAT_SIGNATURE:
return False
# We have to sanity check the second four bytes, because Java class
# files use the same magic number as Mach-O fat binaries.
# This logic is adapted from file(1), which says that Mach-O uses
# these bytes to count the number of architectures within, while
# Java uses it for a version number. Conveniently, there are only
# 18 labelled Mach-O architectures, and Java's first released
# class format used the version 43.0.
num = f.read(4)
if len(num) < 4:
return False
num = struct.unpack('>L', num)[0]
return num < 20
def may_strip(path):
'''
Return whether strip() should be called
'''
return not substs['PKG_SKIP_STRIP']
def strip(path):
'''
Execute the STRIP command with STRIP_FLAGS on the given path.
'''
strip = substs['STRIP']
flags = substs['STRIP_FLAGS'].split() if 'STRIP_FLAGS' in substs else []
cmd = [strip] + flags + [path]
if subprocess.call(cmd) != 0:
errors.fatal('Error executing ' + ' '.join(cmd))
def may_elfhack(path):
'''
Return whether elfhack() should be called
'''
# elfhack only supports libraries. We should check the ELF header for
# the right flag, but checking the file extension works too.
return 'USE_ELF_HACK' in substs and substs['USE_ELF_HACK'] and \
path.endswith(substs['DLL_SUFFIX'])
def elfhack(path):
'''
Execute the elfhack command on the given path.
'''
cmd = [os.path.join(topobjdir, 'build/unix/elfhack/elfhack'), path]
if 'ELF_HACK_FLAGS' in os.environ:
cmd[1:0] = os.environ['ELF_HACK_FLAGS'].split()
if subprocess.call(cmd) != 0:
errors.fatal('Error executing ' + ' '.join(cmd))

View File

@ -0,0 +1,455 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
import re
import shutil
from mozpack.executables import (
is_executable,
may_strip,
strip,
may_elfhack,
elfhack,
)
from mozpack.chrome.manifest import ManifestEntry
from io import BytesIO
from mozpack.errors import ErrorMessage
import mozpack.path
class Dest(object):
'''
Helper interface for BaseFile.copy. The interface works as follows:
- read() and write() can be used to sequentially read/write from the
underlying file.
- a call to read() after a write() will re-open the underlying file and
read from it.
- a call to write() after a read() will re-open the underlying file,
emptying it, and write to it.
'''
def __init__(self, path):
self.path = path
self.mode = None
def read(self, length=-1):
if self.mode != 'r':
self.file = open(self.path, 'rb')
self.mode = 'r'
return self.file.read(length)
def write(self, data):
if self.mode != 'w':
self.file = open(self.path, 'wb')
self.mode = 'w'
return self.file.write(data)
def exists(self):
return os.path.exists(self.path)
def close(self):
if self.mode:
self.mode = None
self.file.close()
class BaseFile(object):
'''
Base interface and helper for file copying. Derived class may implement
their own copy function, or rely on BaseFile.copy using the open() member
function and/or the path property.
'''
def copy(self, dest):
'''
Copy the BaseFile content to the destination given as a string or a
Dest instance. Avoids replacing existing files if the BaseFile content
matches that of the destination, or in case of plain files, if the
destination is newer than the original file.
Returns whether a copy was actually performed (True) or not (False).
'''
if isinstance(dest, basestring):
dest = Dest(dest)
else:
assert isinstance(dest, Dest)
can_skip_content_check = False
if not dest.exists():
can_skip_content_check = True
elif getattr(self, 'path', None) and getattr(dest, 'path', None):
# os.path.getmtime returns a result in seconds with precision up to
# the microsecond. But microsecond is too precise because
# shutil.copystat only copies milliseconds, and seconds is not
# enough precision.
if int(os.path.getmtime(self.path) * 1000) \
<= int(os.path.getmtime(dest.path) * 1000):
return False
elif os.path.getsize(self.path) != os.path.getsize(dest.path):
can_skip_content_check = True
if can_skip_content_check:
if getattr(self, 'path', None) and getattr(dest, 'path', None):
shutil.copy2(self.path, dest.path)
else:
# Ensure the file is always created
if not dest.exists():
dest.write('')
shutil.copyfileobj(self.open(), dest)
return True
src = self.open()
copy_content = ''
while True:
dest_content = dest.read(32768)
src_content = src.read(32768)
copy_content += src_content
if len(dest_content) == len(src_content) == 0:
break
# If the read content differs between origin and destination,
# write what was read up to now, and copy the remainder.
if dest_content != src_content:
dest.write(copy_content)
shutil.copyfileobj(src, dest)
break
if hasattr(self, 'path') and hasattr(dest, 'path'):
shutil.copystat(self.path, dest.path)
return True
def open(self):
'''
Return a file-like object allowing to read() the content of the
associated file. This is meant to be overloaded in subclasses to return
a custom file-like object.
'''
assert self.path is not None
return open(self.path, 'rb')
class File(BaseFile):
'''
File class for plain files.
'''
def __init__(self, path):
self.path = path
class ExecutableFile(File):
'''
File class for executable and library files on OS/2, OS/X and ELF systems.
(see mozpack.executables.is_executable documentation).
'''
def copy(self, dest):
assert isinstance(dest, basestring)
if not File.copy(self, dest):
return False
try:
if may_strip(dest):
strip(dest)
if may_elfhack(dest):
elfhack(dest)
except ErrorMessage:
os.remove(dest)
raise
return True
class GeneratedFile(BaseFile):
'''
File class for content with no previous existence on the filesystem.
'''
def __init__(self, content):
self.content = content
def open(self):
return BytesIO(self.content)
class DeflatedFile(BaseFile):
'''
File class for members of a jar archive. DeflatedFile.copy() effectively
extracts the file from the jar archive.
'''
def __init__(self, file):
from mozpack.mozjar import JarFileReader
assert isinstance(file, JarFileReader)
self.file = file
def open(self):
self.file.seek(0)
return self.file
class XPTFile(GeneratedFile):
'''
File class for a linked XPT file. It takes several XPT files as input
(using the add() and remove() member functions), and links them at copy()
time.
'''
def __init__(self):
self._files = set()
def add(self, xpt):
'''
Add the given XPT file (as a BaseFile instance) to the list of XPTs
to link.
'''
assert isinstance(xpt, BaseFile)
self._files.add(xpt)
def remove(self, xpt):
'''
Remove the given XPT file (as a BaseFile instance) from the list of
XPTs to link.
'''
assert isinstance(xpt, BaseFile)
self._files.remove(xpt)
def copy(self, dest):
'''
Link the registered XPTs and place the resulting linked XPT at the
destination given as a string or a Dest instance. Avoids an expensive
XPT linking if the interfaces in an existing destination match those of
the individual XPTs to link.
'''
if isinstance(dest, basestring):
dest = Dest(dest)
assert isinstance(dest, Dest)
from xpt import xpt_link, Typelib, Interface
all_typelibs = [Typelib.read(f.open()) for f in self._files]
if dest.exists():
# Typelib.read() needs to seek(), so use a BytesIO for dest
# content.
dest_interfaces = \
dict((i.name, i)
for i in Typelib.read(BytesIO(dest.read())).interfaces
if i.iid != Interface.UNRESOLVED_IID)
identical = True
for f in self._files:
typelib = Typelib.read(f.open())
for i in typelib.interfaces:
if i.iid != Interface.UNRESOLVED_IID and \
not (i.name in dest_interfaces and
i == dest_interfaces[i.name]):
identical = False
break
if identical:
return False
s = BytesIO()
xpt_link(all_typelibs).write(s)
dest.write(s.getvalue())
return True
def open(self):
raise RuntimeError("Unsupported")
def isempty(self):
'''
Return whether there are XPT files to link.
'''
return len(self._files) == 0
class ManifestFile(BaseFile):
'''
File class for a manifest file. It takes individual manifest entries (using
the add() and remove() member functions), and adjusts them to be relative
to the base path for the manifest, given at creation.
Example:
There is a manifest entry "content webapprt webapprt/content/" relative
to "webapprt/chrome". When packaging, the entry will be stored in
jar:webapprt/omni.ja!/chrome/chrome.manifest, which means the entry
will have to be relative to "chrome" instead of "webapprt/chrome". This
doesn't really matter when serializing the entry, since this base path
is not written out, but it matters when moving the entry at the same
time, e.g. to jar:webapprt/omni.ja!/chrome.manifest, which we don't do
currently but could in the future.
'''
def __init__(self, base):
self._entries = []
self._base = base
def add(self, entry):
'''
Add the given entry to the manifest. Entries are rebased at open() time
instead of add() time so that they can be more easily remove()d.
'''
assert isinstance(entry, ManifestEntry)
self._entries.append(entry)
def remove(self, entry):
'''
Remove the given entry from the manifest.
'''
assert isinstance(entry, ManifestEntry)
self._entries.remove(entry)
def open(self):
'''
Return a file-like object allowing to read() the serialized content of
the manifest.
'''
return BytesIO(''.join('%s\n' % e.rebase(self._base)
for e in self._entries))
def __iter__(self):
'''
Iterate over entries in the manifest file.
'''
return iter(self._entries)
def isempty(self):
'''
Return whether there are manifest entries to write
'''
return len(self._entries) == 0
class MinifiedProperties(BaseFile):
'''
File class for minified properties. This wraps around a BaseFile instance,
and removes lines starting with a # from its content.
'''
def __init__(self, file):
assert isinstance(file, BaseFile)
self._file = file
def open(self):
'''
Return a file-like object allowing to read() the minified content of
the properties file.
'''
return BytesIO(''.join(l for l in self._file.open().readlines()
if not l.startswith('#')))
class FileFinder(object):
'''
Helper to get appropriate BaseFile instances from the file system.
'''
def __init__(self, base, minify=False):
'''
Create a FileFinder for files under the given base directory. The
optional minify argument specifies whether file types supporting
minification (currently only "*.properties") should be minified.
'''
self.base = base
self._minify = minify
def find(self, pattern):
'''
Yield path, BaseFile_instance pairs for all files under the base
directory and its subdirectories that match the given pattern. See the
mozpack.path.match documentation for a description of the handled
patterns. Note all files with a name starting with a '.' are ignored
when scanning directories, but are not ignored when explicitely
requested.
'''
while pattern.startswith('/'):
pattern = pattern[1:]
return self._find(pattern)
def _find(self, pattern):
'''
Actual implementation of FileFinder.find(), dispatching to specialized
member functions depending on what kind of pattern was given.
'''
if '*' in pattern:
return self._find_glob('', mozpack.path.split(pattern))
elif os.path.isdir(os.path.join(self.base, pattern)):
return self._find_dir(pattern)
else:
return self._find_file(pattern)
def _find_dir(self, path):
'''
Actual implementation of FileFinder.find() when the given pattern
corresponds to an existing directory under the base directory.
Ignores file names starting with a '.' under the given path. If the
path itself has leafs starting with a '.', they are not ignored.
'''
for p in os.listdir(os.path.join(self.base, path)):
if p.startswith('.'):
continue
for p_, f in self._find(mozpack.path.join(path, p)):
yield p_, f
def _find_file(self, path):
'''
Actual implementation of FileFinder.find() when the given pattern
corresponds to an existing file under the base directory.
'''
srcpath = os.path.join(self.base, path)
if not os.path.exists(srcpath):
return
if is_executable(srcpath):
yield path, ExecutableFile(srcpath)
else:
yield path, self._minify_file(srcpath, File(srcpath))
def _find_glob(self, base, pattern):
'''
Actual implementation of FileFinder.find() when the given pattern
contains globbing patterns ('*' or '**'). This is meant to be an
equivalent of:
for p, f in self:
if mozpack.path.match(p, pattern):
yield p, f
but avoids scanning the entire tree.
'''
if not pattern:
for p, f in self._find(base):
yield p, f
elif pattern[0] == '**':
for p, f in self._find(base):
if mozpack.path.match(p, mozpack.path.join(*pattern)):
yield p, f
elif '*' in pattern[0]:
if not os.path.exists(os.path.join(self.base, base)):
return
for p in os.listdir(os.path.join(self.base, base)):
if p.startswith('.') and not pattern[0].startswith('.'):
continue
if re.match(mozpack.path.translate(pattern[0]), p):
for p_, f in self._find_glob(mozpack.path.join(base, p),
pattern[1:]):
yield p_, f
else:
for p, f in self._find_glob(mozpack.path.join(base, pattern[0]),
pattern[1:]):
yield p, f
def __iter__(self):
'''
Iterates over all files under the base directory (excluding files
starting with a '.' and files at any level under a directory starting
with a '.').
for path, file in finder:
...
'''
return self.find('')
def __contains__(self, pattern):
raise RuntimeError("'in' operator forbidden for %s. Use contains()." %
self.__class__.__name__)
def contains(self, pattern):
'''
Return whether some files under the base directory match the given
pattern. See the mozpack.path.match documentation for a description of
the handled patterns.
'''
return any(self.find(pattern))
def _minify_file(self, path, file):
'''
Return an appropriate MinifiedSomething wrapper for the given BaseFile
instance (file), according to the file type (determined by the given
path), if the FileFinder was created with minification enabled.
Otherwise, just return the given BaseFile instance.
Currently, only "*.properties" files are handled.
'''
if self._minify:
if path.endswith('.properties'):
return MinifiedProperties(file)
return file

View File

@ -0,0 +1,732 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from io import BytesIO
import struct
import zlib
import os
from zipfile import (
ZIP_STORED,
ZIP_DEFLATED,
)
from collections import OrderedDict
JAR_STORED = ZIP_STORED
JAR_DEFLATED = ZIP_DEFLATED
MAX_WBITS = 15
class JarReaderError(Exception):
'''Error type for Jar reader errors.'''
class JarWriterError(Exception):
'''Error type for Jar writer errors.'''
class JarStruct(object):
'''
Helper used to define ZIP archive raw data structures. Data structures
handled by this helper all start with a magic number, defined in
subclasses MAGIC field as a 32-bits unsigned integer, followed by data
structured as described in subclasses STRUCT field.
The STRUCT field contains a list of (name, type) pairs where name is a
field name, and the type can be one of 'uint32', 'uint16' or one of the
field names. In the latter case, the field is considered to be a string
buffer with a length given in that field.
For example,
STRUCT = [
('version', 'uint32'),
('filename_size', 'uint16'),
('filename', 'filename_size')
]
describes a structure with a 'version' 32-bits unsigned integer field,
followed by a 'filename_size' 16-bits unsigned integer field, followed by a
filename_size-long string buffer 'filename'.
Fields that are used as other fields size are not stored in objects. In the
above example, an instance of such subclass would only have two attributes:
obj['version']
obj['filename']
filename_size would be obtained with len(obj['filename']).
JarStruct subclasses instances can be either initialized from existing data
(deserialized), or with empty fields.
'''
TYPE_MAPPING = {'uint32': ('I', 4), 'uint16': ('H', 2)}
def __init__(self, data=None):
'''
Create an instance from the given data. Data may be omitted to create
an instance with empty fields.
'''
assert self.MAGIC and isinstance(self.STRUCT, OrderedDict)
self.size_fields = set(t for t in self.STRUCT.itervalues()
if not t in JarStruct.TYPE_MAPPING)
self._values = {}
if data:
self._init_data(data)
else:
self._init_empty()
def _init_data(self, data):
'''
Initialize an instance from data, following the data structure
described in self.STRUCT. The self.MAGIC signature is expected at
data[:4].
'''
assert data is not None
self.signature, size = JarStruct.get_data('uint32', data)
if self.signature != self.MAGIC:
raise JarReaderError('Bad magic')
offset = size
# For all fields used as other fields sizes, keep track of their value
# separately.
sizes = dict((t, 0) for t in self.size_fields)
for name, t in self.STRUCT.iteritems():
if t in JarStruct.TYPE_MAPPING:
value, size = JarStruct.get_data(t, data[offset:])
else:
size = sizes[t]
value = data[offset:offset + size]
if isinstance(value, memoryview):
value = value.tobytes()
if not name in sizes:
self._values[name] = value
else:
sizes[name] = value
offset += size
def _init_empty(self):
'''
Initialize an instance with empty fields.
'''
self.signature = self.MAGIC
for name, t in self.STRUCT.iteritems():
if name in self.size_fields:
continue
self._values[name] = 0 if t in JarStruct.TYPE_MAPPING else ''
@staticmethod
def get_data(type, data):
'''
Deserialize a single field of given type (must be one of
JarStruct.TYPE_MAPPING) at the given offset in the given data.
'''
assert type in JarStruct.TYPE_MAPPING
assert data is not None
format, size = JarStruct.TYPE_MAPPING[type]
data = data[:size]
if isinstance(data, memoryview):
data = data.tobytes()
return struct.unpack('<' + format, data)[0], size
def serialize(self):
'''
Serialize the data structure according to the data structure definition
from self.STRUCT.
'''
serialized = struct.pack('<I', self.signature)
sizes = dict((t, name) for name, t in self.STRUCT.iteritems()
if not t in JarStruct.TYPE_MAPPING)
for name, t in self.STRUCT.iteritems():
if t in JarStruct.TYPE_MAPPING:
format, size = JarStruct.TYPE_MAPPING[t]
if name in sizes:
value = len(self[sizes[name]])
else:
value = self[name]
serialized += struct.pack('<' + format, value)
else:
serialized += self[name]
return serialized
@property
def size(self):
'''
Return the size of the data structure, given the current values of all
variable length fields.
'''
size = JarStruct.TYPE_MAPPING['uint32'][1]
for name, type in self.STRUCT.iteritems():
if type in JarStruct.TYPE_MAPPING:
size += JarStruct.TYPE_MAPPING[type][1]
else:
size += len(self[name])
return size
def __getitem__(self, key):
return self._values[key]
def __setitem__(self, key, value):
if not key in self.STRUCT:
raise KeyError(key)
if key in self.size_fields:
raise AttributeError("can't set attribute")
self._values[key] = value
def __contains__(self, key):
return key in self._values
def __iter__(self):
return self._values.iteritems()
def __repr__(self):
return "<%s %s>" % (self.__class__.__name__,
' '.join('%s=%s' % (n, v) for n, v in self))
class JarCdirEnd(JarStruct):
'''
End of central directory record.
'''
MAGIC = 0x06054b50
STRUCT = OrderedDict([
('disk_num', 'uint16'),
('cdir_disk', 'uint16'),
('disk_entries', 'uint16'),
('cdir_entries', 'uint16'),
('cdir_size', 'uint32'),
('cdir_offset', 'uint32'),
('comment_size', 'uint16'),
('comment', 'comment_size'),
])
CDIR_END_SIZE = JarCdirEnd().size
class JarCdirEntry(JarStruct):
'''
Central directory file header
'''
MAGIC = 0x02014b50
STRUCT = OrderedDict([
('creator_version', 'uint16'),
('min_version', 'uint16'),
('general_flag', 'uint16'),
('compression', 'uint16'),
('lastmod_time', 'uint16'),
('lastmod_date', 'uint16'),
('crc32', 'uint32'),
('compressed_size', 'uint32'),
('uncompressed_size', 'uint32'),
('filename_size', 'uint16'),
('extrafield_size', 'uint16'),
('filecomment_size', 'uint16'),
('disknum', 'uint16'),
('internal_attr', 'uint16'),
('external_attr', 'uint32'),
('offset', 'uint32'),
('filename', 'filename_size'),
('extrafield', 'extrafield_size'),
('filecomment', 'filecomment_size'),
])
class JarLocalFileHeader(JarStruct):
'''
Local file header
'''
MAGIC = 0x04034b50
STRUCT = OrderedDict([
('min_version', 'uint16'),
('general_flag', 'uint16'),
('compression', 'uint16'),
('lastmod_time', 'uint16'),
('lastmod_date', 'uint16'),
('crc32', 'uint32'),
('compressed_size', 'uint32'),
('uncompressed_size', 'uint32'),
('filename_size', 'uint16'),
('extra_field_size', 'uint16'),
('filename', 'filename_size'),
('extra_field', 'extra_field_size'),
])
class JarFileReader(object):
'''
File-like class for use by JarReader to give access to individual files
within a Jar archive.
'''
def __init__(self, header, data):
'''
Initialize a JarFileReader. header is the local file header
corresponding to the file in the jar archive, data a buffer containing
the file data.
'''
assert header['compression'] in [JAR_DEFLATED, JAR_STORED]
self._data = data
# Copy some local file header fields.
for name in ['filename', 'compressed_size',
'uncompressed_size', 'crc32']:
setattr(self, name, header[name])
self.compressed = header['compression'] == JAR_DEFLATED
def read(self, length=-1):
'''
Read some amount of uncompressed data.
'''
return self.uncompressed_data.read(length)
def readlines(self):
'''
Return a list containing all the lines of data in the uncompressed
data.
'''
return self.read().splitlines()
def seek(self, pos, whence=os.SEEK_SET):
'''
Change the current position in the uncompressed data. Subsequent reads
will start from there.
'''
return self.uncompressed_data.seek(pos, whence)
def close(self):
'''
Free the uncompressed data buffer.
'''
self.uncompressed_data.close()
@property
def compressed_data(self):
'''
Return the raw compressed data.
'''
return self._data[:self.compressed_size]
@property
def uncompressed_data(self):
'''
Return the uncompressed data.
'''
if hasattr(self, '_uncompressed_data'):
return self._uncompressed_data
data = self.compressed_data
if self.compressed:
data = zlib.decompress(data.tobytes(), -MAX_WBITS)
else:
data = data.tobytes()
if len(data) != self.uncompressed_size:
raise JarReaderError('Corrupted file? %s' % self.filename)
self._uncompressed_data = BytesIO(data)
return self._uncompressed_data
class JarReader(object):
'''
Class with methods to read Jar files. Can open standard jar files as well
as Mozilla jar files (see further details in the JarWriter documentation).
'''
def __init__(self, file=None, fileobj=None):
'''
Opens the given file as a Jar archive. Use the given file-like object
if one is given instead of opening the given file name.
'''
if fileobj:
data = fileobj.read()
else:
data = open(file, 'rb').read()
self._data = memoryview(data)
# The End of Central Directory Record has a variable size because of
# comments it may contain, so scan for it from the end of the file.
offset = -CDIR_END_SIZE
while True:
signature = JarStruct.get_data('uint32', self._data[offset:])[0]
if signature == JarCdirEnd.MAGIC:
break
if offset == -len(self._data):
raise JarReaderError('Not a jar?')
offset -= 1
self._cdir_end = JarCdirEnd(self._data[offset:])
def close(self):
'''
Free some resources associated with the Jar.
'''
del self._data
@property
def entries(self):
'''
Return an ordered dict of central directory entries, indexed by
filename, in the order they appear in the Jar archive central
directory. Directory entries are skipped.
'''
if hasattr(self, '_entries'):
return self._entries
preload = 0
if self.is_optimized:
preload = JarStruct.get_data('uint32', self._data)[0]
entries = OrderedDict()
offset = self._cdir_end['cdir_offset']
for e in xrange(self._cdir_end['cdir_entries']):
entry = JarCdirEntry(self._data[offset:])
offset += entry.size
# Creator host system. 0 is MSDOS, 3 is Unix
host = entry['creator_version'] >> 16
# External attributes values depend on host above. On Unix the
# higher bits are the stat.st_mode value. On MSDOS, the lower bits
# are the FAT attributes.
xattr = entry['external_attr']
# Skip directories
if (host == 0 and xattr & 0x10) or (host == 3 and
xattr & (040000 << 16)):
continue
entries[entry['filename']] = entry
if entry['offset'] < preload:
self._last_preloaded = entry['filename']
self._entries = entries
return entries
@property
def is_optimized(self):
'''
Return whether the jar archive is optimized.
'''
# In optimized jars, the central directory is at the beginning of the
# file, after a single 32-bits value, which is the length of data
# preloaded.
return self._cdir_end['cdir_offset'] == \
JarStruct.TYPE_MAPPING['uint32'][1]
@property
def last_preloaded(self):
'''
Return the name of the last file that is set to be preloaded.
See JarWriter documentation for more details on preloading.
'''
if hasattr(self, '_last_preloaded'):
return self._last_preloaded
self._last_preloaded = None
self.entries
return self._last_preloaded
def _getreader(self, entry):
'''
Helper to create a JarFileReader corresponding to the given central
directory entry.
'''
header = JarLocalFileHeader(self._data[entry['offset']:])
for key, value in entry:
if key in header and header[key] != value:
raise JarReaderError('Central directory and file header ' +
'mismatch. Corrupted archive?')
return JarFileReader(header, self._data[entry['offset'] + header.size:])
def __iter__(self):
'''
Iterate over all files in the Jar archive, in the form of
JarFileReaders.
for file in jarReader:
...
'''
for entry in self.entries.itervalues():
yield self._getreader(entry)
def __getitem__(self, name):
'''
Get a JarFileReader for the given file name.
'''
return self._getreader(self.entries[name])
def __contains__(self, name):
'''
Return whether the given file name appears in the Jar archive.
'''
return name in self.entries
class JarWriter(object):
'''
Class with methods to write Jar files. Can write more-or-less standard jar
archives as well as jar archives optimized for Gecko. See the documentation
for the close() member function for a description of both layouts.
'''
def __init__(self, file=None, fileobj=None, compress=True, optimize=True):
'''
Initialize a Jar archive in the given file. Use the given file-like
object if one is given instead of opening the given file name.
The compress option determines the default behavior for storing data
in the jar archive. The optimize options determines whether the jar
archive should be optimized for Gecko or not.
'''
if fileobj:
self._data = fileobj
else:
self._data = open(file, 'wb')
self._compress = compress
self._contents = OrderedDict()
self._last_preloaded = None
self._optimize = optimize
def __enter__(self):
'''
Context manager __enter__ method for JarWriter.
'''
return self
def __exit__(self, type, value, tb):
'''
Context manager __exit__ method for JarWriter.
'''
self.finish()
def finish(self):
'''
Flush and close the Jar archive.
Standard jar archives are laid out like the following:
- Local file header 1
- File data 1
- Local file header 2
- File data 2
- (...)
- Central directory entry pointing at Local file header 1
- Central directory entry pointing at Local file header 2
- (...)
- End of central directory, pointing at first central directory
entry.
Jar archives optimized for Gecko are laid out like the following:
- 32-bits unsigned integer giving the amount of data to preload.
- Central directory entry pointing at Local file header 1
- Central directory entry pointing at Local file header 2
- (...)
- End of central directory, pointing at first central directory
entry.
- Local file header 1
- File data 1
- Local file header 2
- File data 2
- (...)
- End of central directory, pointing at first central directory
entry.
The duplication of the End of central directory is to accomodate some
Zip reading tools that want an end of central directory structure to
follow the central directory entries.
'''
offset = 0
headers = {}
preload_size = 0
# Prepare central directory entries
for entry, content in self._contents.itervalues():
header = JarLocalFileHeader()
for name in entry.STRUCT:
if name in header:
header[name] = entry[name]
entry['offset'] = offset
offset += len(content) + header.size
if entry['filename'] == self._last_preloaded:
preload_size = offset
headers[entry] = header
# Prepare end of central directory
end = JarCdirEnd()
end['disk_entries'] = len(self._contents)
end['cdir_entries'] = end['disk_entries']
end['cdir_size'] = reduce(lambda x, y: x + y[0].size,
self._contents.values(), 0)
# On optimized archives, store the preloaded size and the central
# directory entries, followed by the first end of central directory.
if self._optimize:
end['cdir_offset'] = 4
offset = end['cdir_size'] + end['cdir_offset'] + end.size
if preload_size:
preload_size += offset
self._data.write(struct.pack('<I', preload_size))
for entry, _ in self._contents.itervalues():
entry['offset'] += offset
self._data.write(entry.serialize())
self._data.write(end.serialize())
# Store local file entries followed by compressed data
for entry, content in self._contents.itervalues():
self._data.write(headers[entry].serialize())
self._data.write(content)
# On non optimized archives, store the central directory entries.
if not self._optimize:
end['cdir_offset'] = offset
for entry, _ in self._contents.itervalues():
self._data.write(entry.serialize())
# Store the end of central directory.
self._data.write(end.serialize())
self._data.close()
def add(self, name, data, compress=None):
'''
Add a new member to the jar archive, with the given name and the given
data.
The compress option indicates if the given data should be compressed
(True), not compressed (False), or compressed according to the default
defined when creating the JarWriter (None).
When the data should be compressed (True or None with self.compress ==
True), it is only really compressed if the compressed size is smaller
than the uncompressed size.
The given data may be a buffer, a file-like instance, a Deflater or a
JarFileReader instance. The latter two allow to avoid uncompressing
data to recompress it.
'''
if name in self._contents:
raise JarWriterError("File %s already in JarWriter" % name)
if compress is None:
compress = self._compress
if (isinstance(data, JarFileReader) and data.compressed == compress) \
or (isinstance(data, Deflater) and data.compress == compress):
deflater = data
else:
deflater = Deflater(compress)
if isinstance(data, basestring):
deflater.write(data)
elif hasattr(data, 'read'):
data.seek(0)
deflater.write(data.read())
else:
raise JarWriterError("Don't know how to handle %s" % type(data))
# Fill a central directory entry for this new member.
entry = JarCdirEntry()
# Not storing as created on unix, which avoids having to deal with
# st_mode.
entry['creator_version'] = 20
if deflater.compressed:
entry['min_version'] = 20 # Version 2.0 supports deflated streams
entry['general_flag'] = 2 # Max compression
entry['compression'] = JAR_DEFLATED
else:
entry['min_version'] = 10 # Version 1.0 for stored streams
entry['general_flag'] = 0
entry['compression'] = JAR_STORED
# January 1st, 2010. See bug 592369.
entry['lastmod_date'] = ((2010 - 1980) << 9) | (1 << 5) | 1
entry['lastmod_time'] = 0
entry['crc32'] = deflater.crc32
entry['compressed_size'] = deflater.compressed_size
entry['uncompressed_size'] = deflater.uncompressed_size
entry['filename'] = name
self._contents[name] = entry, deflater.compressed_data
def preload(self, files):
'''
Set which members of the jar archive should be preloaded when opening
the archive in Gecko. This reorders the members according to the order
of given list.
'''
new_contents = OrderedDict()
for f in files:
if not f in self._contents:
continue
new_contents[f] = self._contents[f]
self._last_preloaded = f
for f in self._contents:
if not f in new_contents:
new_contents[f] = self._contents[f]
self._contents = new_contents
class Deflater(object):
'''
File-like interface to zlib compression. The data is actually not
compressed unless the compressed form is smaller than the uncompressed
data.
'''
def __init__(self, compress=True):
'''
Initialize a Deflater. The compress argument determines whether to
try to compress at all.
'''
self._data = BytesIO()
self.compress = compress
if compress:
self._deflater = zlib.compressobj(9, zlib.DEFLATED, -MAX_WBITS)
self._deflated = BytesIO()
else:
self._deflater = None
def write(self, data):
'''
Append a buffer to the Deflater.
'''
self._data.write(data)
if self.compress:
if self._deflater:
if isinstance(data, memoryview):
data = data.tobytes()
self._deflated.write(self._deflater.compress(data))
else:
raise JarWriterError("Can't write after flush")
def close(self):
'''
Close the Deflater.
'''
self._data.close()
if self.compress:
self._deflated.close()
def _flush(self):
'''
Flush the underlying zlib compression object.
'''
if self.compress and self._deflater:
self._deflated.write(self._deflater.flush())
self._deflater = None
@property
def compressed(self):
'''
Return whether the data should be compressed.
'''
return self._compressed_size < self.uncompressed_size
@property
def _compressed_size(self):
'''
Return the real compressed size of the data written to the Deflater. If
the Deflater is set not to compress, the uncompressed size is returned.
Otherwise, the actual compressed size is returned, whether or not it is
a win over the uncompressed size.
'''
if self.compress:
self._flush()
return self._deflated.tell()
return self.uncompressed_size
@property
def compressed_size(self):
'''
Return the compressed size of the data written to the Deflater. If the
Deflater is set not to compress, the uncompressed size is returned.
Otherwise, if the data should not be compressed (the real compressed
size is bigger than the uncompressed size), return the uncompressed
size.
'''
if self.compressed:
return self._compressed_size
return self.uncompressed_size
@property
def uncompressed_size(self):
'''
Return the size of the data written to the Deflater.
'''
return self._data.tell()
@property
def crc32(self):
'''
Return the crc32 of the data written to the Deflater.
'''
return zlib.crc32(self._data.getvalue()) & 0xffffffff
@property
def compressed_data(self):
'''
Return the compressed data, if the data should be compressed (real
compressed size smaller than the uncompressed size), or the
uncompressed data otherwise.
'''
if self.compressed:
return self._deflated.getvalue()
return self._data.getvalue()

View File

@ -0,0 +1,252 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from Preprocessor import Preprocessor
import re
import os
from mozpack.errors import errors
from mozpack.chrome.manifest import (
Manifest,
ManifestInterfaces,
is_manifest,
parse_manifest,
)
import mozpack.path
from collections import deque
class PackageManifestParser(object):
'''
Class for parsing of a package manifest, after preprocessing.
A package manifest is a list of file paths, with some syntaxic sugar:
[] designates a toplevel component. Example: [xpcom]
- in front of a file specifies it to be removed
* wildcard support
** expands to all files and zero or more directories
; file comment
The parser takes input from the preprocessor line by line, and pushes
parsed information to a sink object. The add and remove methods of the
sink object are called with the current component and a path.
'''
def __init__(self, sink):
'''
Initialize the package manifest parser with the given sink.
'''
self._component = ''
self._sink = sink
def handle_line(self, str):
'''
Handle a line of input and push the parsed information to the sink
object.
'''
# Remove comments.
str = str.strip()
if not str or str.startswith(';'):
return
if str.startswith('[') and str.endswith(']'):
if str == '[]' or re.search(r'[\[\]\s]', str[1:-1]):
errors.fatal('Malformed manifest')
else:
self._component = str[1:-1]
elif str.startswith('-'):
str = str[1:]
self._sink.remove(self._component, str)
elif ',' in str:
errors.fatal('Incompatible syntax')
else:
self._sink.add(self._component, str)
class PreprocessorOutputWrapper(object):
'''
File-like helper to handle the preprocessor output and send it to a parser.
The parser's handle_line method is called in the relevant errors.context.
'''
def __init__(self, preprocessor, parser):
self._parser = parser
self._pp = preprocessor
def write(self, str):
file = os.path.normpath(os.path.abspath(self._pp.context['FILE']))
with errors.context(file, self._pp.context['LINE']):
self._parser.handle_line(str)
def preprocess(input, parser, defines={}):
'''
Preprocess the file-like input with the given defines, and send the
preprocessed output line by line to the given parser.
'''
pp = Preprocessor()
pp.context.update(defines)
pp.do_filter('substitution')
pp.out = PreprocessorOutputWrapper(pp, parser)
pp.do_include(input)
def preprocess_manifest(sink, manifest, defines={}):
'''
Preprocess the given file-like manifest with the given defines, and push
the parsed information to a sink. See PackageManifestParser documentation
for more details on the sink.
'''
preprocess(manifest, PackageManifestParser(sink), defines)
class CallDeque(deque):
'''
Queue of function calls to make.
'''
def append(self, function, *args):
deque.append(self, (errors.get_context(), function, args))
def execute(self):
while True:
try:
context, function, args = self.popleft()
except IndexError:
return
if context:
with errors.context(context[0], context[1]):
function(*args)
else:
function(*args)
class SimplePackager(object):
'''
Helper used to translate and buffer instructions from the
SimpleManifestSink to a formatter. Formatters expect some information to be
given first that the simple manifest contents can't guarantee before the
end of the input.
'''
def __init__(self, formatter):
self.formatter = formatter
# Queue for formatter.add_interfaces()/add_manifest() calls.
self._queue = CallDeque()
# Queue for formatter.add() calls.
self._file_queue = CallDeque()
# All manifest paths imported.
self._manifests = set()
# All manifest paths included from some other manifest.
self._included_manifests = set()
self._closed = False
def add(self, path, file):
'''
Add the given BaseFile instance with the given path.
'''
assert not self._closed
if is_manifest(path):
self._add_manifest_file(path, file)
elif path.endswith('.xpt'):
self._queue.append(self.formatter.add_interfaces, path, file)
else:
self._file_queue.append(self.formatter.add, path, file)
def _add_manifest_file(self, path, file):
'''
Add the given BaseFile with manifest file contents with the given path.
'''
self._manifests.add(path)
base = ''
if hasattr(file, 'path'):
# Find the directory the given path is relative to.
b = mozpack.path.normsep(file.path)
if b.endswith('/' + path) or b == path:
base = os.path.normpath(b[:-len(path)])
for e in parse_manifest(base, path, file.open()):
if not isinstance(e, (Manifest, ManifestInterfaces)):
# e.move(e.base) just returns a clone of the entry.
self._queue.append(self.formatter.add_manifest, e.move(e.base))
if isinstance(e, Manifest):
if e.flags:
errors.fatal('Flags are not supported on ' +
'"manifest" entries')
self._included_manifests.add(e.path)
def get_bases(self):
'''
Return all paths under which root manifests have been found. Root
manifests are manifests that are included in no other manifest.
'''
return set(mozpack.path.dirname(m)
for m in self._manifests - self._included_manifests)
def close(self):
'''
Push all instructions to the formatter.
'''
self._closed = True
for base in self.get_bases():
if base:
self.formatter.add_base(base)
self._queue.execute()
self._file_queue.execute()
class SimpleManifestSink(object):
'''
Parser sink for "simple" package manifests. Simple package manifests use
the format described in the PackageManifestParser documentation, but don't
support file removals, and require manifests, interfaces and chrome data to
be explicitely listed.
Entries starting with bin/ are searched under bin/ in the FileFinder, but
are packaged without the bin/ prefix.
'''
def __init__(self, finder, formatter):
'''
Initialize the SimpleManifestSink. The given FileFinder is used to
get files matching the patterns given in the manifest. The given
formatter does the packaging job.
'''
self._finder = finder
self.packager = SimplePackager(formatter)
self._closed = False
self._manifests = set()
@staticmethod
def normalize_path(path):
'''
Remove any bin/ prefix.
'''
if mozpack.path.basedir(path, ['bin']) == 'bin':
return mozpack.path.relpath(path, 'bin')
return path
def add(self, section, pattern):
'''
Add files with the given pattern.
'''
assert not self._closed
added = False
for p, f in self._finder.find(pattern):
added = True
if is_manifest(p):
self._manifests.add(p)
self.packager.add(SimpleManifestSink.normalize_path(p), f)
if not added:
errors.error('Missing file(s): %s' % pattern)
def remove(self, section, pattern):
assert not self._closed
errors.fatal('Removal is unsupported')
def close(self, auto_root_manifest=True):
'''
Add possibly missing bits and push all instructions to the formatter.
'''
if auto_root_manifest:
# Simple package manifests don't contain the root manifests, so
# find and add them.
paths = [mozpack.path.dirname(m) for m in self._manifests]
path = mozpack.path.dirname(mozpack.path.commonprefix(paths))
for p, f in self._finder.find(mozpack.path.join(path,
'**', 'chrome.manifest')):
if not p in self._manifests:
self.packager.add(SimpleManifestSink.normalize_path(p), f)
self.packager.close()

View File

@ -0,0 +1,276 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from mozpack.chrome.manifest import (
Manifest,
ManifestInterfaces,
ManifestChrome,
ManifestBinaryComponent,
ManifestResource,
)
from urlparse import urlparse
import mozpack.path
from mozpack.files import (
ManifestFile,
XPTFile,
)
from mozpack.copier import (
FileRegistry,
Jarrer,
)
from mozpack.errors import errors
STARTUP_CACHE_PATHS = [
'jsloader',
'jssubloader',
]
'''
Formatters are classes receiving packaging instructions and creating the
appropriate package layout.
There are three distinct formatters, each handling one of the different chrome
formats:
- flat: essentially, copies files from the source with the same file system
layout. Manifests entries are grouped in a single manifest per directory,
as well as XPT interfaces.
- jar: chrome content is packaged in jar files.
- omni: chrome content, modules, non-binary components, and many other
elements are packaged in an omnijar file for each base directory.
The base interface provides the following methods:
- add_base(path)
Register a base directory for an application or GRE. Base directories
usually contain a root manifest (manifests not included in any other
manifest) named chrome.manifest.
- add(path, content)
Add the given content (BaseFile instance) at the given virtual path
- add_interfaces(path, content)
Add the given content (BaseFile instance) and link it to other
interfaces in the parent directory of the given virtual path.
- add_manifest(entry)
Add a ManifestEntry.
- contains(path)
Returns whether the given virtual path is known of the formatter.
The virtual paths mentioned above are paths as they would be with a flat
chrome.
Formatters all take a FileCopier instance they will fill with the packaged
data.
'''
class FlatFormatter(object):
'''
Formatter for the flat package format.
'''
def __init__(self, copier):
assert isinstance(copier, FileRegistry)
self.copier = copier
self._bases = ['']
self._frozen_bases = False
def add_base(self, base):
# Only allow to add a base directory before calls to _get_base()
assert not self._frozen_bases
if not base in self._bases:
self._bases.append(base)
def _get_base(self, path):
'''
Return the deepest base directory containing the given path.
'''
self._frozen_bases = True
return mozpack.path.basedir(path, self._bases)
def add(self, path, content):
self.copier.add(path, content)
def add_manifest(self, entry):
# Store manifest entries in a single manifest per directory, named
# after their parent directory, except for root manifests, all named
# chrome.manifest.
base = self._get_base(entry.base)
if entry.base == base:
name = 'chrome'
else:
name = mozpack.path.basename(entry.base)
path = mozpack.path.normpath(mozpack.path.join(entry.base,
'%s.manifest' % name))
if not self.copier.contains(path):
assert mozpack.path.basedir(entry.base, [base]) == base
# Add a reference to the manifest file in the parent manifest, if
# the manifest file is not a root manifest.
if len(entry.base) > len(base):
parent = mozpack.path.dirname(entry.base)
relbase = mozpack.path.basename(entry.base)
relpath = mozpack.path.join(relbase,
mozpack.path.basename(path))
FlatFormatter.add_manifest(self, Manifest(parent, relpath))
self.copier.add(path, ManifestFile(entry.base))
self.copier[path].add(entry)
def add_interfaces(self, path, content):
# Interfaces in the same directory are all linked together in an
# interfaces.xpt file.
interfaces_path = mozpack.path.join(mozpack.path.dirname(path),
'interfaces.xpt')
if not self.copier.contains(interfaces_path):
FlatFormatter.add_manifest(self, ManifestInterfaces(
mozpack.path.dirname(path), 'interfaces.xpt'))
self.copier.add(interfaces_path, XPTFile())
self.copier[interfaces_path].add(content)
def contains(self, path):
assert '*' not in path
return self.copier.contains(path)
class JarFormatter(FlatFormatter):
'''
Formatter for the jar package format. Assumes manifest entries related to
chrome are registered before the chrome data files are added.
'''
def __init__(self, copier, compress=True, optimize=True):
FlatFormatter.__init__(self, copier)
self._chrome = set()
self._frozen_chrome = False
self._compress = compress
self._optimize = optimize
def _chromepath(self, path):
'''
Return the chrome base directory under which the given path is. Used to
detect under which .jar (if any) the path should go.
'''
self._frozen_chrome = True
return mozpack.path.basedir(path, self._chrome)
def add(self, path, content):
chrome = self._chromepath(path)
if chrome:
jar = chrome + '.jar'
if not self.copier.contains(jar):
self.copier.add(jar, Jarrer(self._compress, self._optimize))
if not self.copier[jar].contains(mozpack.path.relpath(path,
chrome)):
self.copier[jar].add(mozpack.path.relpath(path, chrome),
content)
else:
FlatFormatter.add(self, path, content)
def _jarize(self, entry, relpath):
'''
Transform a manifest entry in one pointing to chrome data in a jar.
Return the corresponding chrome path and the new entry.
'''
base = entry.base
basepath = mozpack.path.split(relpath)[0]
chromepath = mozpack.path.join(base, basepath)
entry = entry.rebase(chromepath) \
.move(mozpack.path.join(base, 'jar:%s.jar!' % basepath)) \
.rebase(base)
return chromepath, entry
def add_manifest(self, entry):
if isinstance(entry, ManifestChrome):
chromepath, entry = self._jarize(entry, entry.relpath)
assert not self._frozen_chrome
self._chrome.add(chromepath)
elif isinstance(entry, ManifestResource) and \
not urlparse(entry.target).scheme:
chromepath, new_entry = self._jarize(entry, entry.target)
if chromepath in self._chrome:
entry = new_entry
FlatFormatter.add_manifest(self, entry)
def contains(self, path):
assert '*' not in path
chrome = self._chromepath(path)
if not chrome:
return self.copier.contains(path)
if not self.copier.contains(chrome + '.jar'):
return False
return self.copier[chrome + '.jar'].contains(mozpack.path.relpath(path,
chrome))
class OmniJarFormatter(FlatFormatter):
'''
Formatter for the omnijar package format.
'''
def __init__(self, copier, omnijar_name, compress=True, optimize=True):
FlatFormatter.__init__(self, copier)
self.omnijars = {}
self._omnijar_name = omnijar_name
self._compress = compress
self._optimize = optimize
def _get_omnijar(self, path, create=True):
'''
Return the omnijar corresponding to the given path, its base directory
and the path translated to be under the omnijar..
'''
base = self._get_base(path)
if not base in self.omnijars:
if not create:
return None, '', path
omnijar = Jarrer(self._compress, self._optimize)
self.omnijars[base] = FlatFormatter(omnijar)
self.copier.add(mozpack.path.join(base, self._omnijar_name),
omnijar)
return self.omnijars[base], base, mozpack.path.relpath(path, base)
def add(self, path, content):
if self.is_resource(path):
formatter, base, path = self._get_omnijar(path)
else:
formatter = self
FlatFormatter.add(formatter, path, content)
def add_manifest(self, entry):
if isinstance(entry, ManifestBinaryComponent):
formatter, base = self, ''
else:
formatter, base, path = self._get_omnijar(entry.base)
entry = entry.move(mozpack.path.relpath(entry.base, base))
FlatFormatter.add_manifest(formatter, entry)
def add_interfaces(self, path, content):
formatter, base, path = self._get_omnijar(path)
FlatFormatter.add_interfaces(formatter, path, content)
def contains(self, path):
assert '*' not in path
if self.copier.contains(path):
return True
for base, copier in self.omnijars.iteritems():
if copier.contains(mozpack.path.relpath(path, base)):
return True
return False
def is_resource(self, path):
'''
Return whether the given path corresponds to a resource to be put in an
omnijar archive.
'''
base = self._get_base(path)
path = mozpack.path.split(mozpack.path.relpath(path, base))
if path[0] == 'chrome':
return len(path) == 1 or path[1] != 'icons'
if path[0] == 'components':
return path[-1].endswith('.js')
if path[0] == 'res':
return len(path) == 1 or \
(path[1] != 'cursors' and path[1] != 'MainMenu.nib')
if path[0] == 'defaults':
return len(path) != 3 or \
not (path[2] == 'channel-prefs.js' and
path[1] in ['pref', 'preferences'])
return path[0] in [
'modules',
'greprefs.js',
'hyphenation',
'update.locale',
] or path[0] in STARTUP_CACHE_PATHS

View File

@ -0,0 +1,175 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import mozpack.path
from mozpack.files import (
FileFinder,
DeflatedFile,
ManifestFile,
)
from mozpack.chrome.manifest import (
parse_manifest,
ManifestEntryWithRelPath,
ManifestResource,
is_manifest,
)
from mozpack.mozjar import JarReader
from mozpack.copier import (
FileRegistry,
FileCopier,
)
from mozpack.packager import SimplePackager
from mozpack.packager.formats import (
FlatFormatter,
STARTUP_CACHE_PATHS,
)
from urlparse import urlparse
from collections import OrderedDict
class UnpackFinder(FileFinder):
'''
Special FileFinder that treats the source package directory as if it were
in the flat chrome format, whatever chrome format it actually is in.
This means that for example, paths like chrome/browser/content/... match
files under jar:chrome/browser.jar!/content/... in case of jar chrome
format.
'''
def __init__(self, *args, **kargs):
FileFinder.__init__(self, *args, **kargs)
self.files = FileRegistry()
self.kind = 'flat'
self.omnijar = None
self.jarlogs = {}
self.optimizedjars = False
jars = set()
for p, f in FileFinder.find(self, '*'):
# Skip the precomplete file, which is generated at packaging time.
if p == 'precomplete':
continue
base = mozpack.path.dirname(p)
# If the file is a zip/jar that is not a .xpi, and contains a
# chrome.manifest, it is an omnijar. All the files it contains
# go in the directory containing the omnijar. Manifests are merged
# if there is a corresponding manifest in the directory.
if not p.endswith('.xpi') and self._maybe_zip(f) and \
(mozpack.path.basename(p) == self.omnijar or
not self.omnijar):
jar = self._open_jar(p, f)
if 'chrome.manifest' in jar:
self.kind = 'omni'
self.omnijar = mozpack.path.basename(p)
self._fill_with_omnijar(base, jar)
continue
# If the file is a manifest, scan its entries for some referencing
# jar: urls. If there are some, the files contained in the jar they
# point to, go under a directory named after the jar.
if is_manifest(p):
m = self.files[p] if self.files.contains(p) \
else ManifestFile(base)
for e in parse_manifest(self.base, p, f.open()):
m.add(self._handle_manifest_entry(e, jars))
if self.files.contains(p):
continue
f = m
if not p in jars:
self.files.add(p, f)
def _fill_with_omnijar(self, base, jar):
for j in jar:
path = mozpack.path.join(base, j.filename)
if is_manifest(j.filename):
m = self.files[path] if self.files.contains(path) \
else ManifestFile(mozpack.path.dirname(path))
for e in parse_manifest(None, path, j):
m.add(e)
if not self.files.contains(path):
self.files.add(path, m)
continue
else:
self.files.add(path, DeflatedFile(j))
def _handle_manifest_entry(self, entry, jars):
jarpath = None
if isinstance(entry, ManifestEntryWithRelPath) and \
urlparse(entry.relpath).scheme == 'jar':
jarpath, entry = self._unjarize(entry, entry.relpath)
elif isinstance(entry, ManifestResource) and \
urlparse(entry.target).scheme == 'jar':
jarpath, entry = self._unjarize(entry, entry.target)
if jarpath:
# Don't defer unpacking the jar file. If we already saw
# it, take (and remove) it from the registry. If we
# haven't, try to find it now.
if self.files.contains(jarpath):
jar = self.files[jarpath]
self.files.remove(jarpath)
else:
jar = [f for p, f in FileFinder.find(self, jarpath)]
assert len(jar) == 1
jar = jar[0]
if not jarpath in jars:
base = mozpack.path.splitext(jarpath)[0]
for j in self._open_jar(jarpath, jar):
self.files.add(mozpack.path.join(base,
j.filename),
DeflatedFile(j))
jars.add(jarpath)
self.kind = 'jar'
return entry
def _open_jar(self, path, file):
'''
Return a JarReader for the given BaseFile instance, keeping a log of
the preloaded entries it has.
'''
jar = JarReader(fileobj=file.open())
if jar.is_optimized:
self.optimizedjars = True
if jar.last_preloaded:
jarlog = jar.entries.keys()
self.jarlogs[path] = jarlog[:jarlog.index(jar.last_preloaded) + 1]
return jar
def find(self, path):
for p in self.files.match(path):
yield p, self.files[p]
def _maybe_zip(self, file):
'''
Return whether the given BaseFile looks like a ZIP/Jar.
'''
header = file.open().read(8)
return len(header) == 8 and (header[0:2] == 'PK' or
header[4:6] == 'PK')
def _unjarize(self, entry, relpath):
'''
Transform a manifest entry pointing to chrome data in a jar in one
pointing to the corresponding unpacked path. Return the jar path and
the new entry.
'''
base = entry.base
jar, relpath = urlparse(relpath).path.split('!', 1)
entry = entry.rebase(mozpack.path.join(base, 'jar:%s!' % jar)) \
.move(mozpack.path.join(base, mozpack.path.splitext(jar)[0])) \
.rebase(base)
return mozpack.path.join(base, jar), entry
def unpack(source):
'''
Transform a jar chrome or omnijar packaged directory into a flat package.
'''
copier = FileCopier()
finder = UnpackFinder(source)
packager = SimplePackager(FlatFormatter(copier))
for p, f in finder.find('*'):
if mozpack.path.split(p)[0] not in STARTUP_CACHE_PATHS:
packager.add(p, f)
packager.close()
copier.copy(source)

View File

@ -0,0 +1,137 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import posixpath
import os
import re
'''
Like os.path, with a reduced set of functions, and with normalized path
separators (always use forward slashes).
Also contains a few additional utilities not found in os.path.
'''
def normsep(path):
'''
Normalize path separators, by using forward slashes instead of whatever
os.sep is.
'''
if os.sep != '/':
path = path.replace(os.sep, '/')
return path
def relpath(path, start):
rel = normsep(os.path.relpath(path, start))
return '' if rel == '.' else rel
def join(*paths):
paths = [normsep(p) for p in paths]
return posixpath.join(*paths)
def normpath(path):
return posixpath.normpath(normsep(path))
def dirname(path):
return posixpath.dirname(normsep(path))
def commonprefix(paths):
return posixpath.commonprefix([normsep(path) for path in paths])
def basename(path):
return os.path.basename(path)
def splitext(path):
return posixpath.splitext(normsep(path))
def split(path):
'''
Return the normalized path as a list of its components.
split('foo/bar/baz') returns ['foo', 'bar', 'baz']
'''
return normsep(path).split('/')
def basedir(path, bases):
'''
Given a list of directories (bases), return which one contains the given
path. If several matches are found, the deepest base directory is returned.
basedir('foo/bar/baz', ['foo', 'baz', 'foo/bar']) returns 'foo/bar'
('foo' and 'foo/bar' both match, but 'foo/bar' is the deepest match)
'''
path = normsep(path)
bases = [normsep(b) for b in bases]
if path in bases:
return path
for b in sorted(bases, reverse=True):
if b == '' or path.startswith(b + '/'):
return b
def match(path, pattern):
'''
Return whether the given path matches the given pattern.
An asterisk can be used to match any string, including the null string, in
one part of the path:
'foo' matches '*', 'f*' or 'fo*o'
However, an asterisk matching a subdirectory may not match the null string:
'foo/bar' does *not* match 'foo/*/bar'
If the pattern matches one of the ancestor directories of the path, the
patch is considered matching:
'foo/bar' matches 'foo'
Two adjacent asterisks can be used to match files and zero or more
directories and subdirectories.
'foo/bar' matches 'foo/**/bar', or '**/bar'
'''
if not pattern:
return True
if isinstance(path, basestring):
path = split(path)
if isinstance(pattern, basestring):
pattern = split(pattern)
if pattern[0] == '**':
if len(pattern) == 1:
return True
return any(match(path[n:], pattern[1:]) for n in xrange(len(path)))
if len(pattern) > 1:
return match(path[:1], pattern[:1]) and match(path[1:], pattern[1:])
if path:
return re.match(translate(pattern[0]), path[0]) is not None
return False
def translate(pattern):
'''
Translate the globbing pattern to a regular expression.
'''
return re.escape(pattern).replace('\*', '.*') + '$'
def rebase(oldbase, base, relativepath):
'''
Return relativepath relative to base instead of oldbase.
'''
if base == oldbase:
return relativepath
if len(base) < len(oldbase):
assert basedir(oldbase, [base]) == base
relbase = relpath(oldbase, base)
result = join(relbase, relativepath)
else:
assert basedir(base, [oldbase]) == oldbase
relbase = relpath(base, oldbase)
result = relpath(relativepath, relbase)
result = normpath(result)
if relativepath.endswith('/') and not result.endswith('/'):
result += '/'
return result

View File

View File

@ -0,0 +1,148 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import unittest
import mozunit
from mozpack.chrome.flags import (
Flag,
StringFlag,
VersionFlag,
Flags,
)
from mozpack.errors import ErrorMessage
class TestFlag(unittest.TestCase):
def test_flag(self):
flag = Flag('flag')
self.assertEqual(str(flag), '')
self.assertTrue(flag.matches(False))
self.assertTrue(flag.matches('false'))
self.assertFalse(flag.matches('true'))
self.assertRaises(ErrorMessage, flag.add_definition, 'flag=')
self.assertRaises(ErrorMessage, flag.add_definition, 'flag=42')
self.assertRaises(ErrorMessage, flag.add_definition, 'flag!=false')
flag.add_definition('flag=1')
self.assertEqual(str(flag), 'flag=1')
self.assertTrue(flag.matches(True))
self.assertTrue(flag.matches('1'))
self.assertFalse(flag.matches('no'))
flag.add_definition('flag=true')
self.assertEqual(str(flag), 'flag=true')
self.assertTrue(flag.matches(True))
self.assertTrue(flag.matches('true'))
self.assertFalse(flag.matches('0'))
flag.add_definition('flag=no')
self.assertEqual(str(flag), 'flag=no')
self.assertTrue(flag.matches('false'))
self.assertFalse(flag.matches('1'))
flag.add_definition('flag')
self.assertEqual(str(flag), 'flag')
self.assertFalse(flag.matches('false'))
self.assertTrue(flag.matches('true'))
self.assertFalse(flag.matches(False))
def test_string_flag(self):
flag = StringFlag('flag')
self.assertEqual(str(flag), '')
self.assertTrue(flag.matches('foo'))
self.assertRaises(ErrorMessage, flag.add_definition, 'flag>=2')
flag.add_definition('flag=foo')
self.assertEqual(str(flag), 'flag=foo')
self.assertTrue(flag.matches('foo'))
self.assertFalse(flag.matches('bar'))
flag.add_definition('flag=bar')
self.assertEqual(str(flag), 'flag=foo flag=bar')
self.assertTrue(flag.matches('foo'))
self.assertTrue(flag.matches('bar'))
self.assertFalse(flag.matches('baz'))
flag = StringFlag('flag')
flag.add_definition('flag!=bar')
self.assertEqual(str(flag), 'flag!=bar')
self.assertTrue(flag.matches('foo'))
self.assertFalse(flag.matches('bar'))
def test_version_flag(self):
flag = VersionFlag('flag')
self.assertEqual(str(flag), '')
self.assertTrue(flag.matches('1.0'))
self.assertRaises(ErrorMessage, flag.add_definition, 'flag!=2')
flag.add_definition('flag=1.0')
self.assertEqual(str(flag), 'flag=1.0')
self.assertTrue(flag.matches('1.0'))
self.assertFalse(flag.matches('2.0'))
flag.add_definition('flag=2.0')
self.assertEqual(str(flag), 'flag=1.0 flag=2.0')
self.assertTrue(flag.matches('1.0'))
self.assertTrue(flag.matches('2.0'))
self.assertFalse(flag.matches('3.0'))
flag = VersionFlag('flag')
flag.add_definition('flag>=2.0')
self.assertEqual(str(flag), 'flag>=2.0')
self.assertFalse(flag.matches('1.0'))
self.assertTrue(flag.matches('2.0'))
self.assertTrue(flag.matches('3.0'))
flag.add_definition('flag<1.10')
self.assertEqual(str(flag), 'flag>=2.0 flag<1.10')
self.assertTrue(flag.matches('1.0'))
self.assertTrue(flag.matches('1.9'))
self.assertFalse(flag.matches('1.10'))
self.assertFalse(flag.matches('1.20'))
self.assertTrue(flag.matches('2.0'))
self.assertTrue(flag.matches('3.0'))
self.assertRaises(Exception, flag.add_definition, 'flag<')
self.assertRaises(Exception, flag.add_definition, 'flag>')
self.assertRaises(Exception, flag.add_definition, 'flag>=')
self.assertRaises(Exception, flag.add_definition, 'flag<=')
self.assertRaises(Exception, flag.add_definition, 'flag!=1.0')
class TestFlags(unittest.TestCase):
def setUp(self):
self.flags = Flags('contentaccessible=yes',
'appversion>=3.5',
'application=foo',
'application=bar',
'appversion<2.0',
'platform',
'abi!=Linux_x86-gcc3')
def test_flags_str(self):
self.assertEqual(str(self.flags), 'contentaccessible=yes ' +
'appversion>=3.5 appversion<2.0 application=foo ' +
'application=bar platform abi!=Linux_x86-gcc3')
def test_flags_match_unset(self):
self.assertTrue(self.flags.match(os='WINNT'))
def test_flags_match(self):
self.assertTrue(self.flags.match(application='foo'))
self.assertFalse(self.flags.match(application='qux'))
def test_flags_match_different(self):
self.assertTrue(self.flags.match(abi='WINNT_x86-MSVC'))
self.assertFalse(self.flags.match(abi='Linux_x86-gcc3'))
def test_flags_match_version(self):
self.assertTrue(self.flags.match(appversion='1.0'))
self.assertTrue(self.flags.match(appversion='1.5'))
self.assertFalse(self.flags.match(appversion='2.0'))
self.assertFalse(self.flags.match(appversion='3.0'))
self.assertTrue(self.flags.match(appversion='3.5'))
self.assertTrue(self.flags.match(appversion='3.10'))
if __name__ == '__main__':
mozunit.main()

View File

@ -0,0 +1,149 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import unittest
import mozunit
import os
from mozpack.chrome.manifest import (
ManifestContent,
ManifestLocale,
ManifestSkin,
Manifest,
ManifestResource,
ManifestOverride,
ManifestComponent,
ManifestContract,
ManifestInterfaces,
ManifestBinaryComponent,
ManifestCategory,
ManifestStyle,
ManifestOverlay,
MANIFESTS_TYPES,
parse_manifest,
parse_manifest_line,
)
from mozpack.errors import errors, AccumulatedErrors
from test_errors import TestErrors
class TestManifest(unittest.TestCase):
def test_parse_manifest(self):
manifest = [
'content global content/global/',
'content global content/global/ application=foo application=bar' +
' platform',
'locale global en-US content/en-US/',
'locale global en-US content/en-US/ application=foo',
'skin global classic/1.0 content/skin/classic/',
'skin global classic/1.0 content/skin/classic/ application=foo' +
' os=WINNT',
'',
'manifest pdfjs/chrome.manifest',
'resource gre-resources toolkit/res/',
'override chrome://global/locale/netError.dtd' +
' chrome://browser/locale/netError.dtd',
'# Comment',
'component {b2bba4df-057d-41ea-b6b1-94a10a8ede68} foo.js',
'contract @mozilla.org/foo;1' +
' {b2bba4df-057d-41ea-b6b1-94a10a8ede68}',
'interfaces foo.xpt # Inline comment',
'binary-component bar.so',
'category command-line-handler m-browser' +
' @mozilla.org/browser/clh;1' +
' application={ec8030f7-c20a-464f-9b0e-13a3a9e97384}',
'style chrome://global/content/customizeToolbar.xul' +
' chrome://browser/skin/',
'overlay chrome://global/content/viewSource.xul' +
' chrome://browser/content/viewSourceOverlay.xul',
]
other_manifest = [
'content global content/global/'
]
expected_result = [
ManifestContent('', 'global', 'content/global/'),
ManifestContent('', 'global', 'content/global/', 'application=foo',
'application=bar', 'platform'),
ManifestLocale('', 'global', 'en-US', 'content/en-US/'),
ManifestLocale('', 'global', 'en-US', 'content/en-US/',
'application=foo'),
ManifestSkin('', 'global', 'classic/1.0', 'content/skin/classic/'),
ManifestSkin('', 'global', 'classic/1.0', 'content/skin/classic/',
'application=foo', 'os=WINNT'),
Manifest('', 'pdfjs/chrome.manifest'),
ManifestResource('', 'gre-resources', 'toolkit/res/'),
ManifestOverride('', 'chrome://global/locale/netError.dtd',
'chrome://browser/locale/netError.dtd'),
ManifestComponent('', '{b2bba4df-057d-41ea-b6b1-94a10a8ede68}',
'foo.js'),
ManifestContract('', '@mozilla.org/foo;1',
'{b2bba4df-057d-41ea-b6b1-94a10a8ede68}'),
ManifestInterfaces('', 'foo.xpt'),
ManifestBinaryComponent('', 'bar.so'),
ManifestCategory('', 'command-line-handler', 'm-browser',
'@mozilla.org/browser/clh;1', 'application=' +
'{ec8030f7-c20a-464f-9b0e-13a3a9e97384}'),
ManifestStyle('', 'chrome://global/content/customizeToolbar.xul',
'chrome://browser/skin/'),
ManifestOverlay('', 'chrome://global/content/viewSource.xul',
'chrome://browser/content/viewSourceOverlay.xul'),
]
with mozunit.MockedOpen({'manifest': '\n'.join(manifest),
'other/manifest': '\n'.join(other_manifest)}):
# Ensure we have tests for all types of manifests.
self.assertEqual(set(type(e) for e in expected_result),
set(MANIFESTS_TYPES.values()))
self.assertEqual(list(parse_manifest(os.curdir, 'manifest')),
expected_result)
self.assertEqual(list(parse_manifest(os.curdir, 'other/manifest')),
[ManifestContent('other', 'global',
'content/global/')])
def test_manifest_rebase(self):
m = parse_manifest_line('chrome', 'content global content/global/')
m = m.rebase('')
self.assertEqual(str(m), 'content global chrome/content/global/')
m = m.rebase('chrome')
self.assertEqual(str(m), 'content global content/global/')
m = parse_manifest_line('chrome/foo', 'content global content/global/')
m = m.rebase('chrome')
self.assertEqual(str(m), 'content global foo/content/global/')
m = m.rebase('chrome/foo')
self.assertEqual(str(m), 'content global content/global/')
m = parse_manifest_line('modules/foo', 'resource foo ./')
m = m.rebase('modules')
self.assertEqual(str(m), 'resource foo foo/')
m = m.rebase('modules/foo')
self.assertEqual(str(m), 'resource foo ./')
m = parse_manifest_line('chrome', 'content browser browser/content/')
m = m.rebase('chrome/browser').move('jar:browser.jar!').rebase('')
self.assertEqual(str(m), 'content browser jar:browser.jar!/content/')
class TestManifestErrors(TestErrors, unittest.TestCase):
def test_parse_manifest_errors(self):
manifest = [
'skin global classic/1.0 content/skin/classic/ platform',
'',
'binary-component bar.so',
'unsupported foo',
]
with mozunit.MockedOpen({'manifest': '\n'.join(manifest)}):
with self.assertRaises(AccumulatedErrors):
with errors.accumulate():
list(parse_manifest(os.curdir, 'manifest'))
out = self.get_output()
# Expecting 2 errors
self.assertEqual(len(out), 2)
path = os.path.abspath('manifest')
# First on line 1
self.assertTrue(out[0].startswith('Error: %s:1: ' % path))
# Second on line 4
self.assertTrue(out[1].startswith('Error: %s:4: ' % path))
if __name__ == '__main__':
mozunit.main()

View File

@ -0,0 +1,178 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from mozpack.copier import (
FileCopier,
FileRegistry,
Jarrer,
)
from mozpack.files import GeneratedFile
from mozpack.mozjar import JarReader
import mozpack.path
import unittest
import mozunit
import os
import shutil
from mozpack.errors import ErrorMessage
from tempfile import mkdtemp
from mozpack.test.test_files import (
MockDest,
MatchTestTemplate,
)
class TestFileRegistry(MatchTestTemplate, unittest.TestCase):
def add(self, path):
self.registry.add(path, GeneratedFile(path))
def do_check(self, pattern, result):
self.checked = True
if result:
self.assertTrue(self.registry.contains(pattern))
else:
self.assertFalse(self.registry.contains(pattern))
self.assertEqual(self.registry.match(pattern), result)
def test_file_registry(self):
self.registry = FileRegistry()
self.registry.add('foo', GeneratedFile('foo'))
bar = GeneratedFile('bar')
self.registry.add('bar', bar)
self.assertEqual(self.registry.paths(), ['foo', 'bar'])
self.assertEqual(self.registry['bar'], bar)
self.assertRaises(ErrorMessage, self.registry.add, 'foo',
GeneratedFile('foo2'))
self.assertRaises(ErrorMessage, self.registry.remove, 'qux')
self.assertRaises(ErrorMessage, self.registry.add, 'foo/bar',
GeneratedFile('foobar'))
self.assertRaises(ErrorMessage, self.registry.add, 'foo/bar/baz',
GeneratedFile('foobar'))
self.assertEqual(self.registry.paths(), ['foo', 'bar'])
self.registry.remove('foo')
self.assertEqual(self.registry.paths(), ['bar'])
self.registry.remove('bar')
self.assertEqual(self.registry.paths(), [])
self.do_match_test()
self.assertTrue(self.checked)
self.assertEqual(self.registry.paths(), [
'bar',
'foo/bar',
'foo/baz',
'foo/qux/1',
'foo/qux/bar',
'foo/qux/2/test',
'foo/qux/2/test2',
])
self.registry.remove('foo/qux')
self.assertEqual(self.registry.paths(), ['bar', 'foo/bar', 'foo/baz'])
self.registry.add('foo/qux', GeneratedFile('fooqux'))
self.assertEqual(self.registry.paths(), ['bar', 'foo/bar', 'foo/baz',
'foo/qux'])
self.registry.remove('foo/b*')
self.assertEqual(self.registry.paths(), ['bar', 'foo/qux'])
self.assertEqual([f for f, c in self.registry], ['bar', 'foo/qux'])
self.assertEqual(len(self.registry), 2)
self.add('foo/.foo')
self.assertTrue(self.registry.contains('foo/.foo'))
class TestFileCopier(unittest.TestCase):
def setUp(self):
self.tmpdir = mkdtemp()
def tearDown(self):
shutil.rmtree(self.tmpdir)
def all_dirs(self, base):
all_dirs = set()
for root, dirs, files in os.walk(base):
if not dirs:
all_dirs.add(mozpack.path.relpath(root, base))
return all_dirs
def all_files(self, base):
all_files = set()
for root, dirs, files in os.walk(base):
for f in files:
all_files.add(
mozpack.path.join(mozpack.path.relpath(root, base), f))
return all_files
def test_file_copier(self):
copier = FileCopier()
copier.add('foo/bar', GeneratedFile('foobar'))
copier.add('foo/qux', GeneratedFile('fooqux'))
copier.add('foo/deep/nested/directory/file', GeneratedFile('fooz'))
copier.add('bar', GeneratedFile('bar'))
copier.add('qux/foo', GeneratedFile('quxfoo'))
copier.add('qux/bar', GeneratedFile(''))
copier.copy(self.tmpdir)
self.assertEqual(self.all_files(self.tmpdir), set(copier.paths()))
self.assertEqual(self.all_dirs(self.tmpdir),
set(['foo/deep/nested/directory', 'qux']))
copier.remove('foo')
copier.add('test', GeneratedFile('test'))
copier.copy(self.tmpdir)
self.assertEqual(self.all_files(self.tmpdir), set(copier.paths()))
self.assertEqual(self.all_dirs(self.tmpdir), set(['qux']))
class TestJarrer(unittest.TestCase):
def check_jar(self, dest, copier):
jar = JarReader(fileobj=dest)
self.assertEqual([f.filename for f in jar], copier.paths())
for f in jar:
self.assertEqual(f.uncompressed_data.read(),
copier[f.filename].content)
def test_jarrer(self):
copier = Jarrer()
copier.add('foo/bar', GeneratedFile('foobar'))
copier.add('foo/qux', GeneratedFile('fooqux'))
copier.add('foo/deep/nested/directory/file', GeneratedFile('fooz'))
copier.add('bar', GeneratedFile('bar'))
copier.add('qux/foo', GeneratedFile('quxfoo'))
copier.add('qux/bar', GeneratedFile(''))
dest = MockDest()
copier.copy(dest)
self.check_jar(dest, copier)
copier.remove('foo')
copier.add('test', GeneratedFile('test'))
copier.copy(dest)
self.check_jar(dest, copier)
copier.remove('test')
copier.add('test', GeneratedFile('replaced-content'))
copier.copy(dest)
self.check_jar(dest, copier)
copier.copy(dest)
self.check_jar(dest, copier)
preloaded = ['qux/bar', 'bar']
copier.preload(preloaded)
copier.copy(dest)
dest.seek(0)
jar = JarReader(fileobj=dest)
self.assertEqual([f.filename for f in jar], preloaded +
[p for p in copier.paths() if not p in preloaded])
self.assertEqual(jar.last_preloaded, preloaded[-1])
if __name__ == '__main__':
mozunit.main()

View File

@ -0,0 +1,93 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from mozpack.errors import (
errors,
ErrorMessage,
AccumulatedErrors,
)
import unittest
import mozunit
import sys
from cStringIO import StringIO
class TestErrors(object):
def setUp(self):
errors.out = StringIO()
errors.ignore_errors(False)
def tearDown(self):
errors.out = sys.stderr
def get_output(self):
return [l.strip() for l in errors.out.getvalue().splitlines()]
class TestErrorsImpl(TestErrors, unittest.TestCase):
def test_plain_error(self):
errors.warn('foo')
self.assertRaises(ErrorMessage, errors.error, 'foo')
self.assertRaises(ErrorMessage, errors.fatal, 'foo')
self.assertEquals(self.get_output(), ['Warning: foo'])
def test_ignore_errors(self):
errors.ignore_errors()
errors.warn('foo')
errors.error('bar')
self.assertRaises(ErrorMessage, errors.fatal, 'foo')
self.assertEquals(self.get_output(), ['Warning: foo', 'Warning: bar'])
def test_no_error(self):
with errors.accumulate():
errors.warn('1')
def test_simple_error(self):
with self.assertRaises(AccumulatedErrors):
with errors.accumulate():
errors.error('1')
self.assertEquals(self.get_output(), ['Error: 1'])
def test_error_loop(self):
with self.assertRaises(AccumulatedErrors):
with errors.accumulate():
for i in range(3):
errors.error('%d' % i)
self.assertEquals(self.get_output(),
['Error: 0', 'Error: 1', 'Error: 2'])
def test_multiple_errors(self):
with self.assertRaises(AccumulatedErrors):
with errors.accumulate():
errors.error('foo')
for i in range(3):
if i == 2:
errors.warn('%d' % i)
else:
errors.error('%d' % i)
errors.error('bar')
self.assertEquals(self.get_output(),
['Error: foo', 'Error: 0', 'Error: 1',
'Warning: 2', 'Error: bar'])
def test_errors_context(self):
with self.assertRaises(AccumulatedErrors):
with errors.accumulate():
self.assertEqual(errors.get_context(), None)
with errors.context('foo', 1):
self.assertEqual(errors.get_context(), ('foo', 1))
errors.error('a')
with errors.context('bar', 2):
self.assertEqual(errors.get_context(), ('bar', 2))
errors.error('b')
self.assertEqual(errors.get_context(), ('foo', 1))
errors.error('c')
self.assertEqual(self.get_output(), [
'Error: foo:1: a',
'Error: bar:2: b',
'Error: foo:1: c',
])
if __name__ == '__main__':
mozunit.main()

View File

@ -0,0 +1,573 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from mozpack.files import (
Dest,
File,
GeneratedFile,
DeflatedFile,
ManifestFile,
XPTFile,
MinifiedProperties,
FileFinder,
)
from mozpack.mozjar import (
JarReader,
JarWriter,
)
from mozpack.chrome.manifest import (
ManifestContent,
ManifestResource,
ManifestLocale,
ManifestOverride,
)
import unittest
import mozunit
import os
import shutil
import random
import string
import mozpack.path
from mozpack.copier import ensure_parent_dir
from tempfile import mkdtemp
from io import BytesIO
from xpt import Typelib
class TestWithTmpDir(unittest.TestCase):
def setUp(self):
self.tmpdir = mkdtemp()
def tearDown(self):
shutil.rmtree(self.tmpdir)
def tmppath(self, relpath):
return os.path.normpath(os.path.join(self.tmpdir, relpath))
class MockDest(BytesIO, Dest):
def __init__(self):
BytesIO.__init__(self)
self.mode = None
def read(self, length=-1):
if self.mode != 'r':
self.seek(0)
self.mode = 'r'
return BytesIO.read(self, length)
def write(self, data):
if self.mode != 'w':
self.seek(0)
self.truncate(0)
self.mode = 'w'
return BytesIO.write(self, data)
def exists(self):
return True
def close(self):
if self.mode:
self.mode = None
class DestNoWrite(Dest):
def write(self, data):
raise RuntimeError
class TestDest(TestWithTmpDir):
def test_dest(self):
dest = Dest(self.tmppath('dest'))
self.assertFalse(dest.exists())
dest.write('foo')
self.assertTrue(dest.exists())
dest.write('foo')
self.assertEqual(dest.read(4), 'foof')
self.assertEqual(dest.read(), 'oo')
self.assertEqual(dest.read(), '')
dest.write('bar')
self.assertEqual(dest.read(4), 'bar')
dest.close()
self.assertEqual(dest.read(), 'bar')
dest.write('foo')
dest.close()
dest.write('qux')
self.assertEqual(dest.read(), 'qux')
rand = ''.join(random.choice(string.letters) for i in xrange(131597))
samples = [
'',
'test',
'fooo',
'same',
'same',
'Different and longer',
rand,
rand,
rand[:-1] + '_',
'test'
]
class TestFile(TestWithTmpDir):
def test_file(self):
'''
Check that File.copy yields the proper content in the destination file
in all situations that trigger different code paths:
- different content
- different content of the same size
- same content
- long content
'''
src = self.tmppath('src')
dest = self.tmppath('dest')
for content in samples:
with open(src, 'wb') as tmp:
tmp.write(content)
# Ensure the destination file, when it exists, is older than the
# source
if os.path.exists(dest):
time = os.path.getmtime(src) - 1
os.utime(dest, (time, time))
f = File(src)
f.copy(dest)
self.assertEqual(content, open(dest, 'rb').read())
self.assertEqual(content, f.open().read())
self.assertEqual(content, f.open().read())
def test_file_dest(self):
'''
Similar to test_file, but for a destination object instead of
a destination file. This ensures the destination object is being
used properly by File.copy, ensuring that other subclasses of Dest
will work.
'''
src = self.tmppath('src')
dest = MockDest()
for content in samples:
with open(src, 'wb') as tmp:
tmp.write(content)
f = File(src)
f.copy(dest)
self.assertEqual(content, dest.getvalue())
def test_file_open(self):
'''
Test whether File.open returns an appropriately reset file object.
'''
src = self.tmppath('src')
content = ''.join(samples)
with open(src, 'wb') as tmp:
tmp.write(content)
f = File(src)
self.assertEqual(content[:42], f.open().read(42))
self.assertEqual(content, f.open().read())
def test_file_no_write(self):
'''
Test various conditions where File.copy is expected not to write
in the destination file.
'''
src = self.tmppath('src')
dest = self.tmppath('dest')
with open(src, 'wb') as tmp:
tmp.write('test')
# Initial copy
f = File(src)
f.copy(dest)
# Ensure subsequent copies won't trigger writes
f.copy(DestNoWrite(dest))
self.assertEqual('test', open(dest, 'rb').read())
# When the source file is newer, but with the same content, no copy
# should occur
time = os.path.getmtime(src) - 1
os.utime(dest, (time, time))
f.copy(DestNoWrite(dest))
self.assertEqual('test', open(dest, 'rb').read())
# When the source file is older than the destination file, even with
# different content, no copy should occur.
with open(src, 'wb') as tmp:
tmp.write('fooo')
time = os.path.getmtime(dest) - 1
os.utime(src, (time, time))
f.copy(DestNoWrite(dest))
self.assertEqual('test', open(dest, 'rb').read())
# Double check that under conditions where a copy occurs, we would get
# an exception.
time = os.path.getmtime(src) - 1
os.utime(dest, (time, time))
self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest))
class TestGeneratedFile(TestWithTmpDir):
def test_generated_file(self):
'''
Check that GeneratedFile.copy yields the proper content in the
destination file in all situations that trigger different code paths
(see TestFile.test_file)
'''
dest = self.tmppath('dest')
for content in samples:
f = GeneratedFile(content)
f.copy(dest)
self.assertEqual(content, open(dest, 'rb').read())
def test_generated_file_open(self):
'''
Test whether GeneratedFile.open returns an appropriately reset file
object.
'''
content = ''.join(samples)
f = GeneratedFile(content)
self.assertEqual(content[:42], f.open().read(42))
self.assertEqual(content, f.open().read())
def test_generated_file_no_write(self):
'''
Test various conditions where GeneratedFile.copy is expected not to
write in the destination file.
'''
dest = self.tmppath('dest')
# Initial copy
f = GeneratedFile('test')
f.copy(dest)
# Ensure subsequent copies won't trigger writes
f.copy(DestNoWrite(dest))
self.assertEqual('test', open(dest, 'rb').read())
# When using a new instance with the same content, no copy should occur
f = GeneratedFile('test')
f.copy(DestNoWrite(dest))
self.assertEqual('test', open(dest, 'rb').read())
# Double check that under conditions where a copy occurs, we would get
# an exception.
f = GeneratedFile('fooo')
self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest))
class TestDeflatedFile(TestWithTmpDir):
def test_deflated_file(self):
'''
Check that DeflatedFile.copy yields the proper content in the
destination file in all situations that trigger different code paths
(see TestFile.test_file)
'''
src = self.tmppath('src.jar')
dest = self.tmppath('dest')
contents = {}
with JarWriter(src) as jar:
for content in samples:
name = ''.join(random.choice(string.letters)
for i in xrange(8))
jar.add(name, content, compress=True)
contents[name] = content
for j in JarReader(src):
f = DeflatedFile(j)
f.copy(dest)
self.assertEqual(contents[j.filename], open(dest, 'rb').read())
def test_deflated_file_open(self):
'''
Test whether DeflatedFile.open returns an appropriately reset file
object.
'''
src = self.tmppath('src.jar')
content = ''.join(samples)
with JarWriter(src) as jar:
jar.add('content', content)
f = DeflatedFile(JarReader(src)['content'])
self.assertEqual(content[:42], f.open().read(42))
self.assertEqual(content, f.open().read())
def test_deflated_file_no_write(self):
'''
Test various conditions where DeflatedFile.copy is expected not to
write in the destination file.
'''
src = self.tmppath('src.jar')
dest = self.tmppath('dest')
with JarWriter(src) as jar:
jar.add('test', 'test')
jar.add('test2', 'test')
jar.add('fooo', 'fooo')
jar = JarReader(src)
# Initial copy
f = DeflatedFile(jar['test'])
f.copy(dest)
# Ensure subsequent copies won't trigger writes
f.copy(DestNoWrite(dest))
self.assertEqual('test', open(dest, 'rb').read())
# When using a different file with the same content, no copy should
# occur
f = DeflatedFile(jar['test2'])
f.copy(DestNoWrite(dest))
self.assertEqual('test', open(dest, 'rb').read())
# Double check that under conditions where a copy occurs, we would get
# an exception.
f = DeflatedFile(jar['fooo'])
self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest))
class TestManifestFile(TestWithTmpDir):
def test_manifest_file(self):
f = ManifestFile('chrome')
f.add(ManifestContent('chrome', 'global', 'toolkit/content/global/'))
f.add(ManifestResource('chrome', 'gre-resources', 'toolkit/res/'))
f.add(ManifestResource('chrome/pdfjs', 'pdfjs', './'))
f.add(ManifestContent('chrome/pdfjs', 'pdfjs', 'pdfjs'))
f.add(ManifestLocale('chrome', 'browser', 'en-US',
'en-US/locale/browser/'))
f.copy(self.tmppath('chrome.manifest'))
self.assertEqual(open(self.tmppath('chrome.manifest')).readlines(), [
'content global toolkit/content/global/\n',
'resource gre-resources toolkit/res/\n',
'resource pdfjs pdfjs/\n',
'content pdfjs pdfjs/pdfjs\n',
'locale browser en-US en-US/locale/browser/\n',
])
self.assertRaises(
ValueError,
f.remove,
ManifestContent('', 'global', 'toolkit/content/global/')
)
self.assertRaises(
ValueError,
f.remove,
ManifestOverride('chrome', 'chrome://global/locale/netError.dtd',
'chrome://browser/locale/netError.dtd')
)
f.remove(ManifestContent('chrome', 'global',
'toolkit/content/global/'))
self.assertRaises(
ValueError,
f.remove,
ManifestContent('chrome', 'global', 'toolkit/content/global/')
)
f.copy(self.tmppath('chrome.manifest'))
content = open(self.tmppath('chrome.manifest')).read()
self.assertEqual(content[:42], f.open().read(42))
self.assertEqual(content, f.open().read())
# Compiled typelib for the following IDL:
# interface foo;
# [uuid(5f70da76-519c-4858-b71e-e3c92333e2d6)]
# interface bar {
# void bar(in foo f);
# };
bar_xpt = GeneratedFile(
b'\x58\x50\x43\x4F\x4D\x0A\x54\x79\x70\x65\x4C\x69\x62\x0D\x0A\x1A' +
b'\x01\x02\x00\x02\x00\x00\x00\x7B\x00\x00\x00\x24\x00\x00\x00\x5C' +
b'\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x5F' +
b'\x70\xDA\x76\x51\x9C\x48\x58\xB7\x1E\xE3\xC9\x23\x33\xE2\xD6\x00' +
b'\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x0D\x00\x66\x6F\x6F\x00' +
b'\x62\x61\x72\x00\x62\x61\x72\x00\x00\x00\x00\x01\x00\x00\x00\x00' +
b'\x09\x01\x80\x92\x00\x01\x80\x06\x00\x00\x00'
)
# Compiled typelib for the following IDL:
# [uuid(3271bebc-927e-4bef-935e-44e0aaf3c1e5)]
# interface foo {
# void foo();
# };
foo_xpt = GeneratedFile(
b'\x58\x50\x43\x4F\x4D\x0A\x54\x79\x70\x65\x4C\x69\x62\x0D\x0A\x1A' +
b'\x01\x02\x00\x01\x00\x00\x00\x57\x00\x00\x00\x24\x00\x00\x00\x40' +
b'\x80\x00\x00\x32\x71\xBE\xBC\x92\x7E\x4B\xEF\x93\x5E\x44\xE0\xAA' +
b'\xF3\xC1\xE5\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x09\x00' +
b'\x66\x6F\x6F\x00\x66\x6F\x6F\x00\x00\x00\x00\x01\x00\x00\x00\x00' +
b'\x05\x00\x80\x06\x00\x00\x00'
)
# Compiled typelib for the following IDL:
# [uuid(7057f2aa-fdc2-4559-abde-08d939f7e80d)]
# interface foo {
# void foo();
# };
foo2_xpt = GeneratedFile(
b'\x58\x50\x43\x4F\x4D\x0A\x54\x79\x70\x65\x4C\x69\x62\x0D\x0A\x1A' +
b'\x01\x02\x00\x01\x00\x00\x00\x57\x00\x00\x00\x24\x00\x00\x00\x40' +
b'\x80\x00\x00\x70\x57\xF2\xAA\xFD\xC2\x45\x59\xAB\xDE\x08\xD9\x39' +
b'\xF7\xE8\x0D\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x09\x00' +
b'\x66\x6F\x6F\x00\x66\x6F\x6F\x00\x00\x00\x00\x01\x00\x00\x00\x00' +
b'\x05\x00\x80\x06\x00\x00\x00'
)
def read_interfaces(file):
return dict((i.name, i) for i in Typelib.read(file).interfaces)
class TestXPTFile(TestWithTmpDir):
def test_xpt_file(self):
x = XPTFile()
x.add(foo_xpt)
x.add(bar_xpt)
x.copy(self.tmppath('interfaces.xpt'))
foo = read_interfaces(foo_xpt.open())
foo2 = read_interfaces(foo2_xpt.open())
bar = read_interfaces(bar_xpt.open())
linked = read_interfaces(self.tmppath('interfaces.xpt'))
self.assertEqual(foo['foo'], linked['foo'])
self.assertEqual(bar['bar'], linked['bar'])
x.remove(foo_xpt)
x.copy(self.tmppath('interfaces2.xpt'))
linked = read_interfaces(self.tmppath('interfaces2.xpt'))
self.assertEqual(bar['foo'], linked['foo'])
self.assertEqual(bar['bar'], linked['bar'])
x.add(foo_xpt)
x.copy(DestNoWrite(self.tmppath('interfaces.xpt')))
linked = read_interfaces(self.tmppath('interfaces.xpt'))
self.assertEqual(foo['foo'], linked['foo'])
self.assertEqual(bar['bar'], linked['bar'])
x = XPTFile()
x.add(foo2_xpt)
x.add(bar_xpt)
x.copy(self.tmppath('interfaces.xpt'))
linked = read_interfaces(self.tmppath('interfaces.xpt'))
self.assertEqual(foo2['foo'], linked['foo'])
self.assertEqual(bar['bar'], linked['bar'])
x = XPTFile()
x.add(foo_xpt)
x.add(foo2_xpt)
x.add(bar_xpt)
from xpt import DataError
self.assertRaises(DataError, x.copy, self.tmppath('interfaces.xpt'))
class TestMinifiedProperties(TestWithTmpDir):
def test_minified_properties(self):
propLines = [
'# Comments are removed',
'foo = bar',
'',
'# Another comment',
]
prop = GeneratedFile('\n'.join(propLines))
self.assertEqual(MinifiedProperties(prop).open().readlines(),
['foo = bar\n', '\n'])
open(self.tmppath('prop'), 'wb').write('\n'.join(propLines))
MinifiedProperties(File(self.tmppath('prop'))) \
.copy(self.tmppath('prop2'))
self.assertEqual(open(self.tmppath('prop2')).readlines(),
['foo = bar\n', '\n'])
class MatchTestTemplate(object):
def do_match_test(self):
self.add('bar')
self.add('foo/bar')
self.add('foo/baz')
self.add('foo/qux/1')
self.add('foo/qux/bar')
self.add('foo/qux/2/test')
self.add('foo/qux/2/test2')
self.do_check('', [
'bar', 'foo/bar', 'foo/baz', 'foo/qux/1', 'foo/qux/bar',
'foo/qux/2/test', 'foo/qux/2/test2'
])
self.do_check('*', [
'bar', 'foo/bar', 'foo/baz', 'foo/qux/1', 'foo/qux/bar',
'foo/qux/2/test', 'foo/qux/2/test2'
])
self.do_check('foo/qux', [
'foo/qux/1', 'foo/qux/bar', 'foo/qux/2/test', 'foo/qux/2/test2'
])
self.do_check('foo/b*', ['foo/bar', 'foo/baz'])
self.do_check('baz', [])
self.do_check('foo/foo', [])
self.do_check('foo/*ar', ['foo/bar'])
self.do_check('*ar', ['bar'])
self.do_check('*/bar', ['foo/bar'])
self.do_check('foo/*ux', [
'foo/qux/1', 'foo/qux/bar', 'foo/qux/2/test', 'foo/qux/2/test2'
])
self.do_check('foo/q*ux', [
'foo/qux/1', 'foo/qux/bar', 'foo/qux/2/test', 'foo/qux/2/test2'
])
self.do_check('foo/*/2/test*', ['foo/qux/2/test', 'foo/qux/2/test2'])
self.do_check('**/bar', ['bar', 'foo/bar', 'foo/qux/bar'])
self.do_check('foo/**/test', ['foo/qux/2/test'])
self.do_check('foo/**', [
'foo/bar', 'foo/baz', 'foo/qux/1', 'foo/qux/bar',
'foo/qux/2/test', 'foo/qux/2/test2'
])
self.do_check('**/2/test*', ['foo/qux/2/test', 'foo/qux/2/test2'])
self.do_check('**/foo', [
'foo/bar', 'foo/baz', 'foo/qux/1', 'foo/qux/bar',
'foo/qux/2/test', 'foo/qux/2/test2'
])
self.do_check('**/barbaz', [])
self.do_check('f**/bar', ['foo/bar'])
class TestFileFinder(MatchTestTemplate, TestWithTmpDir):
def add(self, path):
ensure_parent_dir(self.tmppath(path))
open(self.tmppath(path), 'wb').write(path)
def do_check(self, pattern, result):
if result:
self.assertTrue(self.finder.contains(pattern))
else:
self.assertFalse(self.finder.contains(pattern))
self.assertEqual(sorted(list(f for f, c in self.finder.find(pattern))),
sorted(result))
def test_file_finder(self):
self.finder = FileFinder(self.tmpdir)
self.do_match_test()
self.add('foo/.foo')
self.add('foo/.bar/foo')
self.assertTrue(self.finder.contains('foo/.foo'))
self.assertTrue(self.finder.contains('foo/.bar'))
self.assertTrue('foo/.foo' in [f for f, c in
self.finder.find('foo/.foo')])
self.assertTrue('foo/.bar/foo' in [f for f, c in
self.finder.find('foo/.bar')])
self.assertEqual(sorted([f for f, c in self.finder.find('foo/.*')]),
['foo/.bar/foo', 'foo/.foo'])
for pattern in ['foo', '**', '**/*', '**/foo', 'foo/*']:
self.assertFalse('foo/.foo' in [f for f, c in
self.finder.find(pattern)])
self.assertFalse('foo/.bar/foo' in [f for f, c in
self.finder.find(pattern)])
self.assertEqual(sorted([f for f, c in self.finder.find(pattern)]),
sorted([f for f, c in self.finder
if mozpack.path.match(f, pattern)]))
if __name__ == '__main__':
mozunit.main()

View File

@ -0,0 +1,259 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from mozpack.mozjar import (
JarReaderError,
JarWriterError,
JarStruct,
JarReader,
JarWriter,
Deflater,
OrderedDict,
)
from mozpack.test.test_files import MockDest
import unittest
import mozunit
class TestJarStruct(unittest.TestCase):
class Foo(JarStruct):
MAGIC = 0x01020304
STRUCT = OrderedDict([
('foo', 'uint32'),
('bar', 'uint16'),
('qux', 'uint16'),
('length', 'uint16'),
('length2', 'uint16'),
('string', 'length'),
('string2', 'length2'),
])
def test_jar_struct(self):
foo = TestJarStruct.Foo()
self.assertEqual(foo.signature, TestJarStruct.Foo.MAGIC)
self.assertEqual(foo['foo'], 0)
self.assertEqual(foo['bar'], 0)
self.assertEqual(foo['qux'], 0)
self.assertFalse('length' in foo)
self.assertFalse('length2' in foo)
self.assertEqual(foo['string'], '')
self.assertEqual(foo['string2'], '')
self.assertEqual(foo.size, 16)
foo['foo'] = 0x42434445
foo['bar'] = 0xabcd
foo['qux'] = 0xef01
foo['string'] = 'abcde'
foo['string2'] = 'Arbitrarily long string'
serialized = b'\x04\x03\x02\x01\x45\x44\x43\x42\xcd\xab\x01\xef' + \
b'\x05\x00\x17\x00abcdeArbitrarily long string'
self.assertEqual(foo.size, len(serialized))
foo_serialized = foo.serialize()
self.assertEqual(foo_serialized, serialized)
def do_test_read_jar_struct(self, data):
self.assertRaises(JarReaderError, TestJarStruct.Foo, data)
self.assertRaises(JarReaderError, TestJarStruct.Foo, data[2:])
foo = TestJarStruct.Foo(data[1:])
self.assertEqual(foo['foo'], 0x45444342)
self.assertEqual(foo['bar'], 0xcdab)
self.assertEqual(foo['qux'], 0x01ef)
self.assertFalse('length' in foo)
self.assertFalse('length2' in foo)
self.assertEqual(foo['string'], '012345')
self.assertEqual(foo['string2'], '67')
def test_read_jar_struct(self):
data = b'\x00\x04\x03\x02\x01\x42\x43\x44\x45\xab\xcd\xef' + \
b'\x01\x06\x00\x02\x0001234567890'
self.do_test_read_jar_struct(data)
def test_read_jar_struct_memoryview(self):
data = b'\x00\x04\x03\x02\x01\x42\x43\x44\x45\xab\xcd\xef' + \
b'\x01\x06\x00\x02\x0001234567890'
self.do_test_read_jar_struct(memoryview(data))
class TestDeflater(unittest.TestCase):
def wrap(self, data):
return data
def test_deflater_no_compress(self):
deflater = Deflater(False)
deflater.write(self.wrap('abc'))
self.assertFalse(deflater.compressed)
self.assertEqual(deflater.uncompressed_size, 3)
self.assertEqual(deflater.compressed_size, deflater.uncompressed_size)
self.assertEqual(deflater.compressed_data, 'abc')
self.assertEqual(deflater.crc32, 0x352441c2)
def test_deflater_compress_no_gain(self):
deflater = Deflater(True)
deflater.write(self.wrap('abc'))
self.assertFalse(deflater.compressed)
self.assertEqual(deflater.uncompressed_size, 3)
self.assertEqual(deflater.compressed_size, deflater.uncompressed_size)
self.assertEqual(deflater.compressed_data, 'abc')
self.assertEqual(deflater.crc32, 0x352441c2)
def test_deflater_compress(self):
deflater = Deflater(True)
deflater.write(self.wrap('aaaaaaaaaaaaanopqrstuvwxyz'))
self.assertTrue(deflater.compressed)
self.assertEqual(deflater.uncompressed_size, 26)
self.assertNotEqual(deflater.compressed_size,
deflater.uncompressed_size)
self.assertEqual(deflater.crc32, 0xd46b97ed)
# The CRC is the same as when not compressed
deflater = Deflater(False)
self.assertFalse(deflater.compressed)
deflater.write(self.wrap('aaaaaaaaaaaaanopqrstuvwxyz'))
self.assertEqual(deflater.crc32, 0xd46b97ed)
class TestDeflaterMemoryView(TestDeflater):
def wrap(self, data):
return memoryview(data)
class TestJar(unittest.TestCase):
optimize = False
def test_jar(self):
s = MockDest()
with JarWriter(fileobj=s, optimize=self.optimize) as jar:
jar.add('foo', 'foo')
self.assertRaises(JarWriterError, jar.add, 'foo', 'bar')
jar.add('bar', 'aaaaaaaaaaaaanopqrstuvwxyz')
jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz', False)
files = [j for j in JarReader(fileobj=s)]
self.assertEqual(files[0].filename, 'foo')
self.assertFalse(files[0].compressed)
self.assertEqual(files[0].read(), 'foo')
self.assertEqual(files[1].filename, 'bar')
self.assertTrue(files[1].compressed)
self.assertEqual(files[1].read(), 'aaaaaaaaaaaaanopqrstuvwxyz')
self.assertEqual(files[2].filename, 'baz/qux')
self.assertFalse(files[2].compressed)
self.assertEqual(files[2].read(), 'aaaaaaaaaaaaanopqrstuvwxyz')
s = MockDest()
with JarWriter(fileobj=s, compress=False,
optimize=self.optimize) as jar:
jar.add('bar', 'aaaaaaaaaaaaanopqrstuvwxyz')
jar.add('foo', 'foo')
jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz', True)
jar = JarReader(fileobj=s)
files = [j for j in jar]
self.assertEqual(files[0].filename, 'bar')
self.assertFalse(files[0].compressed)
self.assertEqual(files[0].read(), 'aaaaaaaaaaaaanopqrstuvwxyz')
self.assertEqual(files[1].filename, 'foo')
self.assertFalse(files[1].compressed)
self.assertEqual(files[1].read(), 'foo')
self.assertEqual(files[2].filename, 'baz/qux')
self.assertTrue(files[2].compressed)
self.assertEqual(files[2].read(), 'aaaaaaaaaaaaanopqrstuvwxyz')
self.assertTrue('bar' in jar)
self.assertTrue('foo' in jar)
self.assertFalse('baz' in jar)
self.assertTrue('baz/qux' in jar)
self.assertTrue(jar['bar'], files[1])
self.assertTrue(jar['foo'], files[0])
self.assertTrue(jar['baz/qux'], files[2])
s.seek(0)
jar = JarReader(fileobj=s)
self.assertTrue('bar' in jar)
self.assertTrue('foo' in jar)
self.assertFalse('baz' in jar)
self.assertTrue('baz/qux' in jar)
files[0].seek(0)
self.assertEqual(jar['bar'].filename, files[0].filename)
self.assertEqual(jar['bar'].compressed, files[0].compressed)
self.assertEqual(jar['bar'].read(), files[0].read())
files[1].seek(0)
self.assertEqual(jar['foo'].filename, files[1].filename)
self.assertEqual(jar['foo'].compressed, files[1].compressed)
self.assertEqual(jar['foo'].read(), files[1].read())
files[2].seek(0)
self.assertEqual(jar['baz/qux'].filename, files[2].filename)
self.assertEqual(jar['baz/qux'].compressed, files[2].compressed)
self.assertEqual(jar['baz/qux'].read(), files[2].read())
def test_rejar(self):
s = MockDest()
with JarWriter(fileobj=s, optimize=self.optimize) as jar:
jar.add('foo', 'foo')
jar.add('bar', 'aaaaaaaaaaaaanopqrstuvwxyz')
jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz', False)
new = MockDest()
with JarWriter(fileobj=new, optimize=self.optimize) as jar:
for j in JarReader(fileobj=s):
jar.add(j.filename, j)
jar = JarReader(fileobj=new)
files = [j for j in jar]
self.assertEqual(files[0].filename, 'foo')
self.assertFalse(files[0].compressed)
self.assertEqual(files[0].read(), 'foo')
self.assertEqual(files[1].filename, 'bar')
self.assertTrue(files[1].compressed)
self.assertEqual(files[1].read(), 'aaaaaaaaaaaaanopqrstuvwxyz')
self.assertEqual(files[2].filename, 'baz/qux')
self.assertTrue(files[2].compressed)
self.assertEqual(files[2].read(), 'aaaaaaaaaaaaanopqrstuvwxyz')
class TestOptimizeJar(TestJar):
optimize = True
class TestPreload(unittest.TestCase):
def test_preload(self):
s = MockDest()
with JarWriter(fileobj=s) as jar:
jar.add('foo', 'foo')
jar.add('bar', 'abcdefghijklmnopqrstuvwxyz')
jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz')
jar = JarReader(fileobj=s)
self.assertEqual(jar.last_preloaded, None)
with JarWriter(fileobj=s) as jar:
jar.add('foo', 'foo')
jar.add('bar', 'abcdefghijklmnopqrstuvwxyz')
jar.add('baz/qux', 'aaaaaaaaaaaaanopqrstuvwxyz')
jar.preload(['baz/qux', 'bar'])
jar = JarReader(fileobj=s)
self.assertEqual(jar.last_preloaded, 'bar')
files = [j for j in jar]
self.assertEqual(files[0].filename, 'baz/qux')
self.assertEqual(files[1].filename, 'bar')
self.assertEqual(files[2].filename, 'foo')
if __name__ == '__main__':
mozunit.main()

View File

@ -0,0 +1,256 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import unittest
import mozunit
import os
from mozpack.packager import (
preprocess_manifest,
SimplePackager,
SimpleManifestSink,
CallDeque,
)
from mozpack.files import GeneratedFile
from mozpack.chrome.manifest import (
ManifestResource,
ManifestContent,
)
from mozunit import MockedOpen
from Preprocessor import Preprocessor
from mozpack.errors import (
errors,
ErrorMessage,
)
import mozpack.path
MANIFEST = '''
bar/*
[foo]
foo/*
-foo/bar
chrome.manifest
; comment
#ifdef baz
[baz]
baz@SUFFIX@
#endif
'''
class TestPreprocessManifest(unittest.TestCase):
MANIFEST_PATH = os.path.join(os.path.abspath(os.curdir), 'manifest')
EXPECTED_LOG = [
((MANIFEST_PATH, 2), 'add', '', 'bar/*'),
((MANIFEST_PATH, 4), 'add', 'foo', 'foo/*'),
((MANIFEST_PATH, 5), 'remove', 'foo', 'foo/bar'),
((MANIFEST_PATH, 6), 'add', 'foo', 'chrome.manifest')
]
def setUp(self):
class MockSink(object):
def __init__(self):
self.log = []
def add(self, section, path):
self._log(errors.get_context(), 'add', section, path)
def remove(self, section, path):
self._log(errors.get_context(), 'remove', section, path)
def _log(self, *args):
self.log.append(args)
self.sink = MockSink()
def test_preprocess_manifest(self):
with MockedOpen({'manifest': MANIFEST}):
preprocess_manifest(self.sink, 'manifest')
self.assertEqual(self.sink.log, self.EXPECTED_LOG)
def test_preprocess_manifest_missing_define(self):
with MockedOpen({'manifest': MANIFEST}):
self.assertRaises(
Preprocessor.Error,
preprocess_manifest,
self.sink,
'manifest',
{'baz': 1}
)
def test_preprocess_manifest_defines(self):
with MockedOpen({'manifest': MANIFEST}):
preprocess_manifest(self.sink, 'manifest',
{'baz': 1, 'SUFFIX': '.exe'})
self.assertEqual(self.sink.log, self.EXPECTED_LOG +
[((self.MANIFEST_PATH, 10), 'add', 'baz', 'baz.exe')])
class MockFormatter(object):
def __init__(self):
self.log = []
def add_base(self, *args):
self._log(errors.get_context(), 'add_base', *args)
def add_manifest(self, *args):
self._log(errors.get_context(), 'add_manifest', *args)
def add_interfaces(self, *args):
self._log(errors.get_context(), 'add_interfaces', *args)
def add(self, *args):
self._log(errors.get_context(), 'add', *args)
def _log(self, *args):
self.log.append(args)
class TestSimplePackager(unittest.TestCase):
def test_simple_packager(self):
class GeneratedFileWithPath(GeneratedFile):
def __init__(self, path, content):
GeneratedFile.__init__(self, content)
self.path = path
formatter = MockFormatter()
packager = SimplePackager(formatter)
curdir = os.path.abspath(os.curdir)
file = GeneratedFileWithPath(os.path.join(curdir, 'foo',
'bar.manifest'),
'resource bar bar/\ncontent bar bar/')
with errors.context('manifest', 1):
packager.add('foo/bar.manifest', file)
file = GeneratedFileWithPath(os.path.join(curdir, 'foo',
'baz.manifest'),
'resource baz baz/')
with errors.context('manifest', 2):
packager.add('bar/baz.manifest', file)
with errors.context('manifest', 3):
packager.add('qux/qux.manifest',
GeneratedFile('resource qux qux/'))
bar_xpt = GeneratedFile('bar.xpt')
qux_xpt = GeneratedFile('qux.xpt')
foo_html = GeneratedFile('foo_html')
bar_html = GeneratedFile('bar_html')
with errors.context('manifest', 4):
packager.add('foo/bar.xpt', bar_xpt)
with errors.context('manifest', 5):
packager.add('foo/bar/foo.html', foo_html)
packager.add('foo/bar/bar.html', bar_html)
file = GeneratedFileWithPath(os.path.join(curdir, 'foo.manifest'),
''.join([
'manifest foo/bar.manifest\n',
'manifest bar/baz.manifest\n',
]))
with errors.context('manifest', 6):
packager.add('foo.manifest', file)
with errors.context('manifest', 7):
packager.add('foo/qux.xpt', qux_xpt)
self.assertEqual(formatter.log, [])
with errors.context('dummy', 1):
packager.close()
self.maxDiff = None
self.assertEqual(formatter.log, [
(('dummy', 1), 'add_base', 'qux'),
((os.path.join(curdir, 'foo', 'bar.manifest'), 1),
'add_manifest', ManifestResource('foo', 'bar', 'bar/')),
((os.path.join(curdir, 'foo', 'bar.manifest'), 2),
'add_manifest', ManifestContent('foo', 'bar', 'bar/')),
(('bar/baz.manifest', 1),
'add_manifest', ManifestResource('bar', 'baz', 'baz/')),
(('qux/qux.manifest', 1),
'add_manifest', ManifestResource('qux', 'qux', 'qux/')),
(('manifest', 4), 'add_interfaces', 'foo/bar.xpt', bar_xpt),
(('manifest', 7), 'add_interfaces', 'foo/qux.xpt', qux_xpt),
(('manifest', 5), 'add', 'foo/bar/foo.html', foo_html),
(('manifest', 5), 'add', 'foo/bar/bar.html', bar_html),
])
self.assertEqual(packager.get_bases(), set(['', 'qux']))
class TestSimpleManifestSink(unittest.TestCase):
def test_simple_manifest_parser(self):
class MockFinder(object):
def __init__(self, files):
self.files = files
self.log = []
def find(self, path):
self.log.append(path)
for f in sorted(self.files):
if mozpack.path.match(f, path):
yield f, self.files[f]
formatter = MockFormatter()
foobar = GeneratedFile('foobar')
foobaz = GeneratedFile('foobaz')
fooqux = GeneratedFile('fooqux')
finder = MockFinder({
'bin/foo/bar': foobar,
'bin/foo/baz': foobaz,
'bin/foo/qux': fooqux,
'bin/foo/chrome.manifest': GeneratedFile('resource foo foo/'),
'bin/chrome.manifest':
GeneratedFile('manifest foo/chrome.manifest'),
})
parser = SimpleManifestSink(finder, formatter)
parser.add('section0', 'bin/foo/b*')
parser.add('section1', 'bin/foo/qux')
parser.add('section1', 'bin/foo/chrome.manifest')
self.assertRaises(ErrorMessage, parser.add, 'section1', 'bin/bar')
self.assertEqual(formatter.log, [])
parser.close()
self.assertEqual(formatter.log, [
(('foo/chrome.manifest', 1),
'add_manifest', ManifestResource('foo', 'foo', 'foo/')),
(None, 'add', 'foo/bar', foobar),
(None, 'add', 'foo/baz', foobaz),
(None, 'add', 'foo/qux', fooqux),
])
self.assertEqual(finder.log, [
'bin/foo/b*',
'bin/foo/qux',
'bin/foo/chrome.manifest',
'bin/bar',
'bin/**/chrome.manifest'
])
class TestCallDeque(unittest.TestCase):
def test_call_deque(self):
class Logger(object):
def __init__(self):
self._log = []
def log(self, str):
self._log.append(str)
@staticmethod
def staticlog(logger, str):
logger.log(str)
def do_log(logger, str):
logger.log(str)
logger = Logger()
d = CallDeque()
d.append(logger.log, 'foo')
d.append(logger.log, 'bar')
d.append(logger.staticlog, logger, 'baz')
d.append(do_log, logger, 'qux')
self.assertEqual(logger._log, [])
d.execute()
self.assertEqual(logger._log, ['foo', 'bar', 'baz', 'qux'])
if __name__ == '__main__':
mozunit.main()

View File

@ -0,0 +1,238 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import mozunit
from mozpack.packager.formats import (
FlatFormatter,
JarFormatter,
OmniJarFormatter,
)
from mozpack.copier import FileRegistry
from mozpack.files import GeneratedFile
from mozpack.chrome.manifest import (
ManifestContent,
ManifestResource,
ManifestBinaryComponent,
)
from mozpack.test.test_files import (
TestWithTmpDir,
foo_xpt,
bar_xpt,
read_interfaces,
)
class TestFlatFormatter(TestWithTmpDir):
def test_flat_formatter(self):
registry = FileRegistry()
formatter = FlatFormatter(registry)
formatter.add_base('app')
formatter.add('f/oo/bar', GeneratedFile('foobar'))
formatter.add('f/oo/baz', GeneratedFile('foobaz'))
formatter.add('f/oo/qux', GeneratedFile('fooqux'))
formatter.add_manifest(ManifestContent('f/oo', 'bar', 'bar'))
formatter.add_manifest(ManifestContent('f/oo', 'qux', 'qux'))
self.assertEqual(registry.paths(),
['f/oo/bar', 'f/oo/baz', 'f/oo/qux',
'chrome.manifest', 'f/f.manifest',
'f/oo/oo.manifest'])
self.assertEqual(registry['chrome.manifest'].open().read(),
'manifest f/f.manifest\n')
self.assertEqual(registry['f/f.manifest'].open().read(),
'manifest oo/oo.manifest\n')
self.assertEqual(registry['f/oo/oo.manifest'].open().read(), ''.join([
'content bar bar\n',
'content qux qux\n',
]))
formatter.add_interfaces('components/foo.xpt', foo_xpt)
formatter.add_interfaces('components/bar.xpt', bar_xpt)
self.assertEqual(registry.paths(),
['f/oo/bar', 'f/oo/baz', 'f/oo/qux',
'chrome.manifest', 'f/f.manifest',
'f/oo/oo.manifest', 'components/components.manifest',
'components/interfaces.xpt'])
self.assertEqual(registry['chrome.manifest'].open().read(), ''.join([
'manifest f/f.manifest\n',
'manifest components/components.manifest\n',
]))
self.assertEqual(
registry['components/components.manifest'].open().read(),
'interfaces interfaces.xpt\n'
)
registry['components/interfaces.xpt'] \
.copy(self.tmppath('interfaces.xpt'))
linked = read_interfaces(self.tmppath('interfaces.xpt'))
foo = read_interfaces(foo_xpt.open())
bar = read_interfaces(bar_xpt.open())
self.assertEqual(foo['foo'], linked['foo'])
self.assertEqual(bar['bar'], linked['bar'])
formatter.add_manifest(ManifestContent('app/chrome', 'content',
'foo/'))
self.assertEqual(registry['chrome.manifest'].open().read(), ''.join([
'manifest f/f.manifest\n',
'manifest components/components.manifest\n',
]))
self.assertEqual(registry['app/chrome.manifest'].open().read(),
'manifest chrome/chrome.manifest\n')
self.assertEqual(registry['app/chrome/chrome.manifest'].open().read(),
'content content foo/\n')
def test_bases(self):
formatter = FlatFormatter(FileRegistry())
formatter.add_base('')
formatter.add_base('browser')
formatter.add_base('webapprt')
self.assertEqual(formatter._get_base('platform.ini'), '')
self.assertEqual(formatter._get_base('browser/application.ini'),
'browser')
self.assertEqual(formatter._get_base('webapprt/webapprt.ini'),
'webapprt')
class TestJarFormatter(TestWithTmpDir):
def test_jar_formatter(self):
registry = FileRegistry()
formatter = JarFormatter(registry)
formatter.add_manifest(ManifestContent('f', 'oo', 'oo/'))
formatter.add_manifest(ManifestContent('f', 'bar', 'oo/bar/'))
formatter.add('f/oo/bar/baz', GeneratedFile('foobarbaz'))
formatter.add('f/oo/qux', GeneratedFile('fooqux'))
self.assertEqual(registry.paths(),
['chrome.manifest', 'f/f.manifest', 'f/oo.jar'])
self.assertEqual(registry['chrome.manifest'].open().read(),
'manifest f/f.manifest\n')
self.assertEqual(registry['f/f.manifest'].open().read(), ''.join([
'content oo jar:oo.jar!/\n',
'content bar jar:oo.jar!/bar/\n',
]))
self.assertTrue(formatter.contains('f/oo/bar/baz'))
self.assertFalse(formatter.contains('foo/bar/baz'))
self.assertEqual(registry['f/oo.jar'].paths(), ['bar/baz', 'qux'])
formatter.add_manifest(ManifestResource('f', 'foo', 'resource://bar/'))
self.assertEqual(registry['f/f.manifest'].open().read(), ''.join([
'content oo jar:oo.jar!/\n',
'content bar jar:oo.jar!/bar/\n',
'resource foo resource://bar/\n',
]))
class TestOmniJarFormatter(TestWithTmpDir):
def test_omnijar_formatter(self):
registry = FileRegistry()
formatter = OmniJarFormatter(registry, 'omni.foo')
formatter.add_base('app')
formatter.add('chrome/f/oo/bar', GeneratedFile('foobar'))
formatter.add('chrome/f/oo/baz', GeneratedFile('foobaz'))
formatter.add('chrome/f/oo/qux', GeneratedFile('fooqux'))
formatter.add_manifest(ManifestContent('chrome/f/oo', 'bar', 'bar'))
formatter.add_manifest(ManifestContent('chrome/f/oo', 'qux', 'qux'))
self.assertEqual(registry.paths(), ['omni.foo'])
self.assertEqual(registry['omni.foo'].paths(), [
'chrome/f/oo/bar',
'chrome/f/oo/baz',
'chrome/f/oo/qux',
'chrome.manifest',
'chrome/chrome.manifest',
'chrome/f/f.manifest',
'chrome/f/oo/oo.manifest',
])
self.assertEqual(registry['omni.foo']['chrome.manifest']
.open().read(), 'manifest chrome/chrome.manifest\n')
self.assertEqual(registry['omni.foo']['chrome/chrome.manifest']
.open().read(), 'manifest f/f.manifest\n')
self.assertEqual(registry['omni.foo']['chrome/f/f.manifest']
.open().read(), 'manifest oo/oo.manifest\n')
self.assertEqual(registry['omni.foo']['chrome/f/oo/oo.manifest']
.open().read(), ''.join([
'content bar bar\n',
'content qux qux\n',
]))
self.assertTrue(formatter.contains('chrome/f/oo/bar'))
self.assertFalse(formatter.contains('chrome/foo/bar'))
formatter.add_interfaces('components/foo.xpt', foo_xpt)
formatter.add_interfaces('components/bar.xpt', bar_xpt)
self.assertEqual(registry['omni.foo'].paths(), [
'chrome/f/oo/bar',
'chrome/f/oo/baz',
'chrome/f/oo/qux',
'chrome.manifest',
'chrome/chrome.manifest',
'chrome/f/f.manifest',
'chrome/f/oo/oo.manifest',
'components/components.manifest',
'components/interfaces.xpt',
])
self.assertEqual(registry['omni.foo']['chrome.manifest']
.open().read(), ''.join([
'manifest chrome/chrome.manifest\n',
'manifest components/components.manifest\n'
]))
self.assertEqual(registry['omni.foo']
['components/components.manifest'].open().read(),
'interfaces interfaces.xpt\n')
registry['omni.foo'][
'components/interfaces.xpt'].copy(self.tmppath('interfaces.xpt'))
linked = read_interfaces(self.tmppath('interfaces.xpt'))
foo = read_interfaces(foo_xpt.open())
bar = read_interfaces(bar_xpt.open())
self.assertEqual(foo['foo'], linked['foo'])
self.assertEqual(bar['bar'], linked['bar'])
formatter.add('app/chrome/foo/baz', GeneratedFile('foobaz'))
formatter.add_manifest(ManifestContent('app/chrome', 'content',
'foo/'))
self.assertEqual(registry.paths(), ['omni.foo', 'app/omni.foo'])
self.assertEqual(registry['app/omni.foo'].paths(), [
'chrome/foo/baz',
'chrome.manifest',
'chrome/chrome.manifest',
])
self.assertEqual(registry['app/omni.foo']['chrome.manifest']
.open().read(), 'manifest chrome/chrome.manifest\n')
self.assertEqual(registry['app/omni.foo']['chrome/chrome.manifest']
.open().read(), 'content content foo/\n')
formatter.add_manifest(ManifestBinaryComponent('components', 'foo.so'))
formatter.add('components/foo.so', GeneratedFile('foo'))
self.assertEqual(registry.paths(), [
'omni.foo', 'app/omni.foo', 'chrome.manifest',
'components/components.manifest', 'components/foo.so',
])
self.assertEqual(registry['chrome.manifest'].open().read(),
'manifest components/components.manifest\n')
self.assertEqual(registry['components/components.manifest']
.open().read(), 'binary-component foo.so\n')
formatter.add_manifest(ManifestBinaryComponent('app/components',
'foo.so'))
formatter.add('app/components/foo.so', GeneratedFile('foo'))
self.assertEqual(registry.paths(), [
'omni.foo', 'app/omni.foo', 'chrome.manifest',
'components/components.manifest', 'components/foo.so',
'app/chrome.manifest', 'app/components/components.manifest',
'app/components/foo.so',
])
self.assertEqual(registry['app/chrome.manifest'].open().read(),
'manifest components/components.manifest\n')
self.assertEqual(registry['app/components/components.manifest']
.open().read(), 'binary-component foo.so\n')
formatter.add('app/foo', GeneratedFile('foo'))
self.assertEqual(registry.paths(), [
'omni.foo', 'app/omni.foo', 'chrome.manifest',
'components/components.manifest', 'components/foo.so',
'app/chrome.manifest', 'app/components/components.manifest',
'app/components/foo.so', 'app/foo'
])
if __name__ == '__main__':
mozunit.main()

View File

@ -0,0 +1,120 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from mozpack.path import (
relpath,
join,
normpath,
dirname,
commonprefix,
basename,
split,
splitext,
basedir,
match,
rebase,
)
import unittest
import mozunit
import os
class TestPath(unittest.TestCase):
def test_relpath(self):
self.assertEqual(relpath('foo', 'foo'), '')
self.assertEqual(relpath(os.path.join('foo', 'bar'), 'foo/bar'), '')
self.assertEqual(relpath(os.path.join('foo', 'bar'), 'foo'), 'bar')
self.assertEqual(relpath(os.path.join('foo', 'bar', 'baz'), 'foo'),
'bar/baz')
self.assertEqual(relpath(os.path.join('foo', 'bar'), 'foo/bar/baz'),
'..')
self.assertEqual(relpath(os.path.join('foo', 'bar'), 'foo/baz'),
'../bar')
self.assertEqual(relpath('foo/', 'foo'), '')
self.assertEqual(relpath('foo/bar/', 'foo'), 'bar')
def test_join(self):
self.assertEqual(join('foo', 'bar', 'baz'), 'foo/bar/baz')
self.assertEqual(join('foo', '', 'bar'), 'foo/bar')
self.assertEqual(join('', 'foo', 'bar'), 'foo/bar')
self.assertEqual(join('', 'foo', '/bar'), '/bar')
def test_normpath(self):
self.assertEqual(normpath(os.path.join('foo', 'bar', 'baz',
'..', 'qux')), 'foo/bar/qux')
def test_dirname(self):
self.assertEqual(dirname('foo/bar/baz'), 'foo/bar')
self.assertEqual(dirname('foo/bar'), 'foo')
self.assertEqual(dirname('foo'), '')
self.assertEqual(dirname('foo/bar/'), 'foo/bar')
def test_commonprefix(self):
self.assertEqual(commonprefix([os.path.join('foo', 'bar', 'baz'),
'foo/qux', 'foo/baz/qux']), 'foo/')
self.assertEqual(commonprefix([os.path.join('foo', 'bar', 'baz'),
'foo/qux', 'baz/qux']), '')
def test_basename(self):
self.assertEqual(basename('foo/bar/baz'), 'baz')
self.assertEqual(basename('foo/bar'), 'bar')
self.assertEqual(basename('foo'), 'foo')
self.assertEqual(basename('foo/bar/'), '')
def test_split(self):
self.assertEqual(split(os.path.join('foo', 'bar', 'baz')),
['foo', 'bar', 'baz'])
def test_splitext(self):
self.assertEqual(splitext(os.path.join('foo', 'bar', 'baz.qux')),
('foo/bar/baz', '.qux'))
def test_basedir(self):
foobarbaz = os.path.join('foo', 'bar', 'baz')
self.assertEqual(basedir(foobarbaz, ['foo', 'bar', 'baz']), 'foo')
self.assertEqual(basedir(foobarbaz, ['foo', 'foo/bar', 'baz']),
'foo/bar')
self.assertEqual(basedir(foobarbaz, ['foo/bar', 'foo', 'baz']),
'foo/bar')
self.assertEqual(basedir(foobarbaz, ['foo', 'bar', '']), 'foo')
self.assertEqual(basedir(foobarbaz, ['bar', 'baz', '']), '')
def test_match(self):
self.assertTrue(match('foo', ''))
self.assertTrue(match('foo/bar/baz.qux', 'foo/bar'))
self.assertTrue(match('foo/bar/baz.qux', 'foo'))
self.assertTrue(match('foo', '*'))
self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*'))
self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*'))
self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*'))
self.assertTrue(match('foo/bar/baz.qux', 'foo/bar/*'))
self.assertTrue(match('foo/bar/baz.qux', 'foo/*/baz.qux'))
self.assertTrue(match('foo/bar/baz.qux', '*/bar/baz.qux'))
self.assertTrue(match('foo/bar/baz.qux', '*/*/baz.qux'))
self.assertTrue(match('foo/bar/baz.qux', '*/*/*'))
self.assertTrue(match('foo/bar/baz.qux', 'foo/*/*'))
self.assertTrue(match('foo/bar/baz.qux', 'foo/*/*.qux'))
self.assertTrue(match('foo/bar/baz.qux', 'foo/b*/*z.qux'))
self.assertTrue(match('foo/bar/baz.qux', 'foo/b*r/ba*z.qux'))
self.assertFalse(match('foo/bar/baz.qux', 'foo/b*z/ba*r.qux'))
self.assertTrue(match('foo/bar/baz.qux', '**'))
self.assertTrue(match('foo/bar/baz.qux', '**/baz.qux'))
self.assertTrue(match('foo/bar/baz.qux', '**/bar/baz.qux'))
self.assertTrue(match('foo/bar/baz.qux', 'foo/**/baz.qux'))
self.assertTrue(match('foo/bar/baz.qux', 'foo/**/*.qux'))
self.assertTrue(match('foo/bar/baz.qux', '**/foo/bar/baz.qux'))
self.assertTrue(match('foo/bar/baz.qux', 'foo/**/bar/baz.qux'))
self.assertTrue(match('foo/bar/baz.qux', 'foo/**/bar/*.qux'))
self.assertTrue(match('foo/bar/baz.qux', 'foo/**/*.qux'))
self.assertTrue(match('foo/bar/baz.qux', '**/*.qux'))
self.assertFalse(match('foo/bar/baz.qux', '**.qux'))
self.assertFalse(match('foo/bar', 'foo/*/bar'))
def test_rebase(self):
self.assertEqual(rebase('foo', 'foo/bar', 'bar/baz'), 'baz')
self.assertEqual(rebase('foo', 'foo', 'bar/baz'), 'bar/baz')
self.assertEqual(rebase('foo/bar', 'foo', 'baz'), 'bar/baz')
if __name__ == '__main__':
mozunit.main()

View File

@ -0,0 +1,97 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from mozpack.unify import (
UnifiedFinder,
UnifiedBuildFinder,
)
import mozunit
from mozpack.test.test_files import TestWithTmpDir
from mozpack.copier import ensure_parent_dir
import os
from mozpack.errors import ErrorMessage
class TestUnified(TestWithTmpDir):
def create_one(self, which, path, content):
file = self.tmppath(os.path.join(which, path))
ensure_parent_dir(file)
open(file, 'wb').write(content)
def create_both(self, path, content):
for p in ['a', 'b']:
self.create_one(p, path, content)
class TestUnifiedFinder(TestUnified):
def test_unified_finder(self):
self.create_both('foo/bar', 'foobar')
self.create_both('foo/baz', 'foobaz')
self.create_one('a', 'bar', 'bar')
self.create_one('b', 'baz', 'baz')
self.create_one('a', 'qux', 'foobar')
self.create_one('b', 'qux', 'baz')
self.create_one('a', 'test/foo', 'a\nb\nc\n')
self.create_one('b', 'test/foo', 'b\nc\na\n')
self.create_both('test/bar', 'a\nb\nc\n')
finder = UnifiedFinder(self.tmppath('a'), self.tmppath('b'),
sorted=['test'])
self.assertEqual(sorted([(f, c.open().read())
for f, c in finder.find('foo')]),
[('foo/bar', 'foobar'), ('foo/baz', 'foobaz')])
self.assertRaises(ErrorMessage, any, finder.find('bar'))
self.assertRaises(ErrorMessage, any, finder.find('baz'))
self.assertRaises(ErrorMessage, any, finder.find('qux'))
self.assertEqual(sorted([(f, c.open().read())
for f, c in finder.find('test')]),
[('test/bar', 'a\nb\nc\n'),
('test/foo', 'a\nb\nc\n')])
class TestUnifiedBuildFinder(TestUnified):
def test_unified_build_finder(self):
self.create_both('chrome.manifest', 'a\nb\nc\n')
self.create_one('a', 'chrome/chrome.manifest', 'a\nb\nc\n')
self.create_one('b', 'chrome/chrome.manifest', 'b\nc\na\n')
self.create_one('a', 'chrome/browser/foo/buildconfig.html',
'\n'.join([
'<html>',
'<body>',
'<h1>about:buildconfig</h1>',
'<div>foo</div>',
'</body>',
'</html>',
]))
self.create_one('b', 'chrome/browser/foo/buildconfig.html',
'\n'.join([
'<html>',
'<body>',
'<h1>about:buildconfig</h1>',
'<div>bar</div>',
'</body>',
'</html>',
]))
finder = UnifiedBuildFinder(self.tmppath('a'), self.tmppath('b'))
self.assertEqual(sorted([(f, c.open().read()) for f, c in
finder.find('**/chrome.manifest')]),
[('chrome.manifest', 'a\nb\nc\n'),
('chrome/chrome.manifest', 'a\nb\nc\n')])
self.assertEqual(sorted([(f, c.open().read()) for f, c in
finder.find('**/buildconfig.html')]),
[('chrome/browser/foo/buildconfig.html', '\n'.join([
'<html>',
'<body>',
'<h1>about:buildconfig</h1>',
'<div>foo</div>',
'<hr> </hr>',
'<div>bar</div>',
'</body>',
'</html>',
]))])
if __name__ == '__main__':
mozunit.main()

View File

@ -0,0 +1,173 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from mozpack.files import (
FileFinder,
ExecutableFile,
BaseFile,
GeneratedFile,
)
from mozpack.executables import (
MACHO_SIGNATURES,
strip,
)
from mozpack.errors import errors
from tempfile import mkstemp
import mozpack.path
import shutil
import struct
import os
import subprocess
from collections import OrderedDict
def may_unify_binary(file):
'''
Return whether the given BaseFile instance is an ExecutableFile that
may be unified. Only non-fat Mach-O binaries are to be unified.
'''
if isinstance(file, ExecutableFile):
signature = file.open().read(4)
if len(signature) < 4:
return False
signature = struct.unpack('>L', signature)[0]
if signature in MACHO_SIGNATURES:
return True
return False
class UnifiedExecutableFile(BaseFile):
'''
File class for executable and library files that to be unified with 'lipo'.
'''
def __init__(self, path1, path2):
'''
Initialize a UnifiedExecutableFile with the path to both non-fat Mach-O
executables to be unified.
'''
self.path1 = path1
self.path2 = path2
def copy(self, dest):
assert isinstance(dest, basestring)
tmpfiles = []
try:
for p in [self.path1, self.path2]:
fd, f = mkstemp()
os.close(fd)
tmpfiles.append(f)
shutil.copy2(p, f)
strip(f)
subprocess.call(['lipo', '-create'] + tmpfiles + ['-output', dest])
finally:
for f in tmpfiles:
os.unlink(f)
class UnifiedFinder(FileFinder):
'''
Helper to get unified BaseFile instances from two distinct trees on the
file system.
'''
def __init__(self, base1, base2, sorted=[], **kargs):
'''
Initialize a UnifiedFinder. base1 and base2 are the base directories
for the two trees from which files are picked. UnifiedFinder.find()
will act as FileFinder.find() but will error out when matches can only
be found in one of the two trees and not the other. It will also error
out if matches can be found on both ends but their contents are not
identical.
The sorted argument gives a list of mozpack.path.match patterns. File
paths matching one of these patterns will have their contents compared
with their lines sorted.
'''
self._base1 = FileFinder(base1, **kargs)
self._base2 = FileFinder(base2, **kargs)
self._sorted = sorted
def _find(self, path):
'''
UnifiedFinder.find() implementation.
'''
files1 = OrderedDict()
for p, f in self._base1.find(path):
files1[p] = f
files2 = set()
for p, f in self._base2.find(path):
files2.add(p)
if p in files1:
if may_unify_binary(files1[p]) and \
may_unify_binary(f):
yield p, UnifiedExecutableFile(files1[p].path, f.path)
else:
unified = self.unify_file(p, files1[p], f)
if unified:
yield p, unified
else:
self._report_difference(p, files1[p], f)
else:
errors.error('File missing in %s: %s' % (self._base1.base, p))
for p in [p for p in files1 if not p in files2]:
errors.error('File missing in %s: %s' % (self._base2.base, p))
def _report_difference(self, path, file1, file2):
'''
Report differences between files in both trees.
'''
errors.error("Can't unify %s: file differs between %s and %s" %
(path, self._base1.base, self._base2.base))
if not isinstance(file1, ExecutableFile) and \
not isinstance(file2, ExecutableFile):
from difflib import unified_diff
import sys
for line in unified_diff(file1.open().readlines(),
file2.open().readlines(),
os.path.join(self._base1.base, path),
os.path.join(self._base2.base, path)):
errors.out.write(line)
def unify_file(self, path, file1, file2):
'''
Given two BaseFiles and the path they were found at, check whether
their content match and return the first BaseFile if they do.
'''
content1 = file1.open().readlines()
content2 = file2.open().readlines()
if content1 == content2:
return file1
for pattern in self._sorted:
if mozpack.path.match(path, pattern):
if sorted(content1) == sorted(content2):
return file1
break
return None
class UnifiedBuildFinder(UnifiedFinder):
'''
Specialized UnifiedFinder for Mozilla applications packaging. It allows
"*.manifest" files to differ in their order, and unifies "buildconfig.html"
files by merging their content.
'''
def __init__(self, base1, base2, **kargs):
UnifiedFinder.__init__(self, base1, base2,
sorted=['**/*.manifest'], **kargs)
def unify_file(self, path, file1, file2):
'''
Unify buildconfig.html contents, or defer to UnifiedFinder.unify_file.
'''
if mozpack.path.basename(path) == 'buildconfig.html':
content1 = file1.open().readlines()
content2 = file2.open().readlines()
# Copy everything from the first file up to the end of its <body>,
# insert a <hr> between the two files and copy the second file's
# content beginning after its leading <h1>.
return GeneratedFile(''.join(
content1[:content1.index('</body>\n')] +
['<hr> </hr>\n'] +
content2[content2.index('<h1>about:buildconfig</h1>\n') + 1:]
))
return UnifiedFinder.unify_file(self, path, file1, file2)

View File

@ -10,6 +10,6 @@ setup(
name='mozbuild',
description='Mozilla build system functionality.',
license='MPL 2.0',
packages=['mach', 'mozbuild'],
packages=['mach', 'mozbuild', 'mozpack'],
version=VERSION
)

View File

@ -86,8 +86,7 @@ else
endif
$(NSINSTALL) -D $(DIST)/l10n-stage
cd $(DIST)/l10n-stage && \
$(UNMAKE_PACKAGE)
$(MAKE) clobber-zip AB_CD=en-US
$(INNER_UNMAKE_PACKAGE)
unpack: $(STAGEDIST)
@ -107,24 +106,13 @@ endif
endif
repackage-zip: UNPACKAGE="$(ZIP_IN)"
repackage-zip: libs-$(AB_CD)
# Adjust jar logs with the new locale (can't use sed -i because of bug 373784)
mkdir -p $(JARLOG_DIR_AB_CD)
-cp -r $(JARLOG_DIR)/en-US/*.jar.log $(JARLOG_DIR_AB_CD)
-$(PERL) -pi.old -e "s/en-US/$(AB_CD)/g" $(JARLOG_DIR_AB_CD)/*.jar.log
# call a hook for apps to put their uninstall helper.exe into the package
$(UNINSTALLER_PACKAGE_HOOK)
# call a hook for apps to build the stub installer
ifdef MOZ_STUB_INSTALLER
$(STUB_HOOK)
endif
# copy xpi-stage over, but not install.rdf and chrome.manifest,
# those are just for language packs
cd $(DIST)/xpi-stage/locale-$(AB_CD) && \
tar --exclude=install.rdf --exclude=chrome.manifest $(TAR_CREATE_FLAGS) - * | ( cd $(STAGEDIST) && tar -xf - )
mv $(STAGEDIST)/chrome/$(AB_CD).manifest $(STAGEDIST)/chrome/localized.manifest
ifdef MOZ_WEBAPP_RUNTIME
mv $(STAGEDIST)/webapprt/chrome/$(AB_CD).manifest $(STAGEDIST)/webapprt/chrome/localized.manifest
endif
$(PYTHON) $(MOZILLA_DIR)/toolkit/mozapps/installer/l10n-repack.py $(STAGEDIST) $(DIST)/xpi-stage/locale-$(AB_CD)
ifneq (en,$(AB))
ifeq (cocoa,$(MOZ_WIDGET_TOOLKIT))
mv $(_ABS_DIST)/l10n-stage/$(MOZ_PKG_DIR)/$(_APPNAME)/Contents/Resources/en.lproj $(_ABS_DIST)/l10n-stage/$(MOZ_PKG_DIR)/$(_APPNAME)/Contents/Resources/$(AB).lproj
@ -132,7 +120,7 @@ endif
endif
$(NSINSTALL) -D $(DIST)/l10n-stage/$(PKG_PATH)
cd $(DIST)/l10n-stage; \
$(PREPARE_PACKAGE) && $(MAKE_PACKAGE)
$(MAKE_PACKAGE)
ifdef MAKE_COMPLETE_MAR
$(MAKE) -C $(MOZDEPTH)/tools/update-packaging full-update AB_CD=$(AB_CD) \
MOZ_PKG_PRETTYNAMES=$(MOZ_PKG_PRETTYNAMES) \
@ -145,13 +133,6 @@ ifeq (cocoa,$(MOZ_WIDGET_TOOLKIT))
mv $(_ABS_DIST)/l10n-stage/$(MOZ_PKG_DIR)/$(_APPNAME)/Contents/Resources/$(AB).lproj $(_ABS_DIST)/l10n-stage/$(MOZ_PKG_DIR)/$(_APPNAME)/Contents/Resources/en.lproj
endif
endif
ifdef MOZ_OMNIJAR
@(cd $(STAGEDIST) && $(UNPACK_OMNIJAR))
ifdef MOZ_WEBAPP_RUNTIME
@(cd $(STAGEDIST)/webapprt && $(UNPACK_OMNIJAR_WEBAPP_RUNTIME))
endif
endif
$(MAKE) clobber-zip AB_CD=$(AB_CD)
$(NSINSTALL) -D $(DIST)/$(PKG_PATH)
mv -f "$(DIST)/l10n-stage/$(PACKAGE)" "$(ZIP_OUT)"
if test -f "$(DIST)/l10n-stage/$(PACKAGE).asc"; then mv -f "$(DIST)/l10n-stage/$(PACKAGE).asc" "$(ZIP_OUT).asc"; fi

View File

@ -1,647 +0,0 @@
#!perl -w
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
package Packager;
require 5.004;
use strict;
use File::stat;
use Cwd;
use File::Basename;
use File::Copy;
use File::Find;
use File::Path;
use File::stat;
require Exporter;
use vars qw(@ISA @EXPORT);
# Package that generates a jar manifest from an input file
@ISA = qw(Exporter);
@EXPORT = qw(
Copy
);
# initialize variables
my($saved_cwd) = cwd();
my($component) = ""; # current component being copied
my(@components) = (); # list of components to copy
my($components) = ""; # string version of @components
my($altdest) = ""; # alternate file destination
my($line) = ""; # line being processed
my($srcdir) = ""; # root directory being copied from
my($destdir) = ""; # root directory being copied to
my($package) = ""; # file listing files to copy
my($os) = ""; # os type (MSDOS, Unix)
my($lineno) = 0; # line # of package file for error text
my($debug) = 0; # controls amount of debug output
my($dirflag) = 0; # flag: are we copying a directory?
my($help) = 0; # flag: if set, print usage
my($fatal_warnings) = 0; # flag: whether package warnings (missing files or invalid entries) are fatal
my($flat) = 0; # copy everything into the package dir, not into separate
# component dirs
my($delayed_error) = 0; # flag: whether an error was found while reading the manifest but we still
# chose to finish reading it
#
# Copy
#
# Loop over each line in the specified manifest, copying into $destdir
#
sub Copy {
($srcdir, $destdir, $package, $os, $flat, $fatal_warnings, $help, $debug, @components) = @_;
check_arguments();
if ($os eq "MSDOS") {
$srcdir =~ s|\\|/|;
$destdir =~ s|\\|/|;
}
open (MANIFEST,"<$package") ||
die "Error: couldn't open file $package for reading: $!. Exiting...\n";
LINE: while (<MANIFEST>) {
$line = "";
$altdest = "";
$lineno++;
s/\\/\//g if ($os eq "MSDOS"); # Convert to posix path
s/\;.*//; # it's a comment, kill it.
s/^\s+//; # nuke leading whitespace
s/\s+$//; # nuke trailing whitespace
($debug >= 2) && print "\n";
($debug >= 8) && print "line $lineno:$_\n";
# it's a blank line, skip it.
/^$/ && do {
($debug >= 10) && print "blank line.\n";
next LINE;
};
# it's a new component
/^\[/ && do {
($debug >= 10) && print "component.\n";
$component = $_;
do_component();
next LINE;
};
# if we find a file before we have a component and we are in flat mode,
# copy it - allows for flat only files (installed-chrome.txt)
if (( $component eq "" ) && ($components eq "" ) && (!$flat)) {
next LINE;
}
# skip line if we're only copying specific components and outside
# those components
if (( $component eq "" ) && ($components ne "" )) {
($debug >= 10) && print "Not in specifed component. Skipping $_\n";
next LINE;
}
if ($line eq "") {
$line = $_; # if $line not set, set it.
}
if ($os ne "MSDOS") { # hack - need to fix for dos
$line =~ s|^/||; # strip any leading path delimiter
}
# delete the file or directory following the '-'
/^-/ && do {
$line =~ s/^-//; # strip leading '-'
($debug >= 10) && print "delete: $destdir/$component/$line\n";
do_delete ("$destdir", "$component", "$line");
next LINE;
};
# file/directory being copied to different target location
/\,/ && do {
/.*\,.*\,.*/ &&
die "Error: multiple commas not allowed ($package, $lineno): $_.\n";
($line, $altdest) = split (/\s*\,\s*/, $line, 2);
$line =~ s|/*$||; # strip any trailing path delimiters
$altdest =~ s|/*$||; # strip any trailing delimiter
($debug >= 10) && print "relocate: $line => $altdest.\n";
};
# if it has wildcards, do recursive copy.
/(?:\*|\?)/ && do {
($debug >= 10) && print "wildcard copy.\n";
do_wildcard ("$srcdir/$line");
next LINE;
};
# if it's a single file, copy it.
( -f "$srcdir/$line" ) && do {
($debug >= 10) && print "file copy.\n";
do_copyfile ();
next LINE;
};
# if it's a directory, do recursive copy.
(-d "$srcdir/$line") && do {
($debug >= 10) && print "directory copy.\n";
do_copydir ("$srcdir/$line");
next LINE;
};
# if we hit this, it's either a file in the package file that is
# not in the src directory, or it is not a valid entry.
delayed_die_or_warn("package error or possible missing or unnecessary file: $line ($package, $lineno).");
} # LINE
close (MANIFEST);
chdir ($saved_cwd);
if ($delayed_error) {
die "Error: found error(s) while packaging, see above for details.\n"
}
}
#
# Delete the given file or directory
#
sub do_delete
{
my ($targetpath) = $_[0];
my ($targetcomp) = $_[1];
my ($targetfile) = $_[2];
my ($target) = ($flat) ? "$targetpath/$targetfile" : "$targetpath/$targetcomp/$targetfile";
($debug >= 2) && print "do_delete():\n";
($debug >= 1) && print "-$targetfile\n";
if ( -f $target ) {
(! -w $target ) &&
die "Error: delete failed: $target not writeable ($package, $component, $lineno). Exiting...\n";
($debug >= 4) && print " unlink($target)\n";
unlink ($target) ||
die "Error: unlink() failed: $!. Exiting...\n";
} elsif ( -d $target ) {
(! -w $target ) &&
die "Error: delete failed: $target not writeable ($package, $component, $lineno). Exiting...\n";
($debug >= 4) && print " rmtree($target)\n";
rmtree ($target, 0, 0) ||
die "Error: rmtree() failed: $!. Exiting...\n";
} else {
warn "Warning: delete failed: $target is not a file or directory ($package, $component, $lineno).\n";
}
}
#
# Copy an individual file from the srcdir to the destdir.
#
# This is called by both the individual and batch/recursive copy routines,
# using $dirflag to check if called from do_copydir. Batch copy can pass in
# directories, so be sure to check first and break if it isn't a file.
#
sub do_copyfile
{
my ($destpath) = ""; # destination directory path
my ($destpathcomp) = ""; # ditto, but possibly including component dir
my ($destname) = ""; # destination file name
my ($destsuffix) = ""; # destination file name suffix
my ($altpath) = ""; # alternate destination directory path
my ($altname) = ""; # alternate destination file name
my ($altsuffix) = ""; # alternate destination file name suffix
my ($srcpath) = ""; # source file directory path
my ($srcname) = ""; # source file name
my ($srcsuffix) = ""; # source file name suffix
($debug >= 2) && print "do_copyfile():\n";
($debug >= 10) && print " cwd: " . getcwd() . "\n";
# set srcname correctly depending on how called
if ( $dirflag ) {
($srcname, $srcpath, $srcsuffix) = fileparse("$File::Find::name", '\..*?$');
} else {
($srcname, $srcpath, $srcsuffix) = fileparse("$srcdir/$line", '\..*?$');
}
($debug >= 4) && print " fileparse(src): '$srcpath $srcname $srcsuffix'\n";
# return if srcname is a directory from do_copydir
if ( -d "$srcpath$srcname$srcsuffix" ) {
($debug >= 10) && print " return: '$srcpath$srcname$srcsuffix' is a directory\n";
return;
}
else {
($debug >= 10) && print " '$srcpath$srcname$srcsuffix' is not a directory\n";
}
# set the destination path, if alternate destination given, use it.
if ($flat) {
# WebappRuntime has manifests that shouldn't be flattened, even though it
# gets packaged with Firefox, which does get flattened, so special-case it.
if ($srcsuffix eq ".manifest" && $srcpath =~ m'/(chrome|components)/$' &&
$component ne "WebappRuntime") {
my $subdir = $1;
if ($component eq "") {
die ("Manifest file was not part of a component.");
}
$destpathcomp = "$srcdir/manifests/$component/$subdir";
$altdest = "$srcname$srcsuffix";
}
elsif ($srcsuffix eq ".xpt" && $srcpath =~ m|/components/$|) {
if ($component eq "") {
die ("XPT file was not part of a component.");
}
$destpathcomp = "$srcdir/xpt/$component/components";
$altdest = "$srcname$srcsuffix";
}
else {
$destpathcomp = "$destdir";
}
} else {
if ( $component ne "" ) {
$destpathcomp = "$destdir/$component";
}
else {
$destpathcomp = "$destdir";
}
}
if ( $altdest ne "" ) {
if ( $dirflag ) { # directory copy to altdest
($destname, $destpath, $destsuffix) = fileparse("$destpathcomp/$altdest/$File::Find::name", '\..*?$');
# Todo: add MSDOS hack
$destpath =~ s|\Q$srcdir\E/$line/||; # rm info added by find
($debug >= 5) &&
print " dir copy to altdest: $destpath $destname $destsuffix\n";
} else { # single file copy to altdest
($destname, $destpath, $destsuffix) = fileparse("$destpathcomp/$altdest", '\..*?$');
($debug >= 5) &&
print " file copy to altdest: $destpath $destname $destsuffix\n";
}
} else {
if ( $dirflag ) { # directory copy, no altdest
my $destfile = $File::Find::name;
if ($os eq "MSDOS") {
$destfile =~ s|\\|/|;
}
$destfile =~ s|\Q$srcdir\E/||;
($destname, $destpath, $destsuffix) = fileparse("$destpathcomp/$destfile", '\..*?$');
($debug >= 5) &&
print " dir copy w/o altdest: $destpath $destname $destsuffix\n";
} else { # single file copy, no altdest
($destname, $destpath, $destsuffix) = fileparse("$destpathcomp/$line", '\..*?$');
($debug >= 5) &&
print " file copy w/o altdest: $destpath $destname $destsuffix\n";
}
}
# Translate: */../../*/bin/*
# into: */../../*/*
# (where the *'s are interpreted using shell-notation and
# it matches not only forward slashes but also backslashes.)
# $1 = */../../*/
# $2 = bin/
# $3 = *
if ($flat) {
$destpath =~ s|(.*[/\\]\.\.[/\\]\.\.[/\\].+[/\\])(bin[/\\])(.*)|$1$3|;
}
# create the destination path if it doesn't exist
if (! -d "$destpath" ) {
($debug >= 5) && print " mkpath($destpath)\n";
# For OS/2 - remove trailing '/'
chop($destpath);
mkpath ($destpath, 0, 0755) ||
die "Error: mkpath() failed: $!. Exiting...\n";
# Put delimiter back for copying...
$destpath = "$destpath/";
}
# path exists, source and destination known, time to copy
if ((-f "$srcpath$srcname$srcsuffix") && (-r "$srcpath$srcname$srcsuffix")) {
if ( $debug >= 1 ) {
if ( $dirflag ) {
print "$destname$destsuffix\n"; # from unglob
} else {
print "$line\n"; # from single file
}
if ( $debug >= 3 ) {
print " copy\t$srcpath$srcname$srcsuffix =>\n\t\t$destpath$destname$destsuffix\n";
}
}
if (stat("$destpath$destname$destsuffix") &&
stat("$srcpath$srcname$srcsuffix")->mtime < stat("$destpath$destname$destsuffix")->mtime) {
if ( $debug >= 3 ) {
print "source file older than destination, do not copy\n";
}
return;
}
unlink("$destpath$destname$destsuffix") if ( -e "$destpath$destname$destsuffix");
# If source is a symbolic link pointing in the same directory, create a
# symbolic link
if ((-l "$srcpath$srcname$srcsuffix") && (readlink("$srcpath$srcname$srcsuffix") !~ /\//)) {
symlink(readlink("$srcpath$srcname$srcsuffix"), "$destpath$destname$destsuffix") ||
die "Error: copy of symbolic link $srcpath$srcname$srcsuffix failed ($package, $component, $lineno): $!. Exiting...\n";
return;
}
copy ("$srcpath$srcname$srcsuffix", "$destpath$destname$destsuffix") ||
die "Error: copy of file $srcpath$srcname$srcsuffix failed ($package, $component, $lineno): $!. Exiting...\n";
# if this is unix, set the dest file permissions
# read permissions
my($st) = stat("$srcpath$srcname$srcsuffix") ||
die "Error: can't stat $srcpath$srcname$srcsuffix: $! Exiting...\n";
# set permissions
($debug >= 2) && print " chmod ".$st->mode." $destpath$destname$destsuffix\n";
chmod ($st->mode, "$destpath$destname$destsuffix") ||
warn "Warning: chmod of $destpath$destname$destsuffix failed: $!. Exiting...\n";
} else {
warn "Error: file $srcpath$srcname$srcsuffix is not a file or is not readable ($package, $component, $lineno).\n";
}
}
#
# Expand any wildcards and copy files and/or directories
#
# todo: pass individual files to do_copyfile, not do_copydir
#
sub do_wildcard
{
my ($entry) = $_[0];
my (@list) = ();
my ($item) = "";
($debug >= 2) && print "do_wildcard():\n";
if ( $entry =~ /(?:\*|\?)/ ) { # it's a wildcard,
@list = glob($entry); # expand it
($debug >= 4) && print " glob: $entry => @list\n";
foreach $item ( @list ) { # now copy each item in list
if ( -f $item ) {
($debug >= 10) && print " do_copyfile: $item\n";
# glob adds full path to item like find() in copydir so
# take advantage of existing code in copyfile by using
# $dirflag and $File::Find::name.
$File::Find::name = $item;
$dirflag = 1;
do_copyfile();
$dirflag = 0;
$File::Find::name = "";
} elsif ( -d $item ) {
($debug >= 10) && print " do_copydir($item)\n";
do_copydir ($item);
} else {
warn "Warning: $item is not a file or directory ($package, $component, $lineno). Skipped...\n";
}
}
}
}
#
# Recursively copy directories specified.
#
sub do_copydir
{
my ($entry) = $_[0];
$dirflag = 1; # flag indicating directory copy in progress
($debug >= 2) && print "do_copydir():\n";
if (! -d "$entry" ) {
warn "Warning: $entry is not a directory ($package, $component, $lineno). Skipped...\n";
}
($debug >= 4) && print " find($entry)\n";
find (\&do_copyfile, $entry);
$dirflag = 0;
}
#
# Handle new component
#
sub do_component
{
($debug >= 2) && print "do_component():\n";
( $component =~ /^\[.*(?:\s|\[|\])+.*\]/ ) && # no brackets or ws
die "Error: malformed component $component. Exiting...\n";
$component =~ s/^\[(.*)\]/$1/; # strip []
if ( $components ne "") {
if ( $components =~ /$component/ ) {
($debug >= 10) && print "Component $component is in $components.\n";
} else {
($debug >= 10) && print "Component $component not in $components.\n";
$component = "";
return; # named specific components and this isn't it
}
}
if ($debug >= 1) {
print "[$component]\n";
}
# create component directory
if (!$flat) {
if ( -d "$destdir/$component" ) {
warn "Warning: component directory \"$component\" already exists in \"$destdir\".\n";
} else {
($debug >= 4) && print " mkdir $destdir/$component\n";
mkdir ("$destdir/$component", 0755) ||
die "Error: couldn't create component directory \"$component\": $!. Exiting...\n";
}
}
}
#
# Print error (and die later) or warn, based on whether $fatal_warnings is set.
#
sub delayed_die_or_warn
{
my ($msg) = $_[0];
if ($fatal_warnings) {
warn "Error: $msg\n";
$delayed_error = 1;
} else {
warn "Warning: $msg\n";
}
}
#
# Check that arguments to script are valid.
#
sub check_arguments
{
my ($exitval) = 0;
($debug >= 2) && print "check_arguments():\n";
# if --help print usage
if ($help) {
print_usage();
exit (1);
}
# make sure required variables are set:
# check source directory
if ( $srcdir eq "" ) {
print "Error: source directory (--source) not specified.\n";
$exitval += 8;
} elsif ((! -d $srcdir) || (! -r $srcdir)) {
print "Error: source directory \"$srcdir\" is not a directory or is unreadable.\n";
$exitval = 1;
}
# check destination directory
if ( $destdir eq "" ) {
print "Error: destination directory (--destination) not specified.\n";
$exitval += 8;
} elsif ((! -d $destdir) || (! -w $destdir)) {
print "Error: destination directory \"$destdir\" is not a directory or is not writeable.\n";
$exitval += 2;
}
# check destdir not a subdir of srcdir
# hack - workaround for bug 14558 that should be fixed eventually.
if (0) { # todo - write test
print "Error: destination directory must not be subdirectory of the source directory.\n";
$exitval += 32;
}
# check package file
if ( $package eq "" ) {
print "Error: package file (--file) not specified.\n";
$exitval += 8;
} elsif (!(-f $package) || !(-r $package)) {
print "Error: package file \"$package\" is not a file or is unreadable.\n";
$exitval += 4;
}
# check OS == {unix|dos}
if ($os eq "") {
print "Error: OS type (--os) not specified.\n";
$exitval += 8;
} elsif ( $os =~ /dos/i ) {
$os = "MSDOS";
fileparse_set_fstype ($os);
} elsif ( $os =~ /unix/i ) {
$os = "Unix"; # can be anything but MSDOS
fileparse_set_fstype ($os);
} else {
print "Error: OS type \"$os\" unknown.\n";
$exitval += 16;
}
# turn components array into a string for regexp
if ( @components > 0 ) {
$components = join (",",@components);
} else {
$components = "";
}
if ($debug > 4) {
print ("source dir:\t$srcdir\ndest dir:\t$destdir\npackage:\t$package\nOS:\t$os\ncomponents:\t$components\n");
}
if ($exitval) {
print "See \'$0 --help\' for more information.\n";
print "Exiting...\n";
exit ($exitval);
}
}
#
# display usage information
#
sub print_usage
{
($debug >= 2) && print "print_usage():\n";
print <<EOC
$0
Copy files from the source directory to component directories
in the destination directory as specified by the package file.
Options:
-s, --source <source directory>
Specifies the directory from which to copy the files
specified in the file passed via --file.
Required.
-d, --destination <destination directory>
Specifies the directory in which to create the component
directories and copy the files specified in the file passed
via --file.
Required.
NOTE: Source and destination directories must be absolute paths.
Relative paths will NOT work. Also, the destination directory
must NOT be a subdirectory of the source directory.
-f, --file <package file>
Specifies the file listing the components to be created in
the destination directory and the files to copy from the
source directory to each component directory in the
destination directory.
Required.
-o, --os [dos|unix]
Specifies which type of system this is. Used for parsing
file specifications from the package file.
Required.
-c, --component <component name>
Specifies a specific component in the package file to copy
rather than copying all the components in the package file.
Can be used more than once for multiple components (e.g.
"-c browser -c mail" to copy mail and news only).
Optional.
-l, --flat
Suppresses creation of components dirs, but stuffes everything
directly into the package destination dir. This is useful
for creating tarballs.
-h, --help
Prints this information.
Optional.
--debug [1-10]
Controls verbosity of debugging output, 10 being most verbose.
1 : same as --verbose.
2 : includes function calls.
3 : includes source and destination for each copy.
Optional.
-v, --verbose
Print component names and files copied/deleted.
Optional.
e.g.
$0 --os unix --source /builds/mozilla/dist --destination /h/lithium/install --file packages-win --os unix --verbose
Note: options can be specified by either a leading '--' or '-'.
EOC
}

View File

@ -0,0 +1,51 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import sys
import hashlib
from mozpack.packager.unpack import UnpackFinder
try:
from collections import OrderedDict
except ImportError:
from simplejson import OrderedDict
'''
Find files duplicated in a given packaged directory, independently of its
package format.
'''
def find_dupes(source):
md5s = OrderedDict()
for p, f in UnpackFinder(source):
content = f.open().read()
m = hashlib.md5(content).digest()
if not m in md5s:
md5s[m] = (len(content), [])
md5s[m][1].append(p)
total = 0
num_dupes = 0
for m, (size, paths) in md5s.iteritems():
if len(paths) > 1:
print 'Duplicates %d bytes%s:' % (size,
' (%d times)' % (len(paths) - 1) if len(paths) > 2 else '')
print ''.join(' %s\n' % p for p in paths)
total += (len(paths) - 1) * size
num_dupes += 1
if num_dupes:
print "WARNING: Found %d duplicated files taking %d bytes" % \
(num_dupes, total) + " (uncompressed)"
def main():
if len(sys.argv) != 2:
import os
print >>sys.stderr, "Usage: %s directory" % \
os.path.basename(sys.argv[0])
sys.exit(1)
find_dupes(sys.argv[1])
if __name__ == "__main__":
main()

View File

@ -0,0 +1,185 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
'''
Replace localized parts of a packaged directory with data from a langpack
directory.
'''
import sys
import os
import mozpack.path
from mozpack.packager.formats import (
FlatFormatter,
JarFormatter,
OmniJarFormatter,
)
from mozpack.packager import SimplePackager
from mozpack.files import ManifestFile
from mozpack.copier import (
FileCopier,
Jarrer,
)
from mozpack.chrome.manifest import (
ManifestLocale,
ManifestEntryWithRelPath,
is_manifest,
ManifestChrome,
Manifest,
)
from mozpack.errors import errors
from mozpack.packager.unpack import UnpackFinder
from createprecomplete import generate_precomplete
# Set of files or directories not listed in a chrome.manifest but that are
# localized.
NON_CHROME = set([
'searchplugins',
'dictionaries',
'hyphenation',
'defaults/profile',
'defaults/pref*/*-l10n.js',
'update.locale',
'extensions/langpack-*@*',
'distribution/extensions/langpack-*@*',
])
def repack(source, l10n):
finder = UnpackFinder(source)
l10n_finder = UnpackFinder(l10n)
copier = FileCopier()
if finder.kind == 'flat':
formatter = FlatFormatter(copier)
elif finder.kind == 'jar':
formatter = JarFormatter(copier, optimize=finder.optimizedjars)
elif finder.kind == 'omni':
formatter = OmniJarFormatter(copier, finder.omnijar,
optimize=finder.optimizedjars)
# Read all manifest entries from the packaged directory.
manifests = dict((p, m) for p, m in finder.find('**/*.manifest')
if is_manifest(p))
assert all(isinstance(m, ManifestFile) for m in manifests.itervalues())
entries = [e for m in manifests.itervalues() for e in m if e.localized]
# Find unique locales used in these manifest entries.
locales = list(set(e.id for e in entries if isinstance(e, ManifestLocale)))
# Find all paths whose manifest are included by no other manifest.
includes = set(mozpack.path.join(e.base, e.relpath)
for m in manifests.itervalues()
for e in m if isinstance(e, Manifest))
bases = [mozpack.path.dirname(p) for p in set(manifests.keys()) - includes]
# Read all manifest entries from the langpack directory.
manifests = [m for p, m in l10n_finder.find('**/*.manifest')
if is_manifest(p)]
assert all(isinstance(m, ManifestFile) for m in manifests)
l10n_entries = [e for m in manifests for e in m if e.localized]
# Find unique locales used in these manifest entries.
l10n_locales = list(set(e.id for e in l10n_entries
if isinstance(e, ManifestLocale)))
# The code further below assumes there's only one locale replaced with
# another one.
if len(locales) > 1 or len(l10n_locales) > 1:
errors.fatal("Multiple locales aren't supported")
locale = locales[0]
l10n_locale = l10n_locales[0]
# For each base directory, store what path a locale chrome package name
# corresponds to.
# e.g., for the following entry under app/chrome:
# locale foo en-US path/to/files
# keep track that the locale path for foo in app is
# app/chrome/path/to/files.
l10n_paths = {}
for e in l10n_entries:
if isinstance(e, ManifestChrome):
base = mozpack.path.basedir(e.path, bases)
if not base in l10n_paths:
l10n_paths[base] = {}
l10n_paths[base][e.name] = e.path
# For chrome and non chrome files or directories, store what langpack path
# corresponds to a package path.
paths = dict((e.path,
l10n_paths[mozpack.path.basedir(e.path, bases)][e.name])
for e in entries if isinstance(e, ManifestEntryWithRelPath))
for path in NON_CHROME:
for p, f in l10n_finder.find(path):
paths[p] = p
# Create a new package, with non localized bits coming from the original
# package, and localized bits coming from the langpack.
packager = SimplePackager(formatter)
for p, f in finder:
if is_manifest(p):
# Remove localized manifest entries.
for e in [e for e in f if e.localized]:
f.remove(e)
base = mozpack.path.basedir(p, paths.keys())
if base:
# If the path is one that needs a locale replacement, use the
# corresponding file from the langpack.
subpath = mozpack.path.relpath(p, base)
path = mozpack.path.normpath(mozpack.path.join(paths[base],
subpath))
files = [f for p, f in l10n_finder.find(path)]
if len(files) == 0 and base in NON_CHROME:
path = path.replace(locale, l10n_locale)
files = [f for p, f in l10n_finder.find(path)]
if len(files) == 0:
if not base in NON_CHROME:
errors.error("Missing file: %s" % os.path.join(l10n, path))
else:
packager.add(path, files[0])
else:
packager.add(p, f)
# Add localized manifest entries from the langpack.
l10n_manifests = []
for base in set(e.base for e in l10n_entries):
m = ManifestFile(base)
for e in l10n_entries:
if e.base == base:
m.add(e)
path = mozpack.path.join(base, 'chrome.%s.manifest' % l10n_locale)
l10n_manifests.append((path, m))
bases = packager.get_bases()
for path, m in l10n_manifests:
base = mozpack.path.basedir(path, bases)
packager.add(path, m)
# Add a "manifest $path" entry in the top manifest under that base.
m = ManifestFile(base)
m.add(Manifest(base, mozpack.path.relpath(path, base)))
packager.add(mozpack.path.join(base, 'chrome.manifest'), m)
packager.close()
# Add any remaining non chrome files.
for base in NON_CHROME:
for p, f in l10n_finder.find(base):
if not formatter.contains(p):
formatter.add(p, f)
# Transplant jar preloading information.
for path, log in finder.jarlogs.iteritems():
assert isinstance(copier[path], Jarrer)
copier[path].preload([l.replace(locale, l10n_locale) for l in log])
copier.copy(source)
generate_precomplete(source)
def main():
if len(sys.argv) != 3:
print >>sys.stderr, "Usage: %s directory l10n-directory" % \
os.path.basename(sys.argv[0])
sys.exit(1)
repack(sys.argv[1], sys.argv[2])
if __name__ == "__main__":
main()

View File

@ -1,24 +0,0 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import sys, os
outmanifest = sys.argv[1]
manifestdirs = sys.argv[2:]
outfd = open(outmanifest, 'w')
for manifestdir in manifestdirs:
if not os.path.isdir(manifestdir):
print >>sys.stderr, "Warning: trying to link manifests in missing directory '%s'" % manifestdir
continue
for name in os.listdir(manifestdir):
infd = open(os.path.join(manifestdir, name))
print >>outfd, "# %s" % name
outfd.write(infd.read())
print >>outfd
infd.close()
outfd.close()

View File

@ -1,21 +0,0 @@
REM This Source Code Form is subject to the terms of the Mozilla Public
REM License, v. 2.0. If a copy of the MPL was not distributed with this
REM file, You can obtain one at http://mozilla.org/MPL/2.0/.
/* An adapted version of sign.cmd from NSS */
PARSE ARG dist filename
dist=forwardtoback(dist);
'@echo 'dist
'set BEGINLIBPATH='dist'\bin;%BEGINLIBPATH%'
'set LIBPATHSTRICT=T'
dist'\bin\shlibsign -v -i 'filename
exit
forwardtoback: procedure
arg pathname
parse var pathname pathname'/'rest
do while (rest <> "")
pathname = pathname'\'rest
parse var pathname pathname'/'rest
end
return pathname

View File

@ -1,10 +0,0 @@
REM This Source Code Form is subject to the terms of the Mozilla Public
REM License, v. 2.0. If a copy of the MPL was not distributed with this
REM file, You can obtain one at http://mozilla.org/MPL/2.0/.
@rem compress binaries for optimum performance without disturbing chkdll32
@rem yes to: abort if in use, delete debug & extra data, leave non-resident names;
@rem align no-bounday/page-shift; no backup; use stdio; discard existing exe/dll settings;
@rem normal priority; packing: LZ, medium run lth, medium fixup; recursive search;
@rem unpack before pack; pack files; leave stub, remove debug & extra data;
lxlite *.exe *.dll /yua /ydd /yxd /ynl /anp /b- /cs+ /d /i- /ml1 /mr2 /mf2 /r+ /u+ /x- /zs:0 /zx /zd

View File

@ -1,13 +0,0 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os, sys
filename = sys.argv[1]
with open(filename, 'r') as f:
for l in f.readlines():
l = l.strip()
if not l.startswith("bin/"):
continue
print l[4:]

View File

@ -96,7 +96,6 @@ endif # MOZ_NATIVE_NSPR
MAKE_JSSHELL = $(ZIP) -9j $(PKG_JSSHELL) $(JSSHELL_BINS)
endif # LIBXUL_SDK
PREPARE_PACKAGE = $(error What is a $(MOZ_PKG_FORMAT) package format?);
_ABS_DIST = $(call core_abspath,$(DIST))
JARLOG_DIR = $(call core_abspath,$(DEPTH)/jarlog/)
JARLOG_DIR_AB_CD = $(JARLOG_DIR)/$(AB_CD)
@ -447,128 +446,10 @@ endif
MAKE_SDK = $(CREATE_FINAL_TAR) - $(MOZ_APP_NAME)-sdk | bzip2 -vf > $(SDK)
endif
ifdef MOZ_OMNIJAR
# Set MOZ_CAN_RUN_PROGRAMS for trivial cases.
ifndef MOZ_CAN_RUN_PROGRAMS
ifdef UNIVERSAL_BINARY
MOZ_CAN_RUN_PROGRAMS=1
endif
ifndef CROSS_COMPILE
MOZ_CAN_RUN_PROGRAMS=1
endif
endif # MOZ_CAN_RUN_PROGRAMS
ifdef GENERATE_CACHE
ifdef MOZ_CAN_RUN_PROGRAMS
ifdef RUN_TEST_PROGRAM
_ABS_RUN_TEST_PROGRAM = $(call core_abspath,$(RUN_TEST_PROGRAM))
endif
ifdef LIBXUL_SDK
PRECOMPILE_RESOURCE=app
PRECOMPILE_GRE=$(LIBXUL_DIST)/bin
else
PRECOMPILE_RESOURCE=gre
PRECOMPILE_GRE=$$PWD
endif
# Silence the unzip step so we don't print any binary data from the comment field.
GENERATE_CACHE = \
$(_ABS_RUN_TEST_PROGRAM) $(LIBXUL_DIST)/bin/xpcshell$(BIN_SUFFIX) -g "$(PRECOMPILE_GRE)" -a "$$PWD" -f $(call core_abspath,$(MOZILLA_DIR)/toolkit/mozapps/installer/precompile_cache.js) -e "populate_startupcache('startupCache.zip');" && \
rm -rf jsloader jssubloader && \
$(UNZIP) -q startupCache.zip && \
rm startupCache.zip && \
$(ZIP) -r9m $(OMNIJAR_NAME) jsloader/resource/$(PRECOMPILE_RESOURCE) jssubloader/*/resource/$(PRECOMPILE_RESOURCE) && \
rm -rf jsloader jssubloader
else
GENERATE_CACHE = true
endif
endif
GENERATE_CACHE ?= true
OMNIJAR_FILES = \
chrome \
chrome.manifest \
components/*.js \
components/*.xpt \
components/*.manifest \
modules \
res \
defaults \
greprefs.js \
jsloader \
jssubloader \
hyphenation \
update.locale \
$(NULL)
# defaults/pref/channel-prefs.js is handled separate from other prefs due to
# bug 756325
NON_OMNIJAR_FILES += \
chrome/icons/\* \
$(PREF_DIR)/channel-prefs.js \
defaults/pref/channel-prefs.js \
res/cursors/\* \
res/MainMenu.nib/\* \
\*/.mkdir.done \
$(NULL)
PACK_OMNIJAR = \
rm -f $(OMNIJAR_NAME) components/binary.manifest && \
grep -h '^binary-component' components/*.manifest > binary.manifest ; \
for m in components/*.manifest; do \
sed -e 's/^binary-component/\#binary-component/' $$m > tmp.manifest && \
mv tmp.manifest $$m; \
done; \
$(ZIP) -r9m $(OMNIJAR_NAME) $(OMNIJAR_FILES) -x $(NON_OMNIJAR_FILES) && \
$(GENERATE_CACHE) && \
$(OPTIMIZE_JARS_CMD) --optimize $(JARLOG_DIR_AB_CD) ./ ./ && \
mv binary.manifest components && \
printf "manifest components/binary.manifest\n" > chrome.manifest
UNPACK_OMNIJAR = \
$(OPTIMIZE_JARS_CMD) --deoptimize $(JARLOG_DIR_AB_CD) ./ ./ && \
$(UNZIP) -o $(OMNIJAR_NAME) && \
rm -f components/binary.manifest && \
for m in components/*.manifest; do \
sed -e 's/^\#binary-component/binary-component/' $$m > tmp.manifest && \
mv tmp.manifest $$m; \
done
ifdef MOZ_WEBAPP_RUNTIME
# It's simpler to pack the webapp runtime, because it doesn't have any
# binary components. We also don't pre-generate the startup cache, which seems
# unnecessary, given the small size of the runtime, although it might become
# more valuable over time.
PACK_OMNIJAR_WEBAPP_RUNTIME = \
rm -f $(OMNIJAR_NAME); \
$(ZIP) -r9m $(OMNIJAR_NAME) $(OMNIJAR_FILES) -x $(NON_OMNIJAR_FILES) && \
$(OPTIMIZE_JARS_CMD) --optimize $(JARLOG_DIR_AB_CD) ./ ./
UNPACK_OMNIJAR_WEBAPP_RUNTIME = \
$(OPTIMIZE_JARS_CMD) --deoptimize $(JARLOG_DIR_AB_CD) ./ ./ && \
$(UNZIP) -o $(OMNIJAR_NAME)
PREPARE_PACKAGE = (cd $(STAGEPATH)$(MOZ_PKG_DIR)$(_BINPATH) && $(PACK_OMNIJAR)) && \
(cd $(STAGEPATH)$(MOZ_PKG_DIR)$(_BINPATH)/webapprt && $(PACK_OMNIJAR_WEBAPP_RUNTIME)) && \
(cd $(STAGEPATH)$(MOZ_PKG_DIR)$(_BINPATH) && $(CREATE_PRECOMPLETE_CMD))
UNMAKE_PACKAGE = $(INNER_UNMAKE_PACKAGE) && \
(cd $(STAGEPATH)$(MOZ_PKG_DIR)$(_BINPATH) && $(UNPACK_OMNIJAR)) && \
(cd $(STAGEPATH)$(MOZ_PKG_DIR)$(_BINPATH)/webapprt && $(UNPACK_OMNIJAR_WEBAPP_RUNTIME))
else # ndef MOZ_WEBAPP_RUNTIME
PREPARE_PACKAGE = (cd $(STAGEPATH)$(MOZ_PKG_DIR)$(_BINPATH) && $(PACK_OMNIJAR)) && \
(cd $(STAGEPATH)$(MOZ_PKG_DIR)$(_BINPATH) && $(CREATE_PRECOMPLETE_CMD))
UNMAKE_PACKAGE = $(INNER_UNMAKE_PACKAGE) && (cd $(STAGEPATH)$(MOZ_PKG_DIR)$(_BINPATH) && $(UNPACK_OMNIJAR))
endif # def MOZ_WEBAPP_RUNTIME
else # ndef MOZ_OMNIJAR
PREPARE_PACKAGE = (cd $(STAGEPATH)$(MOZ_PKG_DIR)$(_BINPATH) && $(CREATE_PRECOMPLETE_CMD))
UNMAKE_PACKAGE = $(INNER_UNMAKE_PACKAGE)
endif # def MOZ_OMNIJAR
ifdef MOZ_INTERNAL_SIGNING_FORMAT
MOZ_SIGN_PREPARED_PACKAGE_CMD=$(MOZ_SIGN_CMD) $(foreach f,$(MOZ_INTERNAL_SIGNING_FORMAT),-f $(f)) $(foreach i,$(SIGN_INCLUDES),-i $(i)) $(foreach x,$(SIGN_EXCLUDES),-x $(x))
ifeq (WINNT,$(OS_ARCH))
MOZ_SIGN_PREPARED_PACKAGE_CMD += --nsscmd "$(SIGN_CMD)"
MOZ_SIGN_PREPARED_PACKAGE_CMD += --nsscmd "$(_ABS_DIST)/bin/shlibsign$(BIN_SUFFIX) -v -i"
endif
endif
@ -602,49 +483,6 @@ MAKE_SDK += && $(MOZ_SIGN_CMD) -f gpg $(SDK)
UPLOAD_EXTRA_FILES += $(SDK).asc
endif
# dummy macro if we don't have PSM built
SIGN_NSS =
ifdef MOZ_CAN_RUN_PROGRAMS
ifdef MOZ_PSM
SIGN_NSS = echo signing nss libraries;
NSS_DLL_SUFFIX = $(DLL_SUFFIX)
ifdef UNIVERSAL_BINARY
NATIVE_ARCH = $(shell uname -p | sed -e s/powerpc/ppc/)
NATIVE_DIST = $(DIST:$(DEPTH)/%=$(DEPTH)/../$(NATIVE_ARCH)/%)
SIGN_CMD = $(NATIVE_DIST)/bin/run-mozilla.sh $(NATIVE_DIST)/bin/shlibsign -v -i
else
ifeq ($(OS_ARCH),OS2)
# uppercase extension to get the correct output file from shlibsign
NSS_DLL_SUFFIX = .DLL
SIGN_CMD = $(MOZILLA_DIR)/toolkit/mozapps/installer/os2/sign.cmd $(DIST)
else
SIGN_CMD = $(strip $(RUN_TEST_PROGRAM) $(_ABS_DIST)/bin/shlibsign$(BIN_SUFFIX) -v -i)
endif
endif
SOFTOKN = $(DIST)/$(STAGEPATH)$(MOZ_PKG_DIR)$(_BINPATH)/$(DLL_PREFIX)softokn3$(NSS_DLL_SUFFIX)
NSSDBM = $(DIST)/$(STAGEPATH)$(MOZ_PKG_DIR)$(_BINPATH)/$(DLL_PREFIX)nssdbm3$(NSS_DLL_SUFFIX)
FREEBL = $(DIST)/$(STAGEPATH)$(MOZ_PKG_DIR)$(_BINPATH)/$(DLL_PREFIX)freebl3$(NSS_DLL_SUFFIX)
FREEBL_32FPU = $(DIST)/$(STAGEPATH)$(MOZ_PKG_DIR)$(_BINPATH)/$(DLL_PREFIX)freebl_32fpu_3$(DLL_SUFFIX)
FREEBL_32INT = $(DIST)/$(STAGEPATH)$(MOZ_PKG_DIR)$(_BINPATH)/$(DLL_PREFIX)freebl_32int_3$(DLL_SUFFIX)
FREEBL_32INT64 = $(DIST)/$(STAGEPATH)$(MOZ_PKG_DIR)$(_BINPATH)/$(DLL_PREFIX)freebl_32int64_3$(DLL_SUFFIX)
FREEBL_64FPU = $(DIST)/$(STAGEPATH)$(MOZ_PKG_DIR)$(_BINPATH)/$(DLL_PREFIX)freebl_64fpu_3$(DLL_SUFFIX)
FREEBL_64INT = $(DIST)/$(STAGEPATH)$(MOZ_PKG_DIR)$(_BINPATH)/$(DLL_PREFIX)freebl_64int_3$(DLL_SUFFIX)
SIGN_NSS += \
if test -f $(SOFTOKN); then $(SIGN_CMD) $(SOFTOKN); fi && \
if test -f $(NSSDBM); then $(SIGN_CMD) $(NSSDBM); fi && \
if test -f $(FREEBL); then $(SIGN_CMD) $(FREEBL); fi && \
if test -f $(FREEBL_32FPU); then $(SIGN_CMD) $(FREEBL_32FPU); fi && \
if test -f $(FREEBL_32INT); then $(SIGN_CMD) $(FREEBL_32INT); fi && \
if test -f $(FREEBL_32INT64); then $(SIGN_CMD) $(FREEBL_32INT64); fi && \
if test -f $(FREEBL_64FPU); then $(SIGN_CMD) $(FREEBL_64FPU); fi && \
if test -f $(FREEBL_64INT); then $(SIGN_CMD) $(FREEBL_64INT); fi;
endif # MOZ_PSM
endif # MOZ_CAN_RUN_PROGRAMS
NO_PKG_FILES += \
core \
bsdecho \
@ -688,29 +526,8 @@ endif
DEFINES += -DDLL_PREFIX=$(DLL_PREFIX) -DDLL_SUFFIX=$(DLL_SUFFIX) -DBIN_SUFFIX=$(BIN_SUFFIX)
ifdef MOZ_PKG_REMOVALS
MOZ_PKG_REMOVALS_GEN = removed-files
$(MOZ_PKG_REMOVALS_GEN): $(MOZ_PKG_REMOVALS) $(GLOBAL_DEPS)
cat $(MOZ_PKG_REMOVALS) | \
sed -e 's/^[ \t]*//' | \
$(PYTHON) $(MOZILLA_DIR)/config/Preprocessor.py -Fsubstitution $(DEFINES) $(ACDEFINES) > $(MOZ_PKG_REMOVALS_GEN)
GARBAGE += $(MOZ_PKG_REMOVALS_GEN)
endif
GARBAGE += $(DIST)/$(PACKAGE) $(PACKAGE)
ifeq ($(OS_ARCH),OS2)
STRIP = $(MOZILLA_DIR)/toolkit/mozapps/installer/os2/strip.cmd
endif
ifneq (,$(filter WINNT OS2,$(OS_ARCH)))
PKGCP_OS = dos
else
PKGCP_OS = unix
endif
# The following target stages files into two directories: one directory for
# core files, and one for optional extensions based on the information in
# the MOZ_PKG_MANIFEST file and the following vars:
@ -719,18 +536,6 @@ endif
PKG_ARG = , "$(pkg)"
# Define packager macro to work around make 3.81 backslash issue (bug #339933)
# Controls whether missing file warnings should be fatal
ifndef MOZ_PKG_FATAL_WARNINGS
MOZ_PKG_FATAL_WARNINGS = 0
endif
define PACKAGER_COPY
$(PERL) -I$(MOZILLA_DIR)/toolkit/mozapps/installer -e 'use Packager; \
Packager::Copy($1,$2,$3,$4,$5,$(MOZ_PKG_FATAL_WARNINGS),$6,$7);'
endef
installer-stage: prepare-package
ifndef MOZ_PKG_MANIFEST
$(error MOZ_PKG_MANIFEST unspecified!)
@ -749,115 +554,47 @@ ifeq (gonk,$(MOZ_WIDGET_TOOLKIT))
ELF_HACK_FLAGS = --fill
endif
stage-package: $(MOZ_PKG_MANIFEST) $(MOZ_PKG_REMOVALS_GEN)
@rm -rf $(DIST)/$(PKG_PATH)$(PKG_BASENAME).tar $(DIST)/$(PKG_PATH)$(PKG_BASENAME).dmg $@ $(EXCLUDE_LIST)
ifndef MOZ_FAST_PACKAGE
@rm -rf $(DIST)/$(MOZ_PKG_DIR)
endif
# NOTE: this must be a tar now that dist links into the tree so that we
# do not strip the binaries actually in the tree.
@echo "Creating package directory..."
if ! test -d $(DIST)/$(MOZ_PKG_DIR) ; then \
mkdir $(DIST)/$(MOZ_PKG_DIR); \
fi
ifndef UNIVERSAL_BINARY
# If UNIVERSAL_BINARY, the package will be made from an already-prepared
# STAGEPATH
ifdef MOZ_PKG_MANIFEST
ifndef MOZ_FAST_PACKAGE
$(RM) -rf $(DIST)/xpt $(DIST)/manifests
endif
$(call PACKAGER_COPY, "$(call core_abspath,$(DIST))",\
"$(call core_abspath,$(DIST)/$(MOZ_PKG_DIR))", \
"$(MOZ_PKG_MANIFEST)", "$(PKGCP_OS)", 1, 0, 1)
$(PERL) $(MOZILLA_DIR)/toolkit/mozapps/installer/xptlink.pl -s $(DIST) -d $(DIST)/xpt -f $(DIST)/$(MOZ_PKG_DIR)/$(_BINPATH)/components -v -x "$(XPIDL_LINK)"
$(PYTHON) $(MOZILLA_DIR)/toolkit/mozapps/installer/link-manifests.py \
$(DIST)/$(MOZ_PKG_DIR)/$(_BINPATH)/components/components.manifest \
$(patsubst %,$(DIST)/manifests/%/components,$(MOZ_NONLOCALIZED_PKG_LIST))
$(PYTHON) $(MOZILLA_DIR)/toolkit/mozapps/installer/link-manifests.py \
$(DIST)/$(MOZ_PKG_DIR)/$(_BINPATH)/chrome/nonlocalized.manifest \
$(patsubst %,$(DIST)/manifests/%/chrome,$(MOZ_NONLOCALIZED_PKG_LIST))
$(PYTHON) $(MOZILLA_DIR)/toolkit/mozapps/installer/link-manifests.py \
$(DIST)/$(MOZ_PKG_DIR)/$(_BINPATH)/chrome/localized.manifest \
$(patsubst %,$(DIST)/manifests/%/chrome,$(MOZ_LOCALIZED_PKG_LIST))
ifdef MOZ_WEBAPP_RUNTIME
mv $(DIST)/$(MOZ_PKG_DIR)/$(_BINPATH)/webapprt/chrome/$(AB_CD).manifest $(DIST)/$(MOZ_PKG_DIR)/$(_BINPATH)/webapprt/chrome/localized.manifest
sed 's/$(AB_CD)/localized/' $(DIST)/bin/webapprt/chrome.manifest > $(DIST)/$(MOZ_PKG_DIR)/$(_BINPATH)/webapprt/chrome.manifest
endif
printf "manifest components/interfaces.manifest\nmanifest components/components.manifest\nmanifest chrome/nonlocalized.manifest\nmanifest chrome/localized.manifest\n" > $(DIST)/$(MOZ_PKG_DIR)/$(_BINPATH)/chrome.manifest
else # !MOZ_PKG_MANIFEST
ifeq ($(MOZ_WIDGET_TOOLKIT),cocoa)
ifndef STAGE_SDK
@cd $(DIST) && rsync -auv --copy-unsafe-links $(_APPNAME) $(MOZ_PKG_DIR)
@echo "Linking XPT files..."
@rm -rf $(DIST)/xpt
@$(NSINSTALL) -D $(DIST)/xpt
@($(XPIDL_LINK) $(DIST)/xpt/$(MOZ_PKG_APPNAME).xpt $(DIST)/$(STAGEPATH)$(MOZ_PKG_DIR)$(_BINPATH)/components/*.xpt && rm -f $(DIST)/$(STAGEPATH)$(MOZ_PKG_DIR)$(_BINPATH)/components/*.xpt && cp $(DIST)/xpt/$(MOZ_PKG_APPNAME).xpt $(DIST)/$(STAGEPATH)$(MOZ_PKG_DIR)$(_BINPATH)/components && printf "interfaces $(MOZ_PKG_APPNAME).xpt\n" >$(DIST)/$(STAGEPATH)$(MOZ_PKG_DIR)$(_BINPATH)/components/interfaces.manifest) || echo No *.xpt files found in: $(DIST)/$(STAGEPATH)$(MOZ_PKG_DIR)$(_BINPATH)/components/. Continuing...
else
@cd $(DIST)/bin && $(TAR) $(TAR_CREATE_FLAGS) - * | (cd ../$(MOZ_PKG_DIR); tar -xf -)
endif
else
@cd $(DIST)/bin && $(TAR) $(TAR_CREATE_FLAGS) - * | (cd ../$(MOZ_PKG_DIR); tar -xf -)
@echo "Linking XPT files..."
@rm -rf $(DIST)/xpt
@$(NSINSTALL) -D $(DIST)/xpt
@($(XPIDL_LINK) $(DIST)/xpt/$(MOZ_PKG_APPNAME).xpt $(DIST)/$(STAGEPATH)$(MOZ_PKG_DIR)/components/*.xpt && rm -f $(DIST)/$(STAGEPATH)$(MOZ_PKG_DIR)/components/*.xpt && cp $(DIST)/xpt/$(MOZ_PKG_APPNAME).xpt $(DIST)/$(STAGEPATH)$(MOZ_PKG_DIR)/components && printf "interfaces $(MOZ_PKG_APPNAME).xpt\n" >$(DIST)/$(STAGEPATH)$(MOZ_PKG_DIR)/components/interfaces.manifest) || echo No *.xpt files found in: $(DIST)/$(STAGEPATH)$(MOZ_PKG_DIR)/components/. Continuing...
endif # DMG
# MOZ_PKG_MANIFEST is the canonical way to define the package manifest (which
# the packager will preprocess), but for a smooth transition, we derive it
# from the now deprecated MOZ_PKG_MANIFEST_P when MOZ_PKG_MANIFEST is not
# defined.
ifndef MOZ_PKG_MANIFEST
ifdef MOZ_PKG_MANIFEST_P
MOZ_PKG_MANIFEST := $(MOZ_PKG_MANIFEST_P)
endif # MOZ_PKG_MANIFEST_P
endif # MOZ_PKG_MANIFEST
endif # UNIVERSAL_BINARY
$(OPTIMIZE_JARS_CMD) --optimize $(JARLOG_DIR_AB_CD) $(DIST)/bin/chrome $(DIST)/$(STAGEPATH)$(MOZ_PKG_DIR)$(_BINPATH)/chrome
ifndef PKG_SKIP_STRIP
ifeq ($(OS_ARCH),OS2)
@echo "Stripping package directory..."
@cd $(DIST)/$(STAGEPATH)$(MOZ_PKG_DIR) && $(STRIP)
else
@echo "Stripping package directory..."
@cd $(DIST)/$(STAGEPATH)$(MOZ_PKG_DIR); find . ! -type d \
! -name "*.js" \
! -name "*.xpt" \
! -name "*.gif" \
! -name "*.jpg" \
! -name "*.png" \
! -name "*.xpm" \
! -name "*.txt" \
! -name "*.rdf" \
! -name "*.sh" \
! -name "*.properties" \
! -name "*.dtd" \
! -name "*.html" \
! -name "*.xul" \
! -name "*.css" \
! -name "*.xml" \
! -name "*.jar" \
! -name "*.dat" \
! -name "*.tbl" \
! -name "*.src" \
! -name "*.reg" \
$(PLATFORM_EXCLUDE_LIST) \
-exec $(STRIP) $(STRIP_FLAGS) {} >/dev/null 2>&1 \;
endif
endif # PKG_SKIP_STRIP
ifdef USE_ELF_HACK
@echo ===
@echo === If you get failures below, please file a bug describing the error
@echo === and your environment \(compiler and linker versions\), and use
@echo === --disable-elf-hack until this is fixed.
@echo ===
cd $(DIST)/$(STAGEPATH)$(MOZ_PKG_DIR); find . -name "*$(DLL_SUFFIX)" | xargs ../../build/unix/elfhack/elfhack $(ELF_HACK_FLAGS)
# For smooth transition of comm-central
ifndef MOZ_PACKAGER_FORMAT
ifeq ($(MOZ_CHROME_FILE_FORMAT),flat)
ifdef MOZ_OMNIJAR
MOZ_PACKAGER_FORMAT := omni
else
MOZ_PACKAGER_FORMAT := flat
endif
endif
endif
ifndef MOZ_PACKAGER_FORMAT
$(error MOZ_PACKAGER_FORMAT is not set)
endif
# We always sign nss because we don't do it from security/manager anymore
@$(SIGN_NSS)
@echo "Removing unpackaged files..."
ifdef NO_PKG_FILES
cd $(DIST)/$(STAGEPATH)$(MOZ_PKG_DIR)$(_BINPATH); rm -rf $(NO_PKG_FILES)
ifneq (android,$(MOZ_WIDGET_TOOLKIT))
OPTIMIZEJARS = 1
endif
ifdef MOZ_PKG_REMOVALS
$(SYSINSTALL) $(IFLAGS1) $(MOZ_PKG_REMOVALS_GEN) $(DIST)/$(STAGEPATH)$(MOZ_PKG_DIR)$(_BINPATH)
endif # MOZ_PKG_REMOVALS
ifdef MOZ_POST_STAGING_CMD
cd $(DIST)/$(STAGEPATH)$(MOZ_PKG_DIR)$(_BINPATH) && $(MOZ_POST_STAGING_CMD)
endif # MOZ_POST_STAGING_CMD
export NO_PKG_FILES USE_ELF_HACK ELF_HACK_FLAGS _BINPATH
stage-package: $(MOZ_PKG_MANIFEST)
@rm -rf $(DIST)/$(PKG_PATH)$(PKG_BASENAME).tar $(DIST)/$(PKG_PATH)$(PKG_BASENAME).dmg $@ $(EXCLUDE_LIST)
$(PYTHON) $(MOZILLA_DIR)/toolkit/mozapps/installer/packager.py $(DEFINES) \
--format $(MOZ_PACKAGER_FORMAT) \
$(addprefix --removals ,$(MOZ_PKG_REMOVALS)) \
$(if $(filter-out 0,$(MOZ_PKG_FATAL_WARNINGS)),,--ignore-errors) \
$(if $(MOZ_PACKAGER_MINIFY),--minify) \
$(if $(JARLOG_DIR),--jarlogs $(JARLOG_DIR_AB_CD)) \
$(if $(OPTIMIZEJARS),--optimizejars) \
$(addprefix --unify ,$(UNIFY_DIST)) \
$(MOZ_PKG_MANIFEST) $(DIST) $(DIST)/$(STAGEPATH)$(MOZ_PKG_DIR)
$(PYTHON) $(MOZILLA_DIR)/toolkit/mozapps/installer/find-dupes.py $(DIST)/$(MOZ_PKG_DIR)
ifndef LIBXUL_SDK
ifdef MOZ_PACKAGE_JSSHELL
# Package JavaScript Shell
@ -868,19 +605,12 @@ endif # MOZ_PACKAGE_JSSHELL
endif # LIBXUL_SDK
prepare-package: stage-package
cd $(DIST) && $(PREPARE_PACKAGE)
make-package-internal: prepare-package make-sourcestamp-file
@echo "Compressing..."
cd $(DIST) && $(MAKE_PACKAGE)
ifdef MOZ_FAST_PACKAGE
MAKE_PACKAGE_DEPS = $(wildcard $(subst * , ,$(addprefix $(DIST)/bin/,$(shell $(PYTHON) $(topsrcdir)/toolkit/mozapps/installer/packager-deps.py $(MOZ_PKG_MANIFEST)))))
else
MAKE_PACKAGE_DEPS = FORCE
endif
make-package: $(MAKE_PACKAGE_DEPS)
make-package: FORCE
$(MAKE) make-package-internal
$(TOUCH) $@

View File

@ -0,0 +1,362 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from mozpack.packager.formats import (
FlatFormatter,
JarFormatter,
OmniJarFormatter,
)
from mozpack.packager import (
preprocess_manifest,
preprocess,
SimpleManifestSink,
)
from mozpack.files import (
GeneratedFile,
FileFinder,
File,
)
from mozpack.copier import (
FileCopier,
Jarrer,
)
from mozpack.errors import errors
from mozpack.unify import UnifiedBuildFinder
import mozpack.path
import buildconfig
from argparse import ArgumentParser
from createprecomplete import generate_precomplete
import os
import re
import sys
from StringIO import StringIO
import subprocess
import platform
# List of libraries to shlibsign.
SIGN_LIBS = [
'softokn3',
'nssdbm3',
'freebl3',
'freebl_32fpu_3',
'freebl_32int_3',
'freebl_32int64_3',
'freebl_64fpu_3',
'freebl_64int_3',
]
class ToolLauncher(object):
'''
Helper to execute tools like xpcshell with the appropriate environment.
launcher = ToolLauncher()
launcher.tooldir = '/path/to/tools'
launcher.launch(['xpcshell', '-e', 'foo.js'])
'''
def __init__(self):
self.tooldir = None
def launch(self, cmd, extra_linker_path=None, extra_env={}):
'''
Launch the given command, passed as a list. The first item in the
command list is the program name, without a path and without a suffix.
These are determined from the tooldir member and the BIN_SUFFIX value.
An extra_linker_path may be passed to give an additional directory
to add to the search paths for the dynamic linker.
An extra_env dict may be passed to give additional environment
variables to export when running the command.
'''
assert self.tooldir
cmd[0] = os.path.join(self.tooldir, 'bin',
cmd[0] + buildconfig.substs['BIN_SUFFIX'])
if not extra_linker_path:
extra_linker_path = os.path.join(self.tooldir, 'bin')
env = dict(os.environ)
for p in ['LD_LIBRARY_PATH', 'DYLD_LIBRARY_PATH']:
if p in env:
env[p] = extra_linker_path + ':' + env[p]
else:
env[p] = extra_linker_path
for e in extra_env:
env[e] = extra_env[e]
print >>errors.out, 'Executing', ' '.join(cmd)
errors.out.flush()
return subprocess.call(cmd, env=env)
def can_launch(self):
return self.tooldir is not None
launcher = ToolLauncher()
class LibSignFile(File):
'''
File class for shlibsign signatures.
'''
def copy(self, dest):
assert isinstance(dest, basestring)
# os.path.getmtime returns a result in seconds with precision up to the
# microsecond. But microsecond is too precise because shutil.copystat
# only copies milliseconds, and seconds is not enough precision.
if os.path.exists(dest) and \
int(os.path.getmtime(self.path) * 1000) <= \
int(os.path.getmtime(dest) * 1000):
return False
if launcher.launch(['shlibsign', '-v', '-o', dest, '-i', self.path]):
errors.fatal('Error while signing %s' % self.path)
def precompile_cache(formatter, source_path, gre_path, app_path):
'''
Create startup cache for the given application directory, using the
given GRE path.
- formatter is a Formatter instance where to add the startup cache.
- source_path is the base path of the package.
- gre_path is the GRE path, relative to source_path.
- app_path is the application path, relative to source_path.
Startup cache for all resources under resource://app/ are generated,
except when gre_path == app_path, in which case it's under
resource://gre/.
'''
from tempfile import mkstemp
source_path = os.path.abspath(source_path)
if app_path != gre_path:
resource = 'app'
else:
resource = 'gre'
app_path = os.path.join(source_path, app_path)
gre_path = os.path.join(source_path, gre_path)
fd, cache = mkstemp('.zip')
os.close(fd)
os.remove(cache)
try:
if launcher.launch(['xpcshell', '-g', gre_path, '-a', app_path,
'-f', os.path.join(os.path.dirname(__file__),
'precompile_cache.js'),
'-e', 'precompile_startupcache("resource://%s/");'
% resource],
extra_linker_path=gre_path,
extra_env={'MOZ_STARTUP_CACHE': cache}):
errors.fatal('Error while running startup cache precompilation')
return
from mozpack.mozjar import JarReader
jar = JarReader(cache)
resource = '/resource/%s/' % resource
for f in jar:
if resource in f.filename:
path = f.filename[f.filename.index(resource) + len(resource):]
if formatter.contains(path):
formatter.add(f.filename, GeneratedFile(f.read()))
jar.close()
finally:
if os.path.exists(cache):
os.remove(cache)
class RemovedFiles(GeneratedFile):
'''
File class for removed-files. Is used as a preprocessor parser.
'''
def __init__(self, copier):
self.copier = copier
GeneratedFile.__init__(self, '')
def handle_line(self, str):
f = str.strip()
if self.copier.contains(f):
errors.error('Removal of packaged file(s): %s' % f)
self.content += f + '\n'
def split_define(define):
'''
Give a VAR[=VAL] string, returns a (VAR, VAL) tuple, where VAL defaults to
1. Numeric VALs are returned as ints.
'''
if '=' in define:
name, value = define.split('=', 1)
try:
value = int(value)
except ValueError:
pass
return (name, value)
return (define, 1)
class NoPkgFilesRemover(object):
'''
Formatter wrapper to handle NO_PKG_FILES.
'''
def __init__(self, formatter, has_manifest):
assert 'NO_PKG_FILES' in os.environ
self._formatter = formatter
self._files = os.environ['NO_PKG_FILES'].split()
if has_manifest:
self._error = errors.error
self._msg = 'NO_PKG_FILES contains file listed in manifest: %s'
else:
self._error = errors.warn
self._msg = 'Skipping %s'
def add_base(self, base):
self._formatter.add_base(base)
def add(self, path, content):
if not any(mozpack.path.match(path, spec) for spec in self._files):
self._formatter.add(path, content)
else:
self._error(self._msg % path)
def add_manifest(self, entry):
self._formatter.add_manifest(entry)
def add_interfaces(self, path, content):
self._formatter.add_interfaces(path, content)
def contains(self, path):
return self._formatter.contains(path)
def main():
parser = ArgumentParser()
parser.add_argument('-D', dest='defines', action='append',
metavar="VAR[=VAL]", help='Define a variable')
parser.add_argument('--format', default='omni',
help='Choose the chrome format for packaging ' +
'(omni, jar or flat ; default: %(default)s)')
parser.add_argument('--removals', default=None,
help='removed-files source file')
parser.add_argument('--ignore-errors', action='store_true', default=False,
help='Transform errors into warnings.')
parser.add_argument('--minify', action='store_true', default=False,
help='Make some files more compact while packaging')
parser.add_argument('--jarlogs', default='', help='Base directory where ' +
'to find jar content access logs')
parser.add_argument('--optimizejars', action='store_true', default=False,
help='Enable jar optimizations')
parser.add_argument('--unify', default='',
help='Base directory of another build to unify with')
parser.add_argument('manifest', default=None, nargs='?',
help='Manifest file name')
parser.add_argument('source', metavar="source",
help='Source directory')
parser.add_argument('destination',
help='Destination directory')
args = parser.parse_args()
defines = dict(buildconfig.defines)
if args.ignore_errors:
errors.ignore_errors()
if args.defines:
for name, value in [split_define(d) for d in args.defines]:
defines[name] = value
copier = FileCopier()
if args.format == 'flat':
formatter = FlatFormatter(copier)
elif args.format == 'jar':
formatter = JarFormatter(copier, optimize=args.optimizejars)
elif args.format == 'omni':
formatter = OmniJarFormatter(copier,
buildconfig.substs['OMNIJAR_NAME'],
optimize=args.optimizejars)
else:
errors.fatal('Unknown format: %s', format)
# Adjust defines according to the requested format.
if isinstance(formatter, OmniJarFormatter):
defines['MOZ_OMNIJAR'] = 1
elif 'MOZ_OMNIJAR' in defines:
del defines['MOZ_OMNIJAR']
binpath = ''
if 'BINPATH' in defines:
binpath = SimpleManifestSink.normalize_path(defines['BINPATH'])
while binpath.startswith('/'):
binpath = binpath[1:]
if args.unify:
def is_native(path):
path = os.path.abspath(path)
return platform.machine() in mozpack.path.split(path)
# Invert args.unify and args.source if args.unify points to the
# native architecture.
args.source, args.unify = sorted([args.source, args.unify],
key=is_native, reverse=True)
if is_native(args.source):
launcher.tooldir = args.source
elif not buildconfig.substs['CROSS_COMPILE']:
launcher.tooldir = buildconfig.substs['LIBXUL_DIST']
with errors.accumulate():
if args.unify:
finder = UnifiedBuildFinder(args.source, args.unify,
minify=args.minify)
else:
finder = FileFinder(args.source, minify=args.minify)
if 'NO_PKG_FILES' in os.environ:
sinkformatter = NoPkgFilesRemover(formatter,
args.manifest is not None)
else:
sinkformatter = formatter
sink = SimpleManifestSink(finder, sinkformatter)
if args.manifest:
preprocess_manifest(sink, args.manifest, defines)
else:
sink.add('', 'bin/*')
sink.close(args.manifest is not None)
if args.removals:
lines = [l.lstrip() for l in open(args.removals).readlines()]
removals_in = StringIO(''.join(lines))
removals_in.name = args.removals
removals = RemovedFiles(copier)
preprocess(removals_in, removals, defines)
copier.add(mozpack.path.join(binpath, 'removed-files'), removals)
# shlibsign libraries
if launcher.can_launch():
for lib in SIGN_LIBS:
libbase = mozpack.path.join(binpath, '%s%s') \
% (buildconfig.substs['DLL_PREFIX'], lib)
libname = '%s%s' % (libbase, buildconfig.substs['DLL_SUFFIX'])
if copier.contains(libname):
copier.add(libbase + '.chk',
LibSignFile(os.path.join(args.destination, libname)))
# Setup preloading
if args.jarlogs:
jarlogs = FileFinder(args.jarlogs)
for p, log in jarlogs:
if p.endswith('.log'):
p = p[:-4]
if copier.contains(p) and isinstance(copier[p], Jarrer):
copier[p].preload([l.strip() for l in log.open().readlines()])
# Fill startup cache
if isinstance(formatter, OmniJarFormatter) and launcher.can_launch():
if buildconfig.substs['LIBXUL_SDK']:
gre_path = buildconfig.substs['LIBXUL_DIST']
else:
gre_path = None
for base in sorted([[p for p in [mozpack.path.join('bin', b), b]
if os.path.exists(os.path.join(args.source, p))][0]
for b in sink.packager.get_bases()]):
if not gre_path:
gre_path = base
base_path = sink.normalize_path(base)
if base_path in formatter.omnijars:
precompile_cache(formatter.omnijars[base_path],
args.source, gre_path, base)
copier.copy(args.destination)
generate_precomplete(os.path.normpath(os.path.join(args.destination,
binpath)))
if __name__ == '__main__':
main()

View File

@ -1,91 +0,0 @@
#!/usr/bin/perl -w
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# pkgcp.pl -
#
# Parse a package file and copy the specified files for a component
# from the given source directory into the given destination directory
# for packaging by the install builder.
#
# Todo:
# - port to MacPerl
# - change warn()s to die()s to enforce updating package files.
# - change var names to standard form
# load modules
use Getopt::Long;
use File::Basename;
use Cwd;
# initialize variables
%components = (); # list of components to copy
$srcdir = ""; # root directory being copied from
$destdir = ""; # root directory being copied to
$package = ""; # file listing files to copy
$os = ""; # os type (MacOS, MSDOS, Unix, OS/2)
$verbose = 0; # shorthand for --debug 1
$debug = 0; # controls amount of debug output
$help = 0; # flag: if set, print usage
# get command line options
$return = GetOptions(
"source|s=s", \$srcdir,
"destination|d=s", \$destdir,
"file|f=s", \$package,
"os|o=s", \$os,
"component|c=s", \@components,
"help|h", \$help,
"debug=i", \$debug,
"verbose|v", \$verbose,
"flat|l", \$flat,
"<>", \&do_badargument
);
# set debug level
if ($verbose && !($debug)) {
$debug = 1;
} elsif ($debug != 0) {
$debug = abs ($debug);
($debug >= 2) && print "debug level: $debug\n";
}
# check usage
if (! $return)
{
die "Error: couldn't parse command line options. See \'$0 --help' for options.\nExiting...\n";
}
# ensure that Packager.pm is in @INC, since we might not be called from
# mozilla/toolkit/mozapps/installer.
$top_path = $0;
if ( $os eq "dos" ) {
$top_path =~ s/\\/\//g;
}
push(@INC, dirname($top_path));
require Packager;
if ( $os eq "os2" ) {
$cwd = cwd();
if ($srcdir !~ /^.:+/) {
$srcdir = $cwd."/".$srcdir;
}
$os = "unix";
}
Packager::Copy($srcdir, $destdir, $package, $os, $flat, $help, $debug, @components);
#
# This is called by GetOptions when there are extra command line arguments
# it doesn't understand.
#
sub do_badargument
{
warn "Warning: unknown command line option specified: @_.\n";
}
# EOF

View File

@ -79,18 +79,6 @@ function resolveResource(spec) {
return Services.io.newURI(rph.resolveURI(uri), null, null);
}
function populate_startupcache(startupcacheName) {
var scFile = Services.dirsvc.get("CurWorkD", Ci.nsIFile);
scFile.append(startupcacheName);
let env = Cc["@mozilla.org/process/environment;1"].getService(Ci.nsIEnvironment);
env.set('MOZ_STARTUP_CACHE', scFile.path);
var greURI = resolveResource("resource://gre/");
var appURI = resolveResource("resource://app/");
load_modules_under("resource://gre/", greURI);
if (!appURI.equals(greURI))
load_modules_under("resource://app/", appURI);
function precompile_startupcache(uri) {
load_modules_under(uri, resolveResource(uri));
}

View File

@ -0,0 +1,18 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import sys
import os
from mozpack.packager.unpack import unpack
def main():
if len(sys.argv) != 2:
print >>sys.stderr, "Usage: %s directory" % \
os.path.basename(sys.argv[0])
sys.exit(1)
unpack(sys.argv[1])
if __name__ == "__main__":
main()

View File

@ -1,247 +0,0 @@
#!/usr/bin/perl -w
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# xptlink.pl -
#
# traverse directories created by pkgcp.pl and merge multiple .xpt files into
# a single .xpt file to improve startup performance.
#
use Getopt::Long;
# initialize variables
$srcdir = ""; # root directory being copied from
$destdir = ""; # root directory being copied to
$finaldir = ""; # where to put the final linked XPT
$verbose = 0; # shorthand for --debug 1
$debug = 0; # controls amount of debug output
$help = 0; # flag: if set, print usage
$xptlink = ""; # path to the xpt_link binary
# get command line options
$return = GetOptions( "source|s=s", \$srcdir,
"destination|d=s", \$destdir,
"final|f=s", \$finaldir,
"help|h", \$help,
"debug=i", \$debug,
"verbose|v", \$verbose,
"xptlink|x=s", \$xptlink,
"<>", \&do_badargument
);
if ($finaldir eq "") {
die "Error: -f is required";
}
my $bindir = "";
# remove extra slashes from $destdir
$destdir =~ s:/+:/:g;
# set debug level
if ($verbose && !($debug)) {
$debug = 1;
} elsif ($debug != 0) {
$debug = abs ($debug);
($debug >= 2) && print "debug level: $debug\n";
}
# check usage
if (! $return)
{
die "Error: couldn't parse command line options. See \'$0 --help' for options.\nExiting...\n";
} else {
check_arguments();
}
$xptdirs = (); # directories in the destination directory
($debug >= 1) && print "\nLinking .xpt files...\n";
($debug >= 2) && print "do_xptlink():\n";
# get list of directories on which to run xptlink
opendir (DESTDIR, "$destdir") ||
die "Error: could not open directory $destdir. Exiting...\n";
@xptdirs = sort ( grep (!/^\./, readdir (DESTDIR) ) );
($debug >= 4) && print "xptdirs: @xptdirs\n";
closedir (DESTDIR);
foreach my $component (@xptdirs) {
($debug >= 1) && print "[$component]\n";
print ("Checking for '$destdir/$component/$bindir"."components'\n") if $debug >= 3;
if (-d "$destdir/$component/$bindir"."components") {
warn "File '$destdir/$component/$bindir"."components/$component.xpt' already exists."
if -f "$destdir/$component/$bindir"."components/$component.xpt";
# create list of .xpt files in cwd
my @xptfiles;
($debug >= 4) && print "opendir: $destdir/$component/$bindir"."components\n";
opendir (COMPDIR, "$destdir/$component/$bindir"."components") ||
die "Error: cannot open $destdir/$component/$bindir"."components. Exiting...\n";
($debug >= 3) && print "Creating list of .xpt files...\n";
my @files = sort ( grep (!/^\./, readdir (COMPDIR)));
foreach my $file (@files) {
($debug >= 6) && print "$file\n";
if ( $file =~ /\.xpt$/ ) {
push @xptfiles, "$destdir/$component/$bindir"."components/$file";
($debug >= 8) && print "xptfiles:\t@xptfiles\n";
}
}
closedir (COMPDIR);
# merge .xpt files into one if we found any in the dir
if ( scalar(@xptfiles) ) {
my ($merged, $manifest);
$merged = "$finaldir/$component.xpt";
$manifest = "$finaldir/interfaces.manifest";
my @realxptfiles;
my $realmerged;
if ($^O eq "cygwin") {
@realxptfiles = map {my $file = `cygpath -t mixed $_`;
chomp $file;
$file} @xptfiles;
$realmerged = `cygpath -t mixed $merged`;
chomp $realmerged;
}
else {
@realxptfiles = @xptfiles;
$realmerged = $merged;
}
my $cmdline = "$xptlink $realmerged @realxptfiles";
($debug >= 4) && print "$cmdline\n";
system($cmdline) == 0 || die ("'$cmdline' failed");
print "Manifest file: $manifest";
open MANIFEST, '>>', $manifest;
print MANIFEST "interfaces $component.xpt\n";
close MANIFEST;
}
}
}
($debug >= 1) && print "Linking .xpt files completed.\n";
exit (0);
#
# Check that arguments to script are valid.
#
sub check_arguments
{
my ($exitval) = 0;
($debug >= 2) && print "check_arguments():\n";
# if --help print usage
if ($help) {
print_usage();
exit (1);
}
# make sure required variables are set:
# check source directory
if ( $srcdir eq "" ) {
print "Error: source directory (--source) not specified.\n";
$exitval += 8;
} elsif ((! -d $srcdir) || (! -r $srcdir)) {
print "Error: source directory \"$srcdir\" is not a directory or is unreadable.\n";
$exitval = 1;
}
# check directory
if ( $destdir eq "" ) {
print "Error: destination directory (--destdir) not specified.\n";
$exitval += 8;
} elsif ((! -d $destdir) || (! -w $destdir)) {
print "Error: destination directory \"$destdir\" is not a directory or is not writeable.\n";
$exitval += 2;
}
if ($exitval) {
print "See \'$0 --help\' for more information.\n";
print "Exiting...\n";
exit ($exitval);
}
if ($xptlink eq "") {
$xptlink = "$srcdir/bin/xpt_link";
}
}
#
# This is called by GetOptions when there are extra command line arguments
# it doesn't understand.
#
sub do_badargument
{
warn "Warning: unknown command line option specified: @_.\n";
}
#
# display usage information
#
sub print_usage
{
($debug >= 2) && print "print_usage():\n";
print <<EOC
$0
Traverse component directory specified and merge multiple existing
.xpt files into single new .xpt files for improved startup time.
Options:
-s, --source <directory>
Specifies the directory from which the component files were
copied. Typically, this will be the same directory used by
pkgcp.pl.
Required.
-d, --destination <directory>
Specifies the directory in which the component directories are
located. Typically, this will be the same directory used by
pkgcp.pl.
Required.
-o, --os [dos|unix]
Specifies which type of system this is. Used for setting path
delimiters correctly.
Required.
-h, --help
Prints this information.
Optional.
--debug [1-10]
Controls verbosity of debugging output, 10 being most verbose.
1 : same as --verbose.
2 : includes function calls.
3 : includes source and destination for each copy.
Optional.
-v, --verbose
Print component names and files copied/deleted.
Optional.
e.g.
$0 --os unix -source /builds/mozilla/dist --destination /h/lithium/install --os unix --verbose
Note: options can be specified by either a leading '--' or '-'.
EOC
}
# EOF