diff --git a/.lldbinit b/.lldbinit
index 17215fb7d82..8564681b0f2 100644
--- a/.lldbinit
+++ b/.lldbinit
@@ -9,3 +9,8 @@ settings set target.inline-breakpoint-strategy always
# will show a variable declared as "nsIFrame *" that points to an nsBlockFrame
# object as being of type "nsBlockFrame *" rather than "nsIFrame *".
settings set target.prefer-dynamic-value run-target
+
+# Import the module that defines complex Gecko debugging commands. Rather
+# than do any kind of searching, this assumes that you are running lldb from
+# the top level source directory.
+script sys.path.append('python/lldbutils'); import lldbutils; lldbutils.init()
diff --git a/CLOBBER b/CLOBBER
index c8f09ad5dfd..f077bd15c03 100644
--- a/CLOBBER
+++ b/CLOBBER
@@ -22,4 +22,4 @@
# changes to stick? As of bug 928195, this shouldn't be necessary! Please
# don't change CLOBBER for WebIDL changes any more.
-Bug 958185 requires clobber on Android to force a Proguard refresh
+Bug 950298 requires clobber because it changes where nsinstall is picked and it was previously installed there with wrong permissions. Also, the directory where js is built changed.
diff --git a/Makefile.in b/Makefile.in
index 731209a184e..ae4e161a856 100644
--- a/Makefile.in
+++ b/Makefile.in
@@ -17,15 +17,19 @@ export TOPLEVEL_BUILD := 1
default::
+ifdef MOZ_BUILD_APP
include $(topsrcdir)/$(MOZ_BUILD_APP)/build.mk
+endif
include $(topsrcdir)/config/config.mk
ifndef LIBXUL_SDK
ifdef COMPILE_ENVIRONMENT
+ifndef BUILDING_JS
BUILD_JS = 1
endif
endif
+endif
GARBAGE_DIRS += dist _javagen _profile staticlib
DIST_GARBAGE = config.cache config.log config.status* config-defs.h \
@@ -34,29 +38,40 @@ DIST_GARBAGE = config.cache config.log config.status* config-defs.h \
netwerk/necko-config.h xpcom/xpcom-config.h xpcom/xpcom-private.h \
.mozconfig.mk
+ifdef BUILDING_JS
+configure_dir = $(topsrcdir)/js/src
+else
+configure_dir = $(topsrcdir)
+endif
+
ifndef MOZ_PROFILE_USE
# We need to explicitly put backend.RecursiveMakeBackend here
# otherwise the rule in rules.mk doesn't run early enough.
-libs binaries export tools:: CLOBBER $(topsrcdir)/configure config.status backend.RecursiveMakeBackend
+libs binaries export tools:: CLOBBER $(configure_dir)/configure config.status backend.RecursiveMakeBackend
ifdef BUILD_JS
libs binaries export tools:: js-config-status
endif
endif
+ifdef BUILDING_JS
+.PHONY: CLOBBER
+CLOBBER:
+else
CLOBBER: $(topsrcdir)/CLOBBER
@echo 'STOP! The CLOBBER file has changed.'
@echo 'Please run the build through a sanctioned build wrapper, such as'
@echo '"mach build" or client.mk.'
@exit 1
+endif
-$(topsrcdir)/configure: $(topsrcdir)/configure.in
+$(configure_dir)/configure: $(configure_dir)/configure.in
@echo 'STOP! configure.in has changed, and your configure is out of date.'
@echo 'Please rerun autoconf and re-configure your build directory.'
@echo 'To ignore this message, touch "configure" in the source directory,'
@echo 'but your build might not succeed.'
@exit 1
-config.status: $(topsrcdir)/configure
+config.status: $(configure_dir)/configure
@echo 'STOP! configure has changed and needs to be run in this build directory.'
@echo 'Please rerun configure.'
@echo 'To ignore this message, touch "config.status" in the build directory,'
@@ -90,7 +105,7 @@ endif
install_manifests := bin idl include public private sdk
install_manifest_depends = \
CLOBBER \
- $(topsrcdir)/configure \
+ $(configure_dir)/configure \
config.status \
backend.RecursiveMakeBackend \
$(NULL)
@@ -102,6 +117,18 @@ endif
.PHONY: install-manifests
install-manifests: $(addprefix install-dist-,$(install_manifests))
+# process_install_manifest needs to be invoked with --no-remove when building
+# js as standalone because automated builds are building nspr separately and
+# that would remove the resulting files. It is also necessary when building
+# js as part of gecko because that would remove the files exported from gecko.
+# Eventually, a standalone js build would just be able to build nspr itself,
+# removing the need for the former. But that won't likely happen before
+# finishing to merge gecko and js build systems, removing the need for the
+# latter.
+ifdef BUILDING_JS
+NO_REMOVE=1
+endif
+
.PHONY: $(addprefix install-dist-,$(install_manifests))
$(addprefix install-dist-,$(install_manifests)): install-dist-%: $(install_manifest_depends)
$(call py_action,process_install_manifest,$(if $(NO_REMOVE),--no-remove )$(DIST)/$* _build_manifests/install/dist_$* $(if $(BUILD_JS),js/src/_build_manifests/install/dist_$*))
@@ -139,10 +166,12 @@ endif
# with no regard for PGO passes. This decision could probably be revisited.
export:: install-dist-sdk
+ifndef BUILDING_JS
ifdef ENABLE_TESTS
# Additional makefile targets to call automated test suites
include $(topsrcdir)/testing/testsuite-targets.mk
endif
+endif
default all::
$(call BUILDSTATUS,TIERS export $(if $(COMPILE_ENVIRONMENT),$(if $(MOZ_PSEUDO_DERECURSE),compile ))libs tools)
@@ -181,6 +210,7 @@ endif
SYM_STORE_SOURCE_DIRS := $(topsrcdir)
+ifndef BUILDING_JS
include $(topsrcdir)/toolkit/mozapps/installer/package-name.mk
ifdef MOZ_SYMBOLS_EXTRA_BUILDID
@@ -225,6 +255,7 @@ endif
# so transform it to an immediate assignment.
MOZ_SOURCE_STAMP := $(MOZ_SOURCE_STAMP)
export MOZ_SOURCE_STAMP
+endif
#XXX: this is a hack, since we don't want to clobber for MSVC
# PGO support, but we can't do this test in client.mk
@@ -250,6 +281,21 @@ check::
@relcount=`find $(DIST)/bin -name '*.so' | xargs objdump -R | grep R_386_PC32 | wc -l` && if test $$relcount -gt 0; then echo 'FAILED: R_386_PC32 relocations detected in a shared library. Did you use a system header without adding it to config/system-headers?'; exit 1; else echo 'PASSED'; fi
endif
+ifdef BUILDING_JS
+# Delegate js-specific rules to js
+check-%:
+ $(MAKE) -C js/src $@
+
+source-package install:
+ $(MAKE) -C js/src $@
+
+# Every export rule depends on config/export, but the rule for config/export
+# doesn't exist when building js non-standalone.
+.PHONY: config/export
+config/export:
+
+else
+
ifdef BUILD_JS
js/src/Makefile: subsrcdir := js/src
@@ -263,3 +309,4 @@ js/src/export config/export: build/clang-plugin/export
endif
endif
endif
+endif
diff --git a/client.mk b/client.mk
index 49c0a2ae82d..869582ce92c 100644
--- a/client.mk
+++ b/client.mk
@@ -312,7 +312,6 @@ EXTRA_CONFIG_DEPS := \
$(NULL)
$(CONFIGURES): %: %.in $(EXTRA_CONFIG_DEPS)
- @$(PYTHON) $(TOPSRCDIR)/js/src/config/check-sync-dirs.py $(TOPSRCDIR)/js/src/build $(TOPSRCDIR)/build
@echo Generating $@ using autoconf
cd $(@D); $(AUTOCONF)
@@ -322,7 +321,6 @@ CONFIG_STATUS_DEPS := \
$(TOPSRCDIR)/CLOBBER \
$(TOPSRCDIR)/nsprpub/configure \
$(TOPSRCDIR)/config/milestone.txt \
- $(TOPSRCDIR)/js/src/config/milestone.txt \
$(TOPSRCDIR)/browser/config/version.txt \
$(TOPSRCDIR)/build/virtualenv_packages.txt \
$(TOPSRCDIR)/python/mozbuild/mozbuild/virtualenv.py \
@@ -396,7 +394,7 @@ endif
####################################
# Build it
-realbuild:: $(OBJDIR)/Makefile $(OBJDIR)/config.status check-sync-dirs-config
+realbuild:: $(OBJDIR)/Makefile $(OBJDIR)/config.status
$(MOZ_MAKE)
####################################
@@ -454,25 +452,6 @@ cleansrcdir:
build/autoconf/clean-config.sh; \
fi;
-# Because SpiderMonkey can be distributed and built independently
-# of the Mozilla source tree, it contains its own copies of many of
-# the files used by the top-level Mozilla build process, from the
-# 'config' and 'build' subtrees.
-#
-# To make it simpler to keep the copies in sync, we follow the policy
-# that the SpiderMonkey copies must always be exact copies of those in
-# the containing Mozilla tree. If you've made a change in one, it
-# belongs in the other as well. If the change isn't right for both
-# places, then that's something to bring up with the other developers.
-#
-# Some files are reasonable to diverge; for example,
-# js/src/config/autoconf.mk.in doesn't need most of the stuff in
-# config/autoconf.mk.in.
-.PHONY: check-sync-dirs
-check-sync-dirs: check-sync-dirs-build check-sync-dirs-config
-check-sync-dirs-%:
- @$(PYTHON) $(TOPSRCDIR)/js/src/config/check-sync-dirs.py $(TOPSRCDIR)/js/src/$* $(TOPSRCDIR)/$*
-
echo-variable-%:
@echo $($*)
diff --git a/config/Makefile.in b/config/Makefile.in
index 1487075bf0f..4c5f3812308 100644
--- a/config/Makefile.in
+++ b/config/Makefile.in
@@ -27,19 +27,21 @@ nsinstall$(HOST_BIN_SUFFIX): $(HOST_PROGRAM)
cp $^ $@.tmp
mv $@.tmp $@
-NSINSTALL_FILES := nsinstall$(HOST_BIN_SUFFIX)
+NSINSTALL_EXECUTABLES := nsinstall$(HOST_BIN_SUFFIX)
NSINSTALL_DEST := $(DIST)/bin
NSINSTALL_TARGET := export
INSTALL_TARGETS += NSINSTALL
endif
endif
+ifndef BUILDING_JS
HEADERS_FILES = \
$(DEPTH)/mozilla-config.h \
$(NULL)
HEADERS_DEST := $(DIST)/include
HEADERS_TARGET := export
INSTALL_TARGETS += HEADERS
+endif
PYTHON_UNIT_TESTS := $(wildcard $(srcdir)/tests/unit-*.py)
@@ -47,6 +49,7 @@ include $(topsrcdir)/config/rules.mk
HOST_CFLAGS += -DUNICODE -D_UNICODE
+ifndef BUILDING_JS
# Generate a new buildid every time we "export" in config... that's only
# supposed to be once per-build!
export::
@@ -55,6 +58,7 @@ ifdef MOZ_BUILD_DATE
else
$(PYTHON) $(topsrcdir)/toolkit/xre/make-platformini.py --print-buildid > buildid
endif
+endif
ifdef WRAP_SYSTEM_INCLUDES
export-preqs = \
@@ -108,10 +112,10 @@ GARBAGE += \
FORCE:
-check-preqs = \
- check-jar-mn \
- check-makefiles \
- $(NULL)
+ifndef BUILDING_JS
+check-preqs += check-jar-mn
+endif
+check-preqs += check-makefiles
check:: $(check-preqs)
diff --git a/config/asencode.cpp b/config/asencode.cpp
deleted file mode 100644
index 1478a60a92f..00000000000
--- a/config/asencode.cpp
+++ /dev/null
@@ -1,194 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-/*
- * To compile, do:
- *
- * gcc -framework ApplicationServices -l stdc++ -o asencode asencode.cpp
- */
-
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-
-typedef struct ASFinderInfo
-{
- FInfo ioFlFndrInfo; /* PBGetFileInfo() or PBGetCatInfo() */
- FXInfo ioFlXFndrInfo; /* PBGetCatInfo() (HFS only) */
-} ASFinderInfo; /* ASFinderInfo */
-
-#define EXIT_IF_FALSE(x) \
- do { \
- if (!(x)) { \
- printf("Assertion failure: %s\n" \
- " at %s line %d\n", \
- #x, __FILE__, __LINE__); \
- exit(1); \
- } \
- } while (0)
-
-// encodes a file with data and resource forks into a single
-// AppleSingle encoded file..
-
-static void append_file(FILE* output, const char* input_name)
-{
- FILE* input = fopen(input_name, "rb");
- EXIT_IF_FALSE(input != nullptr);
-
- while (1) {
- char buffer[4096];
- size_t amount = fread(buffer, 1, sizeof(buffer), input);
- if (amount == 0) {
- EXIT_IF_FALSE(feof(input) != 0);
- break;
- }
- fwrite(buffer, 1, amount, output);
- }
- fclose(input);
-}
-
-int main(int argc, char** argv)
-{
- if (argc < 3) {
- printf("usage: %s input output\n", argv[0]);
- exit(1);
- }
-
- const char *input_name = argv[1];
-
- struct stat input_st;
- if (stat(input_name, &input_st) != 0) {
- printf("%s: can't open file `%s'\n", argv[0], input_name);
- exit(2);
- }
-
- if ((input_st.st_mode & S_IFMT) != S_IFREG) {
- printf("%s: file `%s' not a regular file\n", argv[0], input_name);
- exit(3);
- }
-
- char rez_name[512];
- strcpy(rez_name, input_name);
- strcat(rez_name, "/rsrc");
-
- struct stat rez_st;
- EXIT_IF_FALSE(stat(rez_name, &rez_st) == 0);
-
- if (rez_st.st_size == 0) {
- printf("%s: no resource fork found on file `%s'\n", argv[0], argv[1]);
- exit(4);
- }
-
- FILE* output = fopen(argv[2], "wb");
- if (output == nullptr) {
- printf("%s: can't open file `%s'\n", argv[0], argv[2]);
- exit(5);
- }
-
- struct header {
- int magic_number;
- int version_number;
- char filler[16];
- } header;
-
- header.magic_number = 0x00051600;
- header.version_number = 0x00020000;
-
- EXIT_IF_FALSE(fwrite(&header, sizeof(header), 1, output) == 1);
-
- short entry_count = 5;
- EXIT_IF_FALSE(fwrite(&entry_count, sizeof(entry_count), 1, output) == 1);
-
- struct entry {
- unsigned int id;
- unsigned int offset;
- unsigned int length;
- };
-
- struct dates
- {
- int create; /* file creation date/time */
- int modify; /* last modification date/time */
- int backup; /* last backup date/time */
- int access; /* last access date/time */
- } dates;
-
- char *name_buf = strdup(input_name);
- char *orig_name = basename(name_buf);
- int orig_name_len = strlen(orig_name);
-
- entry entries[entry_count];
-
- int header_end = sizeof(header) + sizeof(entry_count) + sizeof(entries);
-
- entries[0].id = 1; // data fork
- entries[0].offset = header_end;
- entries[0].length = input_st.st_size;
-
- entries[1].id = 2; // data fork
- entries[1].offset = entries[0].offset + entries[0].length;
- entries[1].length = rez_st.st_size;
-
- entries[2].id = 3; // file name
- entries[2].offset = entries[1].offset + entries[1].length;
- entries[2].length = orig_name_len;
-
- entries[3].id = 8; // file dates
- entries[3].offset = entries[2].offset + entries[2].length;
- entries[3].length = sizeof(dates);
-
- entries[4].id = 9; // finder info
- entries[4].offset = entries[3].offset + entries[3].length;
- entries[4].length = sizeof(ASFinderInfo);
-
- EXIT_IF_FALSE(fwrite(entries, sizeof(entry), entry_count, output) ==
- entry_count);
-
- append_file(output, input_name);
- append_file(output, rez_name);
-
- EXIT_IF_FALSE(fwrite(orig_name, 1, orig_name_len, output) == orig_name_len);
-
- // Dates in an AppleSingle encoded file should be the number of
- // seconds since (or to) 00:00:00, January 1, 2000 UTC
-#define Y2K_SECONDS (946710000U)
-
- dates.create = input_st.st_ctime - Y2K_SECONDS;
- dates.modify = input_st.st_mtime - Y2K_SECONDS;
- dates.backup = 0x80000000; // earliest possible time
- dates.access = input_st.st_atime - Y2K_SECONDS;
-
- EXIT_IF_FALSE(fwrite(&dates, 1, sizeof(dates), output) == sizeof(dates));
-
- char abs_input_name[PATH_MAX];
- EXIT_IF_FALSE(realpath(input_name, abs_input_name) == abs_input_name);
-
- FSRef fsref;
- EXIT_IF_FALSE(FSPathMakeRef((unsigned char *)abs_input_name, &fsref, 0) == 0);
-
- FSCatalogInfo cat_info;
- memset(&cat_info, 0, sizeof(cat_info));
- EXIT_IF_FALSE(FSGetCatalogInfo(&fsref,
- kFSCatInfoGettableInfo,
- &cat_info, nullptr, nullptr, nullptr) == 0);
-
- ASFinderInfo finder_info;
- memcpy(&finder_info.ioFlFndrInfo, &cat_info.finderInfo,
- sizeof(finder_info.ioFlFndrInfo));
- memcpy(&finder_info.ioFlXFndrInfo, &cat_info.extFinderInfo,
- sizeof(finder_info.ioFlXFndrInfo));
-
- EXIT_IF_FALSE(fwrite(&finder_info, 1, sizeof(finder_info), output) ==
- sizeof(finder_info));
-
- fclose(output);
-
- return 0;
-}
diff --git a/config/baseconfig.mk b/config/baseconfig.mk
index 78e50bbfbd4..c5e50dd34bf 100644
--- a/config/baseconfig.mk
+++ b/config/baseconfig.mk
@@ -2,7 +2,18 @@ includedir := $(includedir)/$(MOZ_APP_NAME)-$(MOZ_APP_VERSION)
idldir = $(datadir)/idl/$(MOZ_APP_NAME)-$(MOZ_APP_VERSION)
installdir = $(libdir)/$(MOZ_APP_NAME)-$(MOZ_APP_VERSION)
sdkdir = $(libdir)/$(MOZ_APP_NAME)-devel-$(MOZ_APP_VERSION)
-DIST = $(DEPTH)/dist
+ifndef TOP_DIST
+TOP_DIST = dist
+endif
+ifneq (,$(filter /%,$(TOP_DIST)))
+DIST = $(TOP_DIST)
+else
+ifeq (.,$(DEPTH))
+DIST = $(TOP_DIST)
+else
+DIST = $(DEPTH)/$(TOP_DIST)
+endif
+endif
# We do magic with OBJ_SUFFIX in config.mk, the following ensures we don't
# manually use it before config.mk inclusion
diff --git a/config/config.mk b/config/config.mk
index f405445ebfd..2bf2db44a33 100644
--- a/config/config.mk
+++ b/config/config.mk
@@ -717,7 +717,7 @@ else
ifeq ($(HOST_OS_ARCH),WINNT)
NSINSTALL = $(NSINSTALL_PY)
else
-NSINSTALL = $(CONFIG_TOOLS)/nsinstall$(HOST_BIN_SUFFIX)
+NSINSTALL = $(DIST)/bin/nsinstall$(HOST_BIN_SUFFIX)
endif # WINNT
endif # OS2
endif # NSINSTALL_BIN
diff --git a/config/moz.build b/config/moz.build
index bf8ca9a80f7..04dd19e5c5f 100644
--- a/config/moz.build
+++ b/config/moz.build
@@ -10,9 +10,7 @@ NO_DIST_INSTALL = True
NO_VISIBILITY_FLAGS = True
CONFIGURE_SUBST_FILES += [
- 'autoconf.mk',
'doxygen.cfg',
- 'emptyvars.mk',
'makefiles/test/Makefile',
'tests/makefiles/autodeps/Makefile',
'tests/src-simple/Makefile',
diff --git a/config/rules.mk b/config/rules.mk
index a9887d9c5eb..945952161a0 100644
--- a/config/rules.mk
+++ b/config/rules.mk
@@ -364,6 +364,10 @@ ifdef MOZ_UPDATE_XTERM
UPDATE_TITLE = printf '\033]0;%s in %s\007' $(1) $(relativesrcdir)/$(2) ;
endif
+ifdef BUILDING_JS
+NO_BUILDSTATUS_MESSAGES=1
+endif
+
ifdef MACH
ifndef NO_BUILDSTATUS_MESSAGES
define BUILDSTATUS
@@ -1453,9 +1457,9 @@ endif
# file would be $(DIST)/include/bar/baz/qux.h instead of $(DIST)/include/qux.h
# If we're using binary nsinstall and it's not built yet, fallback to python nsinstall.
-ifneq (,$(filter $(CONFIG_TOOLS)/nsinstall$(HOST_BIN_SUFFIX),$(install_cmd)))
-ifeq (,$(wildcard $(CONFIG_TOOLS)/nsinstall$(HOST_BIN_SUFFIX)))
-nsinstall_is_usable = $(if $(wildcard $(CONFIG_TOOLS)/nsinstall$(HOST_BIN_SUFFIX)),yes)
+ifneq (,$(filter $(DIST)/bin/nsinstall$(HOST_BIN_SUFFIX),$(install_cmd)))
+ifeq (,$(wildcard $(DIST)/bin/nsinstall$(HOST_BIN_SUFFIX)))
+nsinstall_is_usable = $(if $(wildcard $(DIST)/bin/nsinstall$(HOST_BIN_SUFFIX)),yes)
define install_cmd_override
$(1): install_cmd = $$(if $$(nsinstall_is_usable),$$(INSTALL),$$(NSINSTALL_PY)) $$(1)
diff --git a/config/static-checking-config.mk b/config/static-checking-config.mk
index b134f916dc2..a780dc87102 100644
--- a/config/static-checking-config.mk
+++ b/config/static-checking-config.mk
@@ -7,6 +7,7 @@
DEHYDRA_SCRIPT = $(topsrcdir)/config/static-checking.js
+ifndef BUILDING_JS
DEHYDRA_MODULES = \
$(topsrcdir)/xpcom/analysis/final.js \
$(topsrcdir)/xpcom/analysis/must-override.js \
@@ -17,9 +18,13 @@ TREEHYDRA_MODULES = \
$(topsrcdir)/xpcom/analysis/stack.js \
$(topsrcdir)/xpcom/analysis/flow.js \
$(topsrcdir)/xpcom/analysis/static-init.js \
- $(topsrcdir)/js/src/jsstack.js \
$(topsrcdir)/layout/generic/frame-verify.js \
$(NULL)
+endif
+
+TREEHYDRA_MODULES += \
+ $(topsrcdir)/js/src/jsstack.js \
+ $(NULL)
DEHYDRA_ARG_PREFIX=-fplugin-arg-gcc_treehydra-
@@ -38,7 +43,11 @@ OS_CXXFLAGS += $(DEHYDRA_FLAGS)
endif
ifdef ENABLE_CLANG_PLUGIN
+ifndef BUILDING_JS
CLANG_PLUGIN := $(DEPTH)/build/clang-plugin/$(DLL_PREFIX)clang-plugin$(DLL_SUFFIX)
+else
+CLANG_PLUGIN := $(DEPTH)/../../build/clang-plugin/$(DLL_PREFIX)clang-plugin$(DLL_SUFFIX)
+endif
OS_CXXFLAGS += -Xclang -load -Xclang $(CLANG_PLUGIN) -Xclang -add-plugin -Xclang moz-check
OS_CFLAGS += -Xclang -load -Xclang $(CLANG_PLUGIN) -Xclang -add-plugin -Xclang moz-check
endif
diff --git a/content/media/omx/MediaOmxReader.cpp b/content/media/omx/MediaOmxReader.cpp
index dcc847a0628..cf19ada6c96 100644
--- a/content/media/omx/MediaOmxReader.cpp
+++ b/content/media/omx/MediaOmxReader.cpp
@@ -360,51 +360,6 @@ static uint64_t BytesToTime(int64_t offset, uint64_t length, uint64_t durationUs
return uint64_t(double(durationUs) * perc);
}
-nsresult MediaOmxReader::GetBuffered(mozilla::dom::TimeRanges* aBuffered, int64_t aStartTime)
-{
- if (!mOmxDecoder.get())
- return NS_OK;
-
- MediaResource* stream = mOmxDecoder->GetResource();
-
- int64_t durationUs = 0;
- mOmxDecoder->GetDuration(&durationUs);
-
- // Nothing to cache if the media takes 0us to play.
- if (!durationUs)
- return NS_OK;
-
- // Special case completely cached files. This also handles local files.
- if (stream->IsDataCachedToEndOfResource(0)) {
- aBuffered->Add(0, durationUs);
- return NS_OK;
- }
-
- int64_t totalBytes = stream->GetLength();
-
- // If we can't determine the total size, pretend that we have nothing
- // buffered. This will put us in a state of eternally-low-on-undecoded-data
- // which is not get, but about the best we can do.
- if (totalBytes == -1)
- return NS_OK;
-
- int64_t startOffset = stream->GetNextCachedData(0);
- while (startOffset >= 0) {
- int64_t endOffset = stream->GetCachedDataEnd(startOffset);
- // Bytes [startOffset..endOffset] are cached.
- NS_ASSERTION(startOffset >= 0, "Integer underflow in GetBuffered");
- NS_ASSERTION(endOffset >= 0, "Integer underflow in GetBuffered");
-
- uint64_t startUs = BytesToTime(startOffset, totalBytes, durationUs);
- uint64_t endUs = BytesToTime(endOffset, totalBytes, durationUs);
- if (startUs != endUs) {
- aBuffered->Add((double)startUs / USECS_PER_S, (double)endUs / USECS_PER_S);
- }
- startOffset = stream->GetNextCachedData(endOffset);
- }
- return NS_OK;
-}
-
void MediaOmxReader::OnDecodeThreadFinish() {
if (mOmxDecoder.get()) {
mOmxDecoder->Pause();
diff --git a/content/media/omx/MediaOmxReader.h b/content/media/omx/MediaOmxReader.h
index a8c6d714905..ef60e266e10 100644
--- a/content/media/omx/MediaOmxReader.h
+++ b/content/media/omx/MediaOmxReader.h
@@ -75,7 +75,6 @@ public:
virtual nsresult ReadMetadata(MediaInfo* aInfo,
MetadataTags** aTags);
virtual nsresult Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime, int64_t aCurrentTime);
- virtual nsresult GetBuffered(mozilla::dom::TimeRanges* aBuffered, int64_t aStartTime);
virtual void OnDecodeThreadStart() MOZ_OVERRIDE;
diff --git a/docshell/base/nsDocShell.cpp b/docshell/base/nsDocShell.cpp
index a2f95ff0e51..ca080b06df3 100644
--- a/docshell/base/nsDocShell.cpp
+++ b/docshell/base/nsDocShell.cpp
@@ -9115,13 +9115,21 @@ nsDocShell::InternalLoad(nsIURI * aURI,
aLoadType == LOAD_HISTORY ||
aLoadType == LOAD_LINK) {
- // Split mCurrentURI and aURI on the '#' character. Make sure we read
+ nsCOMPtr currentURI;
+ if (sURIFixup && mCurrentURI) {
+ rv = sURIFixup->CreateExposableURI(mCurrentURI,
+ getter_AddRefs(currentURI));
+ NS_ENSURE_SUCCESS(rv, rv);
+ } else {
+ currentURI = mCurrentURI;
+ }
+ // Split currentURI and aURI on the '#' character. Make sure we read
// the return values of SplitURIAtHash; if it fails, we don't want to
// allow a short-circuited navigation.
nsAutoCString curBeforeHash, curHash, newBeforeHash, newHash;
nsresult splitRv1, splitRv2;
- splitRv1 = mCurrentURI ?
- nsContentUtils::SplitURIAtHash(mCurrentURI,
+ splitRv1 = currentURI ?
+ nsContentUtils::SplitURIAtHash(currentURI,
curBeforeHash, curHash) :
NS_ERROR_FAILURE;
splitRv2 = nsContentUtils::SplitURIAtHash(aURI, newBeforeHash, newHash);
@@ -9179,9 +9187,6 @@ nsDocShell::InternalLoad(nsIURI * aURI,
mDocumentRequest->Cancel(NS_BINDING_ABORTED);
}
- // Save the current URI; we need it if we fire a hashchange later.
- nsCOMPtr oldURI = mCurrentURI;
-
// Save the position of the scrollers.
nscoord cx = 0, cy = 0;
GetCurScrollPos(ScrollOrientation_X, &cx);
@@ -9342,15 +9347,15 @@ nsDocShell::InternalLoad(nsIURI * aURI,
}
if (doHashchange) {
- // Make sure to use oldURI here, not mCurrentURI, because by
- // now, mCurrentURI has changed!
- win->DispatchAsyncHashchange(oldURI, aURI);
+ // Note that currentURI hasn't changed because it's on the
+ // stack, so we can just use it directly as the old URI.
+ win->DispatchAsyncHashchange(currentURI, aURI);
}
}
// Inform the favicon service that the favicon for oldURI also
// applies to aURI.
- CopyFavicon(oldURI, aURI, mInPrivateBrowsing);
+ CopyFavicon(currentURI, aURI, mInPrivateBrowsing);
return NS_OK;
}
@@ -10561,10 +10566,18 @@ nsDocShell::AddState(const JS::Value &aData, const nsAString& aTitle,
// Step 2: Resolve aURL
bool equalURIs = true;
- nsCOMPtr oldURI = mCurrentURI;
+ nsCOMPtr currentURI;
+ if (sURIFixup && mCurrentURI) {
+ rv = sURIFixup->CreateExposableURI(mCurrentURI,
+ getter_AddRefs(currentURI));
+ NS_ENSURE_SUCCESS(rv, rv);
+ } else {
+ currentURI = mCurrentURI;
+ }
+ nsCOMPtr oldURI = currentURI;
nsCOMPtr newURI;
if (aURL.Length() == 0) {
- newURI = mCurrentURI;
+ newURI = currentURI;
}
else {
// 2a: Resolve aURL relative to mURI
@@ -10594,7 +10607,7 @@ nsDocShell::AddState(const JS::Value &aData, const nsAString& aTitle,
// the new URI has the same origin as our current URI, we also
// check that the two URIs have the same userpass. (The
// security manager says that |http://foo.com| and
- // |http://me@foo.com| have the same origin.) mCurrentURI
+ // |http://me@foo.com| have the same origin.) currentURI
// won't contain the password part of the userpass, so this
// means that it's never valid to specify a password in a
// pushState or replaceState URI.
@@ -10604,14 +10617,14 @@ nsDocShell::AddState(const JS::Value &aData, const nsAString& aTitle,
NS_ENSURE_TRUE(secMan, NS_ERROR_FAILURE);
// It's very important that we check that newURI is of the same
- // origin as mCurrentURI, not docBaseURI, because a page can
+ // origin as currentURI, not docBaseURI, because a page can
// set docBaseURI arbitrarily to any domain.
nsAutoCString currentUserPass, newUserPass;
- NS_ENSURE_SUCCESS(mCurrentURI->GetUserPass(currentUserPass),
+ NS_ENSURE_SUCCESS(currentURI->GetUserPass(currentUserPass),
NS_ERROR_FAILURE);
NS_ENSURE_SUCCESS(newURI->GetUserPass(newUserPass),
NS_ERROR_FAILURE);
- if (NS_FAILED(secMan->CheckSameOriginURI(mCurrentURI,
+ if (NS_FAILED(secMan->CheckSameOriginURI(currentURI,
newURI, true)) ||
!currentUserPass.Equals(newUserPass)) {
@@ -10636,8 +10649,8 @@ nsDocShell::AddState(const JS::Value &aData, const nsAString& aTitle,
}
}
- if (mCurrentURI) {
- mCurrentURI->Equals(newURI, &equalURIs);
+ if (currentURI) {
+ currentURI->Equals(newURI, &equalURIs);
}
else {
equalURIs = false;
@@ -10699,7 +10712,7 @@ nsDocShell::AddState(const JS::Value &aData, const nsAString& aTitle,
// SHEntry's URI was modified in this way by a push/replaceState call
// set URIWasModified to true for the current SHEntry (bug 669671).
bool sameExceptHashes = true, oldURIWasModified = false;
- newURI->EqualsExceptRef(mCurrentURI, &sameExceptHashes);
+ newURI->EqualsExceptRef(currentURI, &sameExceptHashes);
oldOSHE->GetURIWasModified(&oldURIWasModified);
newSHEntry->SetURIWasModified(!sameExceptHashes || oldURIWasModified);
diff --git a/docshell/test/file_anchor_scroll_after_document_open.html b/docshell/test/file_anchor_scroll_after_document_open.html
new file mode 100644
index 00000000000..7903380eac6
--- /dev/null
+++ b/docshell/test/file_anchor_scroll_after_document_open.html
@@ -0,0 +1,15 @@
+
+
diff --git a/docshell/test/file_pushState_after_document_open.html b/docshell/test/file_pushState_after_document_open.html
new file mode 100644
index 00000000000..97a6954f2ee
--- /dev/null
+++ b/docshell/test/file_pushState_after_document_open.html
@@ -0,0 +1,11 @@
+
+
diff --git a/docshell/test/mochitest.ini b/docshell/test/mochitest.ini
index 4776de84e0f..06ac16ba6d7 100644
--- a/docshell/test/mochitest.ini
+++ b/docshell/test/mochitest.ini
@@ -11,6 +11,7 @@ support-files =
bug668513_redirect.html
bug668513_redirect.html^headers^
bug691547_frame.html
+ file_anchor_scroll_after_document_open.html
file_bug385434_1.html
file_bug385434_2.html
file_bug385434_3.html
@@ -31,8 +32,10 @@ support-files =
file_bug680257.html
file_bug703855.html
file_bug728939.html
+ file_pushState_after_document_open.html
historyframes.html
+[test_anchor_scroll_after_document_open.html]
[test_bfcache_plus_hash.html]
[test_bug123696.html]
[test_bug369814.html]
@@ -69,4 +72,5 @@ support-files =
[test_bug728939.html]
[test_bug797909.html]
[test_framedhistoryframes.html]
+[test_pushState_after_document_open.html]
[test_windowedhistoryframes.html]
diff --git a/docshell/test/test_anchor_scroll_after_document_open.html b/docshell/test/test_anchor_scroll_after_document_open.html
new file mode 100644
index 00000000000..93fa4615b42
--- /dev/null
+++ b/docshell/test/test_anchor_scroll_after_document_open.html
@@ -0,0 +1,55 @@
+
+
+
+
+
+ Test for Bug 881487
+
+
+
+
+
+Mozilla Bug 881487
+
+
+
+
+
+
+
+
+
+
diff --git a/docshell/test/test_pushState_after_document_open.html b/docshell/test/test_pushState_after_document_open.html
new file mode 100644
index 00000000000..51ba1050c21
--- /dev/null
+++ b/docshell/test/test_pushState_after_document_open.html
@@ -0,0 +1,37 @@
+
+
+
+
+
+ Test for Bug 957479
+
+
+
+
+
+Mozilla Bug 957479
+
+
+
+
+
+
+
+
diff --git a/dom/base/URL.h b/dom/base/URL.h
index 76a715e6491..20d0109dc4f 100644
--- a/dom/base/URL.h
+++ b/dom/base/URL.h
@@ -118,6 +118,11 @@ public:
void SetHash(const nsAString& aArg);
+ void Stringify(nsString& aRetval) const
+ {
+ GetHref(aRetval);
+ }
+
// URLSearchParamsObserver
void URLSearchParamsUpdated() MOZ_OVERRIDE;
void URLSearchParamsNeedsUpdates() MOZ_OVERRIDE;
diff --git a/dom/base/test/mochitest.ini b/dom/base/test/mochitest.ini
index 61a915c49f9..b8b227fe8e7 100644
--- a/dom/base/test/mochitest.ini
+++ b/dom/base/test/mochitest.ini
@@ -28,23 +28,24 @@ support-files =
[test_messageChannel_post.html]
[test_messageChannel_pref.html]
[test_messageChannel_start.html]
+[test_messagemanager_targetchain.html]
[test_messageChannel_transferable.html]
[test_messageChannel_unshipped.html]
[test_named_frames.html]
[test_nondomexception.html]
+[test_openDialogChromeOnly.html]
+[test_postMessage_solidus.html]
[test_screen_orientation.html]
[test_settimeout_inner.html]
[test_setting_opener.html]
[test_url.html]
+[test_url_empty_port.html]
+[test_urlExceptions.html]
+[test_urlSearchParams.html]
+[test_urlutils_stringify.html]
[test_window_constructor.html]
[test_window_cross_origin_props.html]
[test_window_enumeration.html]
[test_window_extensible.html]
[test_window_indexing.html]
[test_writable-replaceable.html]
-[test_urlExceptions.html]
-[test_openDialogChromeOnly.html]
-[test_messagemanager_targetchain.html]
-[test_url_empty_port.html]
-[test_postMessage_solidus.html]
-[test_urlSearchParams.html]
diff --git a/dom/base/test/test_url.html b/dom/base/test/test_url.html
index afa0bbfcfba..6c60d8cc9d2 100644
--- a/dom/base/test/test_url.html
+++ b/dom/base/test/test_url.html
@@ -270,6 +270,8 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=887364
if ('pathname' in test) is(test.pathname, url.pathname, "pathname");
if ('search' in test) is(test.search, url.search, "search");
if ('hash' in test) is(test.hash, url.hash, "hash");
+
+ if ('href' in test) is (test.href, url + '', 'stringify works');
}
diff --git a/dom/base/test/test_urlutils_stringify.html b/dom/base/test/test_urlutils_stringify.html
new file mode 100644
index 00000000000..a5f9dce83ad
--- /dev/null
+++ b/dom/base/test/test_urlutils_stringify.html
@@ -0,0 +1,38 @@
+
+
+
+
+
+
+ Test for Bug 959190
+
+
+
+
+Mozilla Bug 959190
+
+
+
+
+
+
+
+ foobar
+
+
+
+
diff --git a/dom/bindings/BindingUtils.cpp b/dom/bindings/BindingUtils.cpp
index 6dce37c1ed1..5aa2e68193a 100644
--- a/dom/bindings/BindingUtils.cpp
+++ b/dom/bindings/BindingUtils.cpp
@@ -951,21 +951,21 @@ XrayResolveAttribute(JSContext* cx, JS::Handle wrapper,
// They all have getters, so we can just make it.
JS::Rooted global(cx, JS_GetGlobalForObject(cx, wrapper));
JS::Rooted fun(cx,
- JS_NewFunctionById(cx, (JSNative)attrSpec.getter.op,
+ JS_NewFunctionById(cx, (JSNative)attrSpec.getter.propertyOp.op,
0, 0, global, id));
if (!fun)
return false;
- SET_JITINFO(fun, attrSpec.getter.info);
+ SET_JITINFO(fun, attrSpec.getter.propertyOp.info);
JSObject *funobj = JS_GetFunctionObject(fun);
desc.setGetterObject(funobj);
desc.attributesRef() |= JSPROP_GETTER;
- if (attrSpec.setter.op) {
+ if (attrSpec.setter.propertyOp.op) {
// We have a setter! Make it.
- fun = JS_NewFunctionById(cx, (JSNative)attrSpec.setter.op, 1, 0,
+ fun = JS_NewFunctionById(cx, (JSNative)attrSpec.setter.propertyOp.op, 1, 0,
global, id);
if (!fun)
return false;
- SET_JITINFO(fun, attrSpec.setter.info);
+ SET_JITINFO(fun, attrSpec.setter.propertyOp.info);
funobj = JS_GetFunctionObject(fun);
desc.setSetterObject(funobj);
desc.attributesRef() |= JSPROP_SETTER;
diff --git a/dom/bindings/CallbackFunction.h b/dom/bindings/CallbackFunction.h
index 63a48af5374..400c19942be 100644
--- a/dom/bindings/CallbackFunction.h
+++ b/dom/bindings/CallbackFunction.h
@@ -29,7 +29,6 @@ public:
nsIGlobalObject* aIncumbentGlobal)
: CallbackObject(aCallable, aIncumbentGlobal)
{
- MOZ_ASSERT(JS_ObjectIsCallable(nullptr, mCallback));
}
JS::Handle Callable() const
diff --git a/dom/bindings/Codegen.py b/dom/bindings/Codegen.py
index d72cb64b6c3..753831aad85 100644
--- a/dom/bindings/Codegen.py
+++ b/dom/bindings/Codegen.py
@@ -1649,7 +1649,7 @@ class AttrDefiner(PropertyDefiner):
else:
accessor = "genericGetter"
jitinfo = "&%s_getterinfo" % attr.identifier.name
- return "{ JS_CAST_NATIVE_TO(%s, JSPropertyOp), %s }" % \
+ return "{ { JS_CAST_NATIVE_TO(%s, JSPropertyOp), %s } }" % \
(accessor, jitinfo)
def setter(attr):
@@ -1668,7 +1668,7 @@ class AttrDefiner(PropertyDefiner):
else:
accessor = "genericSetter"
jitinfo = "&%s_setterinfo" % attr.identifier.name
- return "{ JS_CAST_NATIVE_TO(%s, JSStrictPropertyOp), %s }" % \
+ return "{ { JS_CAST_NATIVE_TO(%s, JSStrictPropertyOp), %s } }" % \
(accessor, jitinfo)
def specData(attr):
@@ -2841,8 +2841,9 @@ def getJSToNativeConversionInfo(type, descriptorProvider, failureCode=None,
If lenientFloatCode is not None, it should be used in cases when
we're a non-finite float that's not unrestricted.
- If allowTreatNonCallableAsNull is true, then [TreatNonCallableAsNull]
- extended attributes on nullable callback functions will be honored.
+ If allowTreatNonCallableAsNull is true, then [TreatNonCallableAsNull] and
+ [TreatNonObjectAsNull] extended attributes on nullable callback functions
+ will be honored.
If isCallbackReturnValue is "JSImpl" or "Callback", then the declType may be
adjusted to make it easier to return from a callback. Since that type is
@@ -3737,6 +3738,8 @@ for (uint32_t i = 0; i < length; ++i) {
if type.isCallback():
assert not isEnforceRange and not isClamp
assert not type.treatNonCallableAsNull() or type.nullable()
+ assert not type.treatNonObjectAsNull() or type.nullable()
+ assert not type.treatNonObjectAsNull() or not type.treatNonCallableAsNull()
name = type.unroll().identifier.name
if type.nullable():
@@ -3758,6 +3761,17 @@ for (uint32_t i = 0; i < length; ++i) {
"} else {\n"
" ${declName} = nullptr;\n"
"}")
+ elif allowTreatNonCallableAsNull and type.treatNonObjectAsNull():
+ if not isDefinitelyObject:
+ haveObject = "${val}.isObject()"
+ if defaultValue is not None:
+ assert(isinstance(defaultValue, IDLNullValue))
+ haveObject = "${haveValue} && " + haveObject
+ template = CGIfElseWrapper(haveObject,
+ CGGeneric(conversion),
+ CGGeneric("${declName} = nullptr;")).define()
+ else:
+ template = conversion
else:
template = wrapObjectTemplate(
"if (JS_ObjectIsCallable(cx, &${val}.toObject())) {\n" +
@@ -10774,6 +10788,7 @@ class CGCallback(CGClass):
getters=[], setters=[]):
self.baseName = baseName
self._deps = idlObject.getDeps()
+ self.idlObject = idlObject
name = idlObject.identifier.name
if isJSImplementedDescriptor(descriptorProvider):
name = jsImplName(name)
@@ -10801,6 +10816,13 @@ class CGCallback(CGClass):
methods=realMethods+getters+setters)
def getConstructors(self):
+ if (not self.idlObject.isInterface() and
+ not self.idlObject._treatNonObjectAsNull):
+ body = "MOZ_ASSERT(JS_ObjectIsCallable(nullptr, mCallback));"
+ else:
+ # Not much we can assert about it, other than not being null, and
+ # CallbackObject does that already.
+ body = ""
return [ClassConstructor(
[Argument("JS::Handle", "aCallback"), Argument("nsIGlobalObject*", "aIncumbentGlobal")],
bodyInHeader=True,
@@ -10808,7 +10830,8 @@ class CGCallback(CGClass):
explicit=True,
baseConstructors=[
"%s(aCallback, aIncumbentGlobal)" % self.baseName,
- ])]
+ ],
+ body=body)]
def getMethodImpls(self, method):
assert method.needThisHandling
@@ -10816,13 +10839,13 @@ class CGCallback(CGClass):
# Strip out the JSContext*/JSObject* args
# that got added.
assert args[0].name == "cx" and args[0].argType == "JSContext*"
- assert args[1].name == "aThisObj" and args[1].argType == "JS::Handle"
+ assert args[1].name == "aThisVal" and args[1].argType == "JS::Handle"
args = args[2:]
# Record the names of all the arguments, so we can use them when we call
# the private method.
argnames = [arg.name for arg in args]
- argnamesWithThis = ["s.GetContext()", "thisObjJS"] + argnames
- argnamesWithoutThis = ["s.GetContext()", "JS::NullPtr()"] + argnames
+ argnamesWithThis = ["s.GetContext()", "thisValJS"] + argnames
+ argnamesWithoutThis = ["s.GetContext()", "JS::UndefinedHandleValue"] + argnames
# Now that we've recorded the argnames for our call to our private
# method, insert our optional argument for deciding whether the
# CallSetup should re-throw exceptions on aRv.
@@ -10846,6 +10869,8 @@ class CGCallback(CGClass):
" aRv.Throw(NS_ERROR_FAILURE);\n"
" return${errorReturn};\n"
"}\n"
+ "JS::Rooted thisValJS(s.GetContext(),\n"
+ " JS::ObjectValue(*thisObjJS));\n"
"return ${methodName}(${callArgs});").substitute({
"errorReturn" : method.getDefaultRetval(),
"callArgs" : ", ".join(argnamesWithThis),
@@ -10872,6 +10897,7 @@ class CGCallback(CGClass):
class CGCallbackFunction(CGCallback):
def __init__(self, callback, descriptorProvider):
+ self.callback = callback
CGCallback.__init__(self, callback, descriptorProvider,
"CallbackFunction",
methods=[CallCallback(callback, descriptorProvider)])
@@ -11119,10 +11145,10 @@ class CallbackMember(CGNativeMember):
args.append(Argument("ExceptionHandling", "aExceptionHandling",
"eReportExceptions"))
return args
- # We want to allow the caller to pass in a "this" object, as
+ # We want to allow the caller to pass in a "this" value, as
# well as a JSContext.
return [Argument("JSContext*", "cx"),
- Argument("JS::Handle", "aThisObj")] + args
+ Argument("JS::Handle", "aThisVal")] + args
def getCallSetup(self):
if self.needThisHandling:
@@ -11175,8 +11201,9 @@ class CallbackMethod(CallbackMember):
def getCall(self):
replacements = {
"errorReturn" : self.getDefaultRetval(),
- "thisObj": self.getThisObj(),
- "getCallable": self.getCallableDecl()
+ "thisVal": self.getThisVal(),
+ "getCallable": self.getCallableDecl(),
+ "callGuard": self.getCallGuard()
}
if self.argCount > 0:
replacements["argv"] = "argv.begin()"
@@ -11185,8 +11212,8 @@ class CallbackMethod(CallbackMember):
replacements["argv"] = "nullptr"
replacements["argc"] = "0"
return string.Template("${getCallable}"
- "if (!JS_CallFunctionValue(cx, ${thisObj}, callable,\n"
- " ${argc}, ${argv}, rval.address())) {\n"
+ "if (${callGuard}!JS::Call(cx, ${thisVal}, callable,\n"
+ " ${argc}, ${argv}, &rval)) {\n"
" aRv.Throw(NS_ERROR_UNEXPECTED);\n"
" return${errorReturn};\n"
"}\n").substitute(replacements)
@@ -11197,8 +11224,8 @@ class CallCallback(CallbackMethod):
CallbackMethod.__init__(self, callback.signatures()[0], "Call",
descriptorProvider, needThisHandling=True)
- def getThisObj(self):
- return "aThisObj"
+ def getThisVal(self):
+ return "aThisVal"
def getCallableDecl(self):
return "JS::Rooted callable(cx, JS::ObjectValue(*mCallback));\n"
@@ -11206,6 +11233,11 @@ class CallCallback(CallbackMethod):
def getPrettyName(self):
return self.callback.identifier.name
+ def getCallGuard(self):
+ if self.callback._treatNonObjectAsNull:
+ return "JS_ObjectIsCallable(cx, mCallback) && "
+ return ""
+
class CallbackOperationBase(CallbackMethod):
"""
Common class for implementing various callback operations.
@@ -11215,13 +11247,13 @@ class CallbackOperationBase(CallbackMethod):
self.methodName = descriptor.binaryNames.get(jsName, jsName)
CallbackMethod.__init__(self, signature, nativeName, descriptor, singleOperation, rethrowContentException)
- def getThisObj(self):
+ def getThisVal(self):
if not self.singleOperation:
- return "mCallback"
+ return "JS::ObjectValue(*mCallback)"
# This relies on getCallableDecl declaring a boolean
# isCallable in the case when we're a single-operation
# interface.
- return "isCallable ? aThisObj.get() : mCallback"
+ return "isCallable ? aThisVal.get() : JS::ObjectValue(*mCallback)"
def getCallableDecl(self):
replacements = {
@@ -11244,6 +11276,9 @@ class CallbackOperationBase(CallbackMethod):
'%s'
'}\n' % CGIndenter(CGGeneric(getCallableFromProp)).define())
+ def getCallGuard(self):
+ return ""
+
class CallbackOperation(CallbackOperationBase):
"""
Codegen actual WebIDL operations on callback interfaces.
diff --git a/dom/bindings/parser/WebIDL.py b/dom/bindings/parser/WebIDL.py
index 1f9fd24aa94..bede74e7675 100644
--- a/dom/bindings/parser/WebIDL.py
+++ b/dom/bindings/parser/WebIDL.py
@@ -876,6 +876,9 @@ class IDLInterface(IDLObjectWithScope):
if identifier == "TreatNonCallableAsNull":
raise WebIDLError("TreatNonCallableAsNull cannot be specified on interfaces",
[attr.location, self.location])
+ if identifier == "TreatNonObjectAsNull":
+ raise WebIDLError("TreatNonObjectAsNull cannot be specified on interfaces",
+ [attr.location, self.location])
elif identifier == "NoInterfaceObject":
if not attr.noArguments():
raise WebIDLError("[NoInterfaceObject] must take no arguments",
@@ -1411,6 +1414,10 @@ class IDLType(IDLObject):
assert self.tag() == IDLType.Tags.callback
return self.nullable() and self.inner._treatNonCallableAsNull
+ def treatNonObjectAsNull(self):
+ assert self.tag() == IDLType.Tags.callback
+ return self.nullable() and self.inner._treatNonObjectAsNull
+
def addExtendedAttributes(self, attrs):
assert len(attrs) == 0
@@ -2690,10 +2697,7 @@ class IDLAttribute(IDLInterfaceMember):
def handleExtendedAttribute(self, attr):
identifier = attr.identifier()
- if identifier == "TreatNonCallableAsNull":
- raise WebIDLError("TreatNonCallableAsNull cannot be specified on attributes",
- [attr.location, self.location])
- elif identifier == "SetterThrows" and self.readonly:
+ if identifier == "SetterThrows" and self.readonly:
raise WebIDLError("Readonly attributes must not be flagged as "
"[SetterThrows]",
[self.location])
@@ -2937,6 +2941,7 @@ class IDLCallbackType(IDLType, IDLObjectWithScope):
argument.resolve(self)
self._treatNonCallableAsNull = False
+ self._treatNonObjectAsNull = False
def isCallback(self):
return True
@@ -2982,8 +2987,13 @@ class IDLCallbackType(IDLType, IDLObjectWithScope):
for attr in attrs:
if attr.identifier() == "TreatNonCallableAsNull":
self._treatNonCallableAsNull = True
+ elif attr.identifier() == "TreatNonObjectAsNull":
+ self._treatNonObjectAsNull = True
else:
unhandledAttrs.append(attr)
+ if self._treatNonCallableAsNull and self._treatNonObjectAsNull:
+ raise WebIDLError("Cannot specify both [TreatNonCallableAsNull] "
+ "and [TreatNonObjectAsNull]", [self.location])
if len(unhandledAttrs) != 0:
IDLType.addExtendedAttributes(self, unhandledAttrs)
diff --git a/dom/bindings/parser/tests/test_treatNonCallableAsNull.py b/dom/bindings/parser/tests/test_treatNonCallableAsNull.py
index f89ed06eda3..7a0bde8a6dc 100644
--- a/dom/bindings/parser/tests/test_treatNonCallableAsNull.py
+++ b/dom/bindings/parser/tests/test_treatNonCallableAsNull.py
@@ -54,3 +54,18 @@ def WebIDLTest(parser, harness):
threw = True
harness.ok(threw, "Should have thrown.")
+
+ parser = parser.reset()
+
+ threw = False
+ try:
+ parser.parse("""
+ [TreatNonCallableAsNull, TreatNonObjectAsNull]
+ callback Function = any(any... arguments);
+ """)
+
+ results = parser.finish()
+ except:
+ threw = True
+
+ harness.ok(threw, "Should have thrown.")
diff --git a/dom/bindings/test/mochitest.ini b/dom/bindings/test/mochitest.ini
index e6fbde9ea04..30b8b5d9a17 100644
--- a/dom/bindings/test/mochitest.ini
+++ b/dom/bindings/test/mochitest.ini
@@ -17,6 +17,7 @@ support-files =
[test_bug852846.html]
[test_bug862092.html]
[test_barewordGetsWindow.html]
+[test_callback_default_thisval.html]
[test_cloneAndImportNode.html]
[test_defineProperty.html]
[test_enums.html]
@@ -31,4 +32,5 @@ support-files =
[test_namedNoIndexed.html]
[test_queryInterface.html]
[test_sequence_wrapping.html]
+[test_treat_non_object_as_null.html]
[test_traceProtos.html]
diff --git a/dom/bindings/test/test_callback_default_thisval.html b/dom/bindings/test/test_callback_default_thisval.html
new file mode 100644
index 00000000000..f67d09ccce4
--- /dev/null
+++ b/dom/bindings/test/test_callback_default_thisval.html
@@ -0,0 +1,36 @@
+
+
+
+
+
+ Test for Bug 957929
+
+
+
+
+
+Mozilla Bug 957929
+
+
+
+
+
+
+
+
diff --git a/dom/bindings/test/test_treat_non_object_as_null.html b/dom/bindings/test/test_treat_non_object_as_null.html
new file mode 100644
index 00000000000..ddcb73a87f7
--- /dev/null
+++ b/dom/bindings/test/test_treat_non_object_as_null.html
@@ -0,0 +1,39 @@
+
+
+
+
+
+ Test for Bug 952365
+
+
+
+
+
+Mozilla Bug 952365
+
+
+
+
+
+
+
+
diff --git a/dom/imptests/html/html/webappapis/scripting/events/test_event-handler-spec-example.html b/dom/imptests/html/html/webappapis/scripting/events/test_event-handler-spec-example.html
index 28273f1b95f..6852e86caa6 100644
--- a/dom/imptests/html/html/webappapis/scripting/events/test_event-handler-spec-example.html
+++ b/dom/imptests/html/html/webappapis/scripting/events/test_event-handler-spec-example.html
@@ -3,20 +3,63 @@
-Start test
diff --git a/dom/webidl/EventHandler.webidl b/dom/webidl/EventHandler.webidl
index fd1d811b16b..ee3c0eda44b 100644
--- a/dom/webidl/EventHandler.webidl
+++ b/dom/webidl/EventHandler.webidl
@@ -10,17 +10,17 @@
* Opera Software ASA. You are granted a license to use, reproduce
* and create derivative works of this document.
*/
-[TreatNonCallableAsNull]
+[TreatNonObjectAsNull]
callback EventHandlerNonNull = any (Event event);
typedef EventHandlerNonNull? EventHandler;
-[TreatNonCallableAsNull]
+[TreatNonObjectAsNull]
// https://www.w3.org/Bugs/Public/show_bug.cgi?id=23489
//callback OnBeforeUnloadEventHandlerNonNull = DOMString (Event event);
callback OnBeforeUnloadEventHandlerNonNull = DOMString? (Event event);
typedef OnBeforeUnloadEventHandlerNonNull? OnBeforeUnloadEventHandler;
-[TreatNonCallableAsNull]
+[TreatNonObjectAsNull]
callback OnErrorEventHandlerNonNull = boolean ((Event or DOMString) event, optional DOMString source, optional unsigned long lineno, optional unsigned long column);
typedef OnErrorEventHandlerNonNull? OnErrorEventHandler;
diff --git a/dom/webidl/HTMLAnchorElement.webidl b/dom/webidl/HTMLAnchorElement.webidl
index a603557b0f0..9d253bc8c4d 100644
--- a/dom/webidl/HTMLAnchorElement.webidl
+++ b/dom/webidl/HTMLAnchorElement.webidl
@@ -13,7 +13,6 @@
// http://www.whatwg.org/specs/web-apps/current-work/#the-a-element
interface HTMLAnchorElement : HTMLElement {
- stringifier;
[SetterThrows]
attribute DOMString target;
[SetterThrows]
diff --git a/dom/webidl/HTMLAreaElement.webidl b/dom/webidl/HTMLAreaElement.webidl
index 7a512f498ae..b5437f54c30 100644
--- a/dom/webidl/HTMLAreaElement.webidl
+++ b/dom/webidl/HTMLAreaElement.webidl
@@ -14,7 +14,6 @@
// http://www.whatwg.org/specs/web-apps/current-work/#the-area-element
interface HTMLAreaElement : HTMLElement {
-stringifier;
[SetterThrows]
attribute DOMString alt;
[SetterThrows]
diff --git a/dom/webidl/URLUtils.webidl b/dom/webidl/URLUtils.webidl
index 033a6fca929..12cebceb74d 100644
--- a/dom/webidl/URLUtils.webidl
+++ b/dom/webidl/URLUtils.webidl
@@ -29,5 +29,8 @@ interface URLUtils {
attribute DOMString search;
attribute URLSearchParams? searchParams;
attribute DOMString hash;
+
+ // Bug 824857 should remove this.
+ stringifier;
};
diff --git a/dom/workers/URL.h b/dom/workers/URL.h
index 922c6c837e1..69c041e4a86 100644
--- a/dom/workers/URL.h
+++ b/dom/workers/URL.h
@@ -112,6 +112,11 @@ public:
void SetHash(const nsAString& aHash);
+ void Stringify(nsString& aRetval) const
+ {
+ GetHref(aRetval);
+ }
+
// IURLSearchParamsObserver
void URLSearchParamsUpdated() MOZ_OVERRIDE;
void URLSearchParamsNeedsUpdates() MOZ_OVERRIDE;
diff --git a/dom/workers/WorkerPrivate.cpp b/dom/workers/WorkerPrivate.cpp
index f143aabe5e1..d40e2833564 100644
--- a/dom/workers/WorkerPrivate.cpp
+++ b/dom/workers/WorkerPrivate.cpp
@@ -4857,19 +4857,23 @@ WorkerPrivate::RunCurrentSyncLoop()
ProcessAllControlRunnablesLocked();
- if (normalRunnablesPending) {
+ // NB: If we processed a NotifyRunnable, we might have run non-control
+ // runnables, one of which may have shut down the sync loop.
+ if (normalRunnablesPending || loopInfo->mCompleted) {
break;
}
}
}
- // Make sure the periodic timer is running before we continue.
- SetGCTimerMode(PeriodicTimer);
+ if (normalRunnablesPending) {
+ // Make sure the periodic timer is running before we continue.
+ SetGCTimerMode(PeriodicTimer);
- MOZ_ALWAYS_TRUE(NS_ProcessNextEvent(thread, false));
+ MOZ_ALWAYS_TRUE(NS_ProcessNextEvent(thread, false));
- // Now *might* be a good time to GC. Let the JS engine make the decision.
- JS_MaybeGC(cx);
+ // Now *might* be a good time to GC. Let the JS engine make the decision.
+ JS_MaybeGC(cx);
+ }
}
// Make sure that the stack didn't change underneath us.
diff --git a/dom/workers/test/urlApi_worker.js b/dom/workers/test/urlApi_worker.js
index 5a4633b6ba3..0fb3ae1c9eb 100644
--- a/dom/workers/test/urlApi_worker.js
+++ b/dom/workers/test/urlApi_worker.js
@@ -263,6 +263,8 @@ onmessage = function() {
if ('pathname' in test) is(test.pathname, url.pathname, "pathname");
if ('search' in test) is(test.search, url.search, "search");
if ('hash' in test) is(test.hash, url.hash, "hash");
+
+ if ('href' in test) is (test.href, url + '', 'stringify works');
}
postMessage({type: 'finish' });
diff --git a/js/src/Makefile.in b/js/src/Makefile.in
index 7d165fbc077..43f12111d9b 100644
--- a/js/src/Makefile.in
+++ b/js/src/Makefile.in
@@ -72,31 +72,6 @@ endif
MOZILLA_DTRACE_SRC = $(srcdir)/devtools/javascript-trace.d
endif
-backend.RecursiveMakeBackend:
- @echo 'Build configuration changed. Regenerating backend.'
- $(PYTHON) config.status
-
-Makefile: backend.RecursiveMakeBackend
- @$(TOUCH) $@
-
-include backend.RecursiveMakeBackend.pp
-
-default:: backend.RecursiveMakeBackend
-
-default::
- $(call py_action,process_install_manifest,--no-remove $(DIST)/include _build_manifests/install/dist_include)
-
-ifneq (,$(CROSS_COMPILE)$(filter-out WINNT OS2,$(OS_ARCH)))
-# nsinstall doesn't get built until we enter config/ in the exports phase,
-# so we'll have to manually ensure it gets built here if we want to use
-# $(EXPORTS)
-export:: config/nsinstall$(HOST_BIN_SUFFIX)
-$(PUBLIC) $(SDK_PUBLIC): config/nsinstall$(HOST_BIN_SUFFIX)
-
-config/nsinstall$(HOST_BIN_SUFFIX): $(srcdir)/config/nsinstall.c $(srcdir)/config/pathsub.c
- $(MAKE) -C config/ nsinstall$(HOST_BIN_SUFFIX)
-endif
-
# Ensure symbol versions of shared library on Linux do not conflict
# with those in libxul.
ifeq (Linux,$(OS_TARGET))
@@ -135,16 +110,16 @@ ifdef ENABLE_INTL_API
ifdef MOZ_SHARED_ICU
ifeq ($(OS_ARCH),WINNT)
ifdef JS_SHARED_LIBRARY
- ICU_FILES := $(foreach libname,$(ICU_LIB_NAMES),intl/icu/target/lib/$(libname)$(ICU_LIB_SUFFIX)$(MOZ_ICU_VERSION).dll)
+ ICU_FILES := $(foreach libname,$(ICU_LIB_NAMES),$(DEPTH)/intl/icu/target/lib/$(libname)$(ICU_LIB_SUFFIX)$(MOZ_ICU_VERSION).dll)
endif
else # ! WINNT
ifeq ($(OS_ARCH),Darwin)
ifdef JS_SHARED_LIBRARY
- ICU_FILES := $(foreach libname,$(ICU_LIB_NAMES),intl/icu/target/lib/$(DLL_PREFIX)$(libname).$(MOZ_ICU_VERSION)$(DLL_SUFFIX))
+ ICU_FILES := $(foreach libname,$(ICU_LIB_NAMES),$(DEPTH)/intl/icu/target/lib/$(DLL_PREFIX)$(libname).$(MOZ_ICU_VERSION)$(DLL_SUFFIX))
endif
else # ! Darwin
ifdef JS_SHARED_LIBRARY
- ICU_FILES := $(foreach libname,$(ICU_LIB_NAMES),intl/icu/target/lib/$(DLL_PREFIX)$(libname)$(DLL_SUFFIX).$(MOZ_ICU_VERSION))
+ ICU_FILES := $(foreach libname,$(ICU_LIB_NAMES),$(DEPTH)/intl/icu/target/lib/$(DLL_PREFIX)$(libname)$(DLL_SUFFIX).$(MOZ_ICU_VERSION))
endif
endif
endif # WINNT
@@ -157,7 +132,7 @@ ifdef ENABLE_INTL_API
else # !MOZ_SHARED_ICU
ifeq ($(OS_ARCH),WINNT)
ICU_LIB_RENAME = $(foreach libname,$(ICU_LIB_NAMES),\
- cp -p intl/icu/target/lib/s$(libname)$(ICU_LIB_SUFFIX).lib intl/icu/target/lib/$(libname).lib;)
+ cp -p $(DEPTH)/intl/icu/target/lib/s$(libname)$(ICU_LIB_SUFFIX).lib $(DEPTH)/intl/icu/target/lib/$(libname).lib;)
endif
endif # MOZ_SHARED_ICU
endif # !MOZ_NATIVE_ICU
@@ -191,8 +166,8 @@ ifdef MOZ_NATIVE_ICU
LOCAL_INCLUDES += $(MOZ_ICU_CFLAGS)
else
LOCAL_INCLUDES += \
- -I$(topsrcdir)/../../intl/icu/source/common \
- -I$(topsrcdir)/../../intl/icu/source/i18n \
+ -I$(topsrcdir)/intl/icu/source/common \
+ -I$(topsrcdir)/intl/icu/source/i18n \
$(NULL)
endif
@@ -232,16 +207,16 @@ endif
buildicu:
# ICU's build system is full of races, so force non-parallel build.
ifdef CROSS_COMPILE
- +$(ICU_MAKE) -j1 -C intl/icu/host STATIC_O=$(OBJ_SUFFIX) GENRBOPTS='-k -R -C'
+ +$(ICU_MAKE) -j1 -C $(DEPTH)/intl/icu/host STATIC_O=$(OBJ_SUFFIX) GENRBOPTS='-k -R -C'
endif
- +$(ICU_MAKE) -j1 -C intl/icu/target STATIC_O=$(OBJ_SUFFIX) GENRBOPTS='-k -R'
+ +$(ICU_MAKE) -j1 -C $(DEPTH)/intl/icu/target STATIC_O=$(OBJ_SUFFIX) GENRBOPTS='-k -R'
$(ICU_LIB_RENAME)
distclean clean::
ifdef CROSS_COMPILE
- $(call SUBMAKE,$@,intl/icu/host)
+ $(call SUBMAKE,$@,$(DEPTH)/intl/icu/host)
endif
- $(call SUBMAKE,$@,intl/icu/target)
+ $(call SUBMAKE,$@,$(DEPTH)/intl/icu/target)
endif
endif
@@ -254,12 +229,12 @@ endif
# check_vanilla_allocations.py is tailored to Linux, so only run it there.
# That should be enough to catch any problems.
check-vanilla-allocations:
- $(PYTHON) $(srcdir)/config/check_vanilla_allocations.py $(REAL_LIBRARY)
+ $(PYTHON) $(topsrcdir)/config/check_vanilla_allocations.py $(REAL_LIBRARY)
# The "aggressive" variant will likely fail on some compiler/platform
# combinations, but is worth running by hand every once in a while.
check-vanilla-allocations-aggressive:
- $(PYTHON) $(srcdir)/config/check_vanilla_allocations.py --aggressive $(REAL_LIBRARY)
+ $(PYTHON) $(topsrcdir)/config/check_vanilla_allocations.py --aggressive $(REAL_LIBRARY)
ifeq ($(OS_ARCH),Linux)
check:: check-vanilla-allocations
@@ -269,7 +244,7 @@ endif
# If the number of OOM errors changes, update the number below. We intend this
# number to go down over time, by fixing OOMs.
check-ooms:
- $(wildcard $(RUN_TEST_PROGRAM)) $(PYTHON) -u $(srcdir)/config/find_OOM_errors.py --regression 125
+ $(wildcard $(RUN_TEST_PROGRAM)) $(PYTHON) -u $(topsrcdir)/config/find_OOM_errors.py --regression 125
ifeq ($(MOZ_DEBUG),1)
#check:: check-ooms
@@ -289,7 +264,7 @@ endif
endif
check-style::
- (cd $(srcdir) && $(PYTHON) config/check_spidermonkey_style.py);
+ (cd $(srcdir) && $(PYTHON) $(topsrcdir)/config/check_spidermonkey_style.py);
check-jit-test::
$(JITTEST_ASAN_ENV) $(wildcard $(RUN_TEST_PROGRAM)) $(PYTHON) -u $(srcdir)/jit-test/jit_test.py \
diff --git a/js/src/aclocal.m4 b/js/src/aclocal.m4
index 1b191092890..c11775bdd13 100644
--- a/js/src/aclocal.m4
+++ b/js/src/aclocal.m4
@@ -31,4 +31,13 @@ builtin(include, ../../build/autoconf/python-virtualenv.m4)dnl
builtin(include, ../../build/autoconf/winsdk.m4)dnl
builtin(include, ../../build/autoconf/icu.m4)dnl
+define([__MOZ_AC_INIT_PREPARE], defn([AC_INIT_PREPARE]))
+define([AC_INIT_PREPARE],
+[if test -z "$srcdir"; then
+ srcdir=`dirname "[$]0"`
+fi
+srcdir="$srcdir/../.."
+__MOZ_AC_INIT_PREPARE($1)
+])
+
MOZ_PROG_CHECKMSYS()
diff --git a/js/src/build/check-sync-exceptions b/js/src/build/check-sync-exceptions
deleted file mode 100644
index 0d20b6487c6..00000000000
--- a/js/src/build/check-sync-exceptions
+++ /dev/null
@@ -1 +0,0 @@
-*.pyc
diff --git a/js/src/build/msys-perl-wrapper b/js/src/build/msys-perl-wrapper
deleted file mode 100644
index 8ba78ed623e..00000000000
--- a/js/src/build/msys-perl-wrapper
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/bin/sh
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-
-args=""
-
-for i in "${@}"
-do
- case "$i" in
- -I?:/*)
- i="$(echo "${i}" | sed -e 's|^-I\(.\):/|-I/\1/|')"
- ;;
- esac
-
- args="${args} '${i}'"
-done
-
-eval "exec perl $args"
diff --git a/js/src/build/qemu-wrap b/js/src/build/qemu-wrap
deleted file mode 100755
index e33938955d9..00000000000
--- a/js/src/build/qemu-wrap
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/bin/bash
-# this script creates a wrapper shell script for an executable. The idea is the actual executable cannot be
-# executed natively (it was cross compiled), but we want to run tests natively. Running this script
-# as part of the compilation process will move the non-native executable to a new location, and replace it
-# with a script that will run it under qemu.
-while [[ -n $1 ]]; do
- case $1 in
- --qemu) QEMU="$2"; shift 2;;
- --libdir) LIBDIR="$2"; shift 2;;
- --ld) LD="$2"; shift 2;;
- *) exe="$1"; shift;;
- esac
-done
-if [[ -z $LIBDIR ]]; then
- echo "You need to specify a directory for the cross libraries when you configure the shell"
- echo "You can do this with --with-cross-lib="
- exit 1
-fi
-LD=${LD:-$LIBDIR/ld-linux.so.3}
-mv $exe $exe.target
-# Just hardcode the path to the executable. It'll be pretty obvious if it is doing the wrong thing.
-
-echo $'#!/bin/bash\n' $QEMU -E LD_LIBRARY_PATH="${LIBDIR}" "$LD" "$(readlink -f "$exe.target")" '"$@"' >"$exe"
-chmod +x $exe
\ No newline at end of file
diff --git a/js/src/build/subconfigure.py b/js/src/build/subconfigure.py
deleted file mode 100644
index 17ef57d0bd4..00000000000
--- a/js/src/build/subconfigure.py
+++ /dev/null
@@ -1,124 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-# This script is used to capture the content of config.status-generated
-# files and subsequently restore their timestamp if they haven't changed.
-
-import os
-import re
-import subprocess
-import sys
-import pickle
-
-class File(object):
- def __init__(self, path):
- self._path = path
- self._content = open(path, 'rb').read()
- stat = os.stat(path)
- self._times = (stat.st_atime, stat.st_mtime)
-
- def update_time(self):
- '''If the file hasn't changed since the instance was created,
- restore its old modification time.'''
- if not os.path.exists(self._path):
- return
- if open(self._path, 'rb').read() == self._content:
- os.utime(self._path, self._times)
-
-
-# As defined in the various sub-configures in the tree
-PRECIOUS_VARS = set([
- 'build_alias',
- 'host_alias',
- 'target_alias',
- 'CC',
- 'CFLAGS',
- 'LDFLAGS',
- 'LIBS',
- 'CPPFLAGS',
- 'CPP',
- 'CCC',
- 'CXXFLAGS',
- 'CXX',
- 'CCASFLAGS',
- 'CCAS',
-])
-
-
-# Autoconf, in some of the sub-configures used in the tree, likes to error
-# out when "precious" variables change in value. The solution it gives to
-# straighten things is to either run make distclean or remove config.cache.
-# There's no reason not to do the latter automatically instead of failing,
-# doing the cleanup (which, on buildbots means a full clobber), and
-# restarting from scratch.
-def maybe_clear_cache():
- comment = re.compile(r'^\s+#')
- cache = {}
- with open('config.cache') as f:
- for line in f.readlines():
- if not comment.match(line) and '=' in line:
- key, value = line.split('=', 1)
- cache[key] = value
- for precious in PRECIOUS_VARS:
- entry = 'ac_cv_env_%s_value' % precious
- if entry in cache and (not precious in os.environ or os.environ[precious] != cache[entry]):
- os.remove('config.cache')
- return
-
-
-def dump(dump_file, shell):
- if os.path.exists('config.cache'):
- maybe_clear_cache()
- if not os.path.exists('config.status'):
- if os.path.exists(dump_file):
- os.remove(dump_file)
- return
-
- config_files = [File('config.status')]
-
- # Scan the config.status output for information about configuration files
- # it generates.
- config_status_output = subprocess.check_output(
- [shell, '-c', './config.status --help'],
- stderr=subprocess.STDOUT).splitlines()
- state = None
- for line in config_status_output:
- if line.startswith('Configuration') and line.endswith(':'):
- state = 'config'
- elif not line.startswith(' '):
- state = None
- elif state == 'config':
- for f in (couple.split(':')[0] for couple in line.split()):
- if os.path.isfile(f):
- config_files.append(File(f))
-
- with open(dump_file, 'wb') as f:
- pickle.dump(config_files, f)
-
-
-def adjust(dump_file):
- if not os.path.exists(dump_file):
- return
-
- config_files = []
-
- try:
- with open(dump_file, 'rb') as f:
- config_files = pickle.load(f)
- except Exception:
- pass
-
- for f in config_files:
- f.update_time()
-
- os.remove(dump_file)
-
-
-CONFIG_DUMP = 'config_files.pkl'
-
-if __name__ == '__main__':
- if sys.argv[1] == 'dump':
- dump(CONFIG_DUMP, sys.argv[2])
- elif sys.argv[1] == 'adjust':
- adjust(CONFIG_DUMP)
diff --git a/js/src/build/unix/add_phony_targets.py b/js/src/build/unix/add_phony_targets.py
deleted file mode 100644
index 86fc328a503..00000000000
--- a/js/src/build/unix/add_phony_targets.py
+++ /dev/null
@@ -1,33 +0,0 @@
-import pymake.data
-import pymake.parser
-import pymake.parserdata
-import sys
-
-'''
-Modifies the output of Sun Studio's -xM to look more like the output
-of gcc's -MD -MP, adding phony targets for dependencies.
-'''
-
-
-def add_phony_targets(path):
- print path
- deps = set()
- targets = set()
- for stmt in pymake.parser.parsefile(path):
- if isinstance(stmt, pymake.parserdata.Rule):
- assert isinstance(stmt.depexp, pymake.data.StringExpansion)
- assert isinstance(stmt.targetexp, pymake.data.StringExpansion)
- for d in stmt.depexp.s.split():
- deps.add(d)
- for t in stmt.targetexp.s.split():
- targets.add(t)
- phony_targets = deps - targets
- if not phony_targets:
- return
- with open(path, 'a') as f:
- f.writelines('%s:\n' % d for d in phony_targets)
-
-
-if __name__ == '__main__':
- for f in sys.argv[1:]:
- add_phony_targets(f)
diff --git a/js/src/build/unix/headers/bits/c++config.h b/js/src/build/unix/headers/bits/c++config.h
deleted file mode 100644
index c0182c12858..00000000000
--- a/js/src/build/unix/headers/bits/c++config.h
+++ /dev/null
@@ -1,2 +0,0 @@
-#include_next
-#undef _GLIBCXX_USE_FLOAT128
diff --git a/js/src/build/unix/print-failed-commands.sh b/js/src/build/unix/print-failed-commands.sh
deleted file mode 100755
index 7f6b73d33e1..00000000000
--- a/js/src/build/unix/print-failed-commands.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/sh
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-#
-# Usage from makefile:
-# ELOG = . $(topdir)/build/autoconf/print-failed-commands.sh
-# $(ELOG) $(CC) $CFLAGS -o $@ $<
-#
-# This shell script is used by the build system to print out commands that fail
-# to execute properly. It is designed to make the "make -s" command more
-# useful.
-#
-# Note that in the example we are sourcing rather than execing the script.
-# Since make already started a shell for us, we might as well use it rather
-# than starting a new one.
-
-( exec "$@" ) || {
- echo
- echo "In the directory " `pwd`
- echo "The following command failed to execute properly:"
- echo "$@"
- exit 1;
-}
diff --git a/js/src/build/unix/uniq.pl b/js/src/build/unix/uniq.pl
deleted file mode 100644
index 301240e0310..00000000000
--- a/js/src/build/unix/uniq.pl
+++ /dev/null
@@ -1,91 +0,0 @@
-#!/usr/bin/env perl
-
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-##----------------------------##
-##---] CORE/CPAN INCLUDES [---##
-##----------------------------##
-use strict;
-use warnings;
-use Getopt::Long;
-
-##-------------------##
-##---] EXPORTS [---##
-##-------------------##
-our $VERSION = qw(1.1);
-
-##-------------------##
-##---] GLOBALS [---##
-##-------------------##
-my %argv;
-my $modver = $Getopt::Long::VERSION || 0;
-my $isOldGetopt = ($modver eq '2.25') ? 1 : 0;
-
-###########################################################################
-## Intent: Script init function
-###########################################################################
-sub init
-{
- if ($isOldGetopt)
- {
- # mozilla.build/mingw perl in need of an upgrade
- # emulate Getopt::Long switch|short:init
- foreach (qw(debug regex sort))
- {
- if (defined($argv{$_}))
- {
- $argv{$_} ||= 1;
- }
- }
- }
-} # init
-
-##----------------##
-##---] MAIN [---##
-##----------------##
-my @args = ($isOldGetopt)
- ? qw(debug|d regex|r sort|s)
- : qw(debug|d:1 regex|r:1 sort|s:1)
- ;
-
-unless(GetOptions(\%argv, @args))
-{
- print "Usage: $0\n";
- print " --sort Sort list elements early\n";
- print " --regex Exclude subdirs by pattern\n";
-}
-
-init();
-my $debug = $argv{debug} || 0;
-
-my %seen;
-my @out;
-my @in = ($argv{sort}) ? sort @ARGV : @ARGV;
-
-foreach my $d (@in)
-{
- next if ($seen{$d}++);
-
- print " arg is $d\n" if ($debug);
-
- if ($argv{regex})
- {
- my $found = 0;
- foreach my $dir (@out)
- {
- my $dirM = quotemeta($dir);
- $found++, last if ($d eq $dir || $d =~ m!^${dirM}\/!);
- }
- print "Adding $d\n" if ($debug && !$found);
- push @out, $d if (!$found);
- } else {
- print "Adding: $d\n" if ($debug);
- push(@out, $d);
- }
-}
-
-print "@out\n"
-
-# EOF
diff --git a/js/src/build/win32/pgomerge.py b/js/src/build/win32/pgomerge.py
deleted file mode 100644
index 313d66870de..00000000000
--- a/js/src/build/win32/pgomerge.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/python
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-# Usage: pgomerge.py
-# Gathers .pgc files from dist/bin and merges them into
-# $PWD/$basename.pgd using pgomgr, then deletes them.
-# No errors if any of these files don't exist.
-
-import sys, os, os.path, subprocess
-if not sys.platform == "win32":
- raise Exception("This script was only meant for Windows.")
-
-def MergePGOFiles(basename, pgddir, pgcdir):
- """Merge pgc files produced from an instrumented binary
- into the pgd file for the second pass of profile-guided optimization
- with MSVC. |basename| is the name of the DLL or EXE without the
- extension. |pgddir| is the path that contains .pgd
- (should be the objdir it was built in). |pgcdir| is the path
- containing basename!N.pgc files, which is probably dist/bin.
- Calls pgomgr to merge each pgc file into the pgd, then deletes
- the pgc files."""
- if not os.path.isdir(pgddir) or not os.path.isdir(pgcdir):
- return
- pgdfile = os.path.abspath(os.path.join(pgddir, basename + ".pgd"))
- if not os.path.isfile(pgdfile):
- return
- for file in os.listdir(pgcdir):
- if file.startswith(basename+"!") and file.endswith(".pgc"):
- try:
- pgcfile = os.path.normpath(os.path.join(pgcdir, file))
- subprocess.call(['pgomgr', '-merge',
- pgcfile,
- pgdfile])
- os.remove(pgcfile)
- except OSError:
- pass
-
-if __name__ == '__main__':
- if len(sys.argv) != 3:
- print >>sys.stderr, "Usage: pgomerge.py "
- sys.exit(1)
- MergePGOFiles(sys.argv[1], os.getcwd(), sys.argv[2])
diff --git a/js/src/builtin/SIMD.cpp b/js/src/builtin/SIMD.cpp
index 3d5845f990f..bae570e28d7 100644
--- a/js/src/builtin/SIMD.cpp
+++ b/js/src/builtin/SIMD.cpp
@@ -12,9 +12,13 @@
*/
#include "builtin/SIMD.h"
+
#include "jsapi.h"
#include "jsfriendapi.h"
+
#include "builtin/TypedObject.h"
+#include "js/Value.h"
+
#include "jsobjinlines.h"
using namespace js;
@@ -48,7 +52,7 @@ struct Float32x4 {
*out = v.toNumber();
}
static void setReturn(CallArgs &args, float value) {
- args.rval().setDouble(value);
+ args.rval().setDouble(JS::CanonicalizeNaN(value));
}
};
diff --git a/js/src/config/Makefile.in b/js/src/config/Makefile.in
deleted file mode 100644
index 7bed1ff4a1c..00000000000
--- a/js/src/config/Makefile.in
+++ /dev/null
@@ -1,47 +0,0 @@
-#
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-# IMPORTANT: Disable NSBUILDROOT for this directory only, otherwise we have
-# a recursive rule for finding nsinstall and the Perl scripts.
-ifdef NSBUILDROOT
-override NSBUILDROOT :=
-endif
-
-ifdef GNU_CC
-MODULE_OPTIMIZE_FLAGS = -O3
-endif
-
-include $(topsrcdir)/config/config.mk
-
-ifneq (WINNT,$(HOST_OS_ARCH))
-# Ensure nsinstall is atomically created
-nsinstall$(HOST_BIN_SUFFIX): $(HOST_PROGRAM)
- cp $^ $@.tmp
- mv $@.tmp $@
-
-NSINSTALL_FILES := nsinstall$(HOST_BIN_SUFFIX)
-NSINSTALL_DEST := $(DIST)/bin
-NSINSTALL_TARGET := export
-INSTALL_TARGETS += NSINSTALL
-endif
-
-include $(topsrcdir)/config/rules.mk
-
-HOST_CFLAGS += -DUNICODE -D_UNICODE
-
-ifdef WRAP_SYSTEM_INCLUDES
-export:: \
- $(call mkdir_deps,system_wrappers_js) \
- $(NULL)
- $(PYTHON) -m mozbuild.action.preprocessor $(DEFINES) $(ACDEFINES) \
- $(srcdir)/system-headers | $(PERL) $(srcdir)/make-system-wrappers.pl system_wrappers_js
- $(INSTALL) system_wrappers_js $(DIST)
-
-GARBAGE_DIRS += system_wrappers_js
-endif
-
-GARBAGE += $(srcdir)/*.pyc *.pyc
-
-FORCE:
diff --git a/js/src/config/Moz/Milestone.pm b/js/src/config/Moz/Milestone.pm
deleted file mode 100644
index 722203298fa..00000000000
--- a/js/src/config/Moz/Milestone.pm
+++ /dev/null
@@ -1,220 +0,0 @@
-#!/usr/bin/perl -w
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-package Moz::Milestone;
-use strict;
-
-use vars qw($officialMilestone
- $milestone);
-
-local $Moz::Milestone::milestone;
-local $Moz::Milestone::officialMilestone;
-
-#
-# Usage: getOfficialMilestone($milestoneFile)
-# Returns full milestone (x.x.x.x[ab12pre+])
-#
-sub getOfficialMilestone($) {
- my $mfile = $_[0];
- open(FILE,"$mfile") ||
- die ("Can't open $mfile for reading!");
-
- my $num = ;
- while($num =~ /^\s*#/ || $num !~ /^\d/) {
- $num = ;
- }
-
- close(FILE);
- if ($num !~ /^\d/) { return; }
- chomp($num);
- # Remove extra ^M caused by using dos-mode line-endings
- chop $num if (substr($num, -1, 1) eq "\r");
- $Moz::Milestone::officialMilestone = $num;
- $Moz::Milestone::milestone = &getMilestoneNum;
- return $num;
-}
-
-#
-# Usage: getMilestoneNum($num)
-# Returns: milestone without a + if it exists.
-#
-sub getMilestoneNum {
- if (defined($Moz::Milestone::milestone)) {
- return $Moz::Milestone::milestone;
- }
-
- if (defined($Moz::Milestone::officialMilestone)) {
- $Moz::Milestone::milestone = $Moz::Milestone::officialMilestone;
- } else {
- $Moz::Milestone::milestone = $_[0];
- }
-
- if ($Moz::Milestone::milestone =~ /\+$/) { # for x.x.x+, strip off the +
- $Moz::Milestone::milestone =~ s/\+$//;
- }
-
- return $Moz::Milestone::milestone;
-}
-
-#
-# Usage: getMilestoneQualifier($num)
-# Returns: + if it exists.
-#
-sub getMilestoneQualifier {
- my $milestoneQualifier;
- if (defined($Moz::Milestone::officialMilestone)) {
- $milestoneQualifier = $Moz::Milestone::officialMilestone;
- } else {
- $milestoneQualifier = $_[0];
- }
-
- if ($milestoneQualifier =~ /\+$/) {
- return "+";
- }
-}
-
-sub getMilestoneMajor {
- my $milestoneMajor;
- if (defined($Moz::Milestone::milestone)) {
- $milestoneMajor = $Moz::Milestone::milestone;
- } else {
- $milestoneMajor = $_[0];
- }
- my @parts = split(/\./,$milestoneMajor);
- return $parts[0];
-}
-
-sub getMilestoneMinor {
- my $milestoneMinor;
- if (defined($Moz::Milestone::milestone)) {
- $milestoneMinor = $Moz::Milestone::milestone;
- } else {
- $milestoneMinor = $_[0];
- }
- my @parts = split(/\./,$milestoneMinor);
-
- if ($#parts < 1 ) { return 0; }
- return $parts[1];
-}
-
-sub getMilestoneMini {
- my $milestoneMini;
- if (defined($Moz::Milestone::milestone)) {
- $milestoneMini = $Moz::Milestone::milestone;
- } else {
- $milestoneMini = $_[0];
- }
- my @parts = split(/\./,$milestoneMini);
-
- if ($#parts < 2 ) { return 0; }
- return $parts[2];
-}
-
-sub getMilestoneMicro {
- my $milestoneMicro;
- if (defined($Moz::Milestone::milestone)) {
- $milestoneMicro = $Moz::Milestone::milestone;
- } else {
- $milestoneMicro = $_[0];
- }
- my @parts = split(/\./,$milestoneMicro);
-
- if ($#parts < 3 ) { return 0; }
- return $parts[3];
-}
-
-sub getMilestoneAB {
- my $milestoneAB;
- if (defined($Moz::Milestone::milestone)) {
- $milestoneAB = $Moz::Milestone::milestone;
- } else {
- $milestoneAB = $_[0];
- }
-
- if ($milestoneAB =~ /a/) { return "alpha"; }
- if ($milestoneAB =~ /b/) { return "beta"; }
- return "final";
-}
-
-#
-# Usage: getMilestoneABWithNum($milestoneFile)
-# Returns the alpha and beta tag with its number (a1, a2, b3, ...)
-#
-sub getMilestoneABWithNum {
- my $milestoneABNum;
- if (defined($Moz::Milestone::milestone)) {
- $milestoneABNum = $Moz::Milestone::milestone;
- } else {
- $milestoneABNum = $_[0];
- }
-
- if ($milestoneABNum =~ /([ab]\d+)/) {
- return $1;
- } else {
- return "";
- }
-}
-
-#
-# build_file($template_file,$output_file)
-#
-sub build_file($$) {
- my @FILE;
- my @MILESTONE_PARTS;
- my $MINI_VERSION = 0;
- my $MICRO_VERSION = 0;
- my $OFFICIAL = 0;
- my $QUALIFIER = "";
-
- if (!defined($Moz::Milestone::milestone)) { die("$0: no milestone file set!\n"); }
- @MILESTONE_PARTS = split(/\./, &getMilestoneNum);
- if ($#MILESTONE_PARTS >= 2) {
- $MINI_VERSION = 1;
- } else {
- $MILESTONE_PARTS[2] = 0;
- }
- if ($#MILESTONE_PARTS >= 3) {
- $MICRO_VERSION = 1;
- } else {
- $MILESTONE_PARTS[3] = 0;
- }
- if (! &getMilestoneQualifier) {
- $OFFICIAL = 1;
- } else {
- $QUALIFIER = "+";
- }
-
- if (-e $_[0]) {
- open(FILE, "$_[0]") || die("$0: Can't open $_[0] for reading!\n");
- @FILE = ;
- close(FILE);
-
- open(FILE, ">$_[1]") || die("$0: Can't open $_[1] for writing!\n");
-
- #
- # There will be more of these based on what we need for files.
- #
- foreach(@FILE) {
- s/__MOZ_MAJOR_VERSION__/$MILESTONE_PARTS[0]/g;
- s/__MOZ_MINOR_VERSION__/$MILESTONE_PARTS[1]/g;
- s/__MOZ_MINI_VERSION__/$MILESTONE_PARTS[2]/g;
- s/__MOZ_MICRO_VERSION__/$MILESTONE_PARTS[3]/g;
- if ($MINI_VERSION) {
- s/__MOZ_OPTIONAL_MINI_VERSION__/.$MILESTONE_PARTS[2]/g;
- }
- if ($MICRO_VERSION) {
- s/__MOZ_OPTIONAL_MICRO_VERSION__/.$MILESTONE_PARTS[3]/g;
- }
-
- print FILE $_;
- }
- close(FILE);
- } else {
- die("$0: $_[0] doesn't exist for autoversioning!\n");
- }
-
-}
-
-1;
diff --git a/js/src/config/autoconf.mk.in b/js/src/config/autoconf.mk.in
deleted file mode 100644
index df6cc89badb..00000000000
--- a/js/src/config/autoconf.mk.in
+++ /dev/null
@@ -1,6 +0,0 @@
-ifndef INCLUDED_AUTOCONF_MK
-INCLUDED_AUTOCONF_MK = 1
-include $(DEPTH)/config/emptyvars.mk
-@ALLSUBSTS@
-include $(topsrcdir)/config/baseconfig.mk
-endif
diff --git a/js/src/config/baseconfig.mk b/js/src/config/baseconfig.mk
deleted file mode 100644
index 230c2564035..00000000000
--- a/js/src/config/baseconfig.mk
+++ /dev/null
@@ -1,42 +0,0 @@
-installdir = $(libdir)/$(MOZ_APP_NAME)-$(MOZ_APP_VERSION)
-sdkdir = $(libdir)/$(MOZ_APP_NAME)-devel-$(MOZ_APP_VERSION)
-
-ifneq (,$(filter /%,$(TOP_DIST)))
-DIST = $(TOP_DIST)
-else
-ifeq (.,$(DEPTH))
-DIST = $(TOP_DIST)
-else
-DIST = $(DEPTH)/$(TOP_DIST)
-endif
-endif
-
-# We do magic with OBJ_SUFFIX in config.mk, the following ensures we don't
-# manually use it before config.mk inclusion
-_OBJ_SUFFIX := $(OBJ_SUFFIX)
-OBJ_SUFFIX = $(error config/config.mk needs to be included before using OBJ_SUFFIX)
-
-ifeq ($(HOST_OS_ARCH),WINNT)
-# We only support building with pymake or a non-msys gnu make version
-# strictly above 4.0.
-ifndef .PYMAKE
-ifeq (a,$(firstword a$(subst /, ,$(abspath .))))
-$(error MSYS make is not supported)
-endif
-# 4.0- happens to be greater than 4.0, lower than the mozmake version,
-# and lower than 4.0.1 or 4.1, whatever next version of gnu make will
-# be released.
-ifneq (4.0-,$(firstword $(sort 4.0- $(MAKE_VERSION))))
-$(error Make version too old. Only versions strictly greater than 4.0 are supported.)
-endif
-endif
-ifeq (a,$(firstword a$(subst /, ,$(srcdir))))
-$(error MSYS-style srcdir are not supported for Windows builds.)
-endif
-endif # WINNT
-
-ifdef .PYMAKE
-include_deps = $(eval -includedeps $(1))
-else
-include_deps = $(eval -include $(1))
-endif
diff --git a/js/src/config/check-sync-dirs.py b/js/src/config/check-sync-dirs.py
deleted file mode 100644
index 22204b11dc5..00000000000
--- a/js/src/config/check-sync-dirs.py
+++ /dev/null
@@ -1,108 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-# check-sync-dirs.py --- check that one directory is an exact subset of another
-#
-# Usage: python check-sync-dirs.py COPY ORIGINAL
-#
-# Check that the files present in the directory tree COPY are exact
-# copies of their counterparts in the directory tree ORIGINAL. COPY
-# need not have all the files in ORIGINAL, but COPY may not have files
-# absent from ORIGINAL.
-#
-# Each directory in COPY may have a file named
-# 'check-sync-exceptions', which lists files in COPY that need not be
-# the same as the corresponding file in ORIGINAL, or exist at all in
-# ORIGINAL. (The 'check-sync-exceptions' file itself is always
-# treated as exceptional.) Blank lines and '#' comments in the file
-# are ignored.
-
-import sys
-import os
-from os.path import join
-import filecmp
-import textwrap
-import fnmatch
-
-if len(sys.argv) != 3:
- print >> sys.stderr, 'TEST-UNEXPECTED-FAIL | check-sync-dirs.py | Usage: %s COPY ORIGINAL' % sys.argv[0]
- sys.exit(1)
-
-copy = os.path.abspath(sys.argv[1])
-original = os.path.abspath(sys.argv[2])
-
-# Return the contents of FILENAME, a 'check-sync-exceptions' file, as
-# a dictionary whose keys are exactly the list of filenames, along
-# with the basename of FILENAME itself. If FILENAME does not exist,
-# return the empty dictionary.
-def read_exceptions(filename):
- if (os.path.exists(filename)):
- f = file(filename)
- exceptions = {}
- for line in f:
- line = line.strip()
- if line != '' and line[0] != '#':
- exceptions[line] = None
- exceptions[os.path.basename (filename)] = None
- f.close()
- return exceptions
- else:
- return {}
-
-# Return true if FILENAME matches any pattern in the list of filename
-# patterns PATTERNS.
-def fnmatch_any(filename, patterns):
- for pattern in patterns:
- if fnmatch.fnmatch(filename, pattern):
- return True
- return False
-
-# Check the contents of the directory tree COPY against ORIGINAL. For each
-# file that differs, apply REPORT to COPY, ORIGINAL, and the file's
-# relative path. COPY and ORIGINAL should be absolute. Ignore files
-# that match patterns given in the list IGNORE.
-def check(copy, original):
- os.chdir(copy)
- for (dirpath, dirnames, filenames) in os.walk('.'):
- exceptions = read_exceptions(join(dirpath, 'check-sync-exceptions'))
- for dirname in dirnames:
- if fnmatch_any(dirname, exceptions):
- dirnames.remove(dirname)
- break
- for filename in filenames:
- if fnmatch_any(filename, exceptions):
- continue
- relative_name = join(dirpath, filename)
- original_name = join(original, relative_name)
- if (os.path.exists(original_name)
- and filecmp.cmp(relative_name, original_name, False)):
- continue
- report(copy, original, relative_name)
-
-differences_found = False
-
-# Print an error message for DIFFERING, which was found to differ
-# between COPY and ORIGINAL. Set the global variable differences_found.
-def report(copy, original, differing):
- global differences_found
- if not differences_found:
- print >> sys.stderr, 'TEST-UNEXPECTED-FAIL | check-sync-dirs.py | build file copies are not in sync\n' \
- 'TEST-INFO | check-sync-dirs.py | file(s) found in: %s\n' \
- 'TEST-INFO | check-sync-dirs.py | differ from their originals in: %s' \
- % (copy, original)
- print >> sys.stderr, 'TEST-INFO | check-sync-dirs.py | differing file: %s' % differing
- differences_found = True
-
-check(copy, original)
-
-if differences_found:
- msg = '''In general, the files in '%s' should always be exact copies of
-originals in '%s'. A change made to one should also be made to the
-other. See 'check-sync-dirs.py' for more details.''' \
- % (copy, original)
- print >> sys.stderr, textwrap.fill(msg, 75)
- sys.exit(1)
-
-print >> sys.stderr, 'TEST-PASS | check-sync-dirs.py | %s <= %s' % (copy, original)
-sys.exit(0)
diff --git a/js/src/config/check-sync-exceptions b/js/src/config/check-sync-exceptions
deleted file mode 100644
index 07b70a5dfe1..00000000000
--- a/js/src/config/check-sync-exceptions
+++ /dev/null
@@ -1,27 +0,0 @@
-Makefile.in
-Makefile
-autoconf.mk.in
-autoconf.mk
-baseconfig.mk
-check-sync-dirs.py
-static-checking-config.mk
-nsinstall
-nsinstall.exe
-host_nsinstall.o
-host_pathsub.o
-moz.build
-
-# This is a copy of nspr's config/make-system-wrappers.pl.
-make-system-wrappers.pl
-system_wrappers_js
-
-# Ignore detritus left lying around by editing tools.
-*~
-.#*
-#*#
-*.orig
-*.rej
-
-# Ignore "compiled" python files
-*.pyc
-*.pyo
diff --git a/js/src/config/check_source_count.py b/js/src/config/check_source_count.py
deleted file mode 100755
index e347e7a55e8..00000000000
--- a/js/src/config/check_source_count.py
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/usr/bin/env python
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-
-# Usage: check_source_count.py SEARCH_TERM COUNT ERROR_LOCATION REPLACEMENT [FILES...]
-# Checks that FILES contains exactly COUNT matches of SEARCH_TERM. If it does
-# not, an error message is printed, quoting ERROR_LOCATION, which should
-# probably be the filename and line number of the erroneous call to
-# check_source_count.py.
-from __future__ import print_function
-import sys
-import os
-import re
-
-search_string = sys.argv[1]
-expected_count = int(sys.argv[2])
-error_location = sys.argv[3]
-replacement = sys.argv[4]
-files = sys.argv[5:]
-
-details = {}
-
-count = 0
-for f in files:
- text = file(f).read()
- match = re.findall(search_string, text)
- if match:
- num = len(match)
- count += num
- details[f] = num
-
-if count == expected_count:
- print("TEST-PASS | check_source_count.py {0} | {1}"
- .format(search_string, expected_count))
-
-else:
- print("TEST-UNEXPECTED-FAIL | check_source_count.py {0} | "
- .format(search_string),
- end='')
- if count < expected_count:
- print("There are fewer occurrences of /{0}/ than expected. "
- "This may mean that you have removed some, but forgotten to "
- "account for it {1}.".format(search_string, error_location))
- else:
- print("There are more occurrences of /{0}/ than expected. We're trying "
- "to prevent an increase in the number of {1}'s, using {2} if "
- "possible. If it is unavoidable, you should update the expected "
- "count {3}.".format(search_string, search_string, replacement,
- error_location))
-
- print("Expected: {0}; found: {1}".format(expected_count, count))
- for k in sorted(details):
- print("Found {0} occurences in {1}".format(details[k],k))
- sys.exit(-1)
-
diff --git a/js/src/config/check_spidermonkey_style.py b/js/src/config/check_spidermonkey_style.py
deleted file mode 100644
index f4ad12fece7..00000000000
--- a/js/src/config/check_spidermonkey_style.py
+++ /dev/null
@@ -1,588 +0,0 @@
-# vim: set ts=8 sts=4 et sw=4 tw=99:
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-#----------------------------------------------------------------------------
-# This script checks various aspects of SpiderMonkey code style. The current checks are as
-# follows.
-#
-# We check the following things in headers.
-#
-# - No cyclic dependencies.
-#
-# - No normal header should #include a inlines.h/-inl.h file.
-#
-# - #ifndef wrappers should have the right form. (XXX: not yet implemented)
-# - Every header file should have one.
-# - The guard name used should be appropriate for the filename.
-#
-# We check the following things in all files.
-#
-# - #includes should have full paths, e.g. "jit/Ion.h", not "Ion.h".
-#
-# - #includes should use the appropriate form for system headers (<...>) and
-# local headers ("...").
-#
-# - #includes should be ordered correctly.
-# - Each one should be in the correct section.
-# - Alphabetical order should be used within sections.
-# - Sections should be in the right order.
-# Note that the presence of #if/#endif blocks complicates things, to the
-# point that it's not always clear where a conditionally-compiled #include
-# statement should go, even to a human. Therefore, we check the #include
-# statements within each #if/#endif block (including nested ones) in
-# isolation, but don't try to do any order checking between such blocks.
-#----------------------------------------------------------------------------
-
-from __future__ import print_function
-
-import difflib
-import os
-import re
-import subprocess
-import sys
-import traceback
-
-# We don't bother checking files in these directories, because they're (a) auxiliary or (b)
-# imported code that doesn't follow our coding style.
-ignored_js_src_dirs = [
- 'js/src/config/', # auxiliary stuff
- 'js/src/ctypes/libffi/', # imported code
- 'js/src/devtools/', # auxiliary stuff
- 'js/src/editline/', # imported code
- 'js/src/gdb/', # auxiliary stuff
- 'js/src/vtune/' # imported code
-]
-
-# We ignore #includes of these files, because they don't follow the usual rules.
-included_inclnames_to_ignore = set([
- 'ffi.h', # generated in ctypes/libffi/
- 'devtools/sharkctl.h', # we ignore devtools/ in general
- 'devtools/Instruments.h', # we ignore devtools/ in general
- 'double-conversion.h', # strange MFBT case
- 'javascript-trace.h', # generated in $OBJDIR if HAVE_DTRACE is defined
- 'jsautokw.h', # generated in $OBJDIR
- 'jsautooplen.h', # generated in $OBJDIR
- 'jscustomallocator.h', # provided by embedders; allowed to be missing
- 'js-config.h', # generated in $OBJDIR
- 'pratom.h', # NSPR
- 'prcvar.h', # NSPR
- 'prinit.h', # NSPR
- 'prlink.h', # NSPR
- 'prlock.h', # NSPR
- 'prprf.h', # NSPR
- 'prthread.h', # NSPR
- 'prtypes.h', # NSPR
- 'selfhosted.out.h', # generated in $OBJDIR
- 'unicode/locid.h', # ICU
- 'unicode/numsys.h', # ICU
- 'unicode/ucal.h', # ICU
- 'unicode/uclean.h', # ICU
- 'unicode/ucol.h', # ICU
- 'unicode/udat.h', # ICU
- 'unicode/udatpg.h', # ICU
- 'unicode/uenum.h', # ICU
- 'unicode/unum.h', # ICU
- 'unicode/ustring.h', # ICU
- 'unicode/utypes.h', # ICU
- 'vtune/VTuneWrapper.h' # VTune
-])
-
-# These files have additional constraints on where they are #included, so we
-# ignore #includes of them when checking #include ordering.
-oddly_ordered_inclnames = set([
- 'ctypes/typedefs.h', # Included multiple times in the body of ctypes/CTypes.h
- 'jsautokw.h', # Included in the body of frontend/TokenStream.h
- 'jswin.h', # Must be #included before
- 'machine/endian.h', # Must be included after on BSD
- 'winbase.h', # Must precede other system headers(?)
- 'windef.h' # Must precede other system headers(?)
-])
-
-# The files in tests/style/ contain code that fails this checking in various
-# ways. Here is the output we expect. If the actual output differs from
-# this, one of the following must have happened.
-# - New SpiderMonkey code violates one of the checked rules.
-# - The tests/style/ files have changed without expected_output being changed
-# accordingly.
-# - This script has been broken somehow.
-#
-expected_output = '''\
-js/src/tests/style/BadIncludes2.h:1: error:
- vanilla header includes an inline-header file "tests/style/BadIncludes2-inl.h"
-
-js/src/tests/style/BadIncludes.h:3: error:
- the file includes itself
-
-js/src/tests/style/BadIncludes.h:6: error:
- "BadIncludes2.h" is included using the wrong path;
- did you forget a prefix, or is the file not yet committed?
-
-js/src/tests/style/BadIncludes.h:8: error:
- should be included using
- the #include "..." form
-
-js/src/tests/style/BadIncludes.h:10: error:
- "stdio.h" is included using the wrong path;
- did you forget a prefix, or is the file not yet committed?
-
-js/src/tests/style/BadIncludesOrder-inl.h:5:6: error:
- "vm/Interpreter-inl.h" should be included after "jsscriptinlines.h"
-
-js/src/tests/style/BadIncludesOrder-inl.h:6:7: error:
- "jsscriptinlines.h" should be included after "js/Value.h"
-
-js/src/tests/style/BadIncludesOrder-inl.h:7:8: error:
- "js/Value.h" should be included after "ds/LifoAlloc.h"
-
-js/src/tests/style/BadIncludesOrder-inl.h:8:9: error:
- "ds/LifoAlloc.h" should be included after "jsapi.h"
-
-js/src/tests/style/BadIncludesOrder-inl.h:9:10: error:
- "jsapi.h" should be included after
-
-js/src/tests/style/BadIncludesOrder-inl.h:10:11: error:
- should be included after "mozilla/HashFunctions.h"
-
-js/src/tests/style/BadIncludesOrder-inl.h:27:28: error:
- "jsobj.h" should be included after "jsfun.h"
-
-(multiple files): error:
- header files form one or more cycles
-
- tests/style/HeaderCycleA1.h
- -> tests/style/HeaderCycleA2.h
- -> tests/style/HeaderCycleA3.h
- -> tests/style/HeaderCycleA1.h
-
- tests/style/HeaderCycleB1-inl.h
- -> tests/style/HeaderCycleB2-inl.h
- -> tests/style/HeaderCycleB3-inl.h
- -> tests/style/HeaderCycleB4-inl.h
- -> tests/style/HeaderCycleB1-inl.h
- -> tests/style/jsheadercycleB5inlines.h
- -> tests/style/HeaderCycleB1-inl.h
- -> tests/style/HeaderCycleB4-inl.h
-
-'''.splitlines(True)
-
-actual_output = []
-
-
-def out(*lines):
- for line in lines:
- actual_output.append(line + '\n')
-
-
-def error(filename, linenum, *lines):
- location = filename
- if linenum is not None:
- location += ':' + str(linenum)
- out(location + ': error:')
- for line in (lines):
- out(' ' + line)
- out('')
-
-
-class FileKind(object):
- C = 1
- CPP = 2
- INL_H = 3
- H = 4
- TBL = 5
- MSG = 6
-
- @staticmethod
- def get(filename):
- if filename.endswith('.c'):
- return FileKind.C
-
- if filename.endswith('.cpp'):
- return FileKind.CPP
-
- if filename.endswith(('inlines.h', '-inl.h', 'Inlines.h')):
- return FileKind.INL_H
-
- if filename.endswith('.h'):
- return FileKind.H
-
- if filename.endswith('.tbl'):
- return FileKind.TBL
-
- if filename.endswith('.msg'):
- return FileKind.MSG
-
- error(filename, None, 'unknown file kind')
-
-
-def get_all_filenames():
- '''Get a list of all the files in the (Mercurial or Git) repository.'''
- cmds = [['hg', 'manifest', '-q'], ['git', 'ls-files', '--full-name', '../..']]
- for cmd in cmds:
- try:
- all_filenames = subprocess.check_output(cmd, universal_newlines=True,
- stderr=subprocess.PIPE).split('\n')
- return all_filenames
- except:
- continue
- else:
- raise Exception('failed to run any of the repo manifest commands', cmds)
-
-
-def check_style():
- # We deal with two kinds of name.
- # - A "filename" is a full path to a file from the repository root.
- # - An "inclname" is how a file is referred to in a #include statement.
- #
- # Examples (filename -> inclname)
- # - "mfbt/Attributes.h" -> "mozilla/Attributes.h"
- # - "js/public/Vector.h" -> "js/Vector.h"
- # - "js/src/vm/String.h" -> "vm/String.h"
-
- mfbt_inclnames = set() # type: set(inclname)
- js_names = dict() # type: dict(filename, inclname)
-
- # Select the appropriate files.
- for filename in get_all_filenames():
- if filename.startswith('mfbt/') and filename.endswith('.h'):
- inclname = 'mozilla/' + filename[len('mfbt/'):]
- mfbt_inclnames.add(inclname)
-
- if filename.startswith('js/public/') and filename.endswith('.h'):
- inclname = 'js/' + filename[len('js/public/'):]
- js_names[filename] = inclname
-
- if filename.startswith('js/src/') and \
- not filename.startswith(tuple(ignored_js_src_dirs)) and \
- filename.endswith(('.c', '.cpp', '.h', '.tbl', '.msg')):
- inclname = filename[len('js/src/'):]
- js_names[filename] = inclname
-
- all_inclnames = mfbt_inclnames | set(js_names.values())
-
- edges = dict() # type: dict(inclname, set(inclname))
-
- # We don't care what's inside the MFBT files, but because they are
- # #included from JS files we have to add them to the inclusion graph.
- for inclname in mfbt_inclnames:
- edges[inclname] = set()
-
- # Process all the JS files.
- for filename in js_names.keys():
- inclname = js_names[filename]
- file_kind = FileKind.get(filename)
- if file_kind == FileKind.C or file_kind == FileKind.CPP or \
- file_kind == FileKind.H or file_kind == FileKind.INL_H:
- included_h_inclnames = set() # type: set(inclname)
-
- # This script is run in js/src/, so prepend '../../' to get to the root of the Mozilla
- # source tree.
- with open(os.path.join('../..', filename)) as f:
- do_file(filename, inclname, file_kind, f, all_inclnames, included_h_inclnames)
-
- edges[inclname] = included_h_inclnames
-
- find_cycles(all_inclnames, edges)
-
- # Compare expected and actual output.
- difflines = difflib.unified_diff(expected_output, actual_output,
- fromfile='check_spider_monkey_style.py expected output',
- tofile='check_spider_monkey_style.py actual output')
- ok = True
- for diffline in difflines:
- ok = False
- print(diffline, end='')
-
- return ok
-
-
-def module_name(name):
- '''Strip the trailing .cpp, .h, inlines.h or -inl.h from a filename.'''
-
- return name.replace('inlines.h', '').replace('-inl.h', '').replace('.h', '').replace('.cpp', '')
-
-
-def is_module_header(enclosing_inclname, header_inclname):
- '''Determine if an included name is the "module header", i.e. should be
- first in the file.'''
-
- module = module_name(enclosing_inclname)
-
- # Normal case, e.g. module == "foo/Bar", header_inclname == "foo/Bar.h".
- if module == module_name(header_inclname):
- return True
-
- # A public header, e.g. module == "foo/Bar", header_inclname == "js/Bar.h".
- m = re.match(r'js\/(.*)\.h', header_inclname)
- if m is not None and module.endswith('/' + m.group(1)):
- return True
-
- return False
-
-
-class Include(object):
- '''Important information for a single #include statement.'''
-
- def __init__(self, inclname, linenum, is_system):
- self.inclname = inclname
- self.linenum = linenum
- self.is_system = is_system
-
- def isLeaf(self):
- return True
-
- def section(self, enclosing_inclname):
- '''Identify which section inclname belongs to.
-
- The section numbers are as follows.
- 0. Module header (e.g. jsfoo.h or jsfooinlines.h within jsfoo.cpp)
- 1. mozilla/Foo.h
- 2. or
- 3. jsfoo.h, prmjtime.h, etc
- 4. foo/Bar.h
- 5. jsfooinlines.h
- 6. foo/Bar-inl.h
- 7. non-.h, e.g. *.tbl, *.msg
- '''
-
- if self.is_system:
- return 2
-
- if not self.inclname.endswith('.h'):
- return 7
-
- # A couple of modules have the .h file in js/ and the .cpp file elsewhere and so need
- # special handling.
- if is_module_header(enclosing_inclname, self.inclname):
- return 0
-
- if '/' in self.inclname:
- if self.inclname.startswith('mozilla/'):
- return 1
-
- if self.inclname.endswith('-inl.h'):
- return 6
-
- return 4
-
- if self.inclname.endswith('inlines.h'):
- return 5
-
- return 3
-
- def quote(self):
- if self.is_system:
- return '<' + self.inclname + '>'
- else:
- return '"' + self.inclname + '"'
-
-
-class HashIfBlock(object):
- '''Important information about a #if/#endif block.
-
- A #if/#endif block is the contents of a #if/#endif (or similar) section.
- The top-level block, which is not within a #if/#endif pair, is also
- considered a block.
-
- Each leaf is either an Include (representing a #include), or another
- nested HashIfBlock.'''
- def __init__(self):
- self.kids = []
-
- def isLeaf(self):
- return False
-
-
-def do_file(filename, inclname, file_kind, f, all_inclnames, included_h_inclnames):
- block_stack = [HashIfBlock()]
-
- # Extract the #include statements as a tree of IBlocks and IIncludes.
- for linenum, line in enumerate(f, start=1):
- # Look for a |#include "..."| line.
- m = re.match(r'\s*#\s*include\s+"([^"]*)"', line)
- if m is not None:
- block_stack[-1].kids.append(Include(m.group(1), linenum, False))
-
- # Look for a |#include <...>| line.
- m = re.match(r'\s*#\s*include\s+<([^>]*)>', line)
- if m is not None:
- block_stack[-1].kids.append(Include(m.group(1), linenum, True))
-
- # Look for a |#{if,ifdef,ifndef}| line.
- m = re.match(r'\s*#\s*(if|ifdef|ifndef)\b', line)
- if m is not None:
- # Open a new block.
- new_block = HashIfBlock()
- block_stack[-1].kids.append(new_block)
- block_stack.append(new_block)
-
- # Look for a |#{elif,else}| line.
- m = re.match(r'\s*#\s*(elif|else)\b', line)
- if m is not None:
- # Close the current block, and open an adjacent one.
- block_stack.pop()
- new_block = HashIfBlock()
- block_stack[-1].kids.append(new_block)
- block_stack.append(new_block)
-
- # Look for a |#endif| line.
- m = re.match(r'\s*#\s*endif\b', line)
- if m is not None:
- # Close the current block.
- block_stack.pop()
-
- def check_include_statement(include):
- '''Check the style of a single #include statement.'''
-
- if include.is_system:
- # Check it is not a known local file (in which case it's probably a system header).
- if include.inclname in included_inclnames_to_ignore or \
- include.inclname in all_inclnames:
- error(filename, include.linenum,
- include.quote() + ' should be included using',
- 'the #include "..." form')
-
- else:
- if include.inclname not in included_inclnames_to_ignore:
- included_kind = FileKind.get(include.inclname)
-
- # Check the #include path has the correct form.
- if include.inclname not in all_inclnames:
- error(filename, include.linenum,
- include.quote() + ' is included ' + 'using the wrong path;',
- 'did you forget a prefix, or is the file not yet committed?')
-
- # Record inclusions of .h files for cycle detection later.
- # (Exclude .tbl and .msg files.)
- elif included_kind == FileKind.H or included_kind == FileKind.INL_H:
- included_h_inclnames.add(include.inclname)
-
- # Check a H file doesn't #include an INL_H file.
- if file_kind == FileKind.H and included_kind == FileKind.INL_H:
- error(filename, include.linenum,
- 'vanilla header includes an inline-header file ' + include.quote())
-
- # Check a file doesn't #include itself. (We do this here because the cycle
- # detection below doesn't detect this case.)
- if inclname == include.inclname:
- error(filename, include.linenum, 'the file includes itself')
-
- def check_includes_order(include1, include2):
- '''Check the ordering of two #include statements.'''
-
- if include1.inclname in oddly_ordered_inclnames or \
- include2.inclname in oddly_ordered_inclnames:
- return
-
- section1 = include1.section(inclname)
- section2 = include2.section(inclname)
- if (section1 > section2) or \
- ((section1 == section2) and (include1.inclname.lower() > include2.inclname.lower())):
- error(filename, str(include1.linenum) + ':' + str(include2.linenum),
- include1.quote() + ' should be included after ' + include2.quote())
-
- # The #include statements in the files in assembler/ and yarr/ have all manner of implicit
- # ordering requirements. Boo. Ignore them.
- skip_order_checking = inclname.startswith(('assembler/', 'yarr/'))
-
- # Check the extracted #include statements, both individually, and the ordering of
- # adjacent pairs that live in the same block.
- def pair_traverse(prev, this):
- if this.isLeaf():
- check_include_statement(this)
- if prev is not None and prev.isLeaf() and not skip_order_checking:
- check_includes_order(prev, this)
- else:
- for prev2, this2 in zip([None] + this.kids[0:-1], this.kids):
- pair_traverse(prev2, this2)
-
- pair_traverse(None, block_stack[-1])
-
-
-def find_cycles(all_inclnames, edges):
- '''Find and draw any cycles.'''
-
- SCCs = tarjan(all_inclnames, edges)
-
- # The various sorted() calls below ensure the output is deterministic.
-
- def draw_SCC(c):
- cset = set(c)
- drawn = set()
- def draw(v, indent):
- out(' ' * indent + ('-> ' if indent else ' ') + v)
- if v in drawn:
- return
- drawn.add(v)
- for succ in sorted(edges[v]):
- if succ in cset:
- draw(succ, indent + 1)
- draw(sorted(c)[0], 0)
- out('')
-
- have_drawn_an_SCC = False
- for scc in sorted(SCCs):
- if len(scc) != 1:
- if not have_drawn_an_SCC:
- error('(multiple files)', None, 'header files form one or more cycles')
- have_drawn_an_SCC = True
-
- draw_SCC(scc)
-
-
-# Tarjan's algorithm for finding the strongly connected components (SCCs) of a graph.
-# https://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm
-def tarjan(V, E):
- vertex_index = {}
- vertex_lowlink = {}
- index = 0
- S = []
- all_SCCs = []
-
- def strongconnect(v, index):
- # Set the depth index for v to the smallest unused index
- vertex_index[v] = index
- vertex_lowlink[v] = index
- index += 1
- S.append(v)
-
- # Consider successors of v
- for w in E[v]:
- if w not in vertex_index:
- # Successor w has not yet been visited; recurse on it
- index = strongconnect(w, index)
- vertex_lowlink[v] = min(vertex_lowlink[v], vertex_lowlink[w])
- elif w in S:
- # Successor w is in stack S and hence in the current SCC
- vertex_lowlink[v] = min(vertex_lowlink[v], vertex_index[w])
-
- # If v is a root node, pop the stack and generate an SCC
- if vertex_lowlink[v] == vertex_index[v]:
- i = S.index(v)
- scc = S[i:]
- del S[i:]
- all_SCCs.append(scc)
-
- return index
-
- for v in V:
- if v not in vertex_index:
- index = strongconnect(v, index)
-
- return all_SCCs
-
-
-def main():
- ok = check_style()
-
- if ok:
- print('TEST-PASS | check_spidermonkey_style.py | ok')
- else:
- print('TEST-UNEXPECTED-FAIL | check_spidermonkey_style.py | actual output does not match expected output; diff is above')
-
- sys.exit(0 if ok else 1)
-
-
-if __name__ == '__main__':
- main()
diff --git a/js/src/config/check_vanilla_allocations.py b/js/src/config/check_vanilla_allocations.py
deleted file mode 100644
index 74ea5e61d0c..00000000000
--- a/js/src/config/check_vanilla_allocations.py
+++ /dev/null
@@ -1,159 +0,0 @@
-# vim: set ts=8 sts=4 et sw=4 tw=79:
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-#----------------------------------------------------------------------------
-# All heap allocations in SpiderMonkey must go through js_malloc, js_calloc,
-# js_realloc, and js_free. This is so that any embedder who uses a custom
-# allocator (by defining JS_USE_CUSTOM_ALLOCATOR) will see all heap allocation
-# go through that custom allocator.
-#
-# Therefore, the presence of any calls to "vanilla" allocation/free functions
-# (e.g. malloc(), free()) is a bug.
-#
-# This script checks for the presence of such disallowed vanilla
-# allocation/free function in SpiderMonkey when it's built as a library. It
-# relies on |nm| from the GNU binutils, and so only works on Linux, but one
-# platform is good enough to catch almost all violations.
-#
-# This checking is only 100% reliable in a JS_USE_CUSTOM_ALLOCATOR build in
-# which the default definitions of js_malloc et al (in Utility.h) -- which call
-# malloc et al -- are replaced with empty definitions. This is because the
-# presence and possible inlining of the default js_malloc et al can cause
-# malloc/calloc/realloc/free calls show up in unpredictable places.
-#
-# Unfortunately, that configuration cannot be tested on Mozilla's standard
-# testing infrastructure. Instead, by default this script only tests that none
-# of the other vanilla allocation/free functions (operator new, memalign, etc)
-# are present. If given the --aggressive flag, it will also check for
-# malloc/calloc/realloc/free.
-#
-# Note: We don't check for |operator delete| and |operator delete[]|. These
-# can be present somehow due to virtual destructors, but this is not too
-# because vanilla delete/delete[] calls don't make sense without corresponding
-# vanilla new/new[] calls, and any explicit calls will be caught by Valgrind's
-# mismatched alloc/free checking.
-#----------------------------------------------------------------------------
-
-from __future__ import print_function
-
-import argparse
-import re
-import subprocess
-import sys
-
-# The obvious way to implement this script is to search for occurrences of
-# malloc et al, succeed if none are found, and fail is some are found.
-# However, "none are found" does not necessarily mean "none are present" --
-# this script could be buggy. (Or the output format of |nm| might change in
-# the future.)
-#
-# So jsutil.cpp deliberately contains a (never-called) function that contains a
-# single use of all the vanilla allocation/free functions. And this script
-# fails if it (a) finds uses of those functions in files other than jsutil.cpp,
-# *or* (b) fails to find them in jsutil.cpp.
-
-# Tracks overall success of the test.
-has_failed = False
-
-
-def fail(msg):
- print('TEST-UNEXPECTED-FAIL | check_vanilla_allocations.py |', msg)
- global has_failed
- has_failed = True
-
-
-def main():
- parser = argparse.ArgumentParser()
- parser.add_argument('--aggressive', action='store_true',
- help='also check for malloc, calloc, realloc and free')
- parser.add_argument('file', type=str,
- help='name of the file to check')
- args = parser.parse_args()
-
- # Run |nm|. Options:
- # -u: show only undefined symbols
- # -C: demangle symbol names
- # -l: show a filename and line number for each undefined symbol
- cmd = ['nm', '-u', '-C', '-l', args.file]
- lines = subprocess.check_output(cmd, universal_newlines=True,
- stderr=subprocess.PIPE).split('\n')
-
- # alloc_fns contains all the vanilla allocation/free functions that we look
- # for. Regexp chars are escaped appropriately.
-
- alloc_fns = [
- # Matches |operator new(unsigned T)|, where |T| is |int| or |long|.
- r'operator new\(unsigned',
-
- # Matches |operator new[](unsigned T)|, where |T| is |int| or |long|.
- r'operator new\[\]\(unsigned',
-
- r'memalign',
- # These two aren't available on all Linux configurations.
- #r'posix_memalign',
- #r'aligned_alloc',
- r'valloc',
- r'strdup'
- ]
-
- if args.aggressive:
- alloc_fns += [
- r'malloc',
- r'calloc',
- r'realloc',
- r'free'
- ]
-
- # This is like alloc_fns, but regexp chars are not escaped.
- alloc_fns_unescaped = [fn.translate(None, r'\\') for fn in alloc_fns]
-
- # This regexp matches the relevant lines in the output of |nm|, which look
- # like the following.
- #
- # U malloc /path/to/objdir/dist/include/js/Utility.h:142
- #
- alloc_fns_re = r'U (' + r'|'.join(alloc_fns) + r').*\/([\w\.]+):(\d+)$'
-
- # This tracks which allocation/free functions have been seen in jsutil.cpp.
- jsutil_cpp = set([])
-
- for line in lines:
- m = re.search(alloc_fns_re, line)
- if m is None:
- continue
-
- fn = m.group(1)
- filename = m.group(2)
- linenum = m.group(3)
- if filename == 'jsutil.cpp':
- jsutil_cpp.add(fn)
- else:
- # An allocation is present in a non-special file. Fail!
- fail("'" + fn + "' present at " + filename + ':' + linenum)
-
-
- # Check that all functions we expect are used in jsutil.cpp. (This will
- # fail if the function-detection code breaks at any point.)
- for fn in alloc_fns_unescaped:
- if fn not in jsutil_cpp:
- fail("'" + fn + "' isn't used as expected in jsutil.cpp")
- else:
- jsutil_cpp.remove(fn)
-
- # This should never happen, but check just in case.
- if jsutil_cpp:
- fail('unexpected allocation fns used in jsutil.cpp: ' +
- ', '.join(jsutil_cpp))
-
- if has_failed:
- sys.exit(1)
-
- print('TEST-PASS | check_vanilla_allocations.py | ok')
- sys.exit(0)
-
-
-if __name__ == '__main__':
- main()
-
diff --git a/js/src/config/config.mk b/js/src/config/config.mk
deleted file mode 100644
index f405445ebfd..00000000000
--- a/js/src/config/config.mk
+++ /dev/null
@@ -1,910 +0,0 @@
-#
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-#
-# config.mk
-#
-# Determines the platform and builds the macros needed to load the
-# appropriate platform-specific .mk file, then defines all (most?)
-# of the generic macros.
-#
-
-# Define an include-at-most-once flag
-ifdef INCLUDED_CONFIG_MK
-$(error Do not include config.mk twice!)
-endif
-INCLUDED_CONFIG_MK = 1
-
-EXIT_ON_ERROR = set -e; # Shell loops continue past errors without this.
-
-ifndef topsrcdir
-topsrcdir = $(DEPTH)
-endif
-
-ifndef INCLUDED_AUTOCONF_MK
-include $(DEPTH)/config/autoconf.mk
-endif
-
--include $(DEPTH)/.mozconfig.mk
-
-# Integrate with mozbuild-generated make files. We first verify that no
-# variables provided by the automatically generated .mk files are
-# present. If they are, this is a violation of the separation of
-# responsibility between Makefile.in and mozbuild files.
-_MOZBUILD_EXTERNAL_VARIABLES := \
- ANDROID_GENERATED_RESFILES \
- ANDROID_RES_DIRS \
- CMSRCS \
- CMMSRCS \
- CPP_UNIT_TESTS \
- DIRS \
- EXTRA_PP_COMPONENTS \
- EXTRA_PP_JS_MODULES \
- FORCE_SHARED_LIB \
- FORCE_STATIC_LIB \
- FINAL_LIBRARY \
- HOST_CSRCS \
- HOST_CMMSRCS \
- HOST_LIBRARY_NAME \
- HOST_PROGRAM \
- HOST_SIMPLE_PROGRAMS \
- IS_COMPONENT \
- JAR_MANIFEST \
- JAVA_JAR_TARGETS \
- JS_MODULES_PATH \
- LIBRARY_NAME \
- LIBXUL_LIBRARY \
- MODULE \
- MSVC_ENABLE_PGO \
- NO_DIST_INSTALL \
- PARALLEL_DIRS \
- PROGRAM \
- SDK_HEADERS \
- SIMPLE_PROGRAMS \
- TEST_DIRS \
- TIERS \
- TOOL_DIRS \
- XPCSHELL_TESTS \
- XPIDL_MODULE \
- $(NULL)
-
-_DEPRECATED_VARIABLES := \
- ANDROID_RESFILES \
- MOCHITEST_FILES_PARTS \
- MOCHITEST_BROWSER_FILES_PARTS \
- SHORT_LIBNAME \
- $(NULL)
-
-ifndef EXTERNALLY_MANAGED_MAKE_FILE
-# Using $(firstword) may not be perfect. But it should be good enough for most
-# scenarios.
-_current_makefile = $(CURDIR)/$(firstword $(MAKEFILE_LIST))
-
-$(foreach var,$(_MOZBUILD_EXTERNAL_VARIABLES),$(if $(filter file override,$(subst $(NULL) ,_,$(origin $(var)))),\
- $(error Variable $(var) is defined in $(_current_makefile). It should only be defined in moz.build files),\
- ))
-
-$(foreach var,$(_DEPRECATED_VARIABLES),$(if $(filter file override,$(subst $(NULL) ,_,$(origin $(var)))),\
- $(error Variable $(var) is defined in $(_current_makefile). This variable has been deprecated. It does nothing. It must be removed in order to build)\
- ))
-
-# Import the automatically generated backend file. If this file doesn't exist,
-# the backend hasn't been properly configured. We want this to be a fatal
-# error, hence not using "-include".
-ifndef STANDALONE_MAKEFILE
-GLOBAL_DEPS += backend.mk
-include backend.mk
-endif
-
-# Freeze the values specified by moz.build to catch them if they fail.
-
-$(foreach var,$(_MOZBUILD_EXTERNAL_VARIABLES),$(eval $(var)_FROZEN := '$($(var))'))
-$(foreach var,$(_DEPRECATED_VARIABLES),$(eval $(var)_FROZEN := '$($(var))'))
-
-CHECK_MOZBUILD_VARIABLES = $(foreach var,$(_MOZBUILD_EXTERNAL_VARIABLES), \
- $(if $(subst $($(var)_FROZEN),,'$($(var))'), \
- $(error Variable $(var) is defined in $(_current_makefile). It should only be defined in moz.build files),\
- )) $(foreach var,$(_DEPRECATED_VARIABLES), \
- $(if $(subst $($(var)_FROZEN),,'$($(var))'), \
- $(error Variable $(var) is defined in $(_current_makefile). This variable has been deprecated. It does nothing. It must be removed in order to build),\
- ))
-
-endif
-
-space = $(NULL) $(NULL)
-
-# Include defs.mk files that can be found in $(srcdir)/$(DEPTH),
-# $(srcdir)/$(DEPTH-1), $(srcdir)/$(DEPTH-2), etc., and $(srcdir)
-# where $(DEPTH-1) is one level less of depth, $(DEPTH-2), two, etc.
-# i.e. for DEPTH=../../.., DEPTH-1 is ../.. and DEPTH-2 is ..
-# These defs.mk files are used to define variables in a directory
-# and all its subdirectories, recursively.
-__depth := $(subst /, ,$(DEPTH))
-ifeq (.,$(__depth))
-__depth :=
-endif
-$(foreach __d,$(__depth) .,$(eval __depth = $(wordlist 2,$(words $(__depth)),$(__depth))$(eval -include $(subst $(space),/,$(strip $(srcdir) $(__depth) defs.mk)))))
-
-COMMA = ,
-
-# Sanity check some variables
-CHECK_VARS := \
- XPI_NAME \
- LIBRARY_NAME \
- MODULE \
- DEPTH \
- XPI_PKGNAME \
- INSTALL_EXTENSION_ID \
- SHARED_LIBRARY_NAME \
- STATIC_LIBRARY_NAME \
- $(NULL)
-
-# checks for internal spaces or trailing spaces in the variable
-# named by $x
-check-variable = $(if $(filter-out 0 1,$(words $($(x))z)),$(error Spaces are not allowed in $(x)))
-
-$(foreach x,$(CHECK_VARS),$(check-variable))
-
-ifndef INCLUDED_FUNCTIONS_MK
-include $(topsrcdir)/config/makefiles/functions.mk
-endif
-
-RM = rm -f
-
-# LIBXUL_DIST is not defined under js/src, thus we make it mean DIST there.
-LIBXUL_DIST ?= $(DIST)
-
-# FINAL_TARGET specifies the location into which we copy end-user-shipped
-# build products (typelibs, components, chrome). It may already be specified by
-# a moz.build file.
-#
-# If XPI_NAME is set, the files will be shipped to $(DIST)/xpi-stage/$(XPI_NAME)
-# instead of $(DIST)/bin. In both cases, if DIST_SUBDIR is set, the files will be
-# shipped to a $(DIST_SUBDIR) subdirectory.
-FINAL_TARGET ?= $(if $(XPI_NAME),$(DIST)/xpi-stage/$(XPI_NAME),$(DIST)/bin)$(DIST_SUBDIR:%=/%)
-# Override the stored value for the check to make sure that the variable is not
-# redefined in the Makefile.in value.
-FINAL_TARGET_FROZEN := '$(FINAL_TARGET)'
-
-ifdef XPI_NAME
-DEFINES += -DXPI_NAME=$(XPI_NAME)
-endif
-
-# The VERSION_NUMBER is suffixed onto the end of the DLLs we ship.
-VERSION_NUMBER = 50
-
-ifeq ($(HOST_OS_ARCH),WINNT)
-win_srcdir := $(subst $(topsrcdir),$(WIN_TOP_SRC),$(srcdir))
-BUILD_TOOLS = $(WIN_TOP_SRC)/build/unix
-else
-win_srcdir := $(srcdir)
-BUILD_TOOLS = $(topsrcdir)/build/unix
-endif
-
-CONFIG_TOOLS = $(MOZ_BUILD_ROOT)/config
-AUTOCONF_TOOLS = $(topsrcdir)/build/autoconf
-
-# Disable MOZ_PSEUDO_DERECURSE when it contains no-pymake and we're running
-# pymake. This can be removed when no-pymake is removed from the default in
-# build/autoconf/compiler-opts.m4.
-ifdef .PYMAKE
-comma = ,
-ifneq (,$(filter no-pymake,$(subst $(comma), ,$(MOZ_PSEUDO_DERECURSE))))
-MOZ_PSEUDO_DERECURSE :=
-endif
-endif
-
-# Disable MOZ_PSEUDO_DERECURSE on PGO builds until it's fixed.
-ifneq (,$(MOZ_PROFILE_USE)$(MOZ_PROFILE_GENERATE))
-MOZ_PSEUDO_DERECURSE :=
-endif
-
-#
-# Strip off the excessively long version numbers on these platforms,
-# but save the version to allow multiple versions of the same base
-# platform to be built in the same tree.
-#
-ifneq (,$(filter FreeBSD HP-UX Linux NetBSD OpenBSD SunOS,$(OS_ARCH)))
-OS_RELEASE := $(basename $(OS_RELEASE))
-
-# Allow the user to ignore the OS_VERSION, which is usually irrelevant.
-ifdef WANT_MOZILLA_CONFIG_OS_VERSION
-OS_VERS := $(suffix $(OS_RELEASE))
-OS_VERSION := $(shell echo $(OS_VERS) | sed 's/-.*//')
-endif
-
-endif
-
-OS_CONFIG := $(OS_ARCH)$(OS_RELEASE)
-
-ifdef _MSC_VER
-CC_WRAPPER ?= $(call py_action,cl)
-CXX_WRAPPER ?= $(call py_action,cl)
-endif # _MSC_VER
-
-CC := $(CC_WRAPPER) $(CC)
-CXX := $(CXX_WRAPPER) $(CXX)
-MKDIR ?= mkdir
-SLEEP ?= sleep
-TOUCH ?= touch
-
-ifdef .PYMAKE
-PYCOMMANDPATH += $(PYTHON_SITE_PACKAGES)
-endif
-
-PYTHON_PATH = $(PYTHON) $(topsrcdir)/config/pythonpath.py
-
-# determine debug-related options
-_DEBUG_ASFLAGS :=
-_DEBUG_CFLAGS :=
-_DEBUG_LDFLAGS :=
-
-ifdef MOZ_DEBUG
- _DEBUG_CFLAGS += $(MOZ_DEBUG_ENABLE_DEFS)
- XULPPFLAGS += $(MOZ_DEBUG_ENABLE_DEFS)
-else
- _DEBUG_CFLAGS += $(MOZ_DEBUG_DISABLE_DEFS)
- XULPPFLAGS += $(MOZ_DEBUG_DISABLE_DEFS)
-endif
-
-ifneq (,$(MOZ_DEBUG)$(MOZ_DEBUG_SYMBOLS))
- ifeq ($(AS),yasm)
- ifeq ($(OS_ARCH)_$(GNU_CC),WINNT_)
- _DEBUG_ASFLAGS += -g cv8
- else
- ifneq ($(OS_ARCH),Darwin)
- _DEBUG_ASFLAGS += -g dwarf2
- endif
- endif
- else
- _DEBUG_ASFLAGS += $(MOZ_DEBUG_FLAGS)
- endif
- _DEBUG_CFLAGS += $(MOZ_DEBUG_FLAGS)
- _DEBUG_LDFLAGS += $(MOZ_DEBUG_LDFLAGS)
-endif
-
-MOZALLOC_LIB = $(call EXPAND_LIBNAME_PATH,mozalloc,$(DIST)/lib)
-
-ASFLAGS += $(_DEBUG_ASFLAGS)
-OS_CFLAGS += $(_DEBUG_CFLAGS)
-OS_CXXFLAGS += $(_DEBUG_CFLAGS)
-OS_LDFLAGS += $(_DEBUG_LDFLAGS)
-
-# XXX: What does this? Bug 482434 filed for better explanation.
-ifeq ($(OS_ARCH)_$(GNU_CC),WINNT_)
-ifdef MOZ_DEBUG
-ifneq (,$(MOZ_BROWSE_INFO)$(MOZ_BSCFILE))
-OS_CFLAGS += -FR
-OS_CXXFLAGS += -FR
-endif
-else # ! MOZ_DEBUG
-
-# MOZ_DEBUG_SYMBOLS generates debug symbols in separate PDB files.
-# Used for generating an optimized build with debugging symbols.
-# Used in the Windows nightlies to generate symbols for crash reporting.
-ifdef MOZ_DEBUG_SYMBOLS
-OS_CXXFLAGS += -UDEBUG -DNDEBUG
-OS_CFLAGS += -UDEBUG -DNDEBUG
-ifdef HAVE_64BIT_OS
-OS_LDFLAGS += -DEBUG -OPT:REF,ICF
-else
-OS_LDFLAGS += -DEBUG -OPT:REF
-endif
-endif
-
-#
-# Handle trace-malloc and DMD in optimized builds.
-# No opt to give sane callstacks.
-#
-ifneq (,$(NS_TRACE_MALLOC)$(MOZ_DMD))
-MOZ_OPTIMIZE_FLAGS=-Zi -Od -UDEBUG -DNDEBUG
-ifdef HAVE_64BIT_OS
-OS_LDFLAGS = -DEBUG -OPT:REF,ICF
-else
-OS_LDFLAGS = -DEBUG -OPT:REF
-endif
-endif # NS_TRACE_MALLOC || MOZ_DMD
-
-endif # MOZ_DEBUG
-
-# We don't build a static CRT when building a custom CRT,
-# it appears to be broken. So don't link to jemalloc if
-# the Makefile wants static CRT linking.
-ifeq ($(MOZ_MEMORY)_$(USE_STATIC_LIBS),1_1)
-# Disable default CRT libs and add the right lib path for the linker
-MOZ_GLUE_LDFLAGS=
-endif
-
-endif # WINNT && !GNU_CC
-
-ifdef MOZ_GLUE_PROGRAM_LDFLAGS
-DEFINES += -DMOZ_GLUE_IN_PROGRAM
-else
-MOZ_GLUE_PROGRAM_LDFLAGS=$(MOZ_GLUE_LDFLAGS)
-endif
-
-#
-# Build using PIC by default
-#
-_ENABLE_PIC=1
-
-# Determine if module being compiled is destined
-# to be merged into libxul
-
-ifeq ($(FINAL_LIBRARY),xul)
- ifdef LIBXUL_LIBRARY
- $(error FINAL_LIBRARY is "xul", LIBXUL_LIBRARY is implied)
- endif
- LIBXUL_LIBRARY := 1
-endif
-
-ifdef LIBXUL_LIBRARY
-ifdef IS_COMPONENT
-$(error IS_COMPONENT is set, but is not compatible with LIBXUL_LIBRARY)
-endif
-FORCE_STATIC_LIB=1
-endif
-
-# If we are building this component into an extension/xulapp, it cannot be
-# statically linked. In the future we may want to add a xulapp meta-component
-# build option.
-
-ifdef XPI_NAME
-ifdef IS_COMPONENT
-EXPORT_LIBRARY=
-FORCE_STATIC_LIB=
-FORCE_SHARED_LIB=1
-endif
-endif
-
-ifndef SHARED_LIBRARY_NAME
-ifdef LIBRARY_NAME
-SHARED_LIBRARY_NAME=$(LIBRARY_NAME)
-endif
-endif
-
-ifndef STATIC_LIBRARY_NAME
-ifdef LIBRARY_NAME
-STATIC_LIBRARY_NAME=$(LIBRARY_NAME)
-endif
-endif
-
-# PGO on MSVC is opt-in
-ifdef _MSC_VER
-ifndef MSVC_ENABLE_PGO
-NO_PROFILE_GUIDED_OPTIMIZE = 1
-endif
-endif
-
-# No sense in profiling tools
-ifdef INTERNAL_TOOLS
-NO_PROFILE_GUIDED_OPTIMIZE = 1
-endif
-
-# Don't build SIMPLE_PROGRAMS with PGO, since they don't need it anyway,
-# and we don't have the same build logic to re-link them in the second pass.
-ifdef SIMPLE_PROGRAMS
-NO_PROFILE_GUIDED_OPTIMIZE = 1
-endif
-
-# No sense in profiling unit tests
-ifdef CPP_UNIT_TESTS
-NO_PROFILE_GUIDED_OPTIMIZE = 1
-endif
-
-# Enable profile-based feedback
-ifneq (1,$(NO_PROFILE_GUIDED_OPTIMIZE))
-ifdef MOZ_PROFILE_GENERATE
-OS_CFLAGS += $(if $(filter $(notdir $<),$(notdir $(NO_PROFILE_GUIDED_OPTIMIZE))),,$(PROFILE_GEN_CFLAGS))
-OS_CXXFLAGS += $(if $(filter $(notdir $<),$(notdir $(NO_PROFILE_GUIDED_OPTIMIZE))),,$(PROFILE_GEN_CFLAGS))
-OS_LDFLAGS += $(PROFILE_GEN_LDFLAGS)
-ifeq (WINNT,$(OS_ARCH))
-AR_FLAGS += -LTCG
-endif
-endif # MOZ_PROFILE_GENERATE
-
-ifdef MOZ_PROFILE_USE
-OS_CFLAGS += $(if $(filter $(notdir $<),$(notdir $(NO_PROFILE_GUIDED_OPTIMIZE))),,$(PROFILE_USE_CFLAGS))
-OS_CXXFLAGS += $(if $(filter $(notdir $<),$(notdir $(NO_PROFILE_GUIDED_OPTIMIZE))),,$(PROFILE_USE_CFLAGS))
-OS_LDFLAGS += $(PROFILE_USE_LDFLAGS)
-ifeq (WINNT,$(OS_ARCH))
-AR_FLAGS += -LTCG
-endif
-endif # MOZ_PROFILE_USE
-endif # NO_PROFILE_GUIDED_OPTIMIZE
-
-
-# Does the makefile specifies the internal XPCOM API linkage?
-ifneq (,$(MOZILLA_INTERNAL_API)$(LIBXUL_LIBRARY))
-DEFINES += -DMOZILLA_INTERNAL_API
-endif
-
-# Force XPCOM/widget/gfx methods to be _declspec(dllexport) when we're
-# building libxul libraries
-ifdef LIBXUL_LIBRARY
-DEFINES += \
- -DIMPL_LIBXUL \
- $(NULL)
-
-ifndef JS_SHARED_LIBRARY
-DEFINES += -DSTATIC_EXPORTABLE_JS_API
-endif
-endif
-
-MAKE_JARS_FLAGS = \
- -t $(topsrcdir) \
- -f $(MOZ_CHROME_FILE_FORMAT) \
- $(NULL)
-
-ifdef USE_EXTENSION_MANIFEST
-MAKE_JARS_FLAGS += -e
-endif
-
-TAR_CREATE_FLAGS = -chf
-
-ifeq ($(OS_ARCH),OS2)
-TAR_CREATE_FLAGS = -cf
-endif
-
-#
-# Personal makefile customizations go in these optional make include files.
-#
-MY_CONFIG := $(DEPTH)/config/myconfig.mk
-MY_RULES := $(DEPTH)/config/myrules.mk
-
-#
-# Default command macros; can be overridden in .mk.
-#
-CCC = $(CXX)
-
-# Java macros
-JAVA_GEN_DIR = _javagen
-JAVA_DIST_DIR = $(DEPTH)/$(JAVA_GEN_DIR)
-JAVA_IFACES_PKG_NAME = org/mozilla/interfaces
-
-OS_INCLUDES += $(MOZ_JPEG_CFLAGS) $(MOZ_PNG_CFLAGS) $(MOZ_ZLIB_CFLAGS) $(MOZ_PIXMAN_CFLAGS)
-
-# NSPR_CFLAGS and NSS_CFLAGS must appear ahead of OS_INCLUDES to avoid Linux
-# builds wrongly picking up system NSPR/NSS header files.
-INCLUDES = \
- -I$(srcdir) \
- -I. \
- $(LOCAL_INCLUDES) \
- -I$(DIST)/include \
- $(if $(LIBXUL_SDK),-I$(LIBXUL_SDK)/include) \
- $(NSPR_CFLAGS) $(NSS_CFLAGS) \
- $(OS_INCLUDES) \
- $(NULL)
-
-include $(topsrcdir)/config/static-checking-config.mk
-
-CFLAGS = $(OS_CPPFLAGS) $(OS_CFLAGS)
-CXXFLAGS = $(OS_CPPFLAGS) $(OS_CXXFLAGS)
-LDFLAGS = $(OS_LDFLAGS) $(MOZ_FIX_LINK_PATHS)
-
-# Allow each module to override the *default* optimization settings
-# by setting MODULE_OPTIMIZE_FLAGS if the developer has not given
-# arguments to --enable-optimize
-ifdef MOZ_OPTIMIZE
-ifeq (1,$(MOZ_OPTIMIZE))
-ifdef MODULE_OPTIMIZE_FLAGS
-CFLAGS += $(MODULE_OPTIMIZE_FLAGS)
-CXXFLAGS += $(MODULE_OPTIMIZE_FLAGS)
-else
-ifneq (,$(if $(MOZ_PROFILE_GENERATE)$(MOZ_PROFILE_USE),$(MOZ_PGO_OPTIMIZE_FLAGS)))
-CFLAGS += $(MOZ_PGO_OPTIMIZE_FLAGS)
-CXXFLAGS += $(MOZ_PGO_OPTIMIZE_FLAGS)
-else
-CFLAGS += $(MOZ_OPTIMIZE_FLAGS)
-CXXFLAGS += $(MOZ_OPTIMIZE_FLAGS)
-endif # neq (,$(MOZ_PROFILE_GENERATE)$(MOZ_PROFILE_USE))
-endif # MODULE_OPTIMIZE_FLAGS
-else
-CFLAGS += $(MOZ_OPTIMIZE_FLAGS)
-CXXFLAGS += $(MOZ_OPTIMIZE_FLAGS)
-endif # MOZ_OPTIMIZE == 1
-LDFLAGS += $(MOZ_OPTIMIZE_LDFLAGS)
-endif # MOZ_OPTIMIZE
-
-ifdef CROSS_COMPILE
-HOST_CFLAGS += $(HOST_OPTIMIZE_FLAGS)
-else
-ifdef MOZ_OPTIMIZE
-ifeq (1,$(MOZ_OPTIMIZE))
-ifdef MODULE_OPTIMIZE_FLAGS
-HOST_CFLAGS += $(MODULE_OPTIMIZE_FLAGS)
-else
-HOST_CFLAGS += $(MOZ_OPTIMIZE_FLAGS)
-endif # MODULE_OPTIMIZE_FLAGS
-else
-HOST_CFLAGS += $(MOZ_OPTIMIZE_FLAGS)
-endif # MOZ_OPTIMIZE == 1
-endif # MOZ_OPTIMIZE
-endif # CROSS_COMPILE
-
-CFLAGS += $(MOZ_FRAMEPTR_FLAGS)
-CXXFLAGS += $(MOZ_FRAMEPTR_FLAGS)
-
-# Check for FAIL_ON_WARNINGS & FAIL_ON_WARNINGS_DEBUG (Shorthand for Makefiles
-# to request that we use the 'warnings as errors' compile flags)
-
-# NOTE: First, we clear FAIL_ON_WARNINGS[_DEBUG] if we're doing a Windows PGO
-# build, since WARNINGS_AS_ERRORS has been suspected of causing isuses in that
-# situation. (See bug 437002.)
-ifeq (WINNT_1,$(OS_ARCH)_$(MOZ_PROFILE_GENERATE)$(MOZ_PROFILE_USE))
-FAIL_ON_WARNINGS_DEBUG=
-FAIL_ON_WARNINGS=
-endif # WINNT && (MOS_PROFILE_GENERATE ^ MOZ_PROFILE_USE)
-
-# Now, check for debug version of flag; it turns on normal flag in debug builds.
-ifdef FAIL_ON_WARNINGS_DEBUG
-ifdef MOZ_DEBUG
-FAIL_ON_WARNINGS = 1
-endif # MOZ_DEBUG
-endif # FAIL_ON_WARNINGS_DEBUG
-
-# Check for normal version of flag, and add WARNINGS_AS_ERRORS if it's set to 1.
-ifdef FAIL_ON_WARNINGS
-CXXFLAGS += $(WARNINGS_AS_ERRORS)
-CFLAGS += $(WARNINGS_AS_ERRORS)
-endif # FAIL_ON_WARNINGS
-
-ifeq ($(OS_ARCH)_$(GNU_CC),WINNT_)
-#// Currently, unless USE_STATIC_LIBS is defined, the multithreaded
-#// DLL version of the RTL is used...
-#//
-#//------------------------------------------------------------------------
-ifdef USE_STATIC_LIBS
-RTL_FLAGS=-MT # Statically linked multithreaded RTL
-ifneq (,$(MOZ_DEBUG)$(NS_TRACE_MALLOC))
-ifndef MOZ_NO_DEBUG_RTL
-RTL_FLAGS=-MTd # Statically linked multithreaded MSVC4.0 debug RTL
-endif
-endif # MOZ_DEBUG || NS_TRACE_MALLOC
-
-else # !USE_STATIC_LIBS
-
-RTL_FLAGS=-MD # Dynamically linked, multithreaded RTL
-ifneq (,$(MOZ_DEBUG)$(NS_TRACE_MALLOC))
-ifndef MOZ_NO_DEBUG_RTL
-RTL_FLAGS=-MDd # Dynamically linked, multithreaded MSVC4.0 debug RTL
-endif
-endif # MOZ_DEBUG || NS_TRACE_MALLOC
-endif # USE_STATIC_LIBS
-endif # WINNT && !GNU_CC
-
-ifeq ($(OS_ARCH),Darwin)
-# Compiling ObjC requires an Apple compiler anyway, so it's ok to set
-# host CMFLAGS here.
-HOST_CMFLAGS += -fobjc-exceptions
-HOST_CMMFLAGS += -fobjc-exceptions
-OS_COMPILE_CMFLAGS += -fobjc-exceptions
-OS_COMPILE_CMMFLAGS += -fobjc-exceptions
-ifeq ($(MOZ_WIDGET_TOOLKIT),uikit)
-OS_COMPILE_CMFLAGS += -fobjc-abi-version=2 -fobjc-legacy-dispatch
-OS_COMPILE_CMMFLAGS += -fobjc-abi-version=2 -fobjc-legacy-dispatch
-endif
-endif
-
-COMPILE_CFLAGS = $(VISIBILITY_FLAGS) $(DEFINES) $(INCLUDES) $(DSO_CFLAGS) $(DSO_PIC_CFLAGS) $(RTL_FLAGS) $(OS_CPPFLAGS) $(OS_COMPILE_CFLAGS) $(CFLAGS) $(EXTRA_COMPILE_FLAGS)
-COMPILE_CXXFLAGS = $(STL_FLAGS) $(VISIBILITY_FLAGS) $(DEFINES) $(INCLUDES) $(DSO_CFLAGS) $(DSO_PIC_CFLAGS) $(RTL_FLAGS) $(OS_CPPFLAGS) $(OS_COMPILE_CXXFLAGS) $(CXXFLAGS) $(EXTRA_COMPILE_FLAGS)
-COMPILE_CMFLAGS = $(OS_COMPILE_CMFLAGS) $(EXTRA_COMPILE_FLAGS)
-COMPILE_CMMFLAGS = $(OS_COMPILE_CMMFLAGS) $(EXTRA_COMPILE_FLAGS)
-ASFLAGS += $(EXTRA_ASSEMBLER_FLAGS)
-
-ifndef CROSS_COMPILE
-HOST_CFLAGS += $(RTL_FLAGS)
-endif
-
-#
-# Name of the binary code directories
-#
-# Override defaults
-
-# We need to know where to find the libraries we
-# put on the link line for binaries, and should
-# we link statically or dynamic? Assuming dynamic for now.
-
-ifneq (WINNT_,$(OS_ARCH)_$(GNU_CC))
-LIBS_DIR = -L$(DIST)/bin -L$(DIST)/lib
-ifdef LIBXUL_SDK
-LIBS_DIR += -L$(LIBXUL_SDK)/bin -L$(LIBXUL_SDK)/lib
-endif
-endif
-
-# Default location of include files
-ifndef LIBXUL_SDK
-IDL_PARSER_DIR = $(topsrcdir)/xpcom/idl-parser
-IDL_PARSER_CACHE_DIR = $(DEPTH)/xpcom/idl-parser
-else
-IDL_PARSER_DIR = $(LIBXUL_SDK)/sdk/bin
-IDL_PARSER_CACHE_DIR = $(LIBXUL_SDK)/sdk/bin
-endif
-
-SDK_LIB_DIR = $(DIST)/sdk/lib
-SDK_BIN_DIR = $(DIST)/sdk/bin
-
-DEPENDENCIES = .md
-
-MOZ_COMPONENT_LIBS=$(XPCOM_LIBS) $(MOZ_COMPONENT_NSPR_LIBS)
-
-ifdef MACOSX_DEPLOYMENT_TARGET
-export MACOSX_DEPLOYMENT_TARGET
-endif # MACOSX_DEPLOYMENT_TARGET
-
-ifdef MOZ_USING_CCACHE
-ifdef CLANG_CXX
-export CCACHE_CPP2=1
-endif
-endif
-
-# Set link flags according to whether we want a console.
-ifdef MOZ_WINCONSOLE
-ifeq ($(MOZ_WINCONSOLE),1)
-ifeq ($(OS_ARCH),OS2)
-BIN_FLAGS += -Zlinker -PM:VIO
-endif
-ifeq ($(OS_ARCH),WINNT)
-ifdef GNU_CC
-WIN32_EXE_LDFLAGS += -mconsole
-else
-WIN32_EXE_LDFLAGS += -SUBSYSTEM:CONSOLE
-endif
-endif
-else # MOZ_WINCONSOLE
-ifeq ($(OS_ARCH),OS2)
-BIN_FLAGS += -Zlinker -PM:PM
-endif
-ifeq ($(OS_ARCH),WINNT)
-ifdef GNU_CC
-WIN32_EXE_LDFLAGS += -mwindows
-else
-WIN32_EXE_LDFLAGS += -SUBSYSTEM:WINDOWS
-endif
-endif
-endif
-endif
-
-ifdef _MSC_VER
-ifeq ($(CPU_ARCH),x86_64)
-# set stack to 2MB on x64 build. See bug 582910
-WIN32_EXE_LDFLAGS += -STACK:2097152
-endif
-endif
-
-# If we're building a component on MSVC, we don't want to generate an
-# import lib, because that import lib will collide with the name of a
-# static version of the same library.
-ifeq ($(GNU_LD)$(OS_ARCH),WINNT)
-ifdef IS_COMPONENT
-LDFLAGS += -IMPLIB:fake.lib
-DELETE_AFTER_LINK = fake.lib fake.exp
-endif
-endif
-
-#
-# Include any personal overrides the user might think are needed.
-#
--include $(topsrcdir)/$(MOZ_BUILD_APP)/app-config.mk
--include $(MY_CONFIG)
-
-######################################################################
-
-GARBAGE += $(DEPENDENCIES) core $(wildcard core.[0-9]*) $(wildcard *.err) $(wildcard *.pure) $(wildcard *_pure_*.o) Templates.DB
-
-ifeq ($(OS_ARCH),Darwin)
-ifndef NSDISTMODE
-NSDISTMODE=absolute_symlink
-endif
-PWD := $(CURDIR)
-endif
-
-NSINSTALL_PY := $(PYTHON) $(abspath $(topsrcdir)/config/nsinstall.py)
-# For Pymake, wherever we use nsinstall.py we're also going to try to make it
-# a native command where possible. Since native commands can't be used outside
-# of single-line commands, we continue to provide INSTALL for general use.
-# Single-line commands should be switched over to install_cmd.
-NSINSTALL_NATIVECMD := %nsinstall nsinstall
-
-ifdef NSINSTALL_BIN
-NSINSTALL = $(NSINSTALL_BIN)
-else
-ifeq (OS2,$(CROSS_COMPILE)$(OS_ARCH))
-NSINSTALL = $(MOZ_TOOLS_DIR)/nsinstall
-else
-ifeq ($(HOST_OS_ARCH),WINNT)
-NSINSTALL = $(NSINSTALL_PY)
-else
-NSINSTALL = $(CONFIG_TOOLS)/nsinstall$(HOST_BIN_SUFFIX)
-endif # WINNT
-endif # OS2
-endif # NSINSTALL_BIN
-
-
-ifeq (,$(CROSS_COMPILE)$(filter-out WINNT OS2, $(OS_ARCH)))
-INSTALL = $(NSINSTALL) -t
-ifdef .PYMAKE
-install_cmd = $(NSINSTALL_NATIVECMD) -t $(1)
-endif # .PYMAKE
-
-else
-
-# This isn't laid out as conditional directives so that NSDISTMODE can be
-# target-specific.
-INSTALL = $(if $(filter copy, $(NSDISTMODE)), $(NSINSTALL) -t, $(if $(filter absolute_symlink, $(NSDISTMODE)), $(NSINSTALL) -L $(PWD), $(NSINSTALL) -R))
-
-endif # WINNT/OS2
-
-# The default for install_cmd is simply INSTALL
-install_cmd ?= $(INSTALL) $(1)
-
-# Use nsinstall in copy mode to install files on the system
-SYSINSTALL = $(NSINSTALL) -t
-# This isn't necessarily true, just here
-sysinstall_cmd = install_cmd
-
-#
-# Localization build automation
-#
-
-# Because you might wish to "make locales AB_CD=ab-CD", we don't hardcode
-# MOZ_UI_LOCALE directly, but use an intermediate variable that can be
-# overridden by the command line. (Besides, AB_CD is prettier).
-AB_CD = $(MOZ_UI_LOCALE)
-
-ifndef L10NBASEDIR
- L10NBASEDIR = $(error L10NBASEDIR not defined by configure)
-else
- IS_LANGUAGE_REPACK = 1
-endif
-
-EXPAND_LOCALE_SRCDIR = $(if $(filter en-US,$(AB_CD)),$(topsrcdir)/$(1)/en-US,$(or $(realpath $(L10NBASEDIR)),$(abspath $(L10NBASEDIR)))/$(AB_CD)/$(subst /locales,,$(1)))
-
-ifdef relativesrcdir
-LOCALE_SRCDIR ?= $(call EXPAND_LOCALE_SRCDIR,$(relativesrcdir))
-endif
-
-ifdef relativesrcdir
-MAKE_JARS_FLAGS += --relativesrcdir=$(relativesrcdir)
-ifneq (en-US,$(AB_CD))
-ifdef LOCALE_MERGEDIR
-MAKE_JARS_FLAGS += --locale-mergedir=$(LOCALE_MERGEDIR)
-endif
-ifdef IS_LANGUAGE_REPACK
-MAKE_JARS_FLAGS += --l10n-base=$(L10NBASEDIR)/$(AB_CD)
-endif
-else
-MAKE_JARS_FLAGS += -c $(LOCALE_SRCDIR)
-endif # en-US
-else
-MAKE_JARS_FLAGS += -c $(LOCALE_SRCDIR)
-endif # ! relativesrcdir
-
-ifdef LOCALE_MERGEDIR
-MERGE_FILE = $(firstword \
- $(wildcard $(LOCALE_MERGEDIR)/$(subst /locales,,$(relativesrcdir))/$(1)) \
- $(wildcard $(LOCALE_SRCDIR)/$(1)) \
- $(srcdir)/en-US/$(1) )
-else
-MERGE_FILE = $(LOCALE_SRCDIR)/$(1)
-endif
-MERGE_FILES = $(foreach f,$(1),$(call MERGE_FILE,$(f)))
-
-ifeq (OS2,$(OS_ARCH))
-RUN_TEST_PROGRAM = $(topsrcdir)/build/os2/test_os2.cmd '$(LIBXUL_DIST)'
-else
-ifneq (WINNT,$(OS_ARCH))
-RUN_TEST_PROGRAM = $(LIBXUL_DIST)/bin/run-mozilla.sh
-endif # ! WINNT
-endif # ! OS2
-
-#
-# Java macros
-#
-
-# Make sure any compiled classes work with at least JVM 1.4
-JAVAC_FLAGS += -source 1.4
-
-ifdef MOZ_DEBUG
-JAVAC_FLAGS += -g
-endif
-
-CREATE_PRECOMPLETE_CMD = $(PYTHON) $(abspath $(topsrcdir)/config/createprecomplete.py)
-
-# MDDEPDIR is the subdirectory where dependency files are stored
-MDDEPDIR := .deps
-
-EXPAND_LIBS_EXEC = $(PYTHON) $(topsrcdir)/config/expandlibs_exec.py $(if $@,--depend $(MDDEPDIR)/$@.pp --target $@)
-EXPAND_LIBS_GEN = $(PYTHON) $(topsrcdir)/config/expandlibs_gen.py $(if $@,--depend $(MDDEPDIR)/$@.pp)
-EXPAND_AR = $(EXPAND_LIBS_EXEC) --extract -- $(AR)
-EXPAND_CC = $(EXPAND_LIBS_EXEC) --uselist -- $(CC)
-EXPAND_CCC = $(EXPAND_LIBS_EXEC) --uselist -- $(CCC)
-EXPAND_LD = $(EXPAND_LIBS_EXEC) --uselist -- $(LD)
-EXPAND_MKSHLIB_ARGS = --uselist
-ifdef SYMBOL_ORDER
-EXPAND_MKSHLIB_ARGS += --symbol-order $(SYMBOL_ORDER)
-endif
-EXPAND_MKSHLIB = $(EXPAND_LIBS_EXEC) $(EXPAND_MKSHLIB_ARGS) -- $(MKSHLIB)
-
-ifneq (,$(MOZ_LIBSTDCXX_TARGET_VERSION)$(MOZ_LIBSTDCXX_HOST_VERSION))
-ifneq ($(OS_ARCH),Darwin)
-CHECK_STDCXX = objdump -p $(1) | grep -e 'GLIBCXX_3\.4\.\(9\|[1-9][0-9]\)' > /dev/null && echo 'TEST-UNEXPECTED-FAIL | | We do not want these libstdc++ symbols to be used:' && objdump -T $(1) | grep -e 'GLIBCXX_3\.4\.\(9\|[1-9][0-9]\)' && exit 1 || exit 0
-endif
-
-ifdef MOZ_LIBSTDCXX_TARGET_VERSION
-EXTRA_LIBS += $(call EXPAND_LIBNAME_PATH,stdc++compat,$(DEPTH)/build/unix/stdc++compat)
-endif
-ifdef MOZ_LIBSTDCXX_HOST_VERSION
-HOST_EXTRA_LIBS += $(call EXPAND_LIBNAME_PATH,host_stdc++compat,$(DEPTH)/build/unix/stdc++compat)
-endif
-endif
-
-# autoconf.mk sets OBJ_SUFFIX to an error to avoid use before including
-# this file
-OBJ_SUFFIX := $(_OBJ_SUFFIX)
-
-# PGO builds with GCC build objects with instrumentation in a first pass,
-# then objects optimized, without instrumentation, in a second pass. If
-# we overwrite the ojects from the first pass with those from the second,
-# we end up not getting instrumentation data for better optimization on
-# incremental builds. As a consequence, we use a different object suffix
-# for the first pass.
-ifndef NO_PROFILE_GUIDED_OPTIMIZE
-ifdef MOZ_PROFILE_GENERATE
-ifdef GNU_CC
-OBJ_SUFFIX := i_o
-endif
-endif
-endif
-
-# EXPAND_LIBNAME - $(call EXPAND_LIBNAME,foo)
-# expands to $(LIB_PREFIX)foo.$(LIB_SUFFIX) or -lfoo, depending on linker
-# arguments syntax. Should only be used for system libraries
-
-# EXPAND_LIBNAME_PATH - $(call EXPAND_LIBNAME_PATH,foo,dir)
-# expands to dir/$(LIB_PREFIX)foo.$(LIB_SUFFIX)
-
-# EXPAND_MOZLIBNAME - $(call EXPAND_MOZLIBNAME,foo)
-# expands to $(DIST)/lib/$(LIB_PREFIX)foo.$(LIB_SUFFIX)
-
-ifdef GNU_CC
-EXPAND_LIBNAME = $(addprefix -l,$(1))
-else
-EXPAND_LIBNAME = $(foreach lib,$(1),$(LIB_PREFIX)$(lib).$(LIB_SUFFIX))
-endif
-EXPAND_LIBNAME_PATH = $(foreach lib,$(1),$(2)/$(LIB_PREFIX)$(lib).$(LIB_SUFFIX))
-EXPAND_MOZLIBNAME = $(foreach lib,$(1),$(DIST)/lib/$(LIB_PREFIX)$(lib).$(LIB_SUFFIX))
-
-PLY_INCLUDE = -I$(topsrcdir)/other-licenses/ply
-
-export CL_INCLUDES_PREFIX
-# Make sure that the build system can handle non-ASCII characters
-# in environment variables to prevent it from breking silently on
-# non-English systems.
-export NONASCII
-
-ifdef MOZ_GTK2_CFLAGS
-MOZ_GTK2_CFLAGS := -I$(topsrcdir)/widget/gtk/compat $(MOZ_GTK2_CFLAGS)
-endif
-
-DEFINES += -DNO_NSPR_10_SUPPORT
-
-ifdef IS_GYP_DIR
-LOCAL_INCLUDES += \
- -I$(topsrcdir)/ipc/chromium/src \
- -I$(topsrcdir)/ipc/glue \
- -I$(DEPTH)/ipc/ipdl/_ipdlheaders \
- $(NULL)
-
-ifeq (WINNT,$(OS_TARGET))
-# These get set via VC project file settings for normal GYP builds.
-DEFINES += -DUNICODE -D_UNICODE
-LOCAL_INCLUDES += -I'$(MOZ_DIRECTX_SDK_PATH)/include'
-endif
-
-STL_FLAGS=
-# Skip most Mozilla-specific include locations.
-INCLUDES = -I. $(LOCAL_INCLUDES) -I$(DEPTH)/dist/include
-endif
diff --git a/js/src/config/emptyvars.mk.in b/js/src/config/emptyvars.mk.in
deleted file mode 100644
index 388cf2a3598..00000000000
--- a/js/src/config/emptyvars.mk.in
+++ /dev/null
@@ -1 +0,0 @@
-@ALLEMPTYSUBSTS@
diff --git a/js/src/config/expandlibs.py b/js/src/config/expandlibs.py
deleted file mode 100644
index b63492b0e5b..00000000000
--- a/js/src/config/expandlibs.py
+++ /dev/null
@@ -1,145 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-'''Expandlibs is a system that allows to replace some libraries with a
-descriptor file containing some linking information about them.
-
-The descriptor file format is as follows:
----8<-----
-OBJS = a.o b.o ...
-LIBS = libfoo.a libbar.a ...
---->8-----
-
-(In the example above, OBJ_SUFFIX is o and LIB_SUFFIX is a).
-
-Expandlibs also canonicalizes how to pass libraries to the linker, such
-that only the ${LIB_PREFIX}${ROOT}.${LIB_SUFFIX} form needs to be used:
-given a list of files, expandlibs will replace items with the form
-${LIB_PREFIX}${ROOT}.${LIB_SUFFIX} following these rules:
-
-- If a ${DLL_PREFIX}${ROOT}.${DLL_SUFFIX} or
- ${DLL_PREFIX}${ROOT}.${IMPORT_LIB_SUFFIX} file exists, use that instead
-- If the ${LIB_PREFIX}${ROOT}.${LIB_SUFFIX} file exists, use it
-- If a ${LIB_PREFIX}${ROOT}.${LIB_SUFFIX}.${LIB_DESC_SUFFIX} file exists,
- replace ${LIB_PREFIX}${ROOT}.${LIB_SUFFIX} with the OBJS and LIBS the
- descriptor contains. And for each of these LIBS, also apply the same
- rules.
-'''
-from __future__ import with_statement
-import sys, os, errno
-import expandlibs_config as conf
-
-def ensureParentDir(file):
- '''Ensures the directory parent to the given file exists'''
- dir = os.path.dirname(file)
- if dir and not os.path.exists(dir):
- try:
- os.makedirs(dir)
- except OSError, error:
- if error.errno != errno.EEXIST:
- raise
-
-def relativize(path):
- '''Returns a path relative to the current working directory, if it is
- shorter than the given path'''
- def splitpath(path):
- dir, file = os.path.split(path)
- if os.path.splitdrive(dir)[1] == os.sep:
- return [file]
- return splitpath(dir) + [file]
-
- if not os.path.exists(path):
- return path
- curdir = splitpath(os.path.abspath(os.curdir))
- abspath = splitpath(os.path.abspath(path))
- while curdir and abspath and curdir[0] == abspath[0]:
- del curdir[0]
- del abspath[0]
- if not curdir and not abspath:
- return '.'
- relpath = os.path.join(*[os.pardir for i in curdir] + abspath)
- if len(path) > len(relpath):
- return relpath
- return path
-
-def isObject(path):
- '''Returns whether the given path points to an object file, that is,
- ends with OBJ_SUFFIX or .i_o'''
- return os.path.splitext(path)[1] in [conf.OBJ_SUFFIX, '.i_o']
-
-def isDynamicLib(path):
- '''Returns whether the given path points to a dynamic library, that is,
- ends with DLL_SUFFIX.'''
- # On mac, the xul library is named XUL, instead of libxul.dylib. Assume any
- # file by that name is a dynamic library.
- return os.path.splitext(path)[1] == conf.DLL_SUFFIX or os.path.basename(path) == 'XUL'
-
-class LibDescriptor(dict):
- KEYS = ['OBJS', 'LIBS']
-
- def __init__(self, content=None):
- '''Creates an instance of a lib descriptor, initialized with contents
- from a list of strings when given. This is intended for use with
- file.readlines()'''
- if isinstance(content, list) and all([isinstance(item, str) for item in content]):
- pass
- elif content is not None:
- raise TypeError("LibDescriptor() arg 1 must be None or a list of strings")
- super(LibDescriptor, self).__init__()
- for key in self.KEYS:
- self[key] = []
- if not content:
- return
- for key, value in [(s.strip() for s in item.split('=', 2)) for item in content if item.find('=') >= 0]:
- if key in self.KEYS:
- self[key] = value.split()
-
- def __str__(self):
- '''Serializes the lib descriptor'''
- return '\n'.join('%s = %s' % (k, ' '.join(self[k])) for k in self.KEYS if len(self[k]))
-
-class ExpandArgs(list):
- def __init__(self, args):
- '''Creates a clone of the |args| list and performs file expansion on
- each item it contains'''
- super(ExpandArgs, self).__init__()
- for arg in args:
- self += self._expand(arg)
-
- def _expand(self, arg):
- '''Internal function doing the actual work'''
- (root, ext) = os.path.splitext(arg)
- if ext != conf.LIB_SUFFIX or not os.path.basename(root).startswith(conf.LIB_PREFIX):
- return [relativize(arg)]
- if len(conf.IMPORT_LIB_SUFFIX):
- dll = root + conf.IMPORT_LIB_SUFFIX
- else:
- dll = root.replace(conf.LIB_PREFIX, conf.DLL_PREFIX, 1) + conf.DLL_SUFFIX
- if os.path.exists(dll):
- return [relativize(dll)]
- if os.path.exists(arg):
- return [relativize(arg)]
- return self._expand_desc(arg)
-
- def _expand_desc(self, arg):
- '''Internal function taking care of lib descriptor expansion only'''
- if os.path.exists(arg + conf.LIBS_DESC_SUFFIX):
- with open(arg + conf.LIBS_DESC_SUFFIX, 'r') as f:
- desc = LibDescriptor(f.readlines())
- objs = [relativize(o) for o in desc['OBJS']]
- for lib in desc['LIBS']:
- objs += self._expand(lib)
- return objs
- return [arg]
-
-class ExpandLibsDeps(ExpandArgs):
- '''Same as ExpandArgs, but also adds the library descriptor to the list'''
- def _expand_desc(self, arg):
- objs = super(ExpandLibsDeps, self)._expand_desc(arg)
- if os.path.exists(arg + conf.LIBS_DESC_SUFFIX):
- objs += [relativize(arg + conf.LIBS_DESC_SUFFIX)]
- return objs
-
-if __name__ == '__main__':
- print " ".join(ExpandArgs(sys.argv[1:]))
diff --git a/js/src/config/expandlibs_config.py b/js/src/config/expandlibs_config.py
deleted file mode 100644
index 8365f77cd12..00000000000
--- a/js/src/config/expandlibs_config.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-from buildconfig import substs
-
-def normalize_suffix(suffix):
- '''Returns a normalized suffix, i.e. ensures it starts with a dot and
- doesn't starts or ends with whitespace characters'''
- value = suffix.strip()
- if len(value) and not value.startswith('.'):
- value = '.' + value
- return value
-
-# Variables from the build system
-AR = substs['AR']
-AR_EXTRACT = substs['AR_EXTRACT'].replace('$(AR)', AR)
-DLL_PREFIX = substs['DLL_PREFIX']
-LIB_PREFIX = substs['LIB_PREFIX']
-OBJ_SUFFIX = normalize_suffix(substs['OBJ_SUFFIX'])
-LIB_SUFFIX = normalize_suffix(substs['LIB_SUFFIX'])
-DLL_SUFFIX = normalize_suffix(substs['DLL_SUFFIX'])
-IMPORT_LIB_SUFFIX = normalize_suffix(substs['IMPORT_LIB_SUFFIX'])
-LIBS_DESC_SUFFIX = normalize_suffix(substs['LIBS_DESC_SUFFIX'])
-EXPAND_LIBS_LIST_STYLE = substs['EXPAND_LIBS_LIST_STYLE']
-EXPAND_LIBS_ORDER_STYLE = substs['EXPAND_LIBS_ORDER_STYLE']
-LD_PRINT_ICF_SECTIONS = substs['LD_PRINT_ICF_SECTIONS']
diff --git a/js/src/config/expandlibs_exec.py b/js/src/config/expandlibs_exec.py
deleted file mode 100644
index f6ac5a033c2..00000000000
--- a/js/src/config/expandlibs_exec.py
+++ /dev/null
@@ -1,361 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-'''expandlibs-exec.py applies expandlibs rules, and some more (see below) to
-a given command line, and executes that command line with the expanded
-arguments.
-
-With the --extract argument (useful for e.g. $(AR)), it extracts object files
-from static libraries (or use those listed in library descriptors directly).
-
-With the --uselist argument (useful for e.g. $(CC)), it replaces all object
-files with a list file. This can be used to avoid limitations in the length
-of a command line. The kind of list file format used depends on the
-EXPAND_LIBS_LIST_STYLE variable: 'list' for MSVC style lists (@file.list)
-or 'linkerscript' for GNU ld linker scripts.
-See https://bugzilla.mozilla.org/show_bug.cgi?id=584474#c59 for more details.
-
-With the --symbol-order argument, followed by a file name, it will add the
-relevant linker options to change the order in which the linker puts the
-symbols appear in the resulting binary. Only works for ELF targets.
-'''
-from __future__ import with_statement
-import sys
-import os
-from expandlibs import (
- ExpandArgs,
- relativize,
- isDynamicLib,
- isObject,
- ensureParentDir,
- ExpandLibsDeps,
-)
-import expandlibs_config as conf
-from optparse import OptionParser
-import subprocess
-import tempfile
-import shutil
-import subprocess
-import re
-from mozbuild.makeutil import Makefile
-
-# The are the insert points for a GNU ld linker script, assuming a more
-# or less "standard" default linker script. This is not a dict because
-# order is important.
-SECTION_INSERT_BEFORE = [
- ('.text', '.fini'),
- ('.rodata', '.rodata1'),
- ('.data.rel.ro', '.dynamic'),
- ('.data', '.data1'),
-]
-
-class ExpandArgsMore(ExpandArgs):
- ''' Meant to be used as 'with ExpandArgsMore(args) as ...: '''
- def __enter__(self):
- self.tmp = []
- return self
-
- def __exit__(self, type, value, tb):
- '''Automatically remove temporary files'''
- for tmp in self.tmp:
- if os.path.isdir(tmp):
- shutil.rmtree(tmp, True)
- else:
- os.remove(tmp)
-
- def extract(self):
- self[0:] = self._extract(self)
-
- def _extract(self, args):
- '''When a static library name is found, either extract its contents
- in a temporary directory or use the information found in the
- corresponding lib descriptor.
- '''
- ar_extract = conf.AR_EXTRACT.split()
- newlist = []
- for arg in args:
- if os.path.splitext(arg)[1] == conf.LIB_SUFFIX:
- if os.path.exists(arg + conf.LIBS_DESC_SUFFIX):
- newlist += self._extract(self._expand_desc(arg))
- continue
- elif os.path.exists(arg) and (len(ar_extract) or conf.AR == 'lib'):
- tmp = tempfile.mkdtemp(dir=os.curdir)
- self.tmp.append(tmp)
- if conf.AR == 'lib':
- out = subprocess.check_output([conf.AR, '-NOLOGO', '-LIST', arg])
- files = out.splitlines()
- # If lib -list returns a list full of dlls, it's an
- # import lib.
- if all(isDynamicLib(f) for f in files):
- newlist += [arg]
- continue
- for f in files:
- subprocess.call([conf.AR, '-NOLOGO', '-EXTRACT:%s' % f, os.path.abspath(arg)], cwd=tmp)
- else:
- subprocess.call(ar_extract + [os.path.abspath(arg)], cwd=tmp)
- objs = []
- for root, dirs, files in os.walk(tmp):
- objs += [relativize(os.path.join(root, f)) for f in files if isObject(f)]
- newlist += sorted(objs)
- continue
- newlist += [arg]
- return newlist
-
- def makelist(self):
- '''Replaces object file names with a temporary list file, using a
- list format depending on the EXPAND_LIBS_LIST_STYLE variable
- '''
- objs = [o for o in self if isObject(o)]
- if not len(objs): return
- fd, tmp = tempfile.mkstemp(suffix=".list",dir=os.curdir)
- if conf.EXPAND_LIBS_LIST_STYLE == "linkerscript":
- content = ['INPUT("%s")\n' % obj for obj in objs]
- ref = tmp
- elif conf.EXPAND_LIBS_LIST_STYLE == "filelist":
- content = ["%s\n" % obj for obj in objs]
- ref = "-Wl,-filelist," + tmp
- elif conf.EXPAND_LIBS_LIST_STYLE == "list":
- content = ["%s\n" % obj for obj in objs]
- ref = "@" + tmp
- else:
- os.close(fd)
- os.remove(tmp)
- return
- self.tmp.append(tmp)
- f = os.fdopen(fd, "w")
- f.writelines(content)
- f.close()
- idx = self.index(objs[0])
- newlist = self[0:idx] + [ref] + [item for item in self[idx:] if item not in objs]
- self[0:] = newlist
-
- def _getFoldedSections(self):
- '''Returns a dict about folded sections.
- When section A and B are folded into section C, the dict contains:
- { 'A': 'C',
- 'B': 'C',
- 'C': ['A', 'B'] }'''
- if not conf.LD_PRINT_ICF_SECTIONS:
- return {}
-
- proc = subprocess.Popen(self + [conf.LD_PRINT_ICF_SECTIONS], stdout = subprocess.PIPE, stderr = subprocess.PIPE)
- (stdout, stderr) = proc.communicate()
- result = {}
- # gold's --print-icf-sections output looks like the following:
- # ld: ICF folding section '.section' in file 'file.o'into '.section' in file 'file.o'
- # In terms of words, chances are this will change in the future,
- # especially considering "into" is misplaced. Splitting on quotes
- # seems safer.
- for l in stderr.split('\n'):
- quoted = l.split("'")
- if len(quoted) > 5 and quoted[1] != quoted[5]:
- result[quoted[1]] = [quoted[5]]
- if quoted[5] in result:
- result[quoted[5]].append(quoted[1])
- else:
- result[quoted[5]] = [quoted[1]]
- return result
-
- def _getOrderedSections(self, ordered_symbols):
- '''Given an ordered list of symbols, returns the corresponding list
- of sections following the order.'''
- if not conf.EXPAND_LIBS_ORDER_STYLE in ['linkerscript', 'section-ordering-file']:
- raise Exception('EXPAND_LIBS_ORDER_STYLE "%s" is not supported' % conf.EXPAND_LIBS_ORDER_STYLE)
- finder = SectionFinder([arg for arg in self if isObject(arg) or os.path.splitext(arg)[1] == conf.LIB_SUFFIX])
- folded = self._getFoldedSections()
- sections = set()
- ordered_sections = []
- for symbol in ordered_symbols:
- symbol_sections = finder.getSections(symbol)
- all_symbol_sections = []
- for section in symbol_sections:
- if section in folded:
- if isinstance(folded[section], str):
- section = folded[section]
- all_symbol_sections.append(section)
- all_symbol_sections.extend(folded[section])
- else:
- all_symbol_sections.append(section)
- for section in all_symbol_sections:
- if not section in sections:
- ordered_sections.append(section)
- sections.add(section)
- return ordered_sections
-
- def orderSymbols(self, order):
- '''Given a file containing a list of symbols, adds the appropriate
- argument to make the linker put the symbols in that order.'''
- with open(order) as file:
- sections = self._getOrderedSections([l.strip() for l in file.readlines() if l.strip()])
- split_sections = {}
- linked_sections = [s[0] for s in SECTION_INSERT_BEFORE]
- for s in sections:
- for linked_section in linked_sections:
- if s.startswith(linked_section):
- if linked_section in split_sections:
- split_sections[linked_section].append(s)
- else:
- split_sections[linked_section] = [s]
- break
- content = []
- # Order is important
- linked_sections = [s for s in linked_sections if s in split_sections]
-
- if conf.EXPAND_LIBS_ORDER_STYLE == 'section-ordering-file':
- option = '-Wl,--section-ordering-file,%s'
- content = sections
- for linked_section in linked_sections:
- content.extend(split_sections[linked_section])
- content.append('%s.*' % linked_section)
- content.append(linked_section)
-
- elif conf.EXPAND_LIBS_ORDER_STYLE == 'linkerscript':
- option = '-Wl,-T,%s'
- section_insert_before = dict(SECTION_INSERT_BEFORE)
- for linked_section in linked_sections:
- content.append('SECTIONS {')
- content.append(' %s : {' % linked_section)
- content.extend(' *(%s)' % s for s in split_sections[linked_section])
- content.append(' }')
- content.append('}')
- content.append('INSERT BEFORE %s' % section_insert_before[linked_section])
- else:
- raise Exception('EXPAND_LIBS_ORDER_STYLE "%s" is not supported' % conf.EXPAND_LIBS_ORDER_STYLE)
-
- fd, tmp = tempfile.mkstemp(dir=os.curdir)
- f = os.fdopen(fd, "w")
- f.write('\n'.join(content)+'\n')
- f.close()
- self.tmp.append(tmp)
- self.append(option % tmp)
-
-class SectionFinder(object):
- '''Instances of this class allow to map symbol names to sections in
- object files.'''
-
- def __init__(self, objs):
- '''Creates an instance, given a list of object files.'''
- if not conf.EXPAND_LIBS_ORDER_STYLE in ['linkerscript', 'section-ordering-file']:
- raise Exception('EXPAND_LIBS_ORDER_STYLE "%s" is not supported' % conf.EXPAND_LIBS_ORDER_STYLE)
- self.mapping = {}
- for obj in objs:
- if not isObject(obj) and os.path.splitext(obj)[1] != conf.LIB_SUFFIX:
- raise Exception('%s is not an object nor a static library' % obj)
- for symbol, section in SectionFinder._getSymbols(obj):
- sym = SectionFinder._normalize(symbol)
- if sym in self.mapping:
- if not section in self.mapping[sym]:
- self.mapping[sym].append(section)
- else:
- self.mapping[sym] = [section]
-
- def getSections(self, symbol):
- '''Given a symbol, returns a list of sections containing it or the
- corresponding thunks. When the given symbol is a thunk, returns the
- list of sections containing its corresponding normal symbol and the
- other thunks for that symbol.'''
- sym = SectionFinder._normalize(symbol)
- if sym in self.mapping:
- return self.mapping[sym]
- return []
-
- @staticmethod
- def _normalize(symbol):
- '''For normal symbols, return the given symbol. For thunks, return
- the corresponding normal symbol.'''
- if re.match('^_ZThn[0-9]+_', symbol):
- return re.sub('^_ZThn[0-9]+_', '_Z', symbol)
- return symbol
-
- @staticmethod
- def _getSymbols(obj):
- '''Returns a list of (symbol, section) contained in the given object
- file.'''
- proc = subprocess.Popen(['objdump', '-t', obj], stdout = subprocess.PIPE, stderr = subprocess.PIPE)
- (stdout, stderr) = proc.communicate()
- syms = []
- for line in stdout.splitlines():
- # Each line has the following format:
- # [lgu!][w ][C ][W ][Ii ][dD ][FfO ] \t
- tmp = line.split(' ',1)
- # This gives us ["", "[lgu!][w ][C ][W ][Ii ][dD ][FfO ] \t "]
- # We only need to consider cases where "\t " is present,
- # and where the [FfO] flag is either F (function) or O (object).
- if len(tmp) > 1 and len(tmp[1]) > 6 and tmp[1][6] in ['O', 'F']:
- tmp = tmp[1][8:].split()
- # That gives us ["","", ""]
- syms.append((tmp[-1], tmp[0]))
- return syms
-
-def print_command(out, args):
- print >>out, "Executing: " + " ".join(args)
- for tmp in [f for f in args.tmp if os.path.isfile(f)]:
- print >>out, tmp + ":"
- with open(tmp) as file:
- print >>out, "".join([" " + l for l in file.readlines()])
- out.flush()
-
-def main():
- parser = OptionParser()
- parser.add_option("--depend", dest="depend", metavar="FILE",
- help="generate dependencies for the given execution and store it in the given file")
- parser.add_option("--target", dest="target", metavar="FILE",
- help="designate the target for dependencies")
- parser.add_option("--extract", action="store_true", dest="extract",
- help="when a library has no descriptor file, extract it first, when possible")
- parser.add_option("--uselist", action="store_true", dest="uselist",
- help="use a list file for objects when executing a command")
- parser.add_option("--verbose", action="store_true", dest="verbose",
- help="display executed command and temporary files content")
- parser.add_option("--symbol-order", dest="symbol_order", metavar="FILE",
- help="use the given list of symbols to order symbols in the resulting binary when using with a linker")
-
- (options, args) = parser.parse_args()
-
- if not options.target:
- options.depend = False
- if options.depend:
- deps = ExpandLibsDeps(args)
- # Filter out common command wrappers
- while os.path.basename(deps[0]) in ['ccache', 'distcc']:
- deps.pop(0)
- # Remove command
- deps.pop(0)
- with ExpandArgsMore(args) as args:
- if options.extract:
- args.extract()
- if options.symbol_order:
- args.orderSymbols(options.symbol_order)
- if options.uselist:
- args.makelist()
-
- if options.verbose:
- print_command(sys.stderr, args)
- try:
- proc = subprocess.Popen(args, stdout = subprocess.PIPE, stderr = subprocess.STDOUT)
- except Exception, e:
- print >>sys.stderr, 'error: Launching', args, ':', e
- raise e
- (stdout, stderr) = proc.communicate()
- if proc.returncode and not options.verbose:
- print_command(sys.stderr, args)
- sys.stderr.write(stdout)
- sys.stderr.flush()
- if proc.returncode:
- exit(proc.returncode)
- if not options.depend:
- return
- ensureParentDir(options.depend)
- mk = Makefile()
- deps = [dep for dep in deps if os.path.isfile(dep) and dep != options.target]
- no_dynamic_lib = [dep for dep in deps if not isDynamicLib(dep)]
- mk.create_rule([options.target]).add_dependencies(no_dynamic_lib)
- if len(deps) != len(no_dynamic_lib):
- mk.create_rule(['%s_order_only' % options.target]).add_dependencies(dep for dep in deps if isDynamicLib(dep))
-
- with open(options.depend, 'w') as depfile:
- mk.dump(depfile, removal_guard=True)
-
-if __name__ == '__main__':
- main()
diff --git a/js/src/config/expandlibs_gen.py b/js/src/config/expandlibs_gen.py
deleted file mode 100644
index 25962f60c94..00000000000
--- a/js/src/config/expandlibs_gen.py
+++ /dev/null
@@ -1,50 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-'''Given a list of object files and library names, prints a library
-descriptor to standard output'''
-
-from __future__ import with_statement
-import sys
-import os
-import expandlibs_config as conf
-from expandlibs import LibDescriptor, isObject, ensureParentDir, ExpandLibsDeps
-from optparse import OptionParser
-
-def generate(args):
- desc = LibDescriptor()
- for arg in args:
- if isObject(arg):
- if os.path.exists(arg):
- desc['OBJS'].append(os.path.abspath(arg))
- else:
- raise Exception("File not found: %s" % arg)
- elif os.path.splitext(arg)[1] == conf.LIB_SUFFIX:
- if os.path.exists(arg) or os.path.exists(arg + conf.LIBS_DESC_SUFFIX):
- desc['LIBS'].append(os.path.abspath(arg))
- else:
- raise Exception("File not found: %s" % arg)
- return desc
-
-if __name__ == '__main__':
- parser = OptionParser()
- parser.add_option("--depend", dest="depend", metavar="FILE",
- help="generate dependencies for the given execution and store it in the given file")
- parser.add_option("-o", dest="output", metavar="FILE",
- help="send output to the given file")
-
- (options, args) = parser.parse_args()
- if not options.output:
- raise Exception("Missing option: -o")
-
- ensureParentDir(options.output)
- with open(options.output, 'w') as outfile:
- print >>outfile, generate(args)
- if options.depend:
- ensureParentDir(options.depend)
- with open(options.depend, 'w') as depfile:
- deps = ExpandLibsDeps(args)
- depfile.write("%s : %s\n" % (options.output, ' '.join(deps)))
- for dep in deps:
- depfile.write("%s :\n" % dep)
diff --git a/js/src/config/find_OOM_errors.py b/js/src/config/find_OOM_errors.py
deleted file mode 100644
index 16065119b78..00000000000
--- a/js/src/config/find_OOM_errors.py
+++ /dev/null
@@ -1,352 +0,0 @@
-#!/usr/bin/env python
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-from __future__ import print_function
-
-usage = """%prog: A test for OOM conditions in the shell.
-
-%prog finds segfaults and other errors caused by incorrect handling of
-allocation during OOM (out-of-memory) conditions.
-"""
-
-help = """Check for regressions only. This runs a set of files with a known
-number of OOM errors (specified by REGRESSION_COUNT), and exits with a non-zero
-result if more or less errors are found. See js/src/Makefile.in for invocation.
-"""
-
-
-import hashlib
-import re
-import shlex
-import subprocess
-import sys
-import threading
-import time
-
-from optparse import OptionParser
-
-#####################################################################
-# Utility functions
-#####################################################################
-def run(args, stdin=None):
- class ThreadWorker(threading.Thread):
- def __init__(self, pipe):
- super(ThreadWorker, self).__init__()
- self.all = ""
- self.pipe = pipe
- self.setDaemon(True)
-
- def run(self):
- while True:
- line = self.pipe.readline()
- if line == '': break
- else:
- self.all += line
-
- try:
- if type(args) == str:
- args = shlex.split(args)
-
- args = [str(a) for a in args] # convert to strs
-
- stdin_pipe = subprocess.PIPE if stdin else None
- proc = subprocess.Popen(args, stdin=stdin_pipe, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- if stdin_pipe:
- proc.stdin.write(stdin)
- proc.stdin.close()
-
- stdout_worker = ThreadWorker(proc.stdout)
- stderr_worker = ThreadWorker(proc.stderr)
- stdout_worker.start()
- stderr_worker.start()
-
- proc.wait()
- stdout_worker.join()
- stderr_worker.join()
-
- except KeyboardInterrupt as e:
- sys.exit(-1)
-
- stdout, stderr = stdout_worker.all, stderr_worker.all
- result = (stdout, stderr, proc.returncode)
- return result
-
-def get_js_files():
- (out, err, exit) = run('find ../jit-test/tests -name "*.js"')
- if (err, exit) != ("", 0):
- sys.exit("Wrong directory, run from an objdir")
- return out.split()
-
-
-
-#####################################################################
-# Blacklisting
-#####################################################################
-def in_blacklist(sig):
- return sig in blacklist
-
-def add_to_blacklist(sig):
- blacklist[sig] = blacklist.get(sig, 0)
- blacklist[sig] += 1
-
-# How often is a particular lines important for this.
-def count_lines():
- """Keep track of the amount of times individual lines occur, in order to
- prioritize the errors which occur most frequently."""
- counts = {}
- for string,count in blacklist.items():
- for line in string.split("\n"):
- counts[line] = counts.get(line, 0) + count
-
- lines = []
- for k,v in counts.items():
- lines.append("{0:6}: {1}".format(v, k))
-
- lines.sort()
-
- countlog = file("../OOM_count_log", "w")
- countlog.write("\n".join(lines))
- countlog.flush()
- countlog.close()
-
-
-#####################################################################
-# Output cleaning
-#####################################################################
-def clean_voutput(err):
- # Skip what we can't reproduce
- err = re.sub(r"^--\d+-- run: /usr/bin/dsymutil \"shell/js\"$", "", err, flags=re.MULTILINE)
- err = re.sub(r"^==\d+==", "", err, flags=re.MULTILINE)
- err = re.sub(r"^\*\*\d+\*\*", "", err, flags=re.MULTILINE)
- err = re.sub(r"^\s+by 0x[0-9A-Fa-f]+: ", "by: ", err, flags=re.MULTILINE)
- err = re.sub(r"^\s+at 0x[0-9A-Fa-f]+: ", "at: ", err, flags=re.MULTILINE)
- err = re.sub(r"(^\s+Address 0x)[0-9A-Fa-f]+( is not stack'd)", r"\1\2", err, flags=re.MULTILINE)
- err = re.sub(r"(^\s+Invalid write of size )\d+", r"\1x", err, flags=re.MULTILINE)
- err = re.sub(r"(^\s+Invalid read of size )\d+", r"\1x", err, flags=re.MULTILINE)
- err = re.sub(r"(^\s+Address 0x)[0-9A-Fa-f]+( is )\d+( bytes inside a block of size )[0-9,]+( free'd)", r"\1\2\3\4", err, flags=re.MULTILINE)
-
- # Skip the repeating bit due to the segfault
- lines = []
- for l in err.split('\n'):
- if l == " Process terminating with default action of signal 11 (SIGSEGV)":
- break
- lines.append(l)
- err = '\n'.join(lines)
-
- return err
-
-def remove_failed_allocation_backtraces(err):
- lines = []
-
- add = True
- for l in err.split('\n'):
-
- # Set start and end conditions for including text
- if l == " The site of the failed allocation is:":
- add = False
- elif l[:2] not in ['by: ', 'at:']:
- add = True
-
- if add:
- lines.append(l)
-
-
- err = '\n'.join(lines)
-
- return err
-
-
-def clean_output(err):
- err = re.sub(r"^js\(\d+,0x[0-9a-f]+\) malloc: \*\*\* error for object 0x[0-9a-f]+: pointer being freed was not allocated\n\*\*\* set a breakppoint in malloc_error_break to debug\n$", "pointer being freed was not allocated", err, flags=re.MULTILINE)
-
- return err
-
-
-#####################################################################
-# Consts, etc
-#####################################################################
-
-command_template = 'shell/js' \
- + ' -m -j -p' \
- + ' -e "const platform=\'darwin\'; const libdir=\'../jit-test/lib/\';"' \
- + ' -f ../jit-test/lib/prolog.js' \
- + ' -f {0}'
-
-
-# Blacklists are things we don't want to see in our logs again (though we do
-# want to count them when they happen). Whitelists we do want to see in our
-# logs again, principally because the information we have isn't enough.
-
-blacklist = {}
-add_to_blacklist(r"('', '', 1)") # 1 means OOM if the shell hasn't launched yet.
-add_to_blacklist(r"('', 'out of memory\n', 1)")
-
-whitelist = set()
-whitelist.add(r"('', 'out of memory\n', -11)") # -11 means OOM
-whitelist.add(r"('', 'out of memory\nout of memory\n', -11)")
-
-
-
-#####################################################################
-# Program
-#####################################################################
-
-# Options
-parser = OptionParser(usage=usage)
-parser.add_option("-r", "--regression", action="store", metavar="REGRESSION_COUNT", help=help,
- type="int", dest="regression", default=None)
-
-(OPTIONS, args) = parser.parse_args()
-
-
-if OPTIONS.regression != None:
- # TODO: This should be expanded as we get a better hang of the OOM problems.
- # For now, we'll just check that the number of OOMs in one short file does not
- # increase.
- files = ["../jit-test/tests/arguments/args-createontrace.js"]
-else:
- files = get_js_files()
-
- # Use a command-line arg to reduce the set of files
- if len (args):
- files = [f for f in files if f.find(args[0]) != -1]
-
-
-if OPTIONS.regression == None:
- # Don't use a logfile, this is automated for tinderbox.
- log = file("../OOM_log", "w")
-
-
-num_failures = 0
-for f in files:
-
- # Run it once to establish boundaries
- command = (command_template + ' -O').format(f)
- out, err, exit = run(command)
- max = re.match(".*OOM max count: (\d+).*", out, flags=re.DOTALL).groups()[0]
- max = int(max)
-
- # OOMs don't recover well for the first 20 allocations or so.
- # TODO: revisit this.
- for i in range(20, max):
-
- if OPTIONS.regression == None:
- print("Testing allocation {0}/{1} in {2}".format(i,max,f))
- else:
- sys.stdout.write('.') # something short for tinderbox, no space or \n
-
- command = (command_template + ' -A {0}').format(f, i)
- out, err, exit = run(command)
-
- # Success (5 is SM's exit code for controlled errors)
- if exit == 5 and err.find("out of memory") != -1:
- continue
-
- # Failure
- else:
-
- if OPTIONS.regression != None:
- # Just count them
- num_failures += 1
- continue
-
- #########################################################################
- # The regression tests ends above. The rest of this is for running the
- # script manually.
- #########################################################################
-
- problem = str((out, err, exit))
- if in_blacklist(problem) and problem not in whitelist:
- add_to_blacklist(problem)
- continue
-
- add_to_blacklist(problem)
-
-
- # Get valgrind output for a good stack trace
- vcommand = "valgrind --dsymutil=yes -q --log-file=OOM_valgrind_log_file " + command
- run(vcommand)
- vout = file("OOM_valgrind_log_file").read()
- vout = clean_voutput(vout)
- sans_alloc_sites = remove_failed_allocation_backtraces(vout)
-
- # Don't print duplicate information
- if in_blacklist(sans_alloc_sites):
- add_to_blacklist(sans_alloc_sites)
- continue
-
- add_to_blacklist(sans_alloc_sites)
-
- log.write ("\n")
- log.write ("\n")
- log.write ("=========================================================================")
- log.write ("\n")
- log.write ("An allocation failure at\n\tallocation {0}/{1} in {2}\n\t"
- "causes problems (detected using bug 624094)"
- .format(i, max, f))
- log.write ("\n")
- log.write ("\n")
-
- log.write ("Command (from obj directory, using patch from bug 624094):\n " + command)
- log.write ("\n")
- log.write ("\n")
- log.write ("stdout, stderr, exitcode:\n " + problem)
- log.write ("\n")
- log.write ("\n")
-
- double_free = err.find("pointer being freed was not allocated") != -1
- oom_detected = err.find("out of memory") != -1
- multiple_oom_detected = err.find("out of memory\nout of memory") != -1
- segfault_detected = exit == -11
-
- log.write ("Diagnosis: ")
- log.write ("\n")
- if multiple_oom_detected:
- log.write (" - Multiple OOMs reported")
- log.write ("\n")
- if segfault_detected:
- log.write (" - segfault")
- log.write ("\n")
- if not oom_detected:
- log.write (" - No OOM checking")
- log.write ("\n")
- if double_free:
- log.write (" - Double free")
- log.write ("\n")
-
- log.write ("\n")
-
- log.write ("Valgrind info:\n" + vout)
- log.write ("\n")
- log.write ("\n")
- log.flush()
-
- if OPTIONS.regression == None:
- count_lines()
-
-print()
-
-# Do the actual regression check
-if OPTIONS.regression != None:
- expected_num_failures = OPTIONS.regression
-
- if num_failures != expected_num_failures:
-
- print("TEST-UNEXPECTED-FAIL |", end='')
- if num_failures > expected_num_failures:
- print("More out-of-memory errors were found ({0}) than expected ({1}). "
- "This probably means an allocation site has been added without a "
- "NULL-check. If this is unavoidable, you can account for it by "
- "updating Makefile.in.".format(num_failures, expected_num_failures),
- end='')
- else:
- print("Congratulations, you have removed {0} out-of-memory error(s) "
- "({1} remain)! Please account for it by updating Makefile.in."
- .format(expected_num_failures - num_failures, num_failures),
- end='')
- sys.exit(-1)
- else:
- print('TEST-PASS | find_OOM_errors | Found the expected number of OOM '
- 'errors ({0})'.format(expected_num_failures))
-
diff --git a/js/src/config/gcc_hidden.h b/js/src/config/gcc_hidden.h
deleted file mode 100644
index 075e68c88bf..00000000000
--- a/js/src/config/gcc_hidden.h
+++ /dev/null
@@ -1,6 +0,0 @@
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-/* Begin all files as hidden visibility */
-#pragma GCC visibility push(hidden)
diff --git a/js/src/config/make-system-wrappers.pl b/js/src/config/make-system-wrappers.pl
deleted file mode 100644
index fa0873a78e0..00000000000
--- a/js/src/config/make-system-wrappers.pl
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/perl
-#
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-$output_dir = shift;
-
-while () {
- chomp;
- if (-e "$output_dir/$_") {
- next;
- }
-
- if (/(.*)\/[^\/*]/) {
- mkdir "$output_dir/$1";
- }
-
- open OUT, ">$output_dir/$_";
- print OUT "#pragma GCC system_header\n"; # suppress include_next warning
- print OUT "#pragma GCC visibility push(default)\n";
- print OUT "#include_next \<$_\>\n";
- print OUT "#pragma GCC visibility pop\n";
- close OUT;
-}
-
diff --git a/js/src/config/makefiles/autotargets.mk b/js/src/config/makefiles/autotargets.mk
deleted file mode 100644
index 16e06fb2a41..00000000000
--- a/js/src/config/makefiles/autotargets.mk
+++ /dev/null
@@ -1,94 +0,0 @@
-# -*- makefile -*-
-# vim:set ts=8 sw=8 sts=8 noet:
-#
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this file,
-# You can obtain one at http://mozilla.org/MPL/2.0/.
-#
-
-ifndef INCLUDED_AUTOTARGETS_MK #{
-
-# Conditional does not wrap the entire file so multiple
-# includes will be able to accumulate dependencies.
-
-###########################################################################
-# AUTO_DEPS - A list of deps/targets drived from other macros.
-###########################################################################
-
-MKDIR ?= mkdir -p
-TOUCH ?= touch
-
-# declare for local use, rules.mk may not have been loaded
-space = $(NULL) $(NULL)
-
-# Deps will be considered intermediate when used as a pre-requisite for
-# wildcard targets. Inhibit their removal, mkdir -p is a standalone op.
-.PRECIOUS: %/.mkdir.done
-
-#########################
-##---] FUNCTIONS [---##
-#########################
-
-# Squeeze can be overzealous, restore root for abspath
-getPathPrefix =$(if $(filter /%,$(1)),/)
-
-# Squeeze '//' from the path, easily created by string functions
-_slashSqueeze =$(foreach val,$(getargv),$(call getPathPrefix,$(val))$(subst $(space),/,$(strip $(subst /,$(space),$(val)))))
-
-# Squeeze extraneous directory slashes from the path
-# o protect embedded spaces within the path
-# o replace //+ sequences with /
-slash_strip = \
- $(strip \
- $(subst <--[**]-->,$(space),\
- $(call _slashSqueeze,\
- $(subst $(space),<--[**]-->,$(1))\
- )))
-
-# Extract directory path from a dependency file.
-mkdir_stem =$(foreach val,$(getargv),$(subst /.mkdir.done,$(NULL),$(val)))
-
-## Generate timestamp file for threadsafe directory creation
-mkdir_deps =$(foreach dir,$(getargv),$(call slash_strip,$(dir)/.mkdir.done))
-
-#######################
-##---] TARGETS [---##
-#######################
-
-%/.mkdir.done: # mkdir -p -p => mkdir -p
- $(subst $(space)-p,$(null),$(MKDIR)) -p '$(dir $@)'
-# Make the timestamp old enough for not being a problem with symbolic links
-# targets depending on it. Use Jan 3, 1980 to accomodate any timezone where
-# 198001010000 would translate to something older than FAT epoch.
- @$(TOUCH) -t 198001030000 '$@'
-
-# A handful of makefiles are attempting "mkdir dot".
-# tbpl/valgrind builds are using this target
-# https://bugzilla.mozilla.org/show_bug.cgi?id=837754
-.mkdir.done:
- @echo 'WARNING: $(MKDIR) -dot- requested by $(MAKE) -C $(CURDIR) $(MAKECMDGOALS)'
- @$(TOUCH) -t 198001030000 '$@'
-
-INCLUDED_AUTOTARGETS_MK = 1
-endif #}
-
-
-## Accumulate deps and cleanup
-ifneq (,$(GENERATED_DIRS))
- GENERATED_DIRS := $(strip $(sort $(GENERATED_DIRS)))
- tmpauto :=$(call mkdir_deps,GENERATED_DIRS)
- GENERATED_DIRS_DEPS +=$(tmpauto)
- GARBAGE_DIRS +=$(GENERATED_DIRS)
-endif
-
-#################################################################
-# One ring/dep to rule them all:
-# config/rules.mk::all target is available by default
-# Add $(AUTO_DEPS) as an explicit target dependency when needed.
-#################################################################
-
-AUTO_DEPS +=$(GENERATED_DIRS_DEPS)
-AUTO_DEPS := $(strip $(sort $(AUTO_DEPS)))
-
-# Complain loudly if deps have not loaded so getargv != $(NULL)
-$(call requiredfunction,getargv)
diff --git a/js/src/config/makefiles/debugmake.mk b/js/src/config/makefiles/debugmake.mk
deleted file mode 100644
index 3c5a39741a7..00000000000
--- a/js/src/config/makefiles/debugmake.mk
+++ /dev/null
@@ -1,121 +0,0 @@
-# -*- makefile -*-
-# vim:set ts=8 sw=8 sts=8 noet:
-#
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this file,
-# You can obtain one at http://mozilla.org/MPL/2.0/.
-#
-
-###########################################################################
-## Intent: Helper targets for displaying variables and state information
-###########################################################################
-
-# Support usage outside of config/rules.mk
-ifndef INCLUDED_DEBUGMAKE_MK #{
-
-define shell_quote
-'$(subst ','\'',$(1))'
-endef
-
-echo-variable-%:
- @echo $(call shell_quote,$($*))
-
-echo-tiers:
- @echo $(TIERS)
-
-echo-tier-dirs:
- @$(foreach tier,$(TIERS),echo '$(tier):'; echo ' dirs: $(tier_$(tier)_dirs)'; $(if $(tier_$(tier)_staticdirs),echo ' staticdirs: $(tier_$(tier)_staticdirs)';) )
-
-echo-dirs:
- @echo $(call shell_quote,$(DIRS))
-
-define print_var
-@printf '%20s = %s\n' $1 $(call shell_quote,$($1))
-
-endef
-
-define print_vars
-$(foreach var,$1,$(call print_var,$(var)))
-endef
-
-showtargs:
-ifneq (,$(filter $(PROGRAM) $(HOST_PROGRAM) $(SIMPLE_PROGRAMS) $(HOST_LIBRARY) $(LIBRARY) $(SHARED_LIBRARY),$(TARGETS)))
- @echo --------------------------------------------------------------------------------
- $(call print_vars,\
- PROGRAM \
- SIMPLE_PROGRAMS \
- LIBRARY \
- SHARED_LIBRARY \
- SHARED_LIBRARY_LIBS \
- LIBS \
- DEF_FILE \
- IMPORT_LIBRARY \
- STATIC_LIBS \
- EXTRA_DSO_LDOPTS \
- DEPENDENT_LIBS \
- )
- @echo --------------------------------------------------------------------------------
-endif
- $(LOOP_OVER_PARALLEL_DIRS)
- $(LOOP_OVER_DIRS)
- $(LOOP_OVER_TOOL_DIRS)
-
-showbuild:
- $(call print_vars,\
- MOZ_BUILD_ROOT \
- MOZ_WIDGET_TOOLKIT \
- CC \
- CXX \
- CCC \
- CPP \
- LD \
- AR \
- IMPLIB \
- FILTER \
- MKSHLIB \
- MKCSHLIB \
- RC \
- MC \
- CFLAGS \
- OS_CFLAGS \
- COMPILE_CFLAGS \
- CXXFLAGS \
- OS_CXXFLAGS \
- COMPILE_CXXFLAGS \
- COMPILE_CMFLAGS \
- COMPILE_CMMFLAGS \
- LDFLAGS \
- OS_LDFLAGS \
- DSO_LDOPTS \
- OS_INCLUDES \
- OS_LIBS \
- EXTRA_LIBS \
- BIN_FLAGS \
- INCLUDES \
- DEFINES \
- ACDEFINES \
- BIN_SUFFIX \
- LIB_SUFFIX \
- DLL_SUFFIX \
- IMPORT_LIB_SUFFIX \
- INSTALL \
- VPATH \
- )
-
-showhost:
- $(call print_vars,\
- HOST_CC \
- HOST_CXX \
- HOST_CFLAGS \
- HOST_LDFLAGS \
- HOST_LIBS \
- HOST_EXTRA_LIBS \
- HOST_EXTRA_DEPS \
- HOST_PROGRAM \
- HOST_OBJS \
- HOST_PROGOBJS \
- HOST_LIBRARY \
- )
-
-INCLUDED_DEBUGMAKE_MK = 1
-endif #}
diff --git a/js/src/config/makefiles/functions.mk b/js/src/config/makefiles/functions.mk
deleted file mode 100644
index d97b604f994..00000000000
--- a/js/src/config/makefiles/functions.mk
+++ /dev/null
@@ -1,34 +0,0 @@
-#
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-#
-# functions.mk
-#
-# Defines functions that are needed by various Makefiles throughout the build
-# system, which are needed before config.mk can be included.
-#
-
-# Define an include-at-most-once flag
-ifdef INCLUDED_FUNCTIONS_MK
-$(error Do not include functions.mk twice!)
-endif
-INCLUDED_FUNCTIONS_MK = 1
-
-core_abspath = $(error core_abspath is unsupported, use $$(abspath) instead)
-core_realpath = $(error core_realpath is unsupported)
-
-core_winabspath = $(error core_winabspath is unsupported)
-
-# Run a named Python build action. The first argument is the name of the build
-# action. The second argument are the arguments to pass to the action (space
-# delimited arguments). e.g.
-#
-# libs::
-# $(call py_action,purge_manifests,_build_manifests/purge/foo.manifest)
-ifdef .PYMAKE
-py_action = %mozbuild.action.$(1) main $(2)
-else
-py_action = $(PYTHON) -m mozbuild.action.$(1) $(2)
-endif
diff --git a/js/src/config/makefiles/java-build.mk b/js/src/config/makefiles/java-build.mk
deleted file mode 100644
index b3f34ab1d03..00000000000
--- a/js/src/config/makefiles/java-build.mk
+++ /dev/null
@@ -1,113 +0,0 @@
-# -*- makefile -*-
-# vim:set ts=8 sw=8 sts=8 noet:
-#
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this file,
-# You can obtain one at http://mozilla.org/MPL/2.0/.
-
-ifndef INCLUDED_JAVA_BUILD_MK #{
-
-ifdef JAVAFILES #{
-GENERATED_DIRS += classes
-
-export:: classes
-classes: $(call mkdir_deps,classes)
-endif #} JAVAFILES
-
-
-ifdef ANDROID_APK_NAME #{
-android_res_dirs := $(addprefix $(srcdir)/,$(or $(ANDROID_RES_DIRS),res))
-_ANDROID_RES_FLAG := $(addprefix -S ,$(android_res_dirs))
-_ANDROID_ASSETS_FLAG := $(addprefix -A ,$(ANDROID_ASSETS_DIR))
-
-GENERATED_DIRS += classes
-
-classes.dex: $(call mkdir_deps,classes)
-classes.dex: R.java
-classes.dex: $(ANDROID_APK_NAME).ap_
-classes.dex: $(JAVAFILES)
- $(JAVAC) $(JAVAC_FLAGS) -d classes $(filter %.java,$^)
- $(DX) --dex --output=$@ classes $(ANDROID_EXTRA_JARS)
-
-# R.java and $(ANDROID_APK_NAME).ap_ are both produced by aapt. To
-# save an aapt invocation, we produce them both at the same time.
-
-R.java: .aapt.deps
-$(ANDROID_APK_NAME).ap_: .aapt.deps
-
-# This uses the fact that Android resource directories list all
-# resource files one subdirectory below the parent resource directory.
-android_res_files := $(wildcard $(addsuffix /*,$(wildcard $(addsuffix /*,$(android_res_dirs)))))
-
-.aapt.deps: AndroidManifest.xml $(android_res_files) $(wildcard $(ANDROID_ASSETS_DIR))
- $(AAPT) package -f -M $< -I $(ANDROID_SDK)/android.jar $(_ANDROID_RES_FLAG) $(_ANDROID_ASSETS_FLAG) \
- -J ${@D} \
- -F $(ANDROID_APK_NAME).ap_
- @$(TOUCH) $@
-
-$(ANDROID_APK_NAME)-unsigned-unaligned.apk: $(ANDROID_APK_NAME).ap_ classes.dex
- cp $< $@
- $(ZIP) -0 $@ classes.dex
-
-$(ANDROID_APK_NAME)-unaligned.apk: $(ANDROID_APK_NAME)-unsigned-unaligned.apk
- cp $< $@
- $(DEBUG_JARSIGNER) $@
-
-$(ANDROID_APK_NAME).apk: $(ANDROID_APK_NAME)-unaligned.apk
- $(ZIPALIGN) -f -v 4 $< $@
-
-GARBAGE += \
- R.java \
- classes.dex \
- $(ANDROID_APK_NAME).ap_ \
- $(ANDROID_APK_NAME)-unsigned-unaligned.apk \
- $(ANDROID_APK_NAME)-unaligned.apk \
- $(ANDROID_APK_NAME).apk \
- $(NULL)
-
-JAVA_CLASSPATH := $(ANDROID_SDK)/android.jar
-ifdef ANDROID_EXTRA_JARS #{
-JAVA_CLASSPATH := $(JAVA_CLASSPATH):$(subst $(NULL) ,:,$(strip $(ANDROID_EXTRA_JARS)))
-endif #} ANDROID_EXTRA_JARS
-
-# Include Android specific java flags, instead of what's in rules.mk.
-include $(topsrcdir)/config/android-common.mk
-endif #} ANDROID_APK_NAME
-
-
-ifdef JAVA_JAR_TARGETS #{
-# Arg 1: Output target name with .jar suffix, like jars/jarfile.jar.
-# Intermediate class files are generated in jars/jarfile-classes.
-# Arg 2: Java sources list. We use VPATH and $^ so sources can be
-# relative to $(srcdir) or $(CURDIR).
-# Arg 3: List of extra jars to link against. We do not use VPATH so
-# jars must be relative to $(CURDIR).
-# Arg 4: Additional JAVAC_FLAGS.
-define java_jar_template
-$(1): $(2) $(3)
- $$(REPORT_BUILD)
- @$$(NSINSTALL) -D $(1:.jar=)-classes
- @$$(if $$(filter-out .,$$(@D)),$$(NSINSTALL) -D $$(@D))
- $$(JAVAC) $$(JAVAC_FLAGS)\
- $(4)\
- -d $(1:.jar=)-classes\
- $(if $(strip $(3)),-classpath $(subst $(NULL) ,:,$(strip $(3))))\
- $$(filter %.java,$$^)
- $$(JAR) cMf $$@ -C $(1:.jar=)-classes .
-
-GARBAGE += $(1)
-
-GARBAGE_DIRS += $(1:.jar=)-classes
-endef
-
-$(foreach jar,$(JAVA_JAR_TARGETS),\
- $(if $($(jar)_DEST),,$(error Missing $(jar)_DEST))\
- $(if $($(jar)_JAVAFILES),,$(error Missing $(jar)_JAVAFILES))\
- $(eval $(call java_jar_template,$($(jar)_DEST),$($(jar)_JAVAFILES) $($(jar)_PP_JAVAFILES),$($(jar)_EXTRA_JARS),$($(jar)_JAVAC_FLAGS)))\
-)
-endif #} JAVA_JAR_TARGETS
-
-
-INCLUDED_JAVA_BUILD_MK := 1
-
-endif #} INCLUDED_JAVA_BUILD_MK
diff --git a/js/src/config/makefiles/makeutils.mk b/js/src/config/makefiles/makeutils.mk
deleted file mode 100644
index 0ae1044643c..00000000000
--- a/js/src/config/makefiles/makeutils.mk
+++ /dev/null
@@ -1,121 +0,0 @@
-# -*- makefile -*-
-# vim:set ts=8 sw=8 sts=8 noet:
-#
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this file,
-# You can obtain one at http://mozilla.org/MPL/2.0/.
-
-## Identify function argument types
-istype =$(if $(value ${1}),list,scalar)
-isval =$(if $(filter-out list,$(call istype,${1})),true)
-isvar =$(if $(filter-out scalar,$(call istype,${1})),true)
-
-# Access up to 9 arguments passed, option needed to emulate $*
-# Inline for function expansion, do not use $(call )
-argv =$(strip
-argv +=$(if $(1), $(1))$(if $(2), $(2))$(if $(3), $(3))$(if $(4), $(4))
-argv +=$(if $(5), $(5))$(if $(6), $(6))$(if $(7), $(7))$(if $(8), $(8))
-argv +=$(if $(9), $(9))
-argv +=$(if $(10), $(error makeutils.mk::argv can only handle 9 arguments))
-argv +=)
-
-###########################################################################
-## Access function args as a simple list, inline within user functions.
-## Usage: $(info ** $(call banner,$(getargv)))
-## $(call banner,scalar)
-## $(call banner,list0 list1 list2)
-## $(call banner,ref) ; ref=foo bar tans
-## getarglist() would be a more accurate name but is longer to type
-getargv = $(if $(call isvar,$(1)),$($(1)),$(argv))
-
-###########################################################################
-# Strip [n] leading options from an argument list. This will allow passing
-# extra args to user functions that will not propogate to sub-$(call )'s
-# Usage: $(call subargv,2)
-subargv =$(wordlist $(1),$(words $(getargv)),$(getargv))
-
-###########################################################################
-# Intent: Display a distinct banner heading in the output stream
-# Usage: $(call banner,BUILDING: foo bar tans)
-# Debug:
-# target-preqs = \
-# $(call banner,target-preqs-BEGIN) \
-# foo bar tans \
-# $(call banner,target-preqs-END) \
-# $(NULL)
-# target: $(target-preqs)
-
-banner = \
-$(info ) \
-$(info ***************************************************************************) \
-$(info ** $(getargv)) \
-$(info ***************************************************************************) \
-$(NULL)
-
-#####################################################################
-# Intent: Determine if a string or pattern is contained in a list
-# Usage: strcmp - $(call if_XinY,clean,$(MAKECMDGOALS))
-# : pattern - $(call if_XinY,clean%,$(MAKECMDGOALS))
-is_XinY =$(filter $(1),$(call subargv,3,$(getargv)))
-
-#####################################################################
-# Provide an alternate var to support testing
-ifdef MAKEUTILS_UNIT_TEST
- mcg_goals=TEST_MAKECMDGOALS
-else
- mcg_goals=MAKECMDGOALS
-endif
-
-# Intent: Conditionals for detecting common/tier target use
-isTargetStem = $(sort \
- $(foreach var,$(getargv),\
- $(foreach pat,$(var)% %$(var),\
- $(call is_XinY,$(pat),${$(mcg_goals)})\
- )))
-isTargetStemClean = $(call isTargetStem,clean)
-isTargetStemExport = $(call isTargetStem,export)
-isTargetStemLibs = $(call isTargetStem,libs)
-isTargetStemTools = $(call isTargetStem,tools)
-
-##################################################
-# Intent: Validation functions / unit test helpers
-
-errorifneq =$(if $(subst $(strip $(1)),$(NULL),$(strip $(2))),$(error expected [$(1)] but found [$(2)]))
-
-# Intent: verify function declaration exists
-requiredfunction =$(foreach func,$(1) $(2) $(3) $(4) $(5) $(6) $(7) $(8) $(9),$(if $(value $(func)),$(NULL),$(error required function [$(func)] is unavailable)))
-
-
-
-## http://www.gnu.org/software/make/manual/make.html#Call-Function
-## Usage: o = $(call map,origin,o map $(MAKE))
-map = $(foreach val,$(2),$(call $(1),$(val)))
-
-
-## Disable checking for clean targets
-ifeq (,$(filter %clean clean%,$(MAKECMDGOALS))) #{
-
-# Usage: $(call checkIfEmpty,[error|warning] foo NULL bar)
-checkIfEmpty =$(foreach var,$(wordlist 2,100,$(argv)),$(if $(strip $($(var))),$(NOP),$(call $(1),Variable $(var) does not contain a value)))
-
-# Usage: $(call errorIfEmpty,foo NULL bar)
-errorIfEmpty =$(call checkIfEmpty,error $(argv))
-warnIfEmpty =$(call checkIfEmpty,warning $(argv))
-
-endif #}
-
-###########################################################################
-## Common makefile library loader
-###########################################################################
-topORerr =$(if $(topsrcdir),$(topsrcdir),$(error topsrcdir is not defined))
-
-ifdef USE_AUTOTARGETS_MK # mkdir_deps
- include $(topORerr)/config/makefiles/autotargets.mk
-endif
-
-ifdef USE_RCS_MK
- include $(topORerr)/config/makefiles/rcs.mk
-endif
-
-## copy(src, dst): recursive copy
-copy_dir = (cd $(1)/. && $(TAR) $(TAR_CREATE_FLAGS) - .) | (cd $(2)/. && tar -xf -)
diff --git a/js/src/config/makefiles/mochitest.mk b/js/src/config/makefiles/mochitest.mk
deleted file mode 100644
index 82182cbb998..00000000000
--- a/js/src/config/makefiles/mochitest.mk
+++ /dev/null
@@ -1,52 +0,0 @@
-# -*- makefile -*-
-# vim:set ts=8 sw=8 sts=8 noet:
-#
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this file,
-# You can obtain one at http://mozilla.org/MPL/2.0/.
-
-ifndef INCLUDED_TESTS_MOCHITEST_MK #{
-
-# $1- test directory name
-# $2- optional: if passed dot used to flatten directory hierarchy copy
-# else- relativesrcdir
-mochitestdir = \
- $(strip \
- $(if $(2),$(DEPTH)/_tests/testing/mochitest/$1/. \
- ,$(DEPTH)/_tests/testing/mochitest/$1/$(relativesrcdir) \
- ))
-
-
-ifdef MOCHITEST_FILES
-MOCHITEST_DEST := $(call mochitestdir,tests)
-INSTALL_TARGETS += MOCHITEST
-endif
-
-ifdef MOCHITEST_CHROME_FILES
-MOCHITEST_CHROME_DEST := $(call mochitestdir,chrome)
-INSTALL_TARGETS += MOCHITEST_CHROME
-endif
-
-ifdef MOCHITEST_BROWSER_FILES
-MOCHITEST_BROWSER_DEST := $(call mochitestdir,browser)
-INSTALL_TARGETS += MOCHITEST_BROWSER
-endif
-
-ifdef MOCHITEST_A11Y_FILES
-MOCHITEST_A11Y_DEST := $(call mochitestdir,a11y)
-INSTALL_TARGETS += MOCHITEST_A11Y
-endif
-
-ifdef MOCHITEST_METRO_FILES
-MOCHITEST_METRO_DEST := $(call mochitestdir,metro)
-INSTALL_TARGETS += MOCHITEST_METRO
-endif
-
-ifdef MOCHITEST_ROBOCOP_FILES
-MOCHITEST_ROBOCOP_DEST := $(call mochitestdir,tests/robocop,flat_hierarchy)
-INSTALL_TARGETS += MOCHITEST_ROBOCOP
-endif
-
-INCLUDED_TESTS_MOCHITEST_MK := 1
-
-endif #} INCLUDED_TESTS_MOCHITEST_MK
diff --git a/js/src/config/makefiles/nonrecursive.mk b/js/src/config/makefiles/nonrecursive.mk
deleted file mode 100644
index 498de568e72..00000000000
--- a/js/src/config/makefiles/nonrecursive.mk
+++ /dev/null
@@ -1,68 +0,0 @@
-# -*- makefile -*-
-# vim:set ts=8 sw=8 sts=8 noet:
-#
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-# The purpose of this file is to pull in non-recursive targets when performing
-# a partial tree (not top-level) build. This will allow people to continue to
-# build individual directories while some of the targets may not be normally
-# defined in that make file.
-#
-# Non-recursive targets are attached to existing make targets. The
-# NONRECURSIVE_TARGETS variable lists the make targets that modified. For
-# each target in this list, the NONRECURSIVE_TARGET_ variable will
-# contain a list of partial variable names. We will then look in variables
-# named NONRECURSIVE_TARGETS___* for information describing
-# how to evaluate non-recursive make targets.
-#
-# Targets are defined by the following variables:
-#
-# FILE - The make file to evaluate. This is equivalent to
-# |make -f |
-# DIRECTORY - The directory whose Makefile to evaluate. This is
-# equivalent to |make -C |.
-# TARGETS - Targets to evaluate in that make file.
-#
-# Only 1 of FILE or DIRECTORY may be defined.
-#
-# For example:
-#
-# NONRECURSIVE_TARGETS = export libs
-# NONRECURSIVE_TARGETS_export = headers
-# NONRECURSIVE_TARGETS_export_headers_FILE = /path/to/exports.mk
-# NONRECURSIVE_TARGETS_export_headers_TARGETS = $(DIST)/include/foo.h $(DIST)/include/bar.h
-# NONRECURSIVE_TARGETS_libs = cppsrcs
-# NONRECURSIVE_TARGETS_libs_cppsrcs_DIRECTORY = $(DEPTH)/foo
-# NONRECURSIVE_TARGETS_libs_cppsrcs_TARGETS = /path/to/foo.o /path/to/bar.o
-#
-# Will get turned into the following:
-#
-# exports::
-# $(MAKE) -C $(DEPTH) -f /path/to/exports.mk $(DIST)/include/foo.h $(DIST)/include/bar.h
-#
-# libs::
-# $(MAKE) -C $(DEPTH)/foo /path/to/foo.o /path/to/bar.o
-
-ifndef INCLUDED_NONRECURSIVE_MK
-
-define define_nonrecursive_target
-$(1)::
- $$(MAKE) -C $(or $(4),$$(DEPTH)) $(addprefix -f ,$(3)) $(2)
-endef
-
-$(foreach target,$(NONRECURSIVE_TARGETS), \
- $(foreach entry,$(NONRECURSIVE_TARGETS_$(target)), \
- $(eval $(call define_nonrecursive_target, \
- $(target), \
- $(NONRECURSIVE_TARGETS_$(target)_$(entry)_TARGETS), \
- $(NONRECURSIVE_TARGETS_$(target)_$(entry)_FILE), \
- $(NONRECURSIVE_TARGETS_$(target)_$(entry)_DIRECTORY), \
- )) \
- ) \
-)
-
-INCLUDED_NONRECURSIVE_MK := 1
-endif
-
diff --git a/js/src/config/makefiles/rcs.mk b/js/src/config/makefiles/rcs.mk
deleted file mode 100644
index b9acada47b3..00000000000
--- a/js/src/config/makefiles/rcs.mk
+++ /dev/null
@@ -1,54 +0,0 @@
-# -*- makefile -*-
-# vim:set ts=8 sw=8 sts=8 noet:
-#
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this file,
-# You can obtain one at http://mozilla.org/MPL/2.0/.
-#
-
-ifdef USE_RCS_MK #{
-
-ifndef INCLUDED_RCS_MK #{
-
-MOZ_RCS_TYPE_HG ?= $(notdir $(wildcard $(topsrcdir)/.hg))
-MOZ_RCS_TYPE_GIT ?= $(notdir $(wildcard $(topsrcdir)/.git))
-
-
-###########################################################################
-# HAVE_MERCURIAL_RCS
-###########################################################################
-ifeq (.hg,$(MOZ_RCS_TYPE_HG)) #{
-
-# Intent: Retrieve the http:// repository path for a directory.
-# Usage: $(call getSourceRepo[,repo_dir|args])
-# Args:
-# path (optional): repository to query. Defaults to $(topsrcdir)
-getSourceRepo = \
- $(call FUNC_getSourceRepo,$(if $(1),cd $(1) && hg,hg --repository $(topsrcdir)))
-
-# return: http://hg.mozilla.org/mozilla-central
-FUNC_getSourceRepo = \
- $(strip \
- $(patsubst %/,%,\
- $(patsubst ssh://%,http://%,\
- $(firstword $(shell $(getargv) showconfig paths.default))\
- )))
-
-#} HAVE_MERCURIAL_RCS
-
-###########################################################################
-# HAVE_GIT_RCS
-###########################################################################
-else ifeq (.git,$(MOZ_RCS_TYPE_GIT)) #{
-
-GIT ?= git
-getSourceRepo = \
- $(shell cd $(topsrcdir) && $(GIT) rev-parse --verify HEAD)
-
-endif #} HAVE_GIT_RCS
-
-
-INCLUDED_RCS_MK := 1
-endif #}
-
-endif #}
diff --git a/js/src/config/makefiles/target_binaries.mk b/js/src/config/makefiles/target_binaries.mk
deleted file mode 100644
index 6bb79244761..00000000000
--- a/js/src/config/makefiles/target_binaries.mk
+++ /dev/null
@@ -1,122 +0,0 @@
-# -*- makefile -*-
-# vim:set ts=8 sw=8 sts=8 noet:
-#
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-ifdef EXPORT_LIBRARY
-ifeq ($(EXPORT_LIBRARY),1)
-ifdef IS_COMPONENT
-EXPORT_LIBRARY = $(DEPTH)/staticlib/components
-else
-EXPORT_LIBRARY = $(DEPTH)/staticlib
-endif
-else
-# If EXPORT_LIBRARY has a value, we'll be installing there. We also need to cleanup there
-GARBAGE += $(foreach lib,$(LIBRARY),$(EXPORT_LIBRARY)/$(lib))
-endif
-endif # EXPORT_LIBRARY
-
-binaries libs:: $(SUBMAKEFILES) $(TARGETS)
-ifndef NO_DIST_INSTALL
-ifdef SHARED_LIBRARY
-ifdef IS_COMPONENT
- $(INSTALL) $(IFLAGS2) $(SHARED_LIBRARY) $(FINAL_TARGET)/components
-ifndef NO_COMPONENTS_MANIFEST
- $(call py_action,buildlist,$(FINAL_TARGET)/chrome.manifest 'manifest components/components.manifest')
- $(call py_action,buildlist,$(FINAL_TARGET)/components/components.manifest 'binary-component $(SHARED_LIBRARY)')
-endif
-endif # IS_COMPONENT
-endif # SHARED_LIBRARY
-endif # !NO_DIST_INSTALL
-
-ifndef NO_DIST_INSTALL
-
-ifneq (,$(strip $(PROGRAM)$(SIMPLE_PROGRAMS)))
-PROGRAMS_EXECUTABLES = $(SIMPLE_PROGRAMS) $(PROGRAM)
-PROGRAMS_DEST ?= $(FINAL_TARGET)
-PROGRAMS_TARGET := binaries libs
-INSTALL_TARGETS += PROGRAMS
-endif
-
-ifdef LIBRARY
-ifdef EXPORT_LIBRARY
-LIBRARY_FILES = $(LIBRARY)
-LIBRARY_DEST ?= $(EXPORT_LIBRARY)
-LIBRARY_TARGET = binaries libs
-INSTALL_TARGETS += LIBRARY
-endif
-ifdef DIST_INSTALL
-ifdef IS_COMPONENT
-$(error Shipping static component libs makes no sense.)
-else
-DIST_LIBRARY_FILES = $(LIBRARY)
-DIST_LIBRARY_DEST ?= $(DIST)/lib
-DIST_LIBRARY_TARGET = binaries libs
-INSTALL_TARGETS += DIST_LIBRARY
-endif
-endif # DIST_INSTALL
-endif # LIBRARY
-
-
-ifdef SHARED_LIBRARY
-ifndef IS_COMPONENT
-SHARED_LIBRARY_FILES = $(SHARED_LIBRARY)
-SHARED_LIBRARY_DEST ?= $(FINAL_TARGET)
-SHARED_LIBRARY_TARGET = binaries libs
-INSTALL_TARGETS += SHARED_LIBRARY
-
-ifneq (,$(filter OS2 WINNT,$(OS_ARCH)))
-ifndef NO_INSTALL_IMPORT_LIBRARY
-IMPORT_LIB_FILES = $(IMPORT_LIBRARY)
-endif # NO_INSTALL_IMPORT_LIBRARY
-else
-IMPORT_LIB_FILES = $(SHARED_LIBRARY)
-endif
-
-IMPORT_LIB_DEST ?= $(DIST)/lib
-IMPORT_LIB_TARGET = binaries libs
-ifdef IMPORT_LIB_FILES
-INSTALL_TARGETS += IMPORT_LIB
-endif
-
-endif # ! IS_COMPONENT
-endif # SHARED_LIBRARY
-
-ifneq (,$(strip $(HOST_SIMPLE_PROGRAMS)$(HOST_PROGRAM)))
-HOST_PROGRAMS_EXECUTABLES = $(HOST_SIMPLE_PROGRAMS) $(HOST_PROGRAM)
-HOST_PROGRAMS_DEST ?= $(DIST)/host/bin
-HOST_PROGRAMS_TARGET = binaries libs
-INSTALL_TARGETS += HOST_PROGRAMS
-endif
-
-ifdef HOST_LIBRARY
-HOST_LIBRARY_FILES = $(HOST_LIBRARY)
-HOST_LIBRARY_DEST ?= $(DIST)/host/lib
-HOST_LIBRARY_TARGET = binaries libs
-INSTALL_TARGETS += HOST_LIBRARY
-endif
-
-endif # !NO_DIST_INSTALL
-
-ifdef MOZ_PSEUDO_DERECURSE
-BINARIES_INSTALL_TARGETS := $(foreach category,$(INSTALL_TARGETS),$(if $(filter binaries,$($(category)_TARGET)),$(category)))
-
-# Fill a dependency file with all the binaries installed somewhere in $(DIST)
-# and with dependencies on the relevant backend files.
-BINARIES_PP := $(MDDEPDIR)/binaries.pp
-
-$(BINARIES_PP): Makefile $(wildcard backend.mk) $(call mkdir_deps,$(MDDEPDIR))
- @echo '$(strip $(foreach category,$(BINARIES_INSTALL_TARGETS),\
- $(foreach file,$($(category)_FILES) $($(category)_EXECUTABLES),\
- $($(category)_DEST)/$(notdir $(file)): $(file)%\
- )\
- ))binaries: Makefile $(wildcard backend.mk)' | tr % '\n' > $@
-
-else
-binaries::
- $(error The binaries target is not supported without MOZ_PSEUDO_DERECURSE)
-endif
-
-# EOF
diff --git a/js/src/config/milestone.pl b/js/src/config/milestone.pl
deleted file mode 100644
index 9aad212d75f..00000000000
--- a/js/src/config/milestone.pl
+++ /dev/null
@@ -1,94 +0,0 @@
-#!/usr/bin/perl -w
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-use Getopt::Long;
-
-use strict;
-use vars qw(
- $OBJDIR
- $SRCDIR
- $TOPSRCDIR
- $SCRIPTDIR
- @TEMPLATE_FILE
- $MILESTONE_FILE
- $MILESTONE
- $MILESTONE_NUM
- @MILESTONE_PARTS
- $MINI_VERSION
- $MICRO_VERSION
- $opt_debug
- $opt_template
- $opt_uaversion
- $opt_symbolversion
- $opt_help
- );
-
-$SCRIPTDIR = $0;
-$SCRIPTDIR =~ s/[^\/]*$//;
-push(@INC,$SCRIPTDIR);
-
-require "Moz/Milestone.pm";
-
-&GetOptions('topsrcdir=s' => \$TOPSRCDIR, 'srcdir=s' => \$SRCDIR, 'objdir=s' => \$OBJDIR, 'debug', 'help', 'template', 'uaversion', 'symbolversion');
-
-if (defined($opt_help)) {
- &usage();
- exit;
-}
-
-if (defined($opt_template)) {
- @TEMPLATE_FILE = @ARGV;
- if ($opt_debug) {
- print("TEMPLATE_FILE = --@TEMPLATE_FILE--\n");
- }
-}
-
-if (!defined($SRCDIR)) { $SRCDIR = '.'; }
-if (!defined($OBJDIR)) { $OBJDIR = '.'; }
-
-$MILESTONE_FILE = "$TOPSRCDIR/config/milestone.txt";
-@MILESTONE_PARTS = (0, 0, 0, 0);
-
-#
-# Grab milestone (top line of $MILESTONE_FILE that starts with a digit)
-#
-my $milestone = Moz::Milestone::getOfficialMilestone($MILESTONE_FILE);
-
-if (@TEMPLATE_FILE) {
- my $TFILE;
-
- foreach $TFILE (@TEMPLATE_FILE) {
- my $BUILT_FILE = "$OBJDIR/$TFILE";
- $TFILE = "$SRCDIR/$TFILE.tmpl";
-
- if (-e $TFILE) {
-
- Moz::Milestone::build_file($TFILE,$BUILT_FILE);
-
- } else {
- warn("$0: No such file $TFILE!\n");
- }
- }
-} elsif(defined($opt_uaversion)) {
- # Only expose the major milestone in the UA string, hide the patch level
- # (bugs 572659 and 870868).
- my $uaversion = Moz::Milestone::getMilestoneMajor($milestone) . ".0";
- print "$uaversion\n";
-} elsif(defined($opt_symbolversion)) {
- # Only expose major milestone and alpha version. Used for symbol versioning
- # on Linux.
- my $symbolversion = Moz::Milestone::getMilestoneMajor($milestone) .
- Moz::Milestone::getMilestoneABWithNum($milestone);
- print "$symbolversion\n";
-} else {
- print "$milestone\n";
-}
-
-sub usage() {
- print < /* OSF/1 requires this before grp.h, so put it first */
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-#include "pathsub.h"
-
-#ifdef HAVE_GETOPT_H
-#include
-#endif
-
-#ifdef SUNOS4
-#include "sunos4.h"
-#endif
-
-#ifdef NEXTSTEP
-#include
-#endif
-
-#ifdef __QNX__
-#include
-#endif
-
-#ifdef NEED_S_ISLNK
-#if !defined(S_ISLNK) && defined(S_IFLNK)
-#define S_ISLNK(a) (((a) & S_IFMT) == S_IFLNK)
-#endif
-#endif
-
-#ifndef _DIRECTORY_SEPARATOR
-#define _DIRECTORY_SEPARATOR "/"
-#endif /* _DIRECTORY_SEPARATOR */
-
-#ifdef NEED_FCHMOD_PROTO
-extern int fchmod(int fildes, mode_t mode);
-#endif
-
-static void
-usage(void)
-{
- fprintf(stderr,
- "usage: %s [-C cwd] [-L linkprefix] [-m mode] [-o owner] [-g group]\n"
- " %*s [-DdltR] file [file ...] directory\n",
- program, (int) strlen(program), "");
- exit(2);
-}
-
-static int
-mkdirs(char *path, mode_t mode)
-{
- char *cp;
- struct stat sb;
- int res;
- int l;
-
- /* strip trailing "/." */
- l = strlen(path);
- if(l > 1 && path[l - 1] == '.' && path[l - 2] == '/')
- path[l - 2] = 0;
-
- while (*path == '/' && path[1] == '/')
- path++;
- for (cp = strrchr(path, '/'); cp && cp != path && *(cp - 1) == '/'; cp--);
- if (cp && cp != path) {
- *cp = '\0';
- if ((lstat(path, &sb) < 0 || !S_ISDIR(sb.st_mode)) &&
- mkdirs(path, mode) < 0) {
- return -1;
- }
- *cp = '/';
- }
-
- res = mkdir(path, mode);
- if ((res != 0) && (errno == EEXIST))
- return 0;
- else
- return res;
-}
-
-static uid_t
-touid(char *owner)
-{
- struct passwd *pw;
- uid_t uid;
- char *cp;
-
- pw = getpwnam(owner);
- if (pw)
- return pw->pw_uid;
- uid = strtol(owner, &cp, 0);
- if (uid == 0 && cp == owner)
- fail("cannot find uid for %s", owner);
- return uid;
-}
-
-static gid_t
-togid(char *group)
-{
- struct group *gr;
- gid_t gid;
- char *cp;
-
- gr = getgrnam(group);
- if (gr)
- return gr->gr_gid;
- gid = strtol(group, &cp, 0);
- if (gid == 0 && cp == group)
- fail("cannot find gid for %s", group);
- return gid;
-}
-
-static void
-copyfile( char *name, char *toname, mode_t mode, char *group, char *owner,
- int dotimes, uid_t uid, gid_t gid )
-{
- int fromfd, tofd = -1, cc, wc, exists;
- char buf[BUFSIZ], *bp;
- struct stat sb, tosb;
- struct utimbuf utb;
-
- exists = (lstat(toname, &tosb) == 0);
-
- fromfd = open(name, O_RDONLY);
- if (fromfd < 0 || fstat(fromfd, &sb) < 0)
- fail("cannot access %s", name);
- if (exists) {
- if (S_ISREG(tosb.st_mode)) {
- /* See if we can open it. This is more reliable than 'access'. */
- tofd = open(toname, O_CREAT | O_WRONLY, 0666);
- }
- if (tofd < 0) {
- (void) (S_ISDIR(tosb.st_mode) ? rmdir : unlink)(toname);
- }
- }
- if (tofd < 0) {
- tofd = open(toname, O_CREAT | O_WRONLY, 0666);
- if (tofd < 0)
- fail("cannot create %s", toname);
- }
-
- bp = buf;
- while ((cc = read(fromfd, bp, sizeof buf)) > 0)
- {
- while ((wc = write(tofd, bp, (unsigned int)cc)) > 0)
- {
- if ((cc -= wc) == 0)
- break;
- bp += wc;
- }
- if (wc < 0)
- fail("cannot write to %s", toname);
- }
- if (cc < 0)
- fail("cannot read from %s", name);
-
- if (ftruncate(tofd, sb.st_size) < 0)
- fail("cannot truncate %s", toname);
-#if !defined(VMS)
- if (dotimes)
- {
- utb.actime = sb.st_atime;
- utb.modtime = sb.st_mtime;
- if (utime(toname, &utb) < 0)
- fail("cannot set times of %s", toname);
- }
-#ifdef HAVE_FCHMOD
- if (fchmod(tofd, mode) < 0)
-#else
- if (chmod(toname, mode) < 0)
-#endif
- fail("cannot change mode of %s", toname);
-#endif
- if ((owner || group) && fchown(tofd, uid, gid) < 0)
- fail("cannot change owner of %s", toname);
-
- /* Must check for delayed (NFS) write errors on close. */
- if (close(tofd) < 0)
- fail("cannot write to %s", toname);
- close(fromfd);
-#if defined(VMS)
- if (chmod(toname, (mode & (S_IREAD | S_IWRITE))) < 0)
- fail("cannot change mode of %s", toname);
- if (dotimes)
- {
- utb.actime = sb.st_atime;
- utb.modtime = sb.st_mtime;
- if (utime(toname, &utb) < 0)
- fail("cannot set times of %s", toname);
- }
-#endif
-}
-
-static void
-copydir( char *from, char *to, mode_t mode, char *group, char *owner,
- int dotimes, uid_t uid, gid_t gid)
-{
- int i;
- DIR *dir;
- struct dirent *ep;
- struct stat sb;
- char *base, *destdir, *direntry, *destentry;
-
- base = xbasename(from);
-
- /* create destination directory */
- destdir = xmalloc((unsigned int)(strlen(to) + 1 + strlen(base) + 1));
- sprintf(destdir, "%s%s%s", to, _DIRECTORY_SEPARATOR, base);
- if (mkdirs(destdir, mode) != 0) {
- fail("cannot make directory %s\n", destdir);
- free(destdir);
- return;
- }
-
- if (!(dir = opendir(from))) {
- fail("cannot open directory %s\n", from);
- free(destdir);
- return;
- }
-
- direntry = xmalloc((unsigned int)PATH_MAX);
- destentry = xmalloc((unsigned int)PATH_MAX);
-
- while ((ep = readdir(dir)))
- {
- if (strcmp(ep->d_name, ".") == 0 || strcmp(ep->d_name, "..") == 0)
- continue;
-
- sprintf(direntry, "%s/%s", from, ep->d_name);
- sprintf(destentry, "%s%s%s", destdir, _DIRECTORY_SEPARATOR, ep->d_name);
-
- if (stat(direntry, &sb) == 0 && S_ISDIR(sb.st_mode))
- copydir( direntry, destdir, mode, group, owner, dotimes, uid, gid );
- else
- copyfile( direntry, destentry, mode, group, owner, dotimes, uid, gid );
- }
-
- free(destdir);
- free(direntry);
- free(destentry);
- closedir(dir);
-}
-
-int
-main(int argc, char **argv)
-{
- int onlydir, dodir, dolink, dorelsymlink, dotimes, opt, len, lplen, tdlen, bnlen, exists, fromfd, tofd, cc, wc;
- mode_t mode = 0755;
- char *linkprefix, *owner, *group, *cp, *cwd, *todir, *toname, *name, *base, *linkname, *bp, buf[BUFSIZ];
- uid_t uid;
- gid_t gid;
- struct stat sb, tosb, fromsb;
- struct utimbuf utb;
-
- program = argv[0];
- cwd = linkname = linkprefix = owner = group = 0;
- onlydir = dodir = dolink = dorelsymlink = dotimes = lplen = 0;
-
- while ((opt = getopt(argc, argv, "C:DdlL:Rm:o:g:t")) != EOF) {
- switch (opt) {
- case 'C':
- cwd = optarg;
- break;
- case 'D':
- onlydir = 1;
- break;
- case 'd':
- dodir = 1;
- break;
- case 'L':
- linkprefix = optarg;
- lplen = strlen(linkprefix);
- dolink = 1;
- break;
- case 'R':
- dolink = dorelsymlink = 1;
- break;
- case 'm':
- mode = strtoul(optarg, &cp, 8);
- if (mode == 0 && cp == optarg)
- usage();
- break;
- case 'o':
- owner = optarg;
- break;
- case 'g':
- group = optarg;
- break;
- case 't':
- dotimes = 1;
- break;
- default:
- usage();
- }
- }
-
- argc -= optind;
- argv += optind;
- if (argc < 2 - onlydir)
- usage();
-
- todir = argv[argc-1];
- if ((stat(todir, &sb) < 0 || !S_ISDIR(sb.st_mode)) &&
- mkdirs(todir, 0777) < 0) {
- fail("cannot make directory %s", todir);
- }
- if (onlydir)
- return 0;
-
- if (!cwd) {
-#ifndef NEEDS_GETCWD
-#ifndef GETCWD_CANT_MALLOC
- cwd = getcwd(0, PATH_MAX);
-#else
- cwd = malloc(PATH_MAX + 1);
- cwd = getcwd(cwd, PATH_MAX);
-#endif
-#else
- cwd = malloc(PATH_MAX + 1);
- cwd = getwd(cwd);
-#endif
- }
-
- xchdir(todir);
-#ifndef NEEDS_GETCWD
-#ifndef GETCWD_CANT_MALLOC
- todir = getcwd(0, PATH_MAX);
-#else
- todir = malloc(PATH_MAX + 1);
- todir = getcwd(todir, PATH_MAX);
-#endif
-#else
- todir = malloc(PATH_MAX + 1);
- todir = getwd(todir);
-#endif
- tdlen = strlen(todir);
- xchdir(cwd);
- tdlen = strlen(todir);
-
- uid = owner ? touid(owner) : (uid_t)(-1);
- gid = group ? togid(group) : (gid_t)(-1);
-
- while (--argc > 0) {
- name = *argv++;
- len = strlen(name);
- base = xbasename(name);
- bnlen = strlen(base);
- toname = xmalloc((unsigned int)(tdlen + 1 + bnlen + 1));
- sprintf(toname, "%s%s%s", todir, _DIRECTORY_SEPARATOR, base);
- exists = (lstat(toname, &tosb) == 0);
-
- if (dodir) {
- /* -d means create a directory, always */
- if (exists && !S_ISDIR(tosb.st_mode)) {
- (void) unlink(toname);
- exists = 0;
- }
- if (!exists && mkdir(toname, mode) < 0)
- fail("cannot make directory %s", toname);
- if ((owner || group) && chown(toname, uid, gid) < 0)
- fail("cannot change owner of %s", toname);
- } else if (dolink) {
- if (access(name, R_OK) != 0) {
- fail("cannot access %s", name);
- }
- if (*name == '/') {
- /* source is absolute pathname, link to it directly */
- linkname = 0;
- } else {
- if (linkprefix) {
- /* -L prefixes names with a $cwd arg. */
- len += lplen + 1;
- linkname = xmalloc((unsigned int)(len + 1));
- sprintf(linkname, "%s/%s", linkprefix, name);
- } else if (dorelsymlink) {
- /* Symlink the relative path from todir to source name. */
- linkname = xmalloc(PATH_MAX);
-
- if (*todir == '/') {
- /* todir is absolute: skip over common prefix. */
- lplen = relatepaths(todir, cwd, linkname);
- strcpy(linkname + lplen, name);
- } else {
- /* todir is named by a relative path: reverse it. */
- reversepath(todir, name, len, linkname);
- xchdir(cwd);
- }
-
- len = strlen(linkname);
- }
- name = linkname;
- }
-
- /* Check for a pre-existing symlink with identical content. */
- if ((exists && (!S_ISLNK(tosb.st_mode) ||
- readlink(toname, buf, sizeof buf) != len ||
- strncmp(buf, name, (unsigned int)len) != 0)) ||
- ((stat(name, &fromsb) == 0) &&
- (fromsb.st_mtime > tosb.st_mtime))) {
- (void) (S_ISDIR(tosb.st_mode) ? rmdir : unlink)(toname);
- exists = 0;
- }
- if (!exists && symlink(name, toname) < 0)
- fail("cannot make symbolic link %s", toname);
-#ifdef HAVE_LCHOWN
- if ((owner || group) && lchown(toname, uid, gid) < 0)
- fail("cannot change owner of %s", toname);
-#endif
-
- if (linkname) {
- free(linkname);
- linkname = 0;
- }
- } else {
- /* Copy from name to toname, which might be the same file. */
- if( stat(name, &sb) == 0 && S_IFDIR & sb.st_mode )
- {
- /* then is directory: must explicitly create destination dir */
- /* and manually copy files over */
- copydir( name, todir, mode, group, owner, dotimes, uid, gid );
- }
- else
- {
- copyfile(name, toname, mode, group, owner, dotimes, uid, gid);
- }
- }
-
- free(toname);
- }
-
- free(cwd);
- free(todir);
- return 0;
-}
diff --git a/js/src/config/nsinstall.py b/js/src/config/nsinstall.py
deleted file mode 100644
index ae6132752bb..00000000000
--- a/js/src/config/nsinstall.py
+++ /dev/null
@@ -1,180 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-# This is a partial python port of nsinstall.
-# It's intended to be used when there's no natively compile nsinstall
-# available, and doesn't intend to be fully equivalent.
-# Its major use is for l10n repackaging on systems that don't have
-# a full build environment set up.
-# The basic limitation is, it doesn't even try to link and ignores
-# all related options.
-from __future__ import print_function
-from optparse import OptionParser
-import os
-import os.path
-import sys
-import shutil
-import stat
-
-def _nsinstall_internal(argv):
- usage = "usage: %prog [options] arg1 [arg2 ...] target-directory"
- p = OptionParser(usage=usage)
-
- p.add_option('-D', action="store_true",
- help="Create a single directory only")
- p.add_option('-t', action="store_true",
- help="Preserve time stamp")
- p.add_option('-m', action="store",
- help="Set mode", metavar="mode")
- p.add_option('-d', action="store_true",
- help="Create directories in target")
- p.add_option('-R', action="store_true",
- help="Use relative symbolic links (ignored)")
- p.add_option('-L', action="store", metavar="linkprefix",
- help="Link prefix (ignored)")
- p.add_option('-X', action="append", metavar="file",
- help="Ignore a file when installing a directory recursively.")
-
- # The remaining arguments are not used in our tree, thus they're not
- # implented.
- def BadArg(option, opt, value, parser):
- parser.error('option not supported: {0}'.format(opt))
-
- p.add_option('-C', action="callback", metavar="CWD",
- callback=BadArg,
- help="NOT SUPPORTED")
- p.add_option('-o', action="callback", callback=BadArg,
- help="Set owner (NOT SUPPORTED)", metavar="owner")
- p.add_option('-g', action="callback", callback=BadArg,
- help="Set group (NOT SUPPORTED)", metavar="group")
-
- (options, args) = p.parse_args(argv)
-
- if options.m:
- # mode is specified
- try:
- options.m = int(options.m, 8)
- except:
- sys.stderr.write('nsinstall: {0} is not a valid mode\n'
- .format(options.m))
- return 1
-
- # just create one directory?
- def maybe_create_dir(dir, mode, try_again):
- dir = os.path.abspath(dir)
- if os.path.exists(dir):
- if not os.path.isdir(dir):
- print('nsinstall: {0} is not a directory'.format(dir), file=sys.stderr)
- return 1
- if mode:
- os.chmod(dir, mode)
- return 0
-
- try:
- if mode:
- os.makedirs(dir, mode)
- else:
- os.makedirs(dir)
- except Exception as e:
- # We might have hit EEXIST due to a race condition (see bug 463411) -- try again once
- if try_again:
- return maybe_create_dir(dir, mode, False)
- print("nsinstall: failed to create directory {0}: {1}".format(dir, e))
- return 1
- else:
- return 0
-
- if options.X:
- options.X = [os.path.abspath(p) for p in options.X]
-
- if options.D:
- return maybe_create_dir(args[0], options.m, True)
-
- # nsinstall arg1 [...] directory
- if len(args) < 2:
- p.error('not enough arguments')
-
- def copy_all_entries(entries, target):
- for e in entries:
- e = os.path.abspath(e)
- if options.X and e in options.X:
- continue
-
- dest = os.path.join(target, os.path.basename(e))
- dest = os.path.abspath(dest)
- handleTarget(e, dest)
- if options.m:
- os.chmod(dest, options.m)
-
- # set up handler
- if options.d:
- # we're supposed to create directories
- def handleTarget(srcpath, targetpath):
- # target directory was already created, just use mkdir
- os.mkdir(targetpath)
- else:
- # we're supposed to copy files
- def handleTarget(srcpath, targetpath):
- if os.path.isdir(srcpath):
- if not os.path.exists(targetpath):
- os.mkdir(targetpath)
- entries = [os.path.join(srcpath, e) for e in os.listdir(srcpath)]
- copy_all_entries(entries, targetpath)
- # options.t is not relevant for directories
- if options.m:
- os.chmod(targetpath, options.m)
- else:
- if os.path.exists(targetpath):
- # On Windows, read-only files can't be deleted
- os.chmod(targetpath, stat.S_IWUSR)
- os.remove(targetpath)
- if options.t:
- shutil.copy2(srcpath, targetpath)
- else:
- shutil.copy(srcpath, targetpath)
-
- # the last argument is the target directory
- target = args.pop()
- # ensure target directory (importantly, we do not apply a mode to the directory
- # because we want to copy files into it and the mode might be read-only)
- rv = maybe_create_dir(target, None, True)
- if rv != 0:
- return rv
-
- copy_all_entries(args, target)
- return 0
-
-# nsinstall as a native command is always UTF-8
-def nsinstall(argv):
- return _nsinstall_internal([unicode(arg, "utf-8") for arg in argv])
-
-if __name__ == '__main__':
- # sys.argv corrupts characters outside the system code page on Windows
- # . Use ctypes instead. This is also
- # useful because switching to Unicode strings makes python use the wide
- # Windows APIs, which is what we want here since the wide APIs normally do a
- # better job at handling long paths and such.
- if sys.platform == "win32":
- import ctypes
- from ctypes import wintypes
- GetCommandLine = ctypes.windll.kernel32.GetCommandLineW
- GetCommandLine.argtypes = []
- GetCommandLine.restype = wintypes.LPWSTR
-
- CommandLineToArgv = ctypes.windll.shell32.CommandLineToArgvW
- CommandLineToArgv.argtypes = [wintypes.LPWSTR, ctypes.POINTER(ctypes.c_int)]
- CommandLineToArgv.restype = ctypes.POINTER(wintypes.LPWSTR)
-
- argc = ctypes.c_int(0)
- argv_arr = CommandLineToArgv(GetCommandLine(), ctypes.byref(argc))
- # The first argv will be "python", the second will be the .py file
- argv = argv_arr[1:argc.value]
- else:
- # For consistency, do it on Unix as well
- if sys.stdin.encoding is not None:
- argv = [unicode(arg, sys.stdin.encoding) for arg in sys.argv]
- else:
- argv = [unicode(arg) for arg in sys.argv]
-
- sys.exit(_nsinstall_internal(argv[1:]))
diff --git a/js/src/config/pathsub.c b/js/src/config/pathsub.c
deleted file mode 100644
index 18a597b2d3b..00000000000
--- a/js/src/config/pathsub.c
+++ /dev/null
@@ -1,215 +0,0 @@
-/* -*- Mode: C; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-/*
-** Pathname subroutines.
-**
-** Brendan Eich, 8/29/95
-*/
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-#include "pathsub.h"
-
-#ifdef USE_REENTRANT_LIBC
-#include
-#endif
-
-#ifdef SUNOS4
-#include "sunos4.h"
-#endif
-
-#ifndef D_INO
-#define D_INO d_ino
-#endif
-
-char *program;
-
-void
-fail(char *format, ...)
-{
- int error;
- va_list ap;
-
-#ifdef USE_REENTRANT_LIBC
- R_STRERROR_INIT_R();
-#endif
-
- error = errno;
- fprintf(stderr, "%s: ", program);
- va_start(ap, format);
- vfprintf(stderr, format, ap);
- va_end(ap);
- if (error) {
-
-#ifdef USE_REENTRANT_LIBC
- R_STRERROR_R(errno);
- fprintf(stderr, ": %s", r_strerror_r);
-#else
- fprintf(stderr, ": %s", strerror(errno));
-#endif
- }
-
- putc('\n', stderr);
- exit(1);
-}
-
-char *
-getcomponent(char *path, char *name)
-{
- if (*path == '\0')
- return 0;
- if (*path == '/') {
- *name++ = '/';
- } else {
- do {
- *name++ = *path++;
- } while (*path != '/' && *path != '\0');
- }
- *name = '\0';
- while (*path == '/')
- path++;
- return path;
-}
-
-#ifdef LAME_READDIR
-#include
-/*
-** The static buffer in Unixware's readdir is too small.
-*/
-struct dirent *readdir(DIR *d)
-{
- static struct dirent *buf = NULL;
-
- if(buf == NULL)
- buf = (struct dirent *) malloc(sizeof(struct dirent) + MAXPATHLEN);
- return(readdir_r(d, buf));
-}
-#endif
-
-char *
-ino2name(ino_t ino)
-{
- DIR *dp;
- struct dirent *ep;
- char *name;
-
- dp = opendir("..");
- if (!dp)
- fail("cannot read parent directory");
- for (;;) {
- if (!(ep = readdir(dp)))
- fail("cannot find current directory");
- if (ep->D_INO == ino)
- break;
- }
- name = xstrdup(ep->d_name);
- closedir(dp);
- return name;
-}
-
-void *
-xmalloc(size_t size)
-{
- void *p = malloc(size);
- if (!p)
- fail("cannot allocate %u bytes", size);
- return p;
-}
-
-char *
-xstrdup(char *s)
-{
- return strcpy(xmalloc(strlen(s) + 1), s);
-}
-
-char *
-xbasename(char *path)
-{
- char *cp;
-
- while ((cp = strrchr(path, '/')) && cp[1] == '\0')
- *cp = '\0';
- if (!cp) return path;
- return cp + 1;
-}
-
-void
-xchdir(char *dir)
-{
- if (chdir(dir) < 0)
- fail("cannot change directory to %s", dir);
-}
-
-int
-relatepaths(char *from, char *to, char *outpath)
-{
- char *cp, *cp2;
- int len;
- char buf[NAME_MAX];
-
- assert(*from == '/' && *to == '/');
- for (cp = to, cp2 = from; *cp == *cp2; cp++, cp2++)
- if (*cp == '\0')
- break;
- while (cp[-1] != '/')
- cp--, cp2--;
- if (cp - 1 == to) {
- /* closest common ancestor is /, so use full pathname */
- len = strlen(strcpy(outpath, to));
- if (outpath[len] != '/') {
- outpath[len++] = '/';
- outpath[len] = '\0';
- }
- } else {
- len = 0;
- while ((cp2 = getcomponent(cp2, buf)) != 0) {
- strcpy(outpath + len, "../");
- len += 3;
- }
- while ((cp = getcomponent(cp, buf)) != 0) {
- sprintf(outpath + len, "%s/", buf);
- len += strlen(outpath + len);
- }
- }
- return len;
-}
-
-void
-reversepath(char *inpath, char *name, int len, char *outpath)
-{
- char *cp, *cp2;
- char buf[NAME_MAX];
- struct stat sb;
-
- cp = strcpy(outpath + PATH_MAX - (len + 1), name);
- cp2 = inpath;
- while ((cp2 = getcomponent(cp2, buf)) != 0) {
- if (strcmp(buf, ".") == 0)
- continue;
- if (strcmp(buf, "..") == 0) {
- if (stat(".", &sb) < 0)
- fail("cannot stat current directory");
- name = ino2name(sb.st_ino);
- len = strlen(name);
- cp -= len + 1;
- strcpy(cp, name);
- cp[len] = '/';
- free(name);
- xchdir("..");
- } else {
- cp -= 3;
- strncpy(cp, "../", 3);
- xchdir(buf);
- }
- }
- strcpy(outpath, cp);
-}
diff --git a/js/src/config/pathsub.h b/js/src/config/pathsub.h
deleted file mode 100644
index adb46032d2d..00000000000
--- a/js/src/config/pathsub.h
+++ /dev/null
@@ -1,42 +0,0 @@
-/* -*- Mode: C; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#ifndef pathsub_h___
-#define pathsub_h___
-/*
-** Pathname subroutines.
-**
-** Brendan Eich, 8/29/95
-*/
-#include
-#include
-
-#ifndef PATH_MAX
-#define PATH_MAX 1024
-#endif
-
-/*
- * Just prevent stupidity
- */
-#undef NAME_MAX
-#define NAME_MAX 256
-
-extern char *program;
-
-extern void fail(char *format, ...);
-extern char *getcomponent(char *path, char *name);
-extern char *ino2name(ino_t ino);
-extern void *xmalloc(size_t size);
-extern char *xstrdup(char *s);
-extern char *xbasename(char *path);
-extern void xchdir(char *dir);
-
-/* Relate absolute pathnames from and to returning the result in outpath. */
-extern int relatepaths(char *from, char *to, char *outpath);
-
-/* XXX changes current working directory -- caveat emptor */
-extern void reversepath(char *inpath, char *name, int len, char *outpath);
-
-#endif /* pathsub_h___ */
diff --git a/js/src/config/pythonpath.py b/js/src/config/pythonpath.py
deleted file mode 100644
index 03180661481..00000000000
--- a/js/src/config/pythonpath.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-"""
-Run a python script, adding extra directories to the python path.
-"""
-
-
-def main(args):
- def usage():
- print >>sys.stderr, "pythonpath.py -I directory script.py [args...]"
- sys.exit(150)
-
- paths = []
-
- while True:
- try:
- arg = args[0]
- except IndexError:
- usage()
-
- if arg == '-I':
- args.pop(0)
- try:
- path = args.pop(0)
- except IndexError:
- usage()
-
- paths.append(os.path.abspath(path))
- continue
-
- if arg.startswith('-I'):
- paths.append(os.path.abspath(args.pop(0)[2:]))
- continue
-
- break
-
- script = args[0]
-
- sys.path[0:0] = [os.path.abspath(os.path.dirname(script))] + paths
- sys.argv = args
- sys.argc = len(args)
-
- frozenglobals['__name__'] = '__main__'
- frozenglobals['__file__'] = script
-
- execfile(script, frozenglobals)
-
-# Freeze scope here ... why this makes things work I have no idea ...
-frozenglobals = globals()
-
-import sys, os
-
-if __name__ == '__main__':
- main(sys.argv[1:])
diff --git a/js/src/config/rebuild_check.py b/js/src/config/rebuild_check.py
deleted file mode 100644
index a6c3dc87acd..00000000000
--- a/js/src/config/rebuild_check.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-import os
-import errno
-
-def mtime(path):
- try:
- return os.stat(path).st_mtime
- except OSError as e:
- if e.errno == errno.ENOENT:
- return -1
- raise
-
-def rebuild_check(args):
- target = args[0]
- deps = args[1:]
- t = mtime(target)
- if t < 0:
- print target
- return
-
- newer = []
- removed = []
- for dep in deps:
- deptime = mtime(dep)
- if deptime < 0:
- removed.append(dep)
- elif mtime(dep) > t:
- newer.append(dep)
-
- if newer and removed:
- print 'Rebuilding %s because %s changed and %s was removed' % (target, ', '.join(newer), ', '.join(removed))
- elif newer:
- print 'Rebuilding %s because %s changed' % (target, ', '.join(newer))
- elif removed:
- print 'Rebuilding %s because %s was removed' % (target, ', '.join(removed))
- else:
- print 'Rebuilding %s for an unknown reason' % target
-
-if __name__ == '__main__':
- import sys
- rebuild_check(sys.argv[1:])
diff --git a/js/src/config/recurse.mk b/js/src/config/recurse.mk
deleted file mode 100644
index 07dfa9d51e2..00000000000
--- a/js/src/config/recurse.mk
+++ /dev/null
@@ -1,243 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this file,
-# You can obtain one at http://mozilla.org/MPL/2.0/.
-
-ifndef INCLUDED_RULES_MK
-include $(topsrcdir)/config/rules.mk
-endif
-
-# The traditional model of directory traversal with make is as follows:
-# make -C foo
-# Entering foo
-# make -C bar
-# Entering foo/bar
-# make -C baz
-# Entering foo/baz
-# make -C qux
-# Entering qux
-#
-# Pseudo derecurse transforms the above into:
-# make -C foo
-# make -C foo/bar
-# make -C foo/baz
-# make -C qux
-
-# MOZ_PSEUDO_DERECURSE can have values other than 1.
-ifeq (1_.,$(if $(MOZ_PSEUDO_DERECURSE),1)_$(DEPTH))
-
-include root.mk
-
-# Disable build status for mach in top directories without TIERS.
-# In practice this disables it when recursing under js/src, which confuses mach.
-ifndef TIERS
-BUILDSTATUS =
-endif
-
-# Main rules (export, compile, binaries, libs and tools) call recurse_* rules.
-# This wrapping is only really useful for build status.
-compile binaries libs export tools::
- $(call BUILDSTATUS,TIER_START $@ $($@_subtiers))
- +$(MAKE) recurse_$@
- $(call BUILDSTATUS,TIER_FINISH $@)
-
-# Carefully avoid $(eval) type of rule generation, which makes pymake slower
-# than necessary.
-# Get current tier and corresponding subtiers from the data in root.mk.
-CURRENT_TIER := $(filter $(foreach tier,compile binaries libs export tools,recurse_$(tier) $(tier)-deps),$(MAKECMDGOALS))
-ifneq (,$(filter-out 0 1,$(words $(CURRENT_TIER))))
-$(error $(CURRENT_TIER) not supported on the same make command line)
-endif
-CURRENT_TIER := $(subst recurse_,,$(CURRENT_TIER:-deps=))
-CURRENT_SUBTIERS := $($(CURRENT_TIER)_subtiers)
-
-# The rules here are doing directory traversal, so we don't want further
-# recursion to happen when running make -C subdir $tier. But some make files
-# further call make -C something else, and sometimes expect recursion to
-# happen in that case (see browser/metro/locales/Makefile.in for example).
-# Conveniently, every invocation of make increases MAKELEVEL, so only stop
-# recursion from happening at current MAKELEVEL + 1.
-ifdef CURRENT_TIER
-ifeq (0,$(MAKELEVEL))
-export NO_RECURSE_MAKELEVEL=1
-else
-export NO_RECURSE_MAKELEVEL=$(word $(MAKELEVEL),2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20)
-endif
-endif
-
-# Get all directories traversed for all subtiers in the current tier, or use
-# directly the $(*_dirs) variables available in root.mk when there is no
-# TIERS (like for js/src).
-CURRENT_DIRS := $(or $($(CURRENT_TIER)_dirs),$(foreach subtier,$(CURRENT_SUBTIERS),$($(CURRENT_TIER)_subtier_$(subtier))))
-
-ifneq (,$(filter binaries libs,$(CURRENT_TIER)))
-WANT_STAMPS = 1
-STAMP_TOUCH = $(TOUCH) $(@D)/binaries
-endif
-
-# Subtier delimiter rules
-$(addprefix subtiers/,$(addsuffix _start/$(CURRENT_TIER),$(CURRENT_SUBTIERS))): subtiers/%_start/$(CURRENT_TIER): $(if $(WANT_STAMPS),$(call mkdir_deps,subtiers/%_start))
- $(call BUILDSTATUS,SUBTIER_START $(CURRENT_TIER) $* $(if $(BUG_915535_FIXED),$($(CURRENT_TIER)_subtier_$*)))
- @$(STAMP_TOUCH)
-
-$(addprefix subtiers/,$(addsuffix _finish/$(CURRENT_TIER),$(CURRENT_SUBTIERS))): subtiers/%_finish/$(CURRENT_TIER): $(if $(WANT_STAMPS),$(call mkdir_deps,subtiers/%_finish))
- $(call BUILDSTATUS,SUBTIER_FINISH $(CURRENT_TIER) $*)
- @$(STAMP_TOUCH)
-
-$(addprefix subtiers/,$(addsuffix /$(CURRENT_TIER),$(CURRENT_SUBTIERS))): %/$(CURRENT_TIER): $(if $(WANT_STAMPS),$(call mkdir_deps,%))
- @$(STAMP_TOUCH)
-
-GARBAGE_DIRS += subtiers
-
-# Recursion rule for all directories traversed for all subtiers in the
-# current tier.
-# root.mk defines subtier_of_* variables, that map a normalized subdir path to
-# a subtier name (e.g. subtier_of_memory_jemalloc = base)
-$(addsuffix /$(CURRENT_TIER),$(CURRENT_DIRS)): %/$(CURRENT_TIER):
-ifdef BUG_915535_FIXED
- $(call BUILDSTATUS,TIERDIR_START $(CURRENT_TIER) $(subtier_of_$(subst /,_,$*)) $*)
-endif
- +@$(MAKE) -C $* $(if $(filter $*,$(tier_$(subtier_of_$(subst /,_,$*))_staticdirs)),,$(CURRENT_TIER))
-# Ensure existing stamps are up-to-date, but don't create one if submake didn't create one.
- $(if $(wildcard $@),@$(STAMP_TOUCH))
-ifdef BUG_915535_FIXED
- $(call BUILDSTATUS,TIERDIR_FINISH $(CURRENT_TIER) $(subtier_of_$(subst /,_,$*)) $*)
-endif
-
-# Dummy rules for possibly inexisting dependencies for the above tier targets
-$(addsuffix /Makefile,$(CURRENT_DIRS)) $(addsuffix /backend.mk,$(CURRENT_DIRS)):
-
-# The export tier requires nsinstall, which is built from config. So every
-# subdirectory traversal needs to happen after traversing config.
-ifeq ($(CURRENT_TIER),export)
-$(addsuffix /$(CURRENT_TIER),$(filter-out config,$(CURRENT_DIRS))): config/$(CURRENT_TIER)
-endif
-
-ifdef COMPILE_ENVIRONMENT
-ifneq (,$(filter libs binaries,$(CURRENT_TIER)))
-# When doing a "libs" build, target_libs.mk ensures the interesting dependency data
-# is available in the "binaries" stamp. Once recursion is done, aggregate all that
-# dependency info so that stamps depend on relevant files and relevant other stamps.
-# When doing a "binaries" build, the aggregate dependency file and those stamps are
-# used and allow to skip recursing directories where changes are not going to require
-# rebuild. A few directories, however, are still traversed all the time, mostly, the
-# gyp managed ones and js/src.
-# A few things that are not traversed by a "binaries" build, but should, in an ideal
-# world, are nspr, nss, icu and ffi.
-recurse_$(CURRENT_TIER):
- @$(MAKE) binaries-deps
-
-# Creating binaries-deps.mk directly would make us build it twice: once when beginning
-# the build because of the include, and once at the end because of the stamps.
-binaries-deps: $(addsuffix /binaries,$(CURRENT_DIRS))
- @$(call py_action,link_deps,-o $@.mk --group-by-depfile --topsrcdir $(topsrcdir) --topobjdir $(DEPTH) --dist $(DIST) --guard $(addprefix ',$(addsuffix ',$^)))
- @$(TOUCH) $@
-
-ifeq (recurse_binaries,$(MAKECMDGOALS))
-$(call include_deps,binaries-deps.mk)
-endif
-
-endif
-
-DIST_GARBAGE += binaries-deps.mk binaries-deps
-
-endif
-
-else
-
-# Don't recurse if MAKELEVEL is NO_RECURSE_MAKELEVEL as defined above, but
-# still recurse for externally managed make files (gyp-generated ones).
-ifeq ($(EXTERNALLY_MANAGED_MAKE_FILE)_$(NO_RECURSE_MAKELEVEL),_$(MAKELEVEL))
-
-compile binaries libs export tools::
-
-else
-#########################
-# Tier traversal handling
-#########################
-
-ifdef TIERS
-
-libs export tools::
- $(call BUILDSTATUS,TIER_START $@ $(filter-out $(if $(filter export,$@),,precompile),$(TIERS)))
- $(foreach tier,$(TIERS), $(if $(filter-out libs_precompile tools_precompile,$@_$(tier)), \
- $(call BUILDSTATUS,SUBTIER_START $@ $(tier) $(if $(filter libs,$@),$(tier_$(tier)_staticdirs)) $(tier_$(tier)_dirs)) \
- $(if $(filter libs,$@),$(foreach dir, $(tier_$(tier)_staticdirs), $(call TIER_DIR_SUBMAKE,$@,$(tier),$(dir),,1))) \
- $(foreach dir, $(tier_$(tier)_dirs), $(call TIER_DIR_SUBMAKE,$@,$(tier),$(dir),$@)) \
- $(call BUILDSTATUS,SUBTIER_FINISH $@ $(tier))))
- $(call BUILDSTATUS,TIER_FINISH $@)
-
-else
-
-define CREATE_SUBTIER_TRAVERSAL_RULE
-PARALLEL_DIRS_$(1) = $$(addsuffix _$(1),$$(PARALLEL_DIRS))
-
-.PHONY: $(1) $$(PARALLEL_DIRS_$(1))
-
-ifdef PARALLEL_DIRS
-$$(PARALLEL_DIRS_$(1)): %_$(1): %/Makefile
- +@$$(call SUBMAKE,$(1),$$*)
-endif
-
-$(1):: $$(SUBMAKEFILES)
-ifdef PARALLEL_DIRS
- +@$(MAKE) $$(PARALLEL_DIRS_$(1))
-endif
- $$(LOOP_OVER_DIRS)
-
-endef
-
-$(foreach subtier,export compile binaries libs tools,$(eval $(call CREATE_SUBTIER_TRAVERSAL_RULE,$(subtier))))
-
-tools export:: $(SUBMAKEFILES)
- $(LOOP_OVER_TOOL_DIRS)
-
-endif # ifdef TIERS
-
-endif # ifeq ($(EXTERNALLY_MANAGED_MAKE_FILE)_$(NO_RECURSE_MAKELEVEL),_$(MAKELEVEL))
-
-endif # ifeq (1_.,$(MOZ_PSEUDO_DERECURSE)_$(DEPTH))
-
-ifdef MOZ_PSEUDO_DERECURSE
-ifdef EXTERNALLY_MANAGED_MAKE_FILE
-# gyp-managed directories
-recurse_targets := $(addsuffix /binaries,$(DIRS) $(PARALLEL_DIRS))
-else
-ifeq (.,$(DEPTH))
-# top-level directories
-recurse_targets := $(addsuffix /binaries,$(binaries_dirs))
-ifdef recurse_targets
-# only js/src has binaries_dirs, and we want to adjust paths for it.
-want_abspaths = 1
-endif
-endif
-endif
-
-ifdef COMPILE_ENVIRONMENT
-
-# Aggregate all dependency files relevant to a binaries build except in
-# the mozilla top-level directory.
-ifneq (_.,$(recurse_targets)_$(DEPTH))
-ALL_DEP_FILES := \
- $(BINARIES_PP) \
- $(addsuffix .pp,$(addprefix $(MDDEPDIR)/,$(sort \
- $(TARGETS) \
- $(filter-out $(SOBJS) $(ASOBJS) $(EXCLUDED_OBJS),$(OBJ_TARGETS)) \
- ))) \
- $(recurse_targets) \
- $(NULL)
-endif
-
-binaries libs:: $(TARGETS) $(BINARIES_PP)
-ifneq (_.,$(recurse_targets)_$(DEPTH))
- @$(if $(or $(recurse_targets),$^),$(call py_action,link_deps,-o binaries --group-all $(if $(want_abspaths),--abspaths )--topsrcdir $(topsrcdir) --topobjdir $(DEPTH) --dist $(DIST) $(ALL_DEP_FILES)))
-endif
-
-endif
-
-endif # ifdef MOZ_PSEUDO_DERECURSE
-
-recurse:
- @$(RECURSED_COMMAND)
- $(LOOP_OVER_PARALLEL_DIRS)
- $(LOOP_OVER_DIRS)
- $(LOOP_OVER_TOOL_DIRS)
diff --git a/js/src/config/rules.mk b/js/src/config/rules.mk
deleted file mode 100644
index a9887d9c5eb..00000000000
--- a/js/src/config/rules.mk
+++ /dev/null
@@ -1,1703 +0,0 @@
-# -*- makefile -*-
-# vim:set ts=8 sw=8 sts=8 noet:
-#
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this file,
-# You can obtain one at http://mozilla.org/MPL/2.0/.
-#
-
-ifndef topsrcdir
-$(error topsrcdir was not set))
-endif
-
-# Define an include-at-most-once flag
-ifdef INCLUDED_RULES_MK
-$(error Do not include rules.mk twice!)
-endif
-INCLUDED_RULES_MK = 1
-
-# Make sure that anything that needs to be defined in moz.build wasn't
-# overwritten.
-_eval_for_side_effects := $(CHECK_MOZBUILD_VARIABLES)
-
-ifndef MOZILLA_DIR
-MOZILLA_DIR = $(topsrcdir)
-endif
-
-ifndef INCLUDED_CONFIG_MK
-include $(topsrcdir)/config/config.mk
-endif
-
-ifndef INCLUDED_VERSION_MK
-include $(topsrcdir)/config/version.mk
-endif
-
-USE_AUTOTARGETS_MK = 1
-include $(topsrcdir)/config/makefiles/makeutils.mk
-
-# Only build with Pymake (not GNU make) on Windows.
-ifeq ($(HOST_OS_ARCH),WINNT)
-ifndef L10NBASEDIR
-ifndef .PYMAKE
-$(error Pymake is required to build on Windows. Run |./mach build| to \
-automatically use pymake or invoke pymake directly via \
-|python build/pymake/make.py|.)
-endif
-endif
-endif
-
-ifdef REBUILD_CHECK
-ifdef .PYMAKE
-REPORT_BUILD = @%rebuild_check rebuild_check $@ $^
-else
-REPORT_BUILD = $(info $(shell $(PYTHON) $(MOZILLA_DIR)/config/rebuild_check.py $@ $^))
-endif
-else
-REPORT_BUILD = $(info $(notdir $@))
-endif
-
-ifeq ($(OS_ARCH),OS2)
-EXEC =
-else
-EXEC = exec
-endif
-
-# Don't copy xulrunner files at install time, when using system xulrunner
-ifdef SYSTEM_LIBXUL
- SKIP_COPY_XULRUNNER=1
-endif
-
-# ELOG prints out failed command when building silently (gmake -s). Pymake
-# prints out failed commands anyway, so ELOG just makes things worse by
-# forcing shell invocations.
-ifndef .PYMAKE
-ifneq (,$(findstring s, $(filter-out --%, $(MAKEFLAGS))))
- ELOG := $(EXEC) sh $(BUILD_TOOLS)/print-failed-commands.sh
-else
- ELOG :=
-endif # -s
-else
- ELOG :=
-endif # ifndef .PYMAKE
-
-_VPATH_SRCS = $(abspath $<)
-
-################################################################################
-# Testing frameworks support
-################################################################################
-
-testxpcobjdir = $(DEPTH)/_tests/xpcshell
-
-ifdef ENABLE_TESTS
-
-# Add test directories to the regular directories list. TEST_DIRS should
-# arguably have the same status as TOOL_DIRS and other *_DIRS variables. It is
-# coded this way until Makefiles stop using the "ifdef ENABLE_TESTS; DIRS +="
-# convention.
-#
-# The current developer workflow expects tests to be updated when processing
-# the default target. If we ever change this implementation, the behavior
-# should be preserved or the change should be widely communicated. A
-# consequence of not processing test dir targets during the default target is
-# that changes to tests may not be updated and code could assume to pass
-# locally against non-current test code.
-DIRS += $(TEST_DIRS)
-
-ifndef INCLUDED_TESTS_MOCHITEST_MK #{
- include $(topsrcdir)/config/makefiles/mochitest.mk
-endif #}
-
-ifdef CPP_UNIT_TESTS
-ifdef COMPILE_ENVIRONMENT
-
-# Compile the tests to $(DIST)/bin. Make lots of niceties available by default
-# through TestHarness.h, by modifying the list of includes and the libs against
-# which stuff links.
-CPPSRCS += $(CPP_UNIT_TESTS)
-CPP_UNIT_TEST_BINS := $(CPP_UNIT_TESTS:.cpp=$(BIN_SUFFIX))
-SIMPLE_PROGRAMS += $(CPP_UNIT_TEST_BINS)
-INCLUDES += -I$(DIST)/include/testing
-LIBS += $(XPCOM_GLUE_LDOPTS) $(NSPR_LIBS)
-
-ifndef MOZ_PROFILE_GENERATE
-libs:: $(CPP_UNIT_TEST_BINS) $(call mkdir_deps,$(DIST)/cppunittests)
- $(NSINSTALL) $(CPP_UNIT_TEST_BINS) $(DIST)/cppunittests
-endif
-
-cppunittests-remote: DM_TRANS?=adb
-cppunittests-remote:
- @if [ '${TEST_DEVICE}' != '' -o '$(DM_TRANS)' = 'adb' ]; then \
- $(PYTHON) -u $(topsrcdir)/testing/remotecppunittests.py \
- --xre-path=$(DEPTH)/dist/bin \
- --localLib=$(DEPTH)/dist/$(MOZ_APP_NAME) \
- --dm_trans=$(DM_TRANS) \
- --deviceIP=${TEST_DEVICE} \
- $(subst .cpp,$(BIN_SUFFIX),$(CPP_UNIT_TESTS)) $(EXTRA_TEST_ARGS); \
- else \
- echo 'please prepare your host with environment variables for TEST_DEVICE'; \
- fi
-
-endif # COMPILE_ENVIRONMENT
-endif # CPP_UNIT_TESTS
-
-.PHONY: check
-
-ifdef PYTHON_UNIT_TESTS
-
-RUN_PYTHON_UNIT_TESTS := $(addsuffix -run,$(PYTHON_UNIT_TESTS))
-
-.PHONY: $(RUN_PYTHON_UNIT_TESTS)
-
-check:: $(RUN_PYTHON_UNIT_TESTS)
-
-$(RUN_PYTHON_UNIT_TESTS): %-run: %
- @PYTHONDONTWRITEBYTECODE=1 $(PYTHON) $<
-
-endif # PYTHON_UNIT_TESTS
-
-endif # ENABLE_TESTS
-
-
-#
-# Library rules
-#
-# If FORCE_STATIC_LIB is set, build a static library.
-# Otherwise, build a shared library.
-#
-
-ifndef LIBRARY
-ifdef STATIC_LIBRARY_NAME
-REAL_LIBRARY := $(LIB_PREFIX)$(STATIC_LIBRARY_NAME).$(LIB_SUFFIX)
-# Only build actual library if it is installed in DIST/lib or SDK
-ifeq (,$(SDK_LIBRARY)$(DIST_INSTALL)$(NO_EXPAND_LIBS))
-LIBRARY := $(REAL_LIBRARY).$(LIBS_DESC_SUFFIX)
-else
-LIBRARY := $(REAL_LIBRARY) $(REAL_LIBRARY).$(LIBS_DESC_SUFFIX)
-endif
-endif # STATIC_LIBRARY_NAME
-endif # LIBRARY
-
-ifndef HOST_LIBRARY
-ifdef HOST_LIBRARY_NAME
-HOST_LIBRARY := $(LIB_PREFIX)$(HOST_LIBRARY_NAME).$(LIB_SUFFIX)
-endif
-endif
-
-ifdef LIBRARY
-ifdef FORCE_SHARED_LIB
-ifdef MKSHLIB
-
-ifdef LIB_IS_C_ONLY
-MKSHLIB = $(MKCSHLIB)
-endif
-
-ifneq (,$(filter OS2 WINNT,$(OS_ARCH)))
-IMPORT_LIBRARY := $(LIB_PREFIX)$(SHARED_LIBRARY_NAME).$(IMPORT_LIB_SUFFIX)
-endif
-
-ifdef MAKE_FRAMEWORK
-SHARED_LIBRARY := $(SHARED_LIBRARY_NAME)
-else
-SHARED_LIBRARY := $(DLL_PREFIX)$(SHARED_LIBRARY_NAME)$(DLL_SUFFIX)
-endif
-
-ifeq ($(OS_ARCH),OS2)
-DEF_FILE := $(SHARED_LIBRARY:.dll=.def)
-endif
-
-EMBED_MANIFEST_AT=2
-
-endif # MKSHLIB
-endif # FORCE_SHARED_LIB
-endif # LIBRARY
-
-ifdef FORCE_STATIC_LIB
-ifndef FORCE_SHARED_LIB
-SHARED_LIBRARY := $(NULL)
-DEF_FILE := $(NULL)
-IMPORT_LIBRARY := $(NULL)
-endif
-endif
-
-ifdef FORCE_SHARED_LIB
-ifndef FORCE_STATIC_LIB
-LIBRARY := $(NULL)
-endif
-endif
-
-ifeq ($(OS_ARCH),WINNT)
-ifndef GNU_CC
-
-#
-# Unless we're building SIMPLE_PROGRAMS, all C++ files share a PDB file per
-# directory. For parallel builds, this PDB file is shared and locked by
-# MSPDBSRV.EXE, starting with MSVC8 SP1. If you're using MSVC 7.1 or MSVC8
-# without SP1, don't do parallel builds.
-#
-# The final PDB for libraries and programs is created by the linker and uses
-# a different name from the single PDB file created by the compiler. See
-# bug 462740.
-#
-
-ifdef SIMPLE_PROGRAMS
-COMPILE_PDBFILE = $(basename $(@F)).pdb
-else
-COMPILE_PDBFILE = generated.pdb
-endif
-
-LINK_PDBFILE = $(basename $(@F)).pdb
-ifdef MOZ_DEBUG
-CODFILE=$(basename $(@F)).cod
-endif
-
-ifdef DEFFILE
-OS_LDFLAGS += -DEF:$(call normalizepath,$(DEFFILE))
-EXTRA_DEPS += $(DEFFILE)
-endif
-
-else #!GNU_CC
-
-ifdef DEFFILE
-OS_LDFLAGS += $(call normalizepath,$(DEFFILE))
-EXTRA_DEPS += $(DEFFILE)
-endif
-
-endif # !GNU_CC
-
-endif # WINNT
-
-ifeq ($(SOLARIS_SUNPRO_CXX),1)
-ifeq (86,$(findstring 86,$(OS_TEST)))
-OS_LDFLAGS += -M $(topsrcdir)/config/solaris_ia32.map
-endif # x86
-endif # Solaris Sun Studio C++
-
-ifeq ($(HOST_OS_ARCH),WINNT)
-HOST_PDBFILE=$(basename $(@F)).pdb
-endif
-
-# Don't build SIMPLE_PROGRAMS during the MOZ_PROFILE_GENERATE pass
-ifdef MOZ_PROFILE_GENERATE
-EXCLUDED_OBJS := $(SIMPLE_PROGRAMS:$(BIN_SUFFIX)=.$(OBJ_SUFFIX))
-SIMPLE_PROGRAMS :=
-endif
-
-ifdef COMPILE_ENVIRONMENT
-ifndef TARGETS
-TARGETS = $(LIBRARY) $(SHARED_LIBRARY) $(PROGRAM) $(SIMPLE_PROGRAMS) $(HOST_LIBRARY) $(HOST_PROGRAM) $(HOST_SIMPLE_PROGRAMS)
-endif
-
-COBJS = $(notdir $(CSRCS:.c=.$(OBJ_SUFFIX)))
-SOBJS = $(notdir $(SSRCS:.S=.$(OBJ_SUFFIX)))
-# CPPSRCS can have different extensions (eg: .cpp, .cc)
-CPPOBJS = $(notdir $(addsuffix .$(OBJ_SUFFIX),$(basename $(CPPSRCS))))
-CMOBJS = $(notdir $(CMSRCS:.m=.$(OBJ_SUFFIX)))
-CMMOBJS = $(notdir $(CMMSRCS:.mm=.$(OBJ_SUFFIX)))
-ASOBJS = $(notdir $(ASFILES:.$(ASM_SUFFIX)=.$(OBJ_SUFFIX)))
-ifndef OBJS
-_OBJS = $(COBJS) $(SOBJS) $(CPPOBJS) $(CMOBJS) $(CMMOBJS) $(ASOBJS)
-OBJS = $(strip $(_OBJS))
-endif
-
-HOST_COBJS = $(addprefix host_,$(notdir $(HOST_CSRCS:.c=.$(OBJ_SUFFIX))))
-# HOST_CPPOBJS can have different extensions (eg: .cpp, .cc)
-HOST_CPPOBJS = $(addprefix host_,$(notdir $(addsuffix .$(OBJ_SUFFIX),$(basename $(HOST_CPPSRCS)))))
-HOST_CMOBJS = $(addprefix host_,$(notdir $(HOST_CMSRCS:.m=.$(OBJ_SUFFIX))))
-HOST_CMMOBJS = $(addprefix host_,$(notdir $(HOST_CMMSRCS:.mm=.$(OBJ_SUFFIX))))
-ifndef HOST_OBJS
-_HOST_OBJS = $(HOST_COBJS) $(HOST_CPPOBJS) $(HOST_CMOBJS) $(HOST_CMMOBJS)
-HOST_OBJS = $(strip $(_HOST_OBJS))
-endif
-else
-LIBRARY :=
-SHARED_LIBRARY :=
-IMPORT_LIBRARY :=
-REAL_LIBRARY :=
-PROGRAM :=
-SIMPLE_PROGRAMS :=
-HOST_LIBRARY :=
-HOST_PROGRAM :=
-HOST_SIMPLE_PROGRAMS :=
-SDK_BINARY := $(filter %.py,$(SDK_BINARY))
-SDK_LIBRARY :=
-endif
-
-ALL_TRASH = \
- $(GARBAGE) $(TARGETS) $(OBJS) $(PROGOBJS) LOGS TAGS a.out \
- $(filter-out $(ASFILES),$(OBJS:.$(OBJ_SUFFIX)=.s)) $(OBJS:.$(OBJ_SUFFIX)=.ii) \
- $(OBJS:.$(OBJ_SUFFIX)=.i) $(OBJS:.$(OBJ_SUFFIX)=.i_o) \
- $(HOST_PROGOBJS) $(HOST_OBJS) $(IMPORT_LIBRARY) $(DEF_FILE)\
- $(EXE_DEF_FILE) so_locations _gen _stubs $(wildcard *.res) $(wildcard *.RES) \
- $(wildcard *.pdb) $(CODFILE) $(IMPORT_LIBRARY) \
- $(SHARED_LIBRARY:$(DLL_SUFFIX)=.exp) $(wildcard *.ilk) \
- $(PROGRAM:$(BIN_SUFFIX)=.exp) $(SIMPLE_PROGRAMS:$(BIN_SUFFIX)=.exp) \
- $(PROGRAM:$(BIN_SUFFIX)=.lib) $(SIMPLE_PROGRAMS:$(BIN_SUFFIX)=.lib) \
- $(SIMPLE_PROGRAMS:$(BIN_SUFFIX)=.$(OBJ_SUFFIX)) \
- $(wildcard gts_tmp_*) $(LIBRARY:%.a=.%.timestamp)
-ALL_TRASH_DIRS = \
- $(GARBAGE_DIRS) /no-such-file
-
-ifdef QTDIR
-GARBAGE += $(MOCSRCS)
-endif
-
-ifdef SIMPLE_PROGRAMS
-GARBAGE += $(SIMPLE_PROGRAMS:%=%.$(OBJ_SUFFIX))
-endif
-
-ifdef HOST_SIMPLE_PROGRAMS
-GARBAGE += $(HOST_SIMPLE_PROGRAMS:%=%.$(OBJ_SUFFIX))
-endif
-
-#
-# the Solaris WorkShop template repository cache. it occasionally can get
-# out of sync, so targets like clobber should kill it.
-#
-ifeq ($(SOLARIS_SUNPRO_CXX),1)
-GARBAGE_DIRS += SunWS_cache
-endif
-
-ifdef MOZ_UPDATE_XTERM
-# Its good not to have a newline at the end of the titlebar string because it
-# makes the make -s output easier to read. Echo -n does not work on all
-# platforms, but we can trick printf into doing it.
-UPDATE_TITLE = printf '\033]0;%s in %s\007' $(1) $(relativesrcdir)/$(2) ;
-endif
-
-ifdef MACH
-ifndef NO_BUILDSTATUS_MESSAGES
-define BUILDSTATUS
-@echo 'BUILDSTATUS $1'
-
-endef
-endif
-endif
-
-define SUBMAKE # $(call SUBMAKE,target,directory,static)
-+@$(UPDATE_TITLE)
-+$(MAKE) $(if $(2),-C $(2)) $(1)
-
-endef # The extra line is important here! don't delete it
-
-define TIER_DIR_SUBMAKE
-$(call BUILDSTATUS,TIERDIR_START $(1) $(2) $(3))
-$(call SUBMAKE,$(4),$(3),$(5))
-$(call BUILDSTATUS,TIERDIR_FINISH $(1) $(2) $(3))
-
-endef # Ths empty line is important.
-
-ifneq (,$(strip $(DIRS)))
-LOOP_OVER_DIRS = \
- $(foreach dir,$(DIRS),$(call SUBMAKE,$@,$(dir)))
-endif
-
-# we only use this for the makefiles target and other stuff that doesn't matter
-ifneq (,$(strip $(PARALLEL_DIRS)))
-LOOP_OVER_PARALLEL_DIRS = \
- $(foreach dir,$(PARALLEL_DIRS),$(call SUBMAKE,$@,$(dir)))
-endif
-
-ifneq (,$(strip $(TOOL_DIRS)))
-LOOP_OVER_TOOL_DIRS = \
- $(foreach dir,$(TOOL_DIRS),$(call SUBMAKE,$@,$(dir)))
-endif
-
-#
-# Now we can differentiate between objects used to build a library, and
-# objects used to build an executable in the same directory.
-#
-ifndef PROGOBJS
-PROGOBJS = $(OBJS)
-endif
-
-ifndef HOST_PROGOBJS
-HOST_PROGOBJS = $(HOST_OBJS)
-endif
-
-GARBAGE_DIRS += $(wildcard $(CURDIR)/$(MDDEPDIR))
-
-#
-# Tags: emacs (etags), vi (ctags)
-# TAG_PROGRAM := ctags -L -
-#
-TAG_PROGRAM = xargs etags -a
-
-#
-# Turn on C++ linking if we have any .cpp or .mm files
-# (moved this from config.mk so that config.mk can be included
-# before the CPPSRCS are defined)
-#
-ifneq ($(HOST_CPPSRCS)$(HOST_CMMSRCS),)
-HOST_CPP_PROG_LINK = 1
-endif
-
-#
-# This will strip out symbols that the component should not be
-# exporting from the .dynsym section.
-#
-ifdef IS_COMPONENT
-EXTRA_DSO_LDOPTS += $(MOZ_COMPONENTS_VERSION_SCRIPT_LDFLAGS)
-endif # IS_COMPONENT
-
-#
-# MacOS X specific stuff
-#
-
-ifeq ($(OS_ARCH),Darwin)
-ifdef SHARED_LIBRARY
-ifdef IS_COMPONENT
-EXTRA_DSO_LDOPTS += -bundle
-else
-EXTRA_DSO_LDOPTS += -dynamiclib -install_name @executable_path/$(SHARED_LIBRARY) -compatibility_version 1 -current_version 1 -single_module
-endif
-endif
-endif
-
-#
-# On NetBSD a.out systems, use -Bsymbolic. This fixes what would otherwise be
-# fatal symbol name clashes between components.
-#
-ifeq ($(OS_ARCH),NetBSD)
-ifeq ($(DLL_SUFFIX),.so.1.0)
-ifdef IS_COMPONENT
-EXTRA_DSO_LDOPTS += -Wl,-Bsymbolic
-endif
-endif
-endif
-
-ifeq ($(OS_ARCH),FreeBSD)
-ifdef IS_COMPONENT
-EXTRA_DSO_LDOPTS += -Wl,-Bsymbolic
-endif
-endif
-
-ifeq ($(OS_ARCH),NetBSD)
-ifneq (,$(filter arc cobalt hpcmips mipsco newsmips pmax sgimips,$(OS_TEST)))
-ifneq (,$(filter layout/%,$(relativesrcdir)))
-OS_CFLAGS += -Wa,-xgot
-OS_CXXFLAGS += -Wa,-xgot
-endif
-endif
-endif
-
-#
-# HP-UXBeOS specific section: for COMPONENTS only, add -Bsymbolic flag
-# which uses internal symbols first
-#
-ifeq ($(OS_ARCH),HP-UX)
-ifdef IS_COMPONENT
-ifeq ($(GNU_CC)$(GNU_CXX),)
-EXTRA_DSO_LDOPTS += -Wl,-Bsymbolic
-ifneq ($(HAS_EXTRAEXPORTS),1)
-MKSHLIB += -Wl,+eNSGetModule -Wl,+eerrno
-MKCSHLIB += +eNSGetModule +eerrno
-ifneq ($(OS_TEST),ia64)
-MKSHLIB += -Wl,+e_shlInit
-MKCSHLIB += +e_shlInit
-endif # !ia64
-endif # !HAS_EXTRAEXPORTS
-endif # non-gnu compilers
-endif # IS_COMPONENT
-endif # HP-UX
-
-ifeq ($(OS_ARCH),AIX)
-ifdef IS_COMPONENT
-ifneq ($(HAS_EXTRAEXPORTS),1)
-MKSHLIB += -bE:$(MOZILLA_DIR)/build/unix/aix.exp -bnoexpall
-MKCSHLIB += -bE:$(MOZILLA_DIR)/build/unix/aix.exp -bnoexpall
-endif # HAS_EXTRAEXPORTS
-endif # IS_COMPONENT
-endif # AIX
-
-#
-# Linux: add -Bsymbolic flag for components
-#
-ifeq ($(OS_ARCH),Linux)
-ifdef IS_COMPONENT
-EXTRA_DSO_LDOPTS += -Wl,-Bsymbolic
-endif
-endif
-
-#
-# GNU doesn't have path length limitation
-#
-
-ifeq ($(OS_ARCH),GNU)
-OS_CPPFLAGS += -DPATH_MAX=1024 -DMAXPATHLEN=1024
-endif
-
-#
-# MINGW32
-#
-ifeq ($(OS_ARCH),WINNT)
-ifdef GNU_CC
-ifndef IS_COMPONENT
-DSO_LDOPTS += -Wl,--out-implib -Wl,$(IMPORT_LIBRARY)
-endif
-endif
-endif
-
-ifeq ($(USE_TVFS),1)
-IFLAGS1 = -rb
-IFLAGS2 = -rb
-else
-IFLAGS1 = -m 644
-IFLAGS2 = -m 755
-endif
-
-ifeq (_WINNT,$(GNU_CC)_$(OS_ARCH))
-OUTOPTION = -Fo# eol
-PREPROCESS_OPTION = -P -Fi# eol
-else
-OUTOPTION = -o # eol
-PREPROCESS_OPTION = -E -o #eol
-endif # WINNT && !GNU_CC
-
-ifneq (,$(filter ml%,$(AS)))
-ASOUTOPTION = -Fo# eol
-else
-ASOUTOPTION = -o # eol
-endif
-
-ifeq (,$(CROSS_COMPILE))
-HOST_OUTOPTION = $(OUTOPTION)
-else
-HOST_OUTOPTION = -o # eol
-endif
-################################################################################
-
-# Ensure the build config is up to date. This is done automatically when builds
-# are performed through |mach build|. The check here is to catch people not
-# using mach. If we ever enforce builds through mach, this code can be removed.
-ifndef MOZBUILD_BACKEND_CHECKED
-ifndef MACH
-ifndef TOPLEVEL_BUILD
-$(DEPTH)/backend.RecursiveMakeBackend:
- $(error Build configuration changed. Build with |mach build| or run |mach build-backend| to regenerate build config)
-
-include $(DEPTH)/backend.RecursiveMakeBackend.pp
-
-default:: $(DEPTH)/backend.RecursiveMakeBackend
-
-export MOZBUILD_BACKEND_CHECKED=1
-endif
-endif
-endif
-
-# The root makefile doesn't want to do a plain export/libs, because
-# of the tiers and because of libxul. Suppress the default rules in favor
-# of something else. Makefiles which use this var *must* provide a sensible
-# default rule before including rules.mk
-ifndef SUPPRESS_DEFAULT_RULES
-default all::
- $(MAKE) export
-ifdef MOZ_PSEUDO_DERECURSE
-ifdef COMPILE_ENVIRONMENT
- $(MAKE) compile
-endif
-endif
- $(MAKE) libs
- $(MAKE) tools
-endif # SUPPRESS_DEFAULT_RULES
-
-ifeq ($(findstring s,$(filter-out --%, $(MAKEFLAGS))),)
-ECHO := echo
-QUIET :=
-else
-ECHO := true
-QUIET := -q
-endif
-
-# Do everything from scratch
-everything::
- $(MAKE) clean
- $(MAKE) all
-
-ifneq (,$(filter-out %.$(LIB_SUFFIX),$(SHARED_LIBRARY_LIBS)))
-$(error SHARED_LIBRARY_LIBS must contain .$(LIB_SUFFIX) files only)
-endif
-
-HOST_LIBS_DEPS = $(filter %.$(LIB_SUFFIX),$(HOST_LIBS))
-
-# Dependencies which, if modified, should cause everything to rebuild
-GLOBAL_DEPS += Makefile $(DEPTH)/config/autoconf.mk $(topsrcdir)/config/config.mk
-
-##############################################
-ifdef COMPILE_ENVIRONMENT
-OBJ_TARGETS = $(OBJS) $(PROGOBJS) $(HOST_OBJS) $(HOST_PROGOBJS)
-
-compile:: $(OBJ_TARGETS)
-
-include $(topsrcdir)/config/makefiles/target_binaries.mk
-endif
-
-ifdef IS_TOOL_DIR
-# One would think "tools:: libs" would work, but it turns out that combined with
-# bug 907365, this makes make forget to run some rules sometimes.
-tools::
- @$(MAKE) libs
-endif
-
-##############################################
-ifneq (1,$(NO_PROFILE_GUIDED_OPTIMIZE))
-ifdef MOZ_PROFILE_USE
-ifeq ($(OS_ARCH)_$(GNU_CC), WINNT_)
-# When building with PGO, we have to make sure to re-link
-# in the MOZ_PROFILE_USE phase if we linked in the
-# MOZ_PROFILE_GENERATE phase. We'll touch this pgo.relink
-# file in the link rule in the GENERATE phase to indicate
-# that we need a relink.
-ifdef SHARED_LIBRARY
-$(SHARED_LIBRARY): pgo.relink
-endif
-ifdef PROGRAM
-$(PROGRAM): pgo.relink
-endif
-
-# In the second pass, we need to merge the pgc files into the pgd file.
-# The compiler would do this for us automatically if they were in the right
-# place, but they're in dist/bin.
-ifneq (,$(SHARED_LIBRARY)$(PROGRAM))
-export::
-ifdef PROGRAM
- $(PYTHON) $(topsrcdir)/build/win32/pgomerge.py \
- $(PROGRAM:$(BIN_SUFFIX)=) $(DIST)/bin
-endif
-ifdef SHARED_LIBRARY
- $(PYTHON) $(topsrcdir)/build/win32/pgomerge.py \
- $(SHARED_LIBRARY_NAME) $(DIST)/bin
-endif
-endif # SHARED_LIBRARY || PROGRAM
-endif # WINNT_
-endif # MOZ_PROFILE_USE
-ifdef MOZ_PROFILE_GENERATE
-# Clean up profiling data during PROFILE_GENERATE phase
-export::
-ifeq ($(OS_ARCH)_$(GNU_CC), WINNT_)
- $(foreach pgd,$(wildcard *.pgd),pgomgr -clear $(pgd);)
-else
-ifdef GNU_CC
- -$(RM) *.gcda
-endif
-endif
-endif
-
-ifneq (,$(MOZ_PROFILE_GENERATE)$(MOZ_PROFILE_USE))
-ifdef GNU_CC
-# Force rebuilding libraries and programs in both passes because each
-# pass uses different object files.
-$(PROGRAM) $(SHARED_LIBRARY) $(LIBRARY): FORCE
-endif
-endif
-
-endif # NO_PROFILE_GUIDED_OPTIMIZE
-
-##############################################
-
-checkout:
- $(MAKE) -C $(topsrcdir) -f client.mk checkout
-
-clean clobber realclean clobber_all::
- -$(RM) $(ALL_TRASH)
- -$(RM) -r $(ALL_TRASH_DIRS)
-
-ifdef TIERS
-clean clobber realclean clobber_all distclean::
- $(foreach dir, \
- $(foreach tier, $(TIERS), $(tier_$(tier)_staticdirs) $(tier_$(tier)_dirs)), \
- -$(call SUBMAKE,$@,$(dir)))
-else
-clean clobber realclean clobber_all distclean::
- $(foreach dir,$(PARALLEL_DIRS) $(DIRS) $(TOOL_DIRS),-$(call SUBMAKE,$@,$(dir)))
-
-distclean::
- $(foreach dir,$(PARALLEL_DIRS) $(DIRS) $(TOOL_DIRS),-$(call SUBMAKE,$@,$(dir)))
-endif
-
-distclean::
- -$(RM) -r $(ALL_TRASH_DIRS)
- -$(RM) $(ALL_TRASH) \
- Makefile .HSancillary \
- $(wildcard *.$(OBJ_SUFFIX)) $(wildcard *.ho) $(wildcard host_*.o*) \
- $(wildcard *.$(LIB_SUFFIX)) $(wildcard *$(DLL_SUFFIX)) \
- $(wildcard *.$(IMPORT_LIB_SUFFIX))
-ifeq ($(OS_ARCH),OS2)
- -$(RM) $(PROGRAM:.exe=.map)
-endif
-
-alltags:
- $(RM) TAGS
- find $(topsrcdir) -name dist -prune -o \( -name '*.[hc]' -o -name '*.cp' -o -name '*.cpp' -o -name '*.idl' \) -print | $(TAG_PROGRAM)
-
-#
-# PROGRAM = Foo
-# creates OBJS, links with LIBS to create Foo
-#
-$(PROGRAM): $(PROGOBJS) $(EXTRA_DEPS) $(EXE_DEF_FILE) $(RESFILE) $(GLOBAL_DEPS)
- $(REPORT_BUILD)
- @$(RM) $@.manifest
-ifeq (_WINNT,$(GNU_CC)_$(OS_ARCH))
- $(EXPAND_LD) -NOLOGO -OUT:$@ -PDB:$(LINK_PDBFILE) $(WIN32_EXE_LDFLAGS) $(LDFLAGS) $(MOZ_GLUE_PROGRAM_LDFLAGS) $(PROGOBJS) $(RESFILE) $(LIBS) $(EXTRA_LIBS) $(OS_LIBS)
-ifdef MSMANIFEST_TOOL
- @if test -f $@.manifest; then \
- if test -f '$(srcdir)/$@.manifest'; then \
- echo 'Embedding manifest from $(srcdir)/$@.manifest and $@.manifest'; \
- mt.exe -NOLOGO -MANIFEST '$(win_srcdir)/$@.manifest' $@.manifest -OUTPUTRESOURCE:$@\;1; \
- else \
- echo 'Embedding manifest from $@.manifest'; \
- mt.exe -NOLOGO -MANIFEST $@.manifest -OUTPUTRESOURCE:$@\;1; \
- fi; \
- elif test -f '$(srcdir)/$@.manifest'; then \
- echo 'Embedding manifest from $(srcdir)/$@.manifest'; \
- mt.exe -NOLOGO -MANIFEST '$(win_srcdir)/$@.manifest' -OUTPUTRESOURCE:$@\;1; \
- fi
-endif # MSVC with manifest tool
-ifdef MOZ_PROFILE_GENERATE
-# touch it a few seconds into the future to work around FAT's
-# 2-second granularity
- touch -t `date +%Y%m%d%H%M.%S -d 'now+5seconds'` pgo.relink
-endif
-else # !WINNT || GNU_CC
- $(EXPAND_CCC) -o $@ $(CXXFLAGS) $(PROGOBJS) $(RESFILE) $(WIN32_EXE_LDFLAGS) $(LDFLAGS) $(WRAP_LDFLAGS) $(LIBS_DIR) $(LIBS) $(MOZ_GLUE_PROGRAM_LDFLAGS) $(OS_LIBS) $(EXTRA_LIBS) $(BIN_FLAGS) $(EXE_DEF_FILE) $(STLPORT_LIBS)
- @$(call CHECK_STDCXX,$@)
-endif # WINNT && !GNU_CC
-
-ifdef ENABLE_STRIP
- $(STRIP) $(STRIP_FLAGS) $@
-endif
-ifdef MOZ_POST_PROGRAM_COMMAND
- $(MOZ_POST_PROGRAM_COMMAND) $@
-endif
-
-$(HOST_PROGRAM): $(HOST_PROGOBJS) $(HOST_LIBS_DEPS) $(HOST_EXTRA_DEPS) $(GLOBAL_DEPS)
- $(REPORT_BUILD)
-ifeq (_WINNT,$(GNU_CC)_$(HOST_OS_ARCH))
- $(EXPAND_LIBS_EXEC) -- $(HOST_LD) -NOLOGO -OUT:$@ -PDB:$(HOST_PDBFILE) $(HOST_OBJS) $(WIN32_EXE_LDFLAGS) $(HOST_LDFLAGS) $(HOST_LIBS) $(HOST_EXTRA_LIBS)
-ifdef MSMANIFEST_TOOL
- @if test -f $@.manifest; then \
- if test -f '$(srcdir)/$@.manifest'; then \
- echo 'Embedding manifest from $(srcdir)/$@.manifest and $@.manifest'; \
- mt.exe -NOLOGO -MANIFEST '$(win_srcdir)/$@.manifest' $@.manifest -OUTPUTRESOURCE:$@\;1; \
- else \
- echo 'Embedding manifest from $@.manifest'; \
- mt.exe -NOLOGO -MANIFEST $@.manifest -OUTPUTRESOURCE:$@\;1; \
- fi; \
- elif test -f '$(srcdir)/$@.manifest'; then \
- echo 'Embedding manifest from $(srcdir)/$@.manifest'; \
- mt.exe -NOLOGO -MANIFEST '$(win_srcdir)/$@.manifest' -OUTPUTRESOURCE:$@\;1; \
- fi
-endif # MSVC with manifest tool
-else
-ifeq ($(HOST_CPP_PROG_LINK),1)
- $(EXPAND_LIBS_EXEC) -- $(HOST_CXX) -o $@ $(HOST_CXXFLAGS) $(HOST_LDFLAGS) $(HOST_PROGOBJS) $(HOST_LIBS) $(HOST_EXTRA_LIBS)
-else
- $(EXPAND_LIBS_EXEC) -- $(HOST_CC) -o $@ $(HOST_CFLAGS) $(HOST_LDFLAGS) $(HOST_PROGOBJS) $(HOST_LIBS) $(HOST_EXTRA_LIBS)
-endif # HOST_CPP_PROG_LINK
-endif
-
-#
-# This is an attempt to support generation of multiple binaries
-# in one directory, it assumes everything to compile Foo is in
-# Foo.o (from either Foo.c or Foo.cpp).
-#
-# SIMPLE_PROGRAMS = Foo Bar
-# creates Foo.o Bar.o, links with LIBS to create Foo, Bar.
-#
-$(SIMPLE_PROGRAMS): %$(BIN_SUFFIX): %.$(OBJ_SUFFIX) $(EXTRA_DEPS) $(GLOBAL_DEPS)
- $(REPORT_BUILD)
-ifeq (_WINNT,$(GNU_CC)_$(OS_ARCH))
- $(EXPAND_LD) -nologo -out:$@ -pdb:$(LINK_PDBFILE) $< $(WIN32_EXE_LDFLAGS) $(LDFLAGS) $(MOZ_GLUE_PROGRAM_LDFLAGS) $(LIBS) $(EXTRA_LIBS) $(OS_LIBS)
-ifdef MSMANIFEST_TOOL
- @if test -f $@.manifest; then \
- mt.exe -NOLOGO -MANIFEST $@.manifest -OUTPUTRESOURCE:$@\;1; \
- rm -f $@.manifest; \
- fi
-endif # MSVC with manifest tool
-else
- $(EXPAND_CCC) $(CXXFLAGS) -o $@ $< $(WIN32_EXE_LDFLAGS) $(LDFLAGS) $(WRAP_LDFLAGS) $(LIBS_DIR) $(LIBS) $(MOZ_GLUE_PROGRAM_LDFLAGS) $(OS_LIBS) $(EXTRA_LIBS) $(BIN_FLAGS) $(STLPORT_LIBS)
- @$(call CHECK_STDCXX,$@)
-endif # WINNT && !GNU_CC
-
-ifdef ENABLE_STRIP
- $(STRIP) $(STRIP_FLAGS) $@
-endif
-ifdef MOZ_POST_PROGRAM_COMMAND
- $(MOZ_POST_PROGRAM_COMMAND) $@
-endif
-
-$(HOST_SIMPLE_PROGRAMS): host_%$(HOST_BIN_SUFFIX): host_%.$(OBJ_SUFFIX) $(HOST_LIBS_DEPS) $(HOST_EXTRA_DEPS) $(GLOBAL_DEPS)
- $(REPORT_BUILD)
-ifeq (WINNT_,$(HOST_OS_ARCH)_$(GNU_CC))
- $(EXPAND_LIBS_EXEC) -- $(HOST_LD) -NOLOGO -OUT:$@ -PDB:$(HOST_PDBFILE) $< $(WIN32_EXE_LDFLAGS) $(HOST_LIBS) $(HOST_EXTRA_LIBS)
-else
-ifneq (,$(HOST_CPPSRCS)$(USE_HOST_CXX))
- $(EXPAND_LIBS_EXEC) -- $(HOST_CXX) $(HOST_OUTOPTION)$@ $(HOST_CXXFLAGS) $(INCLUDES) $< $(HOST_LIBS) $(HOST_EXTRA_LIBS)
-else
- $(EXPAND_LIBS_EXEC) -- $(HOST_CC) $(HOST_OUTOPTION)$@ $(HOST_CFLAGS) $(INCLUDES) $< $(HOST_LIBS) $(HOST_EXTRA_LIBS)
-endif
-endif
-
-ifdef DTRACE_PROBE_OBJ
-EXTRA_DEPS += $(DTRACE_PROBE_OBJ)
-OBJS += $(DTRACE_PROBE_OBJ)
-endif
-
-$(filter %.$(LIB_SUFFIX),$(LIBRARY)): $(OBJS) $(EXTRA_DEPS) $(GLOBAL_DEPS)
- $(REPORT_BUILD)
- $(RM) $(LIBRARY)
- $(EXPAND_AR) $(AR_FLAGS) $(OBJS) $(SHARED_LIBRARY_LIBS)
-
-$(filter-out %.$(LIB_SUFFIX),$(LIBRARY)): $(filter %.$(LIB_SUFFIX),$(LIBRARY)) $(OBJS) $(EXTRA_DEPS) $(GLOBAL_DEPS)
-# When we only build a library descriptor, blow out any existing library
- $(REPORT_BUILD)
- $(if $(filter %.$(LIB_SUFFIX),$(LIBRARY)),,$(RM) $(REAL_LIBRARY) $(EXPORT_LIBRARY:%=%/$(REAL_LIBRARY)))
- $(EXPAND_LIBS_GEN) -o $@ $(OBJS) $(SHARED_LIBRARY_LIBS)
-
-ifeq ($(OS_ARCH),WINNT)
-# Import libraries are created by the rules creating shared libraries.
-# The rules to copy them to $(DIST)/lib depend on $(IMPORT_LIBRARY),
-# but make will happily consider the import library before it is refreshed
-# when rebuilding the corresponding shared library. Defining an empty recipe
-# for import libraries forces make to wait for the shared library recipe to
-# have run before considering other targets that depend on the import library.
-# See bug 795204.
-$(IMPORT_LIBRARY): $(SHARED_LIBRARY) ;
-endif
-
-ifeq ($(OS_ARCH),OS2)
-$(DEF_FILE): $(OBJS) $(SHARED_LIBRARY_LIBS)
- $(RM) $@
- echo LIBRARY $(SHARED_LIBRARY_NAME) INITINSTANCE TERMINSTANCE > $@
- echo PROTMODE >> $@
- echo CODE LOADONCALL MOVEABLE DISCARDABLE >> $@
- echo DATA PRELOAD MOVEABLE MULTIPLE NONSHARED >> $@
- echo EXPORTS >> $@
-
- $(ADD_TO_DEF_FILE)
-
-$(IMPORT_LIBRARY): $(SHARED_LIBRARY)
- $(REPORT_BUILD)
- $(RM) $@
- $(IMPLIB) $@ $^
-endif # OS/2
-
-$(HOST_LIBRARY): $(HOST_OBJS) Makefile
- $(REPORT_BUILD)
- $(RM) $@
- $(EXPAND_LIBS_EXEC) --extract -- $(HOST_AR) $(HOST_AR_FLAGS) $(HOST_OBJS)
-
-ifdef HAVE_DTRACE
-ifndef XP_MACOSX
-ifdef DTRACE_PROBE_OBJ
-ifndef DTRACE_LIB_DEPENDENT
-NON_DTRACE_OBJS := $(filter-out $(DTRACE_PROBE_OBJ),$(OBJS))
-$(DTRACE_PROBE_OBJ): $(NON_DTRACE_OBJS)
- dtrace -G -C -s $(MOZILLA_DTRACE_SRC) -o $(DTRACE_PROBE_OBJ) $(NON_DTRACE_OBJS)
-endif
-endif
-endif
-endif
-
-# On Darwin (Mac OS X), dwarf2 debugging uses debug info left in .o files,
-# so instead of deleting .o files after repacking them into a dylib, we make
-# symlinks back to the originals. The symlinks are a no-op for stabs debugging,
-# so no need to conditionalize on OS version or debugging format.
-
-$(SHARED_LIBRARY): $(OBJS) $(DEF_FILE) $(RESFILE) $(LIBRARY) $(EXTRA_DEPS) $(GLOBAL_DEPS)
- $(REPORT_BUILD)
-ifndef INCREMENTAL_LINKER
- $(RM) $@
-endif
-ifdef DTRACE_LIB_DEPENDENT
-ifndef XP_MACOSX
- dtrace -G -C -s $(MOZILLA_DTRACE_SRC) -o $(DTRACE_PROBE_OBJ) $(shell $(EXPAND_LIBS) $(MOZILLA_PROBE_LIBS))
-endif
- $(EXPAND_MKSHLIB) $(SHLIB_LDSTARTFILE) $(OBJS) $(SUB_SHLOBJS) $(DTRACE_PROBE_OBJ) $(MOZILLA_PROBE_LIBS) $(RESFILE) $(LDFLAGS) $(WRAP_LDFLAGS) $(SHARED_LIBRARY_LIBS) $(EXTRA_DSO_LDOPTS) $(MOZ_GLUE_LDFLAGS) $(OS_LIBS) $(EXTRA_LIBS) $(DEF_FILE) $(SHLIB_LDENDFILE) $(if $(LIB_IS_C_ONLY),,$(STLPORT_LIBS))
- @$(RM) $(DTRACE_PROBE_OBJ)
-else # ! DTRACE_LIB_DEPENDENT
- $(EXPAND_MKSHLIB) $(SHLIB_LDSTARTFILE) $(OBJS) $(SUB_SHLOBJS) $(RESFILE) $(LDFLAGS) $(WRAP_LDFLAGS) $(SHARED_LIBRARY_LIBS) $(EXTRA_DSO_LDOPTS) $(MOZ_GLUE_LDFLAGS) $(OS_LIBS) $(EXTRA_LIBS) $(DEF_FILE) $(SHLIB_LDENDFILE) $(if $(LIB_IS_C_ONLY),,$(STLPORT_LIBS))
-endif # DTRACE_LIB_DEPENDENT
- @$(call CHECK_STDCXX,$@)
-
-ifeq (_WINNT,$(GNU_CC)_$(OS_ARCH))
-ifdef MSMANIFEST_TOOL
-ifdef EMBED_MANIFEST_AT
- @if test -f $@.manifest; then \
- mt.exe -NOLOGO -MANIFEST $@.manifest -OUTPUTRESOURCE:$@\;$(EMBED_MANIFEST_AT); \
- rm -f $@.manifest; \
- fi
-endif # EMBED_MANIFEST_AT
-endif # MSVC with manifest tool
-ifdef MOZ_PROFILE_GENERATE
- touch -t `date +%Y%m%d%H%M.%S -d 'now+5seconds'` pgo.relink
-endif
-endif # WINNT && !GCC
- @$(RM) foodummyfilefoo $(DELETE_AFTER_LINK)
- chmod +x $@
-ifdef ENABLE_STRIP
- $(STRIP) $(STRIP_FLAGS) $@
-endif
-ifdef MOZ_POST_DSO_LIB_COMMAND
- $(MOZ_POST_DSO_LIB_COMMAND) $@
-endif
-
-ifeq ($(SOLARIS_SUNPRO_CC),1)
-_MDDEPFILE = $(MDDEPDIR)/$(@F).pp
-
-define MAKE_DEPS_AUTO_CC
-if test -d $(@D); then \
- echo 'Building deps for $< using Sun Studio cc'; \
- $(CC) $(COMPILE_CFLAGS) -xM $< >$(_MDDEPFILE) ; \
- $(PYTHON) $(topsrcdir)/build/unix/add_phony_targets.py $(_MDDEPFILE) ; \
-fi
-endef
-define MAKE_DEPS_AUTO_CXX
-if test -d $(@D); then \
- echo 'Building deps for $< using Sun Studio CC'; \
- $(CXX) $(COMPILE_CXXFLAGS) -xM $< >$(_MDDEPFILE) ; \
- $(PYTHON) $(topsrcdir)/build/unix/add_phony_targets.py $(_MDDEPFILE) ; \
-fi
-endef
-endif # Sun Studio on Solaris
-
-# The object file is in the current directory, and the source file can be any
-# relative path. This macro adds the dependency obj: src for each source file.
-# This dependency must be first for the $< flag to work correctly, and the
-# rules that have commands for these targets must not list any other
-# prerequisites, or they will override the $< variable.
-define src_objdep
-$(basename $2$(notdir $1)).$(OBJ_SUFFIX): $1 $$(call mkdir_deps,$$(MDDEPDIR))
-endef
-$(foreach f,$(CSRCS) $(SSRCS) $(CPPSRCS) $(CMSRCS) $(CMMSRCS) $(ASFILES),$(eval $(call src_objdep,$(f))))
-$(foreach f,$(HOST_CSRCS) $(HOST_CPPSRCS) $(HOST_CMSRCS) $(HOST_CMMSRCS),$(eval $(call src_objdep,$(f),host_)))
-
-$(OBJS) $(HOST_OBJS) $(PROGOBJS) $(HOST_PROGOBJS): $(GLOBAL_DEPS)
-
-# Rules for building native targets must come first because of the host_ prefix
-$(HOST_COBJS):
- $(REPORT_BUILD)
- $(ELOG) $(HOST_CC) $(HOST_OUTOPTION)$@ -c $(HOST_CFLAGS) $(INCLUDES) $(NSPR_CFLAGS) $(_VPATH_SRCS)
-
-$(HOST_CPPOBJS):
- $(REPORT_BUILD)
- $(ELOG) $(HOST_CXX) $(HOST_OUTOPTION)$@ -c $(HOST_CXXFLAGS) $(INCLUDES) $(NSPR_CFLAGS) $(_VPATH_SRCS)
-
-$(HOST_CMOBJS):
- $(REPORT_BUILD)
- $(ELOG) $(HOST_CC) $(HOST_OUTOPTION)$@ -c $(HOST_CFLAGS) $(HOST_CMFLAGS) $(INCLUDES) $(NSPR_CFLAGS) $(_VPATH_SRCS)
-
-$(HOST_CMMOBJS):
- $(REPORT_BUILD)
- $(ELOG) $(HOST_CXX) $(HOST_OUTOPTION)$@ -c $(HOST_CXXFLAGS) $(HOST_CMMFLAGS) $(INCLUDES) $(NSPR_CFLAGS) $(_VPATH_SRCS)
-
-$(COBJS):
- $(REPORT_BUILD)
- @$(MAKE_DEPS_AUTO_CC)
- $(ELOG) $(CC) $(OUTOPTION)$@ -c $(COMPILE_CFLAGS) $(TARGET_LOCAL_INCLUDES) $(_VPATH_SRCS)
-
-# DEFINES and ACDEFINES are needed here to enable conditional compilation of Q_OBJECTs:
-# 'moc' only knows about #defines it gets on the command line (-D...), not in
-# included headers like mozilla-config.h
-$(filter moc_%.cpp,$(CPPSRCS)): moc_%.cpp: %.h
- $(REPORT_BUILD)
- $(ELOG) $(MOC) $(DEFINES) $(ACDEFINES) $< $(OUTOPTION)$@
-
-$(filter moc_%.cc,$(CPPSRCS)): moc_%.cc: %.cc
- $(REPORT_BUILD)
- $(ELOG) $(MOC) $(DEFINES) $(ACDEFINES) $(_VPATH_SRCS:.cc=.h) $(OUTOPTION)$@
-
-$(filter qrc_%.cpp,$(CPPSRCS)): qrc_%.cpp: %.qrc
- $(REPORT_BUILD)
- $(ELOG) $(RCC) -name $* $< $(OUTOPTION)$@
-
-ifdef ASFILES
-# The AS_DASH_C_FLAG is needed cause not all assemblers (Solaris) accept
-# a '-c' flag.
-$(ASOBJS):
- $(REPORT_BUILD)
- $(AS) $(ASOUTOPTION)$@ $(ASFLAGS) $(AS_DASH_C_FLAG) $(_VPATH_SRCS)
-endif
-
-$(SOBJS):
- $(REPORT_BUILD)
- $(AS) -o $@ $(ASFLAGS) $(LOCAL_INCLUDES) $(TARGET_LOCAL_INCLUDES) -c $<
-
-$(CPPOBJS):
- $(REPORT_BUILD)
- @$(MAKE_DEPS_AUTO_CXX)
- $(ELOG) $(CCC) $(OUTOPTION)$@ -c $(COMPILE_CXXFLAGS) $(TARGET_LOCAL_INCLUDES) $(_VPATH_SRCS)
-
-$(CMMOBJS):
- $(REPORT_BUILD)
- @$(MAKE_DEPS_AUTO_CXX)
- $(ELOG) $(CCC) -o $@ -c $(COMPILE_CXXFLAGS) $(COMPILE_CMMFLAGS) $(TARGET_LOCAL_INCLUDES) $(_VPATH_SRCS)
-
-$(CMOBJS):
- $(REPORT_BUILD)
- @$(MAKE_DEPS_AUTO_CC)
- $(ELOG) $(CC) -o $@ -c $(COMPILE_CFLAGS) $(COMPILE_CMFLAGS) $(TARGET_LOCAL_INCLUDES) $(_VPATH_SRCS)
-
-$(filter %.s,$(CPPSRCS:%.cpp=%.s)): %.s: %.cpp $(call mkdir_deps,$(MDDEPDIR))
- $(REPORT_BUILD)
- $(CCC) -S $(COMPILE_CXXFLAGS) $(TARGET_LOCAL_INCLUDES) $(_VPATH_SRCS)
-
-$(filter %.s,$(CPPSRCS:%.cc=%.s)): %.s: %.cc $(call mkdir_deps,$(MDDEPDIR))
- $(REPORT_BUILD)
- $(CCC) -S $(COMPILE_CXXFLAGS) $(TARGET_LOCAL_INCLUDES) $(_VPATH_SRCS)
-
-$(filter %.s,$(CSRCS:%.c=%.s)): %.s: %.c $(call mkdir_deps,$(MDDEPDIR))
- $(REPORT_BUILD)
- $(CC) -S $(COMPILE_CFLAGS) $(TARGET_LOCAL_INCLUDES) $(_VPATH_SRCS)
-
-$(filter %.i,$(CPPSRCS:%.cpp=%.i)): %.i: %.cpp $(call mkdir_deps,$(MDDEPDIR))
- $(REPORT_BUILD)
- $(CCC) -C $(PREPROCESS_OPTION)$@ $(COMPILE_CXXFLAGS) $(TARGET_LOCAL_INCLUDES) $(_VPATH_SRCS)
-
-$(filter %.i,$(CPPSRCS:%.cc=%.i)): %.i: %.cc $(call mkdir_deps,$(MDDEPDIR))
- $(REPORT_BUILD)
- $(CCC) -C $(PREPROCESS_OPTION)$@ $(COMPILE_CXXFLAGS) $(TARGET_LOCAL_INCLUDES) $(_VPATH_SRCS)
-
-$(filter %.i,$(CSRCS:%.c=%.i)): %.i: %.c $(call mkdir_deps,$(MDDEPDIR))
- $(REPORT_BUILD)
- $(CC) -C $(PREPROCESS_OPTION)$@ $(COMPILE_CFLAGS) $(TARGET_LOCAL_INCLUDES) $(_VPATH_SRCS)
-
-$(filter %.i,$(CMMSRCS:%.mm=%.i)): %.i: %.mm $(call mkdir_deps,$(MDDEPDIR))
- $(REPORT_BUILD)
- $(CCC) -C $(PREPROCESS_OPTION)$@ $(COMPILE_CXXFLAGS) $(COMPILE_CMMFLAGS) $(TARGET_LOCAL_INCLUDES) $(_VPATH_SRCS)
-
-$(RESFILE): %.res: %.rc
- $(REPORT_BUILD)
- @echo Creating Resource file: $@
-ifeq ($(OS_ARCH),OS2)
- $(RC) $(RCFLAGS:-D%=-d %) -i $(subst /,\,$(srcdir)) -r $< $@
-else
-ifdef GNU_CC
- $(RC) $(RCFLAGS) $(filter-out -U%,$(DEFINES)) $(INCLUDES:-I%=--include-dir %) $(OUTOPTION)$@ $(_VPATH_SRCS)
-else
- $(RC) $(RCFLAGS) -r $(DEFINES) $(INCLUDES) $(OUTOPTION)$@ $(_VPATH_SRCS)
-endif
-endif
-
-# Cancel GNU make built-in implicit rules
-ifndef .PYMAKE
-MAKEFLAGS += -r
-endif
-
-ifneq (,$(filter OS2 WINNT,$(OS_ARCH)))
-SEP := ;
-else
-SEP := :
-endif
-
-EMPTY :=
-SPACE := $(EMPTY) $(EMPTY)
-
-# MSYS has its own special path form, but javac expects the source and class
-# paths to be in the DOS form (i.e. e:/builds/...). This function does the
-# appropriate conversion on Windows, but is a noop on other systems.
-ifeq ($(HOST_OS_ARCH),WINNT)
-# We use 'pwd -W' to get DOS form of the path. However, since the given path
-# could be a file or a non-existent path, we cannot call 'pwd -W' directly
-# on the path. Instead, we extract the root path (i.e. "c:/"), call 'pwd -W'
-# on it, then merge with the rest of the path.
-root-path = $(shell echo $(1) | sed -e 's|\(/[^/]*\)/\?\(.*\)|\1|')
-non-root-path = $(shell echo $(1) | sed -e 's|\(/[^/]*\)/\?\(.*\)|\2|')
-normalizepath = $(foreach p,$(1),$(if $(filter /%,$(1)),$(patsubst %/,%,$(shell cd $(call root-path,$(1)) && pwd -W))/$(call non-root-path,$(1)),$(1)))
-else
-normalizepath = $(1)
-endif
-
-###############################################################################
-# Java rules
-###############################################################################
-ifneq (,$(JAVAFILES)$(ANDROID_RESFILES)$(ANDROID_APKNAME)$(JAVA_JAR_TARGETS))
- include $(topsrcdir)/config/makefiles/java-build.mk
-endif
-
-###############################################################################
-# Bunch of things that extend the 'export' rule (in order):
-###############################################################################
-
-ifneq ($(XPI_NAME),)
-$(FINAL_TARGET):
- $(NSINSTALL) -D $@
-
-export:: $(FINAL_TARGET)
-endif
-
-################################################################################
-# Copy each element of PREF_JS_EXPORTS
-
-# The default location for PREF_JS_EXPORTS is the gre prefs directory.
-PREF_DIR = defaults/pref
-
-# If DIST_SUBDIR is defined it indicates that app and gre dirs are
-# different and that we are building app related resources. Hence,
-# PREF_DIR should point to the app prefs location.
-ifneq (,$(DIST_SUBDIR)$(XPI_NAME)$(LIBXUL_SDK))
-PREF_DIR = defaults/preferences
-endif
-
-# on win32, pref files need CRLF line endings... see bug 206029
-ifeq (WINNT,$(OS_ARCH))
-PREF_PPFLAGS += --line-endings=crlf
-endif
-
-ifneq ($(PREF_JS_EXPORTS),)
-ifndef NO_DIST_INSTALL
-PREF_JS_EXPORTS_PATH := $(FINAL_TARGET)/$(PREF_DIR)
-PREF_JS_EXPORTS_FLAGS := $(PREF_PPFLAGS)
-PP_TARGETS += PREF_JS_EXPORTS
-endif
-endif
-
-################################################################################
-# Copy each element of AUTOCFG_JS_EXPORTS to $(FINAL_TARGET)/defaults/autoconfig
-
-ifneq ($(AUTOCFG_JS_EXPORTS),)
-ifndef NO_DIST_INSTALL
-AUTOCFG_JS_EXPORTS_FILES := $(AUTOCFG_JS_EXPORTS)
-AUTOCFG_JS_EXPORTS_DEST := $(FINAL_TARGET)/defaults/autoconfig
-AUTOCFG_JS_EXPORTS_TARGET := export
-INSTALL_TARGETS += AUTOCFG_JS_EXPORTS
-endif
-endif
-
-################################################################################
-# Install a linked .xpt into the appropriate place.
-# This should ideally be performed by the non-recursive idl make file. Some day.
-ifdef XPT_NAME #{
-
-ifndef NO_DIST_INSTALL
-_XPT_NAME_FILES := $(DEPTH)/config/makefiles/xpidl/xpt/$(XPT_NAME)
-_XPT_NAME_DEST := $(FINAL_TARGET)/components
-INSTALL_TARGETS += _XPT_NAME
-
-ifndef NO_INTERFACES_MANIFEST
-libs:: $(call mkdir_deps,$(FINAL_TARGET)/components)
- $(call py_action,buildlist,$(FINAL_TARGET)/components/interfaces.manifest 'interfaces $(XPT_NAME)')
- $(call py_action,buildlist,$(FINAL_TARGET)/chrome.manifest 'manifest components/interfaces.manifest')
-endif
-endif
-
-endif #} XPT_NAME
-
-################################################################################
-# Copy each element of EXTRA_COMPONENTS to $(FINAL_TARGET)/components
-ifneq (,$(filter %.js,$(EXTRA_COMPONENTS) $(EXTRA_PP_COMPONENTS)))
-ifeq (,$(filter %.manifest,$(EXTRA_COMPONENTS) $(EXTRA_PP_COMPONENTS)))
-ifndef NO_JS_MANIFEST
-$(error .js component without matching .manifest. See https://developer.mozilla.org/en/XPCOM/XPCOM_changes_in_Gecko_2.0)
-endif
-endif
-endif
-
-ifdef EXTRA_COMPONENTS
-libs:: $(EXTRA_COMPONENTS)
-ifndef NO_DIST_INSTALL
-EXTRA_COMPONENTS_FILES := $(EXTRA_COMPONENTS)
-EXTRA_COMPONENTS_DEST := $(FINAL_TARGET)/components
-INSTALL_TARGETS += EXTRA_COMPONENTS
-endif
-
-endif
-
-ifdef EXTRA_PP_COMPONENTS
-ifndef NO_DIST_INSTALL
-EXTRA_PP_COMPONENTS_PATH := $(FINAL_TARGET)/components
-PP_TARGETS += EXTRA_PP_COMPONENTS
-endif
-endif
-
-EXTRA_MANIFESTS = $(filter %.manifest,$(EXTRA_COMPONENTS) $(EXTRA_PP_COMPONENTS))
-ifneq (,$(EXTRA_MANIFESTS))
-libs:: $(call mkdir_deps,$(FINAL_TARGET))
- $(call py_action,buildlist,$(FINAL_TARGET)/chrome.manifest $(patsubst %,'manifest components/%',$(notdir $(EXTRA_MANIFESTS))))
-endif
-
-################################################################################
-# Copy each element of EXTRA_JS_MODULES to
-# $(FINAL_TARGET)/$(JS_MODULES_PATH). JS_MODULES_PATH defaults to "modules"
-# if it is undefined.
-JS_MODULES_PATH ?= modules
-FINAL_JS_MODULES_PATH := $(FINAL_TARGET)/$(JS_MODULES_PATH)
-
-ifdef EXTRA_JS_MODULES
-ifndef NO_DIST_INSTALL
-EXTRA_JS_MODULES_FILES := $(EXTRA_JS_MODULES)
-EXTRA_JS_MODULES_DEST := $(FINAL_JS_MODULES_PATH)
-INSTALL_TARGETS += EXTRA_JS_MODULES
-endif
-endif
-
-ifdef EXTRA_PP_JS_MODULES
-ifndef NO_DIST_INSTALL
-EXTRA_PP_JS_MODULES_PATH := $(FINAL_JS_MODULES_PATH)
-PP_TARGETS += EXTRA_PP_JS_MODULES
-endif
-endif
-
-################################################################################
-# Copy testing-only JS modules to appropriate destination.
-#
-# For each file defined in TESTING_JS_MODULES, copy it to
-# objdir/_tests/modules/. If TESTING_JS_MODULE_DIR is defined, that path
-# wlll be appended to the output directory.
-
-ifdef ENABLE_TESTS
-ifdef TESTING_JS_MODULES
-testmodulesdir = $(DEPTH)/_tests/modules/$(TESTING_JS_MODULE_DIR)
-
-GENERATED_DIRS += $(testmodulesdir)
-
-ifndef NO_DIST_INSTALL
-TESTING_JS_MODULES_FILES := $(TESTING_JS_MODULES)
-TESTING_JS_MODULES_DEST := $(testmodulesdir)
-INSTALL_TARGETS += TESTING_JS_MODULES
-endif
-
-endif
-endif
-
-################################################################################
-# SDK
-
-ifneq (,$(SDK_LIBRARY))
-ifndef NO_DIST_INSTALL
-SDK_LIBRARY_FILES := $(SDK_LIBRARY)
-SDK_LIBRARY_DEST := $(SDK_LIB_DIR)
-INSTALL_TARGETS += SDK_LIBRARY
-endif
-endif # SDK_LIBRARY
-
-ifneq (,$(strip $(SDK_BINARY)))
-ifndef NO_DIST_INSTALL
-SDK_BINARY_FILES := $(SDK_BINARY)
-SDK_BINARY_DEST := $(SDK_BIN_DIR)
-INSTALL_TARGETS += SDK_BINARY
-endif
-endif # SDK_BINARY
-
-################################################################################
-# CHROME PACKAGING
-
-chrome::
- $(MAKE) realchrome
- $(LOOP_OVER_PARALLEL_DIRS)
- $(LOOP_OVER_DIRS)
- $(LOOP_OVER_TOOL_DIRS)
-
-$(FINAL_TARGET)/chrome: $(call mkdir_deps,$(FINAL_TARGET)/chrome)
-
-ifneq (,$(JAR_MANIFEST))
-ifndef NO_DIST_INSTALL
-
-ifdef XPI_NAME
-ifdef XPI_ROOT_APPID
-# For add-on packaging we may specify that an application
-# sub-dir should be added to the root chrome manifest with
-# a specific application id.
-MAKE_JARS_FLAGS += --root-manifest-entry-appid='$(XPI_ROOT_APPID)'
-endif
-
-# if DIST_SUBDIR is defined but XPI_ROOT_APPID is not there's
-# no way langpacks will get packaged right, so error out.
-ifneq (,$(DIST_SUBDIR))
-ifndef XPI_ROOT_APPID
-$(error XPI_ROOT_APPID is not defined - langpacks will break.)
-endif
-endif
-endif
-
-libs realchrome:: $(FINAL_TARGET)/chrome
- $(call py_action,jar_maker,\
- $(QUIET) -j $(FINAL_TARGET)/chrome \
- $(MAKE_JARS_FLAGS) $(XULPPFLAGS) $(DEFINES) $(ACDEFINES) \
- $(JAR_MANIFEST))
-
-endif
-
-# This is a temporary check to ensure patches relying on the old behavior
-# of silently picking up jar.mn files continue to work.
-else # No JAR_MANIFEST
-ifneq (,$(wildcard $(srcdir)/jar.mn))
-$(error $(srcdir) contains a jar.mn file but this file is not declared in a JAR_MANIFESTS variable in a moz.build file)
-endif
-endif
-
-ifneq ($(DIST_FILES),)
-DIST_FILES_PATH := $(FINAL_TARGET)
-DIST_FILES_FLAGS := $(XULAPP_DEFINES)
-PP_TARGETS += DIST_FILES
-endif
-
-ifneq ($(DIST_CHROME_FILES),)
-DIST_CHROME_FILES_PATH := $(FINAL_TARGET)/chrome
-DIST_CHROME_FILES_FLAGS := $(XULAPP_DEFINES)
-PP_TARGETS += DIST_CHROME_FILES
-endif
-
-ifneq ($(XPI_PKGNAME),)
-tools realchrome::
-ifdef STRIP_XPI
-ifndef MOZ_DEBUG
- @echo 'Stripping $(XPI_PKGNAME) package directory...'
- @echo $(FINAL_TARGET)
- @cd $(FINAL_TARGET) && find . ! -type d \
- ! -name '*.js' \
- ! -name '*.xpt' \
- ! -name '*.gif' \
- ! -name '*.jpg' \
- ! -name '*.png' \
- ! -name '*.xpm' \
- ! -name '*.txt' \
- ! -name '*.rdf' \
- ! -name '*.sh' \
- ! -name '*.properties' \
- ! -name '*.dtd' \
- ! -name '*.html' \
- ! -name '*.xul' \
- ! -name '*.css' \
- ! -name '*.xml' \
- ! -name '*.jar' \
- ! -name '*.dat' \
- ! -name '*.tbl' \
- ! -name '*.src' \
- ! -name '*.reg' \
- $(PLATFORM_EXCLUDE_LIST) \
- -exec $(STRIP) $(STRIP_FLAGS) {} >/dev/null 2>&1 \;
-endif
-endif
- @echo 'Packaging $(XPI_PKGNAME).xpi...'
- cd $(FINAL_TARGET) && $(ZIP) -qr ../$(XPI_PKGNAME).xpi *
-endif
-
-ifdef INSTALL_EXTENSION_ID
-ifndef XPI_NAME
-$(error XPI_NAME must be set for INSTALL_EXTENSION_ID)
-endif
-
-tools::
- $(RM) -r '$(DIST)/bin$(DIST_SUBDIR:%=/%)/extensions/$(INSTALL_EXTENSION_ID)'
- $(NSINSTALL) -D '$(DIST)/bin$(DIST_SUBDIR:%=/%)/extensions/$(INSTALL_EXTENSION_ID)'
- $(call copy_dir,$(FINAL_TARGET),$(DIST)/bin$(DIST_SUBDIR:%=/%)/extensions/$(INSTALL_EXTENSION_ID))
-
-endif
-
-#############################################################################
-# MDDEPDIR is the subdirectory where all the dependency files are placed.
-# This uses a make rule (instead of a macro) to support parallel
-# builds (-jN). If this were done in the LOOP_OVER_DIRS macro, two
-# processes could simultaneously try to create the same directory.
-#
-# We use $(CURDIR) in the rule's target to ensure that we don't find
-# a dependency directory in the source tree via VPATH (perhaps from
-# a previous build in the source tree) and thus neglect to create a
-# dependency directory in the object directory, where we really need
-# it.
-
-ifneq (,$(filter-out all chrome default export realchrome clean clobber clobber_all distclean realclean,$(MAKECMDGOALS)))
-MDDEPEND_FILES := $(strip $(wildcard $(addprefix $(MDDEPDIR)/,$(EXTRA_MDDEPEND_FILES) $(addsuffix .pp,$(notdir $(sort $(OBJS) $(PROGOBJS) $(HOST_OBJS) $(HOST_PROGOBJS))) $(TARGETS)))))
-
-ifneq (,$(MDDEPEND_FILES))
-$(call include_deps,$(MDDEPEND_FILES))
-endif
-
-endif
-
-
-ifneq (,$(filter export,$(MAKECMDGOALS)))
-MDDEPEND_FILES := $(strip $(wildcard $(addprefix $(MDDEPDIR)/,$(EXTRA_EXPORT_MDDEPEND_FILES))))
-
-ifneq (,$(MDDEPEND_FILES))
-$(call include_deps,$(MDDEPEND_FILES))
-endif
-
-endif
-
-#############################################################################
-
--include $(topsrcdir)/$(MOZ_BUILD_APP)/app-rules.mk
--include $(MY_RULES)
-
-#
-# Generate Emacs tags in a file named TAGS if ETAGS was set in $(MY_CONFIG)
-# or in $(MY_RULES)
-#
-ifdef ETAGS
-ifneq ($(CSRCS)$(CPPSRCS)$(HEADERS),)
-all:: TAGS
-TAGS:: $(CSRCS) $(CPPSRCS) $(HEADERS)
- $(ETAGS) $(CSRCS) $(CPPSRCS) $(HEADERS)
-endif
-endif
-
-################################################################################
-# Install/copy rules
-#
-# The INSTALL_TARGETS variable contains a list of all install target
-# categories. Each category defines a list of files and executables, and an
-# install destination,
-#
-# FOO_FILES := foo bar
-# FOO_EXECUTABLES := baz
-# FOO_DEST := target_path
-# INSTALL_TARGETS += FOO
-#
-# Additionally, a FOO_TARGET variable may be added to indicate the target for
-# which the files and executables are installed. Default is "libs".
-#
-# Finally, a FOO_KEEP_PATH variable may be set to 1 to indicate the paths given
-# in FOO_FILES/FOO_EXECUTABLES are to be kept at the destination. That is,
-# if FOO_FILES is bar/baz/qux.h, and FOO_DEST is $(DIST)/include, the installed
-# file would be $(DIST)/include/bar/baz/qux.h instead of $(DIST)/include/qux.h
-
-# If we're using binary nsinstall and it's not built yet, fallback to python nsinstall.
-ifneq (,$(filter $(CONFIG_TOOLS)/nsinstall$(HOST_BIN_SUFFIX),$(install_cmd)))
-ifeq (,$(wildcard $(CONFIG_TOOLS)/nsinstall$(HOST_BIN_SUFFIX)))
-nsinstall_is_usable = $(if $(wildcard $(CONFIG_TOOLS)/nsinstall$(HOST_BIN_SUFFIX)),yes)
-
-define install_cmd_override
-$(1): install_cmd = $$(if $$(nsinstall_is_usable),$$(INSTALL),$$(NSINSTALL_PY)) $$(1)
-endef
-endif
-endif
-
-install_target_tier = $(or $($(1)_TARGET),libs)
-INSTALL_TARGETS_TIERS := $(sort $(foreach category,$(INSTALL_TARGETS),$(call install_target_tier,$(category))))
-
-install_target_result = $($(1)_DEST:%/=%)/$(if $($(1)_KEEP_PATH),$(2),$(notdir $(2)))
-install_target_files = $(foreach file,$($(1)_FILES),$(call install_target_result,$(category),$(file)))
-install_target_executables = $(foreach file,$($(1)_EXECUTABLES),$(call install_target_result,$(category),$(file)))
-
-# Work around a GNU make 3.81 bug where it gives $< the wrong value.
-# See details in bug 934864.
-define create_dependency
-$(1): $(2)
-$(1): $(2)
-endef
-
-define install_target_template
-$(call install_cmd_override,$(2))
-$(call create_dependency,$(2),$(1))
-endef
-
-$(foreach category,$(INSTALL_TARGETS),\
- $(if $($(category)_DEST),,$(error Missing $(category)_DEST)) \
- $(foreach tier,$(call install_target_tier,$(category)),\
- $(eval INSTALL_TARGETS_FILES_$(tier) += $(call install_target_files,$(category))) \
- $(eval INSTALL_TARGETS_EXECUTABLES_$(tier) += $(call install_target_executables,$(category))) \
- ) \
- $(foreach file,$($(category)_FILES) $($(category)_EXECUTABLES), \
- $(eval $(call install_target_template,$(file),$(call install_target_result,$(category),$(file)))) \
- ) \
-)
-
-$(foreach tier,$(INSTALL_TARGETS_TIERS), \
- $(eval $(tier):: $(INSTALL_TARGETS_FILES_$(tier)) $(INSTALL_TARGETS_EXECUTABLES_$(tier))) \
-)
-
-install_targets_sanity = $(if $(filter-out $(notdir $@),$(notdir $(<))),$(error Looks like $@ has an unexpected dependency on $< which breaks INSTALL_TARGETS))
-
-$(sort $(foreach tier,$(INSTALL_TARGETS_TIERS),$(INSTALL_TARGETS_FILES_$(tier)))):
- $(install_targets_sanity)
- $(call install_cmd,$(IFLAGS1) '$<' '$(@D)')
-
-$(sort $(foreach tier,$(INSTALL_TARGETS_TIERS),$(INSTALL_TARGETS_EXECUTABLES_$(tier)))):
- $(install_targets_sanity)
- $(call install_cmd,$(IFLAGS2) '$<' '$(@D)')
-
-################################################################################
-# Preprocessing rules
-#
-# The PP_TARGETS variable contains a list of all preprocessing target
-# categories. Each category has associated variables listing input files, the
-# output directory, extra preprocessor flags, and so on. For example:
-#
-# FOO := input-file
-# FOO_PATH := target-directory
-# FOO_FLAGS := -Dsome_flag
-# PP_TARGETS += FOO
-#
-# If PP_TARGETS lists a category name (like FOO, above), then we consult the
-# following make variables to see what to do:
-#
-# - lists input files to be preprocessed with mozbuild.action.preprocessor.
-# We search VPATH for the names given here. If an input file name ends in
-# '.in', that suffix is omitted from the output file name.
-#
-# - _PATH names the directory in which to place the preprocessed output
-# files. We create this directory if it does not already exist. Setting
-# this variable is optional; if unset, we install the files in $(CURDIR).
-#
-# - _FLAGS lists flags to pass to mozbuild.action.preprocessor, in addition
-# to the usual bunch. Setting this variable is optional.
-#
-# - _TARGET names the 'make' target that should depend on creating the output
-# files. Setting this variable is optional; if unset, we preprocess the
-# files for the 'libs' target.
-#
-# - _KEEP_PATH may be set to 1 to indicate the paths given in are to be
-# kept under _PATH. That is, if is bar/baz/qux.h.in and _PATH is
-# $(DIST)/include, the preprocessed file would be $(DIST)/include/bar/baz/qux.h
-# instead of $(DIST)/include/qux.h.
-
-pp_target_tier = $(or $($(1)_TARGET),libs)
-PP_TARGETS_TIERS := $(sort $(foreach category,$(PP_TARGETS),$(call pp_target_tier,$(category))))
-
-pp_target_result = $(or $($(1)_PATH:%/=%),$(CURDIR))/$(if $($(1)_KEEP_PATH),$(2:.in=),$(notdir $(2:.in=)))
-pp_target_results = $(foreach file,$($(1)),$(call pp_target_result,$(category),$(file)))
-
-$(foreach category,$(PP_TARGETS), \
- $(foreach tier,$(call pp_target_tier,$(category)), \
- $(eval PP_TARGETS_RESULTS_$(tier) += $(call pp_target_results,$(category))) \
- ) \
- $(foreach file,$($(category)), \
- $(eval $(call create_dependency,$(call pp_target_result,$(category),$(file)), \
- $(file) $(GLOBAL_DEPS))) \
- ) \
- $(eval $(call pp_target_results,$(category)): PP_TARGET_FLAGS=$($(category)_FLAGS)) \
-)
-
-$(foreach tier,$(PP_TARGETS_TIERS), \
- $(eval $(tier):: $(PP_TARGETS_RESULTS_$(tier))) \
-)
-
-PP_TARGETS_ALL_RESULTS := $(sort $(foreach tier,$(PP_TARGETS_TIERS),$(PP_TARGETS_RESULTS_$(tier))))
-$(PP_TARGETS_ALL_RESULTS):
- $(if $(filter-out $(notdir $@),$(notdir $(<:.in=))),$(error Looks like $@ has an unexpected dependency on $< which breaks PP_TARGETS))
- $(RM) '$@'
- $(call py_action,preprocessor,--depend $(MDDEPDIR)/$(@F).pp $(PP_TARGET_FLAGS) $(DEFINES) $(ACDEFINES) $(XULPPFLAGS) '$<' -o '$@')
-
-# The depfile is based on the filename, and we don't want conflicts. So check
-# there's only one occurrence of any given filename in PP_TARGETS_ALL_RESULTS.
-PP_TARGETS_ALL_RESULT_NAMES := $(notdir $(PP_TARGETS_ALL_RESULTS))
-$(foreach file,$(sort $(PP_TARGETS_ALL_RESULT_NAMES)), \
- $(if $(filter-out 1,$(words $(filter $(file),$(PP_TARGETS_ALL_RESULT_NAMES)))), \
- $(error Multiple preprocessing rules are creating a $(file) file) \
- ) \
-)
-
-ifneq (,$(filter $(PP_TARGETS_TIERS) $(PP_TARGETS_ALL_RESULTS),$(MAKECMDGOALS)))
-# If the depfile for a preprocessed file doesn't exist, add a dep to force
-# re-preprocessing.
-$(foreach file,$(PP_TARGETS_ALL_RESULTS), \
- $(if $(wildcard $(MDDEPDIR)/$(notdir $(file)).pp), \
- , \
- $(eval $(file): FORCE) \
- ) \
-)
-
-MDDEPEND_FILES := $(strip $(wildcard $(addprefix $(MDDEPDIR)/,$(addsuffix .pp,$(notdir $(PP_TARGETS_ALL_RESULTS))))))
-
-ifneq (,$(MDDEPEND_FILES))
-$(call include_deps,$(MDDEPEND_FILES))
-endif
-
-endif
-
-# Pull in non-recursive targets if this is a partial tree build.
-ifndef TOPLEVEL_BUILD
-include $(topsrcdir)/config/makefiles/nonrecursive.mk
-endif
-
-################################################################################
-# Special gmake rules.
-################################################################################
-
-
-#
-# Re-define the list of default suffixes, so gmake won't have to churn through
-# hundreds of built-in suffix rules for stuff we don't need.
-#
-.SUFFIXES:
-
-#
-# Fake targets. Always run these rules, even if a file/directory with that
-# name already exists.
-#
-.PHONY: all alltags boot checkout chrome realchrome clean clobber clobber_all export install libs makefiles realclean run_apprunner tools $(DIRS) $(TOOL_DIRS) FORCE
-
-# Used as a dependency to force targets to rebuild
-FORCE:
-
-# Delete target if error occurs when building target
-.DELETE_ON_ERROR:
-
-tags: TAGS
-
-TAGS: $(CSRCS) $(CPPSRCS) $(wildcard *.h)
- -etags $(CSRCS) $(CPPSRCS) $(wildcard *.h)
- $(LOOP_OVER_PARALLEL_DIRS)
- $(LOOP_OVER_DIRS)
-
-ifndef INCLUDED_DEBUGMAKE_MK #{
- ## Only parse when an echo* or show* target is requested
- ifneq (,$(call isTargetStem,echo,show))
- include $(topsrcdir)/config/makefiles/debugmake.mk
- endif #}
-endif #}
-
-documentation:
- @cd $(DEPTH)
- $(DOXYGEN) $(DEPTH)/config/doxygen.cfg
-
-ifdef ENABLE_TESTS
-check::
- $(LOOP_OVER_PARALLEL_DIRS)
- $(LOOP_OVER_DIRS)
- $(LOOP_OVER_TOOL_DIRS)
-endif
-
-
-FREEZE_VARIABLES = \
- CSRCS \
- CPPSRCS \
- EXPORTS \
- DIRS \
- LIBRARY \
- MODULE \
- TIERS \
- EXTRA_COMPONENTS \
- EXTRA_PP_COMPONENTS \
- MOCHITEST_FILES \
- MOCHITEST_CHROME_FILES \
- MOCHITEST_BROWSER_FILES \
- MOCHITEST_A11Y_FILES \
- MOCHITEST_METRO_FILES \
- MOCHITEST_ROBOCOP_FILES \
- $(NULL)
-
-$(foreach var,$(FREEZE_VARIABLES),$(eval $(var)_FROZEN := '$($(var))'))
-
-CHECK_FROZEN_VARIABLES = $(foreach var,$(FREEZE_VARIABLES), \
- $(if $(subst $($(var)_FROZEN),,'$($(var))'),$(error Makefile variable '$(var)' changed value after including rules.mk. Was $($(var)_FROZEN), now $($(var)).)))
-
-libs export::
- $(CHECK_FROZEN_VARIABLES)
-
-PURGECACHES_DIRS ?= $(DIST)/bin
-ifdef MOZ_WEBAPP_RUNTIME
-PURGECACHES_DIRS += $(DIST)/bin/webapprt
-endif
-
-PURGECACHES_FILES = $(addsuffix /.purgecaches,$(PURGECACHES_DIRS))
-
-default all:: $(PURGECACHES_FILES)
-
-$(PURGECACHES_FILES):
- if test -d $(@D) ; then touch $@ ; fi
-
-.DEFAULT_GOAL := $(or $(OVERRIDE_DEFAULT_GOAL),default)
-
-#############################################################################
-# Derived targets and dependencies
-
-include $(topsrcdir)/config/makefiles/autotargets.mk
-ifneq ($(NULL),$(AUTO_DEPS))
- default all libs tools export:: $(AUTO_DEPS)
-endif
-
-export:: $(GENERATED_FILES)
-
-GARBAGE += $(GENERATED_FILES)
diff --git a/js/src/config/solaris_ia32.map b/js/src/config/solaris_ia32.map
deleted file mode 100644
index be6ea3736e7..00000000000
--- a/js/src/config/solaris_ia32.map
+++ /dev/null
@@ -1,5 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-hwcap_1 = OVERRIDE;
diff --git a/js/src/config/static-checking-config.mk b/js/src/config/static-checking-config.mk
deleted file mode 100644
index 8e6a5981868..00000000000
--- a/js/src/config/static-checking-config.mk
+++ /dev/null
@@ -1,36 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-DEHYDRA_SCRIPT = $(topsrcdir)/config/static-checking.js
-
-DEHYDRA_MODULES = \
- $(NULL)
-
-TREEHYDRA_MODULES = \
- $(topsrcdir)/jsstack.js \
- $(NULL)
-
-DEHYDRA_ARG_PREFIX=-fplugin-arg-gcc_treehydra-
-
-DEHYDRA_ARGS = \
- $(DEHYDRA_ARG_PREFIX)script=$(DEHYDRA_SCRIPT) \
- $(DEHYDRA_ARG_PREFIX)topsrcdir=$(topsrcdir) \
- $(DEHYDRA_ARG_PREFIX)objdir=$(DEPTH) \
- $(DEHYDRA_ARG_PREFIX)dehydra-modules=$(subst $(NULL) ,$(COMMA),$(strip $(DEHYDRA_MODULES))) \
- $(DEHYDRA_ARG_PREFIX)treehydra-modules=$(subst $(NULL) ,$(COMMA),$(strip $(TREEHYDRA_MODULES))) \
- $(NULL)
-
-DEHYDRA_FLAGS = -fplugin=$(DEHYDRA_PATH) $(DEHYDRA_ARGS)
-
-ifdef DEHYDRA_PATH
-OS_CXXFLAGS += $(DEHYDRA_FLAGS)
-endif
-
-ifdef ENABLE_CLANG_PLUGIN
-# Load the clang plugin from the mozilla topsrcdir. This implies that the clang
-# plugin is only usable if we're building js/src under mozilla/, though.
-CLANG_PLUGIN := $(DEPTH)/../../build/clang-plugin/$(DLL_PREFIX)clang-plugin$(DLL_SUFFIX)
-OS_CXXFLAGS += -Xclang -load -Xclang $(CLANG_PLUGIN) -Xclang -add-plugin -Xclang moz-check
-OS_CFLAGS += -Xclang -load -Xclang $(CLANG_PLUGIN) -Xclang -add-plugin -Xclang moz-check
-endif
diff --git a/js/src/config/static-checking.js b/js/src/config/static-checking.js
deleted file mode 100644
index 9e59741ba11..00000000000
--- a/js/src/config/static-checking.js
+++ /dev/null
@@ -1,147 +0,0 @@
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-/**
- * A script for GCC-dehydra to analyze the Mozilla codebase and catch
- * patterns that are incorrect, but which cannot be detected by a compiler. */
-
-/**
- * Activate Treehydra outparams analysis if running in Treehydra.
- */
-
-function treehydra_enabled() {
- return this.hasOwnProperty('TREE_CODE');
-}
-
-sys.include_path.push(options.topsrcdir);
-
-include('string-format.js');
-
-let modules = [];
-
-function LoadModules(modulelist)
-{
- if (modulelist == "")
- return;
-
- let modulenames = modulelist.split(',');
- for each (let modulename in modulenames) {
- let module = { __proto__: this };
- include(modulename, module);
- modules.push(module);
- }
-}
-
-LoadModules(options['dehydra-modules']);
-if (treehydra_enabled())
- LoadModules(options['treehydra-modules']);
-
-function process_type(c)
-{
- for each (let module in modules)
- if (module.hasOwnProperty('process_type'))
- module.process_type(c);
-}
-
-function hasAttribute(c, attrname)
-{
- var attr;
-
- if (c.attributes === undefined)
- return false;
-
- for each (attr in c.attributes)
- if (attr.name == 'user' && attr.value[0] == attrname)
- return true;
-
- return false;
-}
-
-// This is useful for detecting method overrides
-function signaturesMatch(m1, m2)
-{
- if (m1.shortName != m2.shortName)
- return false;
-
- if ((!!m1.isVirtual) != (!!m2.isVirtual))
- return false;
-
- if (m1.isStatic != m2.isStatic)
- return false;
-
- let p1 = m1.type.parameters;
- let p2 = m2.type.parameters;
-
- if (p1.length != p2.length)
- return false;
-
- for (let i = 0; i < p1.length; ++i)
- if (!params_match(p1[i], p2[i]))
- return false;
-
- return true;
-}
-
-function params_match(p1, p2)
-{
- [p1, p2] = unwrap_types(p1, p2);
-
- for (let i in p1)
- if (i == "type" && !types_match(p1.type, p2.type))
- return false;
- else if (i != "type" && p1[i] !== p2[i])
- return false;
-
- for (let i in p2)
- if (!(i in p1))
- return false;
-
- return true;
-}
-
-function types_match(t1, t2)
-{
- if (!t1 || !t2)
- return false;
-
- [t1, t2] = unwrap_types(t1, t2);
-
- return t1 === t2;
-}
-
-function unwrap_types(t1, t2)
-{
- while (t1.variantOf)
- t1 = t1.variantOf;
-
- while (t2.variantOf)
- t2 = t2.variantOf;
-
- return [t1, t2];
-}
-
-const forward_functions = [
- 'process_type',
- 'process_tree_type',
- 'process_decl',
- 'process_tree_decl',
- 'process_function',
- 'process_tree',
- 'process_cp_pre_genericize',
- 'input_end'
-];
-
-function setup_forwarding(n)
-{
- this[n] = function() {
- for each (let module in modules) {
- if (module.hasOwnProperty(n)) {
- module[n].apply(this, arguments);
- }
- }
- }
-}
-
-for each (let n in forward_functions)
- setup_forwarding(n);
diff --git a/js/src/config/string-format.js b/js/src/config/string-format.js
deleted file mode 100644
index 7319eb85989..00000000000
--- a/js/src/config/string-format.js
+++ /dev/null
@@ -1,65 +0,0 @@
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-String.prototype.format = function string_format() {
- // there are two modes of operation... unnamed indices are read in order;
- // named indices using %(name)s. The two styles cannot be mixed.
- // Unnamed indices can be passed as either a single argument to this function,
- // multiple arguments to this function, or as a single array argument
- let curindex = 0;
- let d;
-
- if (arguments.length > 1) {
- d = arguments;
- }
- else
- d = arguments[0];
-
- function r(s, key, type) {
- if (type == '%')
- return '%';
-
- let v;
- if (key == "") {
- if (curindex == -1)
- throw Error("Cannot mix named and positional indices in string formatting.");
-
- if (curindex == 0 && (!(d instanceof Object) || !(0 in d))) {
- v = d;
- }
- else if (!(curindex in d))
- throw Error("Insufficient number of items in format, requesting item %i".format(curindex));
- else {
- v = d[curindex];
- }
-
- ++curindex;
- }
- else {
- key = key.slice(1, -1);
- if (curindex > 0)
- throw Error("Cannot mix named and positional indices in string formatting.");
- curindex = -1;
-
- if (!(key in d))
- throw Error("Key '%s' not present during string substitution.".format(key));
- v = d[key];
- }
- switch (type) {
- case "s":
- if (v === undefined)
- return "";
- return v.toString();
- case "r":
- return uneval(v);
- case "i":
- return parseInt(v);
- case "f":
- return Number(v);
- default:
- throw Error("Unexpected format character '%s'.".format(type));
- }
- }
- return this.replace(/%(\([^)]+\))?(.)/g, r);
-};
diff --git a/js/src/config/system-headers b/js/src/config/system-headers
deleted file mode 100644
index 8803150a755..00000000000
--- a/js/src/config/system-headers
+++ /dev/null
@@ -1,1147 +0,0 @@
-nspr.h
-plarena.h
-plarenas.h
-plbase64.h
-plerror.h
-plgetopt.h
-plhash.h
-plstr.h
-pratom.h
-prbit.h
-prclist.h
-prcmon.h
-prcountr.h
-prcpucfg.h
-prcvar.h
-prdtoa.h
-prenv.h
-prerr.h
-prerror.h
-prinet.h
-prinit.h
-prinrval.h
-prio.h
-pripcsem.h
-private
-prlink.h
-prlock.h
-prlog.h
-prlong.h
-prmem.h
-prmon.h
-prmwait.h
-prnetdb.h
-prolock.h
-prpdce.h
-prprf.h
-prproces.h
-prrng.h
-prrwlock.h
-prshm.h
-prshma.h
-prsystem.h
-prthread.h
-prtime.h
-prtpool.h
-prtrace.h
-prtypes.h
-prvrsion.h
-prwin16.h
-base64.h
-blapit.h
-cert.h
-certdb.h
-certt.h
-ciferfam.h
-cmmf.h
-cmmft.h
-cms.h
-cmsreclist.h
-cmst.h
-crmf.h
-crmft.h
-cryptohi.h
-cryptoht.h
-ecl-exp.h
-hasht.h
-jar-ds.h
-jar.h
-jarfile.h
-key.h
-keyhi.h
-keyt.h
-keythi.h
-nss.h
-nssb64.h
-nssb64t.h
-nssbase.h
-nssbaset.h
-nssck.api
-nssckbi.h
-nssckepv.h
-nssckft.h
-nssckfw.h
-nssckfwc.h
-nssckfwt.h
-nssckg.h
-nssckmdt.h
-nssckt.h
-nssilckt.h
-nssilock.h
-nsslocks.h
-nssrwlk.h
-nssrwlkt.h
-nssutil.h
-ocsp.h
-ocspt.h
-p12.h
-p12plcy.h
-p12t.h
-pk11func.h
-pk11pqg.h
-pk11priv.h
-pk11pub.h
-pk11sdr.h
-pkcs11.h
-pkcs11f.h
-pkcs11n.h
-pkcs11p.h
-pkcs11t.h
-pkcs11u.h
-pkcs12.h
-pkcs12t.h
-pkcs7t.h
-portreg.h
-preenc.h
-secasn1.h
-secasn1t.h
-seccomon.h
-secder.h
-secdert.h
-secdig.h
-secdigt.h
-secerr.h
-sechash.h
-secitem.h
-secmime.h
-secmod.h
-secmodt.h
-secoid.h
-secoidt.h
-secpkcs5.h
-secpkcs7.h
-secport.h
-shsign.h
-smime.h
-ssl.h
-sslerr.h
-sslproto.h
-sslt.h
-utilmodt.h
-utilpars.h
-utilparst.h
-utilrename.h
-A4Stuff.h
-activscp.h
-AEDataModel.h
-AEObjects.h
-AEPackObject.h
-AERegistry.h
-AEUtils.h
-afxcmn.h
-afxcoll.h
-afxcview.h
-afxdisp.h
-afxdtctl.h
-afxext.h
-afxmt.h
-afxpriv.h
-afxtempl.h
-afxwin.h
-algorithm
-Aliases.h
-all.h
-alloca.h
-alloc.h
-alsa/asoundlib.h
-android/log.h
-ansi_parms.h
-a.out.h
-app/Cursor.h
-Appearance.h
-AppFileInfo.h
-AppKit.h
-AppleEvents.h
-Application.h
-app/Message.h
-app/MessageRunner.h
-arpa/inet.h
-arpa/nameser.h
-asm/sigcontext.h
-asm/signal.h
-ASRegistry.h
-assert.h
-atk/atk.h
-atlcom.h
-atlconv.h
-atlctl.cpp
-atlctl.h
-ATLCTL.H
-atlhost.h
-atlimpl.cpp
-atlwin.cpp
-ATSTypes.h
-ATSUnicode.h
-Balloons.h
-base/pblock.h
-base/PCR_Base.h
-base/session.h
-basetyps.h
-be/app/Application.h
-Beep.h
-be/kernel/image.h
-be/kernel/OS.h
-bfd.h
-Bitmap.h
-bitset
-blapi.h
-bsd/libc.h
-bsd/syscall.h
-bstring.h
-builtin.h
-Button.h
-byteswap.h
-#if MOZ_TREE_CAIRO!=1
-#define WRAP_CAIRO_HEADERS
-#endif
-#ifdef WRAP_CAIRO_HEADERS
-pixman.h
-cairo.h
-cairo-atsui.h
-cairo-beos.h
-cairo-ft.h
-cairo-glitz.h
-cairo-os2.h
-cairo-pdf.h
-cairo-ps.h
-cairo-tee.h
-cairo-quartz.h
-cairo-win32.h
-cairo-xlib.h
-cairo-xlib-xrender.h
-cairo-directfb.h
-cairo-qpainter.h
-#endif
-dfiff.h
-exception
-ffi.h
-fusion/reactor.h
-fusion/property.h
-fusion/conf.h
-fusion/build.h
-fusion/hash.h
-fusion/shm/shm.h
-fusion/shm/shm_internal.h
-fusion/shm/pool.h
-fusion/ref.h
-fusion/fusion_internal.h
-fusion/lock.h
-fusion/types.h
-fusion/vector.h
-fusion/call.h
-fusion/shmalloc.h
-fusion/protocol.h
-fusion/fusion.h
-fusion/arena.h
-fusion/object.h
-dgiff.h
-direct/util.h
-direct/memcpy.h
-direct/interface.h
-direct/conf.h
-direct/tree.h
-direct/signals.h
-direct/build.h
-direct/interface_implementation.h
-direct/utf8.h
-direct/serial.h
-direct/hash.h
-direct/direct.h
-direct/clock.h
-direct/types.h
-direct/mem.h
-direct/thread.h
-direct/debug.h
-direct/stream.h
-direct/messages.h
-direct/trace.h
-direct/modules.h
-direct/log.h
-direct/system.h
-direct/list.h
-dfb_types.h
-directfb_strings.h
-directfb_keyboard.h
-callconv.h
-Carbon/Carbon.h
-CarbonEvents.h
-Carbon.h
-cassert
-c_asm.h
-cctype
-cderr.h
-cerrno
-CFBase.h
-CFBundle.h
-CFData.h
-CFDictionary.h
-cf.h
-CFNumber.h
-CFPlugIn.h
-CFPreferences.h
-CFString.h
-CFURL.h
-CGAffineTransform.h
-CheckBox.h
-climits
-Clipboard.h
-cmplrs/stsupport.h
-Cocoa/Cocoa.h
-CodeFragments.h
-comdef.h
-commctrl.h
-COMMCTRL.H
-commdlg.h
-compat.h
-condapi.h
-ConditionalMacros.h
-config.h
-conio.h
-console.h
-ControlDefinitions.h
-Controls.h
-CoreFoundation/CoreFoundation.h
-CoreServices/CoreServices.h
-CPalmRec.cpp
-Cpalmrec.h
-CPCatgry.cpp
-CPDbBMgr.h
-CPString.cpp
-CPString.h
-crtdbg.h
-crt_externs.h
-crypt.h
-cstddef
-cstdio
-cstdlib
-cstring
-ctime
-ctype.h
-curl/curl.h
-curl/easy.h
-curses.h
-cxxabi.h
-DateTimeUtils.h
-dbus/dbus.h
-dbus/dbus-glib.h
-dbus/dbus-glib-lowlevel.h
-ddeml.h
-Debug.h
-deque
-dem.h
-descrip.h
-Devices.h
-Dialogs.h
-direct.h
-dirent.h
-DiskInit.h
-dlfcn.h
-dlgs.h
-dl.h
-docobj.h
-dos/dosextens.h
-dos.h
-Drag.h
-DriverServices.h
-DriverSynchronization.h
-DropInPanel.h
-dvidef.h
-elf.h
-endian.h
-Entry.h
-errno.h
-Errors.h
-Events.h
-exdisp.h
-ExDisp.h
-exe386.h
-execinfo.h
-extras.h
-fabdef.h
-fcntl.h
-features.h
-fibdef.h
-File.h
-filehdr.h
-files.h
-Files.h
-FindDirectory.h
-Finder.h
-FinderRegistry.h
-FixMath.h
-float.h
-fnmatch.h
-Folders.h
-fontconfig/fontconfig.h
-fontconfig/fcfreetype.h
-Font.h
-Fonts.h
-fp.h
-fpieee.h
-frame/log.h
-frame/req.h
-freetype/freetype.h
-freetype/ftcache.h
-freetype/ftglyph.h
-freetype/ftsynth.h
-freetype/ftoutln.h
-freetype/ttnameid.h
-freetype/tttables.h
-freetype/t1tables.h
-freetype/ftlcdfil.h
-freetype/ftsizes.h
-freetype/ftadvanc.h
-freetype/ftbitmap.h
-freetype/ftxf86.h
-freetype.h
-ftcache.h
-ftglyph.h
-ftsynth.h
-ftoutln.h
-ttnameid.h
-tttables.h
-t1tables.h
-ftlcdfil.h
-ftsizes.h
-ftadvanc.h
-ftbitmap.h
-ftxf86.h
-fribidi/fribidi.h
-FSp_fopen.h
-fstream
-fstream.h
-ft2build.h
-fts.h
-gconf/gconf-client.h
-Gdiplus.h
-gdk/gdk.h
-gdk/gdkkeysyms.h
-gdk/gdkprivate.h
-gdk/gdkx.h
-gdk/gdkdirectfb.h
-gdk-pixbuf/gdk-pixbuf.h
-Gestalt.h
-getopt.h
-glibconfig.h
-glib.h
-glib-object.h
-gmodule.h
-gnome.h
-gnu/libc-version.h
-grp.h
-gssapi_generic.h
-gssapi/gssapi_generic.h
-gssapi/gssapi.h
-gssapi.h
-gtk/gtk.h
-gtk/gtkx.h
-gtk/gtkprinter.h
-gtk/gtkprintjob.h
-gtk/gtkprintunixdialog.h
-HIToolbox/HIToolbox.h
-hlink.h
-ia64/sys/inline.h
-Icons.h
-iconv.h
-ieeefp.h
-ifaddrs.h
-image.h
-imagehlp.h
-imm.h
-initguid.h
-InterfaceDefs.h
-InternetConfig.h
-IntlResources.h
-ints.h
-intshcut.h
-inttypes.h
-iodef.h
-io.h
-IOKit/IOKitLib.h
-IOKit/IOMessage.h
-IOKit/pwr_mgt/IOPMLib.h
-iomanip
-ios
-iosfwd
-iostream
-iostream.h
-iterator
-JavaControl.h
-JavaEmbedding/JavaControl.h
-JavaVM/jni.h
-JManager.h
-JNIEnvTests.h
-jni.h
-#if MOZ_NATIVE_JPEG==1
-jpeglib.h
-#endif
-JVMManagerTests.h
-Kerberos/Kerberos.h
-kernel/image.h
-kernel/OS.h
-LAction.h
-langinfo.h
-LApplication.h
-LArray.h
-LArrayIterator.h
-LAttachable.h
-LAttachment.h
-LaunchServices.h
-lber.h
-LBroadcaster.h
-LButton.h
-lcache.h
-LCaption.h
-LCheckBox.h
-LCicnButton.h
-LClipboard.h
-LCommander.h
-LComparator.h
-LControl.h
-ldap.h
-ldaplog.h
-ldappr.h
-ldap_ssl.h
-LDataStream.h
-ldfcn.h
-LDialogBox.h
-ldif.h
-LDocApplication.h
-LDocument.h
-LDragAndDrop.h
-LDragTask.h
-LEditField.h
-LEditText.h
-LEventDispatcher.h
-LFile.h
-LFileStream.h
-LFileTypeList.h
-LFocusBox.h
-LGrafPortView.h
-LHandleStream.h
-libc_r.h
-libelf.h
-libelf/libelf.h
-libgen.h
-libgnome/gnome-url.h
-libgnome/libgnome.h
-libgnomeui/gnome-icon-lookup.h
-libgnomeui/gnome-icon-theme.h
-libgnomeui/gnome-ui-init.h
-libgnomevfs/gnome-vfs-file-info.h
-libgnomevfs/gnome-vfs.h
-libgnomevfs/gnome-vfs-init.h
-libgnomevfs/gnome-vfs-mime.h
-libgnomevfs/gnome-vfs-mime-handlers.h
-libgnomevfs/gnome-vfs-mime-utils.h
-libgnomevfs/gnome-vfs-ops.h
-libgnomevfs/gnome-vfs-standard-callbacks.h
-lib$routines.h
-limits
-limits.h
-link.h
-linux/kernel.h
-linux/limits.h
-linux/rtc.h
-linux/version.h
-list
-List.h
-Lists.h
-LListBox.h
-LListener.h
-LMenuBar.h
-LMenu.h
-LModelDirector.h
-LModelObject.h
-LModelProperty.h
-loader.h
-locale
-locale.h
-LOffscreenView.h
-logkeys.h
-logstrng.h
-Looper.h
-LowMem.h
-LPane.h
-LPeriodical.h
-LPicture.h
-LPlaceHolder.h
-LPrintout.h
-LProgressBar.h
-LPushButton.h
-LRadioGroup.h
-LRadioGroupView.h
-LRunArray.h
-LScroller.h
-LSharable.h
-LSingleDoc.h
-LStaticText.h
-LStdControl.h
-LStream.h
-LString.h
-LTabGroup.h
-LTabGroupView.h
-LTableArrayStorage.h
-LTableMonoGeometry.h
-LTableSingleSelector.h
-LTableView.h
-LTextEditView.h
-LTextTableView.h
-LUndoer.h
-LVariableArray.h
-LView.h
-LWindow.h
-m68881.h
-MacErrors.h
-MacHeadersCarbon.h
-machine/ansi.h
-machine/builtins.h
-machine/clock.h
-machine/endian.h
-machine/frame.h
-machine/inline.h
-machine/limits.h
-machine/signal.h
-machine/trap.h
-mach/mach_host.h
-mach/mach_init.h
-mach/mach_interface.h
-mach/mach_port.h
-mach-o/dyld.h
-MacLocales.h
-MacMemory.h
-MacTCP.h
-MacTypes.h
-MacWindows.h
-malloc.h
-malloc_np.h
-map
-mapicode.h
-mapidefs.h
-mapiguid.h
-mapi.h
-mapitags.h
-mapiutil.h
-mapix.h
-Math64.h
-math.h
-mbstring.h
-mem.h
-memory
-memory.h
-Memory.h
-MenuBar.h
-Menu.h
-Menus.h
-Message.h
-Mime.h
-MixedMode.h
-mlang.h
-mmsystem.h
-model.h
-Movies.h
-mpw/errno.h
-mshtmhst.h
-mshtml.h
-mswsock.h
-Multiprocessing.h
-mutex.h
-Navigation.h
-ncompat.h
-ncurses.h
-netCore.h
-netdb.h
-net/if.h
-netinet/in.h
-netinet/in_systm.h
-netinet/tcp.h
-new
-newexe.h
-new.h
-nl_types.h
-NodeInfo.h
-nsswitch.h
-objbase.h
-objidl.h
-Objsafe.h
-ojiapitests.h
-ole2.h
-oleidl.h
-OpenGL/OpenGL.h
-OpenTptInternet.h
-OpenTransport.h
-os2.h
-OS.h
-osreldate.h
-ostream
-OSUtils.h
-Packages.h
-Palettes.h
-PALM_CMN.H
-pango/pango-modules.h
-pango/pangocairo.h
-pango/pangofc-decoder.h
-pango/pangofc-font.h
-pango/pangofc-fontmap.h
-pango/pango-break.h
-pango/pango-fontmap.h
-pango/pango.h
-pango/pangoxft.h
-pango/pango-utils.h
-pascal.h
-Patches.h
-Path.h
-pcfs/pc_dir.h
-Pgenerr.h
-PGenErr.h
-Ph.h
-PLStringFuncs.h
-PMApplication.h
-pmddim.h
-poll.h
-Polygon.h
-portable.h
-Power.h
-PP_ClassHeaders.cp
-PP_Constants.h
-PPCToolbox.h
-PP_DebugHeaders.cp
-PP_KeyCodes.h
-PP_Macros.h
-PP_Messages.h
-PP_Prefix.h
-PP_Resources.h
-PP_Types.h
-Printing.h
-Print/PMPrintingDialogExtensions.h
-private/qucomextra_p.h
-Processes.h
-process.h
-Process.h
-proto/dos.h
-proto/exec.h
-psap.h
-Pt.h
-pthread.h
-pthread_np.h
-pulse/pulseaudio.h
-pwd.h
-Python.h
-QDOffscreen.h
-queue
-Quickdraw.h
-QuickDraw.h
-QuickTimeComponents.h
-quipu/attr.h
-rasdlg.h
-raserror.h
-ras.h
-regex.h
-Region.h
-resolv.h
-Resources.h
-Retrace.h
-rld_interface.h
-rmsdef.h
-Roster.h
-rpc.h
-rpcproxy.h
-rpc/types.h
-sane/sane.h
-sane/sanei.h
-sane/saneopts.h
-Scrap.h
-Screen.h
-Script.h
-ScrollBar.h
-sec.h
-secrng.h
-security.h
-secutil.h
-semaphore.h
-servprov.h
-set
-setjmp.h
-SFNTLayoutTypes.h
-SFNTTypes.h
-share.h
-shellapi.h
-shlguid.h
-shlobj.h
-sigcontext.h
-signal.h
-SimpleGameSound.h
-SIOUX.h
-size_t.h
-sndio.h
-someincludefile.h
-Sound.h
-soundcard.h
-sqlite3.h
-ssdef.h
-sstream
-stack
-StandardFile.h
-starlet.h
-stat.h
-statreg.cpp
-statreg.h
-stdarg.h
-stdbool.h
-stddef.h
-stdint.h
-stdio.h
-stdlib.h
-storage/FindDirectory.h
-StorageKit.h
-string
-StringCompare.h
-string.h
-String.h
-strings.h
-Strings.h
-StringView.h
-stropts.h
-strstrea.h
-structs.h
-stsdef.h
-SupportDefs.h
-support/String.h
-support/SupportDefs.h
-support/TLS.h
-svrcore.h
-symconst.h
-sym.h
-synch.h
-syncmgr.h
-sys/atomic_op.h
-sys/bitypes.h
-sys/byteorder.h
-syscall.h
-sys/cdefs.h
-sys/cfgodm.h
-sys/elf.h
-sys/endian.h
-sys/errno.h
-sys/fault.h
-sys/fcntl.h
-sys/file.h
-sys/filio.h
-sys/frame.h
-sys/immu.h
-sys/inttypes.h
-sys/ioccom.h
-sys/ioctl.h
-sys/ipc.h
-sys/ldr.h
-sys/link.h
-sys/locking.h
-syslog.h
-sys/lwp.h
-sys/machine.h
-sys/mman.h
-sys/mmu.h
-sys/mount.h
-sys/mpctl.h
-sys/param.h
-sys/pda.h
-sys/poll.h
-sys/ppc.h
-sys/prctl.h
-sys/priv.h
-sys/procfs.h
-sys/pstat.h
-sys/ptrace.h
-sys/queue.h
-sys/quota.h
-sys/reboot.h
-sys/reg.h
-sys/regset.h
-sys/resource.h
-sys/sched.h
-sys/select.h
-sys/sem.h
-sys/sendfile.h
-sys/shm.h
-sys/siginfo.h
-sys/signal.h
-sys/socket.h
-sys/sockio.h
-sys/sparc/frame.h
-sys/stack.h
-sys/statfs.h
-sys/stat.h
-sys/statvfs.h
-sys/syscall.h
-sys/sysctl.h
-sys/sysinfo.h
-sys/sysmp.h
-sys/syssgi.h
-sys/system_properties.h
-sys/systeminfo.h
-sys/timeb.h
-sys/time.h
-sys/times.h
-sys/ttycom.h
-sys/types.h
-sys/ucontext.h
-sys/uio.h
-sys/un.h
-sys/unistd.h
-sys/utsname.h
-sys/vfs.h
-sys/wait.h
-tables.h
-TArray.h
-TArrayIterator.h
-task.h
-tchar.h
-TCHAR.H
-termios.h
-TextCommon.h
-TextEdit.h
-TextEncodingConverter.h
-TextServices.h
-TextUtils.h
-TextView.h
-th/PCR_Th.h
-thread.h
-ThreadManagerTests.h
-Threads.h
-time.h
-Timer.h
-tlhelp32.h
-ToolUtils.h
-tr1/functional
-trace.h
-Traps.h
-typeinfo
-types.h
-Types.h
-UAppleEventsMgr.h
-UAttachments.h
-ucontext.h
-uconv.h
-UCursor.h
-ucx$inetdef.h
-UDebugging.h
-UDesktop.h
-UDrawingState.h
-UDrawingUtils.h
-UEnvironment.h
-UEventMgr.h
-UException.h
-UExtractFromAEDesc.h
-UGWorld.h
-UKeyFilters.h
-ulocks.h
-ulserrno.h
-UMemoryMgr.h
-UModalDialogs.h
-UNavServicesDialogs.h
-UnicodeBlockObjects.h
-UnicodeConverter.h
-UnicodeUtilities.h
-unidef.h
-unikbd.h
-unistd.h
-unix.h
-unixio.h
-unixlib.h
-unknwn.h
-UPrinting.h
-UQuickTime.h
-UReanimator.h
-URegions.h
-URegistrar.h
-UResourceMgr.h
-utility
-urlhist.h
-urlmon.h
-UScrap.h
-UScreenPort.h
-UTCUtils.h
-UTETextAction.h
-UTEViewTextAction.h
-UTextEdit.h
-UTextTraits.h
-utility
-utime.h
-UWindows.h
-values.h
-varargs.h
-vcclr.h
-vector
-View.h
-Volume.h
-wab.h
-wait.h
-wchar.h
-wctype.h
-winbase.h
-win/compobj.h
-windef.h
-Window.h
-windows.h
-Windows.h
-windowsx.h
-Wininet.h
-winnls.h
-winperf.h
-winreg.h
-Winreg.h
-winsock2.h
-winsock.h
-winspool.h
-winsvc.h
-winuser.h
-winver.h
-wmem.h
-workbench/startup.h
-wtypes.h
-wx/image.h
-wx/listctrl.h
-wx/log.h
-wx/toolbar.h
-wx/wx.h
-wx/xrc/xmlres.h
-X11/cursorfont.h
-X11/extensions/Print.h
-X11/extensions/shape.h
-X11/extensions/scrnsaver.h
-X11/extensions/XShm.h
-X11/extensions/Xrender.h
-X11/extensions/Xdamage.h
-X11/extensions/Xcomposite.h
-X11/Intrinsic.h
-X11/keysymdef.h
-X11/keysym.h
-X11/Shell.h
-X11/StringDefs.h
-X11/Xatom.h
-X11/Xft/Xft.h
-X11/Xfuncproto.h
-X11/X.h
-X11/XKBlib.h
-X11/Xlib.h
-X11/Xlibint.h
-X11/Xlocale.h
-X11/Xos.h
-X11/Xutil.h
-xpt_struct.h
-xpt_xdr.h
-zmouse.h
-speex/speex_resampler.h
-soundtouch/SoundTouch.h
-#if MOZ_NATIVE_PNG==1
-png.h
-#endif
-#if MOZ_NATIVE_ZLIB==1
-zlib.h
-#endif
-#ifdef MOZ_ENABLE_STARTUP_NOTIFICATION
-libsn/sn.h
-libsn/sn-common.h
-libsn/sn-launchee.h
-libsn/sn-launcher.h
-libsn/sn-monitor.h
-libsn/sn-util.h
-#endif
-#if MOZ_NATIVE_HUNSPELL==1
-hunspell.hxx
-#endif
-#if MOZ_NATIVE_BZ2==1
-bzlib.h
-#endif
-#ifdef MOZ_ENABLE_GIO
-gio/gio.h
-#endif
-#if MOZ_NATIVE_LIBEVENT==1
-event.h
-#else
-sys/event.h
-#endif
-#ifdef MOZ_ENABLE_LIBPROXY
-proxy.h
-#endif
-#ifdef MOZ_ENABLE_CONTENTMANAGER
-SelectSingleContentItemPage.h
-SelectMultipleContentItemsPage.h
-QtSparql/qsparqlconnection.h
-QtSparql/qsparqlquery.h
-QtSparql/qsparqlresult.h
-#endif
-
-#if MOZ_TREE_PIXMAN!=1
-pixman.h
-#endif
-#if MOZ_ENABLE_MEEGOTOUCHSHARE
-shareuiinterface.h
-#endif
-#if MOZ_NATIVE_LIBVPX==1
-vpx/vpx_codec.h
-vpx/vpx_decoder.h
-vpx/vpx_encoder.h
-vpx/vp8cx.h
-vpx/vp8dx.h
-#endif
-#ifdef GKMEDIAS_SHARED_LIBRARY
-vpx/vpx_codec.h
-vpx/vpx_decoder.h
-vpx/vpx_encoder.h
-vpx/vp8cx.h
-vpx/vp8dx.h
-vorbis/codec.h
-theora/theoradec.h
-tremor/ivorbiscodec.h
-ogg/ogg.h
-ogg/os_types.h
-nestegg/nestegg.h
-cubeb/cubeb.h
-#endif
-gst/gst.h
-gst/app/gstappsink.h
-gst/app/gstappsrc.h
-gst/video/video.h
-sys/msg.h
-sys/ipc.h
-sys/thr.h
-sys/user.h
-kvm.h
-spawn.h
-err.h
-xlocale.h
-#ifdef MOZ_SHARED_ICU
-unicode/locid.h
-unicode/numsys.h
-unicode/ucal.h
-unicode/uclean.h
-unicode/ucol.h
-unicode/udat.h
-unicode/udatpg.h
-unicode/uenum.h
-unicode/unum.h
-unicode/ustring.h
-unicode/utypes.h
-#endif
-libutil.h
diff --git a/js/src/config/version.mk b/js/src/config/version.mk
deleted file mode 100644
index 414a1ebb456..00000000000
--- a/js/src/config/version.mk
+++ /dev/null
@@ -1,51 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-ifndef INCLUDED_VERSION_MK
-INCLUDED_VERSION_MK=1
-
-# Windows gmake build:
-# Build default .rc file if $(RESFILE) isn't defined.
-# TODO:
-# PBI : Private build info. Not used currently.
-# Guessing the best way would be to set an env var.
-# BINARY : Binary name. Not used currently.
-ifeq ($(MOZ_WIDGET_TOOLKIT),windows)
-ifndef RESFILE
-RCFILE=./module.rc
-RESFILE=./module.res
-_RC_STRING = -QUIET 1 -DEPTH $(DEPTH) -TOPSRCDIR $(topsrcdir) -OBJDIR . -SRCDIR $(srcdir) -DISPNAME $(MOZ_APP_DISPLAYNAME) -APPVERSION $(MOZ_APP_VERSION)
-ifdef MOZILLA_OFFICIAL
-_RC_STRING += -OFFICIAL 1
-endif
-ifdef MOZ_DEBUG
-_RC_STRING += -DEBUG 1
-endif
-ifdef PROGRAM
-_RC_STRING += -BINARY $(PROGRAM)
-else
-ifdef _PROGRAM
-_RC_STRING += -BINARY $(_PROGRAM)
-else
-ifdef SHARED_LIBRARY
-_RC_STRING += -BINARY $(SHARED_LIBRARY)
-endif
-endif
-endif
-ifdef RCINCLUDE
-_RC_STRING += -RCINCLUDE $(srcdir)/$(RCINCLUDE)
-endif
-
-GARBAGE += $(RESFILE) $(RCFILE)
-
-#dummy target so $(RCFILE) doesn't become the default =P
-all::
-
-$(RCFILE): $(RCINCLUDE) $(topsrcdir)/config/version_win.pl
- $(PERL) $(topsrcdir)/config/version_win.pl $(_RC_STRING)
-
-endif # RESFILE
-endif # Windows
-
-endif
diff --git a/js/src/config/version_win.pl b/js/src/config/version_win.pl
deleted file mode 100755
index d28c00ba3d6..00000000000
--- a/js/src/config/version_win.pl
+++ /dev/null
@@ -1,374 +0,0 @@
-#!/usr/bin/perl -w
-
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-#use diagnostics;
-require strict;
-my $dir = $0;
-$dir =~ s/[^\/]*$//;
-push(@INC, "$dir");
-require "Moz/Milestone.pm";
-use Getopt::Long;
-use Getopt::Std;
-use POSIX;
-
-# Calculate the number of days since Jan. 1, 2000 from a buildid string
-sub daysFromBuildID
-{
- my ($buildid,) = @_;
-
- my ($y, $m, $d, $h) = ($buildid =~ /^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})$/);
- $d || die("Unrecognized buildid string.");
-
- my $secondstodays = 60 * 60 * 24;
- return sprintf("%d",
- (POSIX::mktime(00, 00, 00, $d, $m - 1, $y - 1900) -
- POSIX::mktime(00, 00, 00, 01, 00, 100)) / $secondstodays);
-}
-
-#Creates version resource file
-
-#Paramaters are passed on the command line:
-
-#Example: -MODNAME nsToolkitCompsModule -DEBUG=1
-
-# DEBUG - Mozilla's global debug variable - tells if its debug version
-# OFFICIAL - tells Mozilla is building a milestone or nightly
-# MSTONE - tells which milestone is being built;
-# OBJDIR - Holds the object directory;
-# MODNAME - tells what the name of the module is like nsBMPModule
-# DEPTH - Holds the path to the root obj dir
-# TOPSRCDIR - Holds the path to the root mozilla dir
-# SRCDIR - Holds module.ver and source
-# BINARY - Holds the name of the binary file
-# DISPNAME - Holds the display name of the built application
-# APPVERSION - Holds the version string of the built application
-# RCINCLUDE - Holds the name of the RC File to include or ""
-# QUIET - Turns off output
-
-#Description and Comment come from module.ver
-#Bug 23560
-#http://msdn.microsoft.com/library/default.asp?url=/library/en-us/winui/rc_7x2d.asp
-
-#Get next .ver file entry
-sub getNextEntry
-{
- while ()
- {
- my $mline = $_;
- ($mline) = split(/#/,$mline);
- my ($entry, $value)=split(/=/,$mline,2);
- if (defined($entry))
- {
- if (defined($value))
- {
- $entry =~ s/^\s*(.*?)\s*$/$1/;
- $value =~ s/^\s*(.*?)\s*$/$1/;
- return ($entry,$value);
- }
- }
- }
- return undef;
-}
-
-my ($quiet,$objdir,$debug,$official,$milestone,$buildid,$module,$binary,$depth,$rcinclude,$srcdir,$fileversion,$productversion);
-
-GetOptions( "QUIET" => \$quiet,
- "DEBUG=s" => \$debug,
- "OFFICIAL=s" => \$official,
- "MSTONE=s" => \$milestone,
- "MODNAME=s" => \$module,
- "BINARY=s" => \$binary,
- "DISPNAME=s" => \$displayname,
- "APPVERSION=s" => \$appversion,
- "SRCDIR=s" => \$srcdir,
- "TOPSRCDIR=s" => \$topsrcdir,
- "DEPTH=s" => \$depth,
- "RCINCLUDE=s" => \$rcinclude,
- "OBJDIR=s" => \$objdir);
-if (!defined($debug)) {$debug="";}
-if (!defined($official)) {$official="";}
-if (!defined($milestone)) {$milestone="";}
-if (!defined($module)) {$module="";}
-if (!defined($binary)) {$binary="";}
-if (!defined($displayname)) {$displayname="Mozilla";}
-if (!defined($appversion)) {$appversion=$milestone;}
-if (!defined($depth)) {$depth=".";}
-if (!defined($rcinclude)) {$rcinclude="";}
-if (!defined($objdir)) {$objdir=".";}
-if (!defined($srcdir)) {$srcdir=".";}
-if (!defined($topsrcdir)) {$topsrcdir=".";}
-my $mfversion = "Personal";
-my $mpversion = "Personal";
-my @fileflags = ("0");
-my $comment="";
-my $description="";
-if (!defined($module))
-{
- $module = $binary;
- ($module) = split(/\./,$module);
-}
-
-my $bufferstr=" ";
-
-my $MILESTONE_FILE = "$topsrcdir/config/milestone.txt";
-my $BUILDID_FILE = "$depth/config/buildid";
-
-#Read module.ver file
-#Version file overrides for WIN32:
-#WIN32_MODULE_COMMENT
-#WIN32_MODULE_DESCRIPTION
-#WIN32_MODULE_FILEVERSION
-#WIN32_MODULE_COMPANYNAME
-#WIN32_MODULE_FILEVERSION_STRING
-#WIN32_MODULE_NAME
-#WIN32_MODULE_COPYRIGHT
-#WIN32_MODULE_TRADEMARKS
-#WIN32_MODULE_ORIGINAL_FILENAME
-#WIN32_MODULE_PRODUCTNAME
-#WIN32_MODULE_PRODUCTVERSION
-#WIN32_MODULE_PRODUCTVERSION_STRING
-
-#Override values obtained from the .ver file
-my $override_comment;
-my $override_description;
-my $override_fileversion;
-my $override_company;
-my $override_mfversion;
-my $override_module;
-my $override_copyright;
-my $override_trademarks;
-my $override_filename;
-my $override_productname;
-my $override_productversion;
-my $override_mpversion;
-if (open(VERFILE, "<$srcdir/module.ver"))
-{
-
- my ($a,$b) = getNextEntry();
- while (defined($a))
- {
- if ($a eq "WIN32_MODULE_COMMENT") { $override_comment = $b; }
- if ($a eq "WIN32_MODULE_DESCRIPTION") { $override_description = $b; }
- if ($a eq "WIN32_MODULE_FILEVERSION") { $override_fileversion = $b; }
- if ($a eq "WIN32_MODULE_COMPANYNAME") { $override_company = $b; }
- if ($a eq "WIN32_MODULE_FILEVERSION_STRING") { $override_mfversion = $b; }
- if ($a eq "WIN32_MODULE_NAME") { $override_module = $b; }
- if ($a eq "WIN32_MODULE_COPYRIGHT") { $override_copyright = $b; }
- if ($a eq "WIN32_MODULE_TRADEMARKS") { $override_trademarks = $b; }
- if ($a eq "WIN32_MODULE_ORIGINAL_FILENAME") { $override_filename = $b; }
- if ($a eq "WIN32_MODULE_PRODUCTNAME") { $override_productname = $b; }
- if ($a eq "WIN32_MODULE_PRODUCTVERSION") { $override_productversion = $b; }
- if ($a eq "WIN32_MODULE_PRODUCTVERSION_STRING") { $override_mpversion = $b; }
- ($a,$b) = getNextEntry();
- }
- close(VERFILE)
-}
-else
-{
- if (!$quiet || $quiet ne "1") { print "$bufferstr" . "WARNING: No module.ver file included ($module, $binary). Default values used\n"; }
-}
-#Get rid of trailing and leading whitespace
-$debug =~ s/^\s*(.*)\s*$/$1/;
-$comment =~ s/^\s*(.*)\s*$/$1/;
-$official =~ s/^\s*(.*)\s*$/$1/;
-$milestone =~ s/^\s*(.*)\s*$/$1/;
-$description =~ s/^\s*(.*)\s*$/$1/;
-$module =~ s/^\s*(.*)\s*$/$1/;
-$depth =~ s/^\s*(.*)\s*$/$1/;
-$binary =~ s/^\s*(.*)\s*$/$1/;
-$displayname =~ s/^\s*(.*)\s*$/$1/;
-
-open(BUILDID, "<", $BUILDID_FILE) || die("Couldn't open buildid file: $BUILDID_FILE");
-$buildid = ;
-$buildid =~ s/\s*$//;
-close BUILDID;
-
-my $daycount = daysFromBuildID($buildid);
-
-if ($milestone eq "") {
- $milestone = Moz::Milestone::getOfficialMilestone($MILESTONE_FILE);
-}
-
-$mfversion = $mpversion = $milestone;
-if ($appversion eq "") {
- $appversion = $milestone;
-}
-
-if ($debug eq "1")
-{
- push @fileflags, "VS_FF_DEBUG";
- $mpversion .= " Debug";
- $mfversion .= " Debug";
-}
-
-if ($official ne "1") {
- push @fileflags, "VS_FF_PRIVATEBUILD";
-}
-
-if ($milestone =~ /[a-z]/) {
- push @fileflags, "VS_FF_PRERELEASE";
-}
-
-my @mstone = split(/\./,$milestone);
-$mstone[1] =~s/\D.*$//;
-if (!$mstone[2]) {
- $mstone[2] = "0";
-}
-else {
- $mstone[2] =~s/\D.*$//;
-}
-$fileversion = $productversion="$mstone[0],$mstone[1],$mstone[2],$daycount";
-
-my @appver = split(/\./,$appversion);
-for ($j = 1; $j < 4; $j++)
-{
- if (!$appver[$j]) {
- $appver[$j] = "0";
- }
- else {
- $appver[$j] =~s/\D.*$//;
- }
-}
-my $winappversion = "$appver[0],$appver[1],$appver[2],$appver[3]";
-
-my $copyright = "License: MPL 2";
-my $company = "Mozilla Foundation";
-my $trademarks = "Mozilla";
-my $productname = $displayname;
-
-
-if (defined($override_comment)){$override_comment =~ s/\@MOZ_APP_DISPLAYNAME\@/$displayname/g; $comment=$override_comment;}
-if (defined($override_description)){$override_description =~ s/\@MOZ_APP_DISPLAYNAME\@/$displayname/g; $description=$override_description;}
-if (defined($override_fileversion)){$override_fileversion =~ s/\@MOZ_APP_WINVERSION\@/$winappversion/g; $fileversion=$override_fileversion;}
-if (defined($override_mfversion)){$override_mfversion =~ s/\@MOZ_APP_VERSION\@/$appversion/g; $mfversion=$override_mfversion;}
-if (defined($override_company)){$company=$override_company;}
-if (defined($override_module)){$override_module =~ s/\@MOZ_APP_DISPLAYNAME\@/$displayname/g; $module=$override_module;}
-if (defined($override_copyright)){$override_copyright =~ s/\@MOZ_APP_DISPLAYNAME\@/$displayname/g; $copyright=$override_copyright;}
-if (defined($override_trademarks)){$override_trademarks =~ s/\@MOZ_APP_DISPLAYNAME\@/$displayname/g; $trademarks=$override_trademarks;}
-if (defined($override_filename)){$binary=$override_filename;}
-if (defined($override_productname)){$override_productname =~ s/\@MOZ_APP_DISPLAYNAME\@/$displayname/g; $productname=$override_productname;}
-if (defined($override_productversion)){$override_productversion =~ s/\@MOZ_APP_WINVERSION\@/$winappversion/g; $productversion=$override_productversion;}
-if (defined($override_mpversion)){$override_mpversion =~ s/\@MOZ_APP_VERSION\@/$appversion/g; $mpversion=$override_mpversion;}
-
-
-#Override section
-
-open(RCFILE, ">$objdir/module.rc") || die("Can't edit module.rc - It must be locked.\n");
-print RCFILE qq{
-// This Source Code Form is subject to the terms of the Mozilla Public
-// License, v. 2.0. If a copy of the MPL was not distributed with this
-// file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-#include
-
-// Note: if you contain versioning information in an included
-// RC script, it will be discarded
-// Use module.ver to explicitly set these values
-
-// Do not edit this file. Changes won't affect the build.
-
-};
-
-my $versionlevel=0;
-my $insideversion=0;
-if (open(RCINCLUDE, "<$rcinclude"))
-{
- print RCFILE "// From included resource $rcinclude\n";
-# my $mstring="";
- while ()
- {
- $_ =~ s/\@MOZ_APP_DISPLAYNAME\@/$displayname/g;
- print RCFILE $_;
-# my $instr=$_;
-# chomp($instr);
-# $mstring .= "$instr\;";
- }
- close(RCINCLUDE);
-# $mstring =~ s/\/\*.*\*\///g;
-# my @mlines = split(/\;/,$mstring);
-# for(@mlines)
-# {
-# my ($nocomment)=split(/\/\//,$_);
-# if (defined($nocomment) && $nocomment ne "")
-# {
-# my ($firststring,$secondstring) = split(/\s+/,$nocomment);
-# if (!defined($firststring)) {$firststring="";}
-# if (!defined($secondstring)) {$secondstring="";}
-# if ($secondstring eq "VERSIONINFO")
-# {
-#if (!$quiet || $quiet ne "1") {
-# print "$bufferstr" . "WARNING: Included RC file ($rcinclude, $module, $binary)\n";
-# print "$bufferstr" . "WARNING: contains versioning information that will be discarded\n";
-# print "$bufferstr" . "WARNING: Remove it and use relevant overrides (in module.ver)\n";
-#}
-# $versionlevel = 0;
-# $insideversion = 1;
-# }
-# if ($firststring eq "BEGIN") { $versionlevel++; }
-# if ($secondstring eq "END")
-# {
-# $versionlevel--;
-# if ($insideversion==1 && $versionlevel==0) {$versionlevel=0;}
-# }
-# my $includecheck = $firststring . $secondstring;
-# $includecheck =~ s/<|>/"/g;
-# $includecheck = lc($includecheck);
-# if ($includecheck ne "#include\"winver.h\"")
-# {
-# if ($insideversion == 0 && $versionlevel == 0)
-# {
-# print RCFILE "$nocomment\n";
-# }
-# }
-# }
-# }
-
-}
-
-my $fileflags = join(' | ', @fileflags);
-
-print RCFILE qq{
-
-
-/////////////////////////////////////////////////////////////////////////////
-//
-// Version
-//
-
-1 VERSIONINFO
- FILEVERSION $fileversion
- PRODUCTVERSION $productversion
- FILEFLAGSMASK 0x3fL
- FILEFLAGS $fileflags
- FILEOS VOS__WINDOWS32
- FILETYPE VFT_DLL
- FILESUBTYPE 0x0L
-BEGIN
- BLOCK "StringFileInfo"
- BEGIN
- BLOCK "000004b0"
- BEGIN
- VALUE "Comments", "$comment"
- VALUE "LegalCopyright", "$copyright"
- VALUE "CompanyName", "$company"
- VALUE "FileDescription", "$description"
- VALUE "FileVersion", "$mfversion"
- VALUE "ProductVersion", "$mpversion"
- VALUE "InternalName", "$module"
- VALUE "LegalTrademarks", "$trademarks"
- VALUE "OriginalFilename", "$binary"
- VALUE "ProductName", "$productname"
- VALUE "BuildID", "$buildid"
- END
- END
- BLOCK "VarFileInfo"
- BEGIN
- VALUE "Translation", 0x0, 1200
- END
-END
-
-};
-close(RCFILE);
diff --git a/js/src/configure.in b/js/src/configure.in
index ba0d2a74732..90f34a78898 100644
--- a/js/src/configure.in
+++ b/js/src/configure.in
@@ -8,8 +8,8 @@ dnl Process this file with autoconf to produce a configure script.
dnl ========================================================
AC_PREREQ(2.13)
-AC_INIT(jsapi.h)
-AC_CONFIG_AUX_DIR(${srcdir}/../../build/autoconf)
+AC_INIT(js/src/jsapi.h)
+AC_CONFIG_AUX_DIR(${srcdir}/build/autoconf)
AC_CANONICAL_SYSTEM
TARGET_CPU="${target_cpu}"
TARGET_VENDOR="${target_vendor}"
@@ -73,7 +73,7 @@ _PTHREAD_LDFLAGS=""
dnl Do not allow a separate objdir build if a srcdir build exists.
dnl ==============================================================
-_topsrcdir=`cd \`dirname $0\`; pwd`
+_topsrcdir=`cd $srcdir; pwd`
_objdir=`pwd`
if test "$_topsrcdir" != "$_objdir"
@@ -165,6 +165,8 @@ else
AC_DEFINE(JS_STANDALONE)
fi
AC_SUBST(JS_STANDALONE)
+BUILDING_JS=1
+AC_SUBST(BUILDING_JS)
MOZ_ARG_WITH_STRING(gonk,
[ --with-gonk=DIR
@@ -1157,7 +1159,7 @@ if test "$GNU_CC"; then
esac
fi
- _DEFINES_CFLAGS='-include $(DEPTH)/js-confdefs.h -DMOZILLA_CLIENT'
+ _DEFINES_CFLAGS='-include $(DEPTH)/js/src/js-confdefs.h -DMOZILLA_CLIENT'
_USE_CPP_INCLUDE_FLAG=1
elif test "$SOLARIS_SUNPRO_CC"; then
@@ -1219,7 +1221,7 @@ if test "$GNU_CXX"; then
esac
fi
- _DEFINES_CXXFLAGS='-DMOZILLA_CLIENT -include $(DEPTH)/js-confdefs.h'
+ _DEFINES_CXXFLAGS='-DMOZILLA_CLIENT -include $(DEPTH)/js/src/js-confdefs.h'
_USE_CPP_INCLUDE_FLAG=1
# Recent clang and gcc support C++11 deleted functions without warnings if
@@ -1639,8 +1641,8 @@ ia64*-hpux*)
MKSHLIB_UNFORCE_ALL=
DSO_LDOPTS=-SUBSYSTEM:WINDOWS
_USE_CPP_INCLUDE_FLAG=1
- _DEFINES_CFLAGS='-FI $(DEPTH)/js-confdefs.h -DMOZILLA_CLIENT'
- _DEFINES_CXXFLAGS='-FI $(DEPTH)/js-confdefs.h -DMOZILLA_CLIENT'
+ _DEFINES_CFLAGS='-FI $(DEPTH)/js/src/js-confdefs.h -DMOZILLA_CLIENT'
+ _DEFINES_CXXFLAGS='-FI $(DEPTH)/js/src/js-confdefs.h -DMOZILLA_CLIENT'
CFLAGS="$CFLAGS -W3 -Gy -Fd\$(COMPILE_PDBFILE)"
CXXFLAGS="$CXXFLAGS -W3 -Gy -Fd\$(COMPILE_PDBFILE)"
if test "$_CC_SUITE" -ge "12"; then
@@ -2252,7 +2254,7 @@ EOF
])
if test "$ac_cv_have_visibility_builtin_bug" = "no" -a \
"$ac_cv_have_visibility_class_bug" = "no"; then
- VISIBILITY_FLAGS='-I$(DIST)/system_wrappers_js -include $(topsrcdir)/config/gcc_hidden.h'
+ VISIBILITY_FLAGS='-I$(DIST)/system_wrappers -include $(topsrcdir)/config/gcc_hidden.h'
WRAP_SYSTEM_INCLUDES=1
else
VISIBILITY_FLAGS='-fvisibility=hidden'
@@ -3866,7 +3868,7 @@ if test -z "$SKIP_LIBRARY_CHECKS" -a -z "$NO_EDITLINE"; then
else
dnl By default, we use editline
JS_NATIVE_EDITLINE=1
- EDITLINE_LIBS='$(DEPTH)/editline/$(LIB_PREFIX)editline.$(LIB_SUFFIX)'
+ EDITLINE_LIBS='$(DEPTH)/js/src/editline/$(LIB_PREFIX)editline.$(LIB_SUFFIX)'
fi
dnl Either way, we want to build with line editing support.
@@ -4277,7 +4279,7 @@ if test -n "$ENABLE_INTL_API" -a -z "$MOZ_NATIVE_ICU"; then
ICU_CROSS_BUILD_OPT=""
ICU_SRCDIR=""
if test "$HOST_OS_ARCH" = "WINNT"; then
- ICU_SRCDIR="--srcdir=$(cd $srcdir/../../intl/icu/source; pwd -W)"
+ ICU_SRCDIR="--srcdir=$(cd $srcdir/intl/icu/source; pwd -W)"
fi
if test "$CROSS_COMPILE"; then
@@ -4326,7 +4328,7 @@ if test -n "$ENABLE_INTL_API" -a -z "$MOZ_NATIVE_ICU"; then
CPPFLAGS="$ICU_CPPFLAGS" \
CXXFLAGS="$HOST_ICU_CXXFLAGS $HOST_OPTIMIZE_FLAGS" \
LDFLAGS="$HOST_LDFLAGS" \
- $SHELL $abs_srcdir/../../intl/icu/source/runConfigureICU \
+ $SHELL $abs_srcdir/intl/icu/source/runConfigureICU \
$HOST_ICU_BUILD_OPTS \
$ICU_TARGET \
dnl Shell quoting is fun.
@@ -4436,7 +4438,7 @@ if test -n "$ENABLE_INTL_API" -a -z "$MOZ_NATIVE_ICU"; then
CFLAGS="$ICU_CFLAGS" \
CXXFLAGS="$ICU_CXXFLAGS" \
LDFLAGS="$ICU_LDFLAGS $LDFLAGS" \
- $SHELL $_topsrcdir/../../intl/icu/source/configure \
+ $SHELL $_topsrcdir/intl/icu/source/configure \
$ICU_BUILD_OPTS \
$ICU_CROSS_BUILD_OPT \
$ICU_LINK_OPTS \
@@ -4468,7 +4470,7 @@ AC_OUTPUT()
# Produce the js-config script at configure time; see the comments for
# 'js*-config' in Makefile.in.
AC_MSG_RESULT(invoking $MAKE to create $JS_CONFIG_NAME script)
-$MAKE $JS_CONFIG_NAME
+$MAKE -C js/src $JS_CONFIG_NAME
# Build jsctypes if it's enabled.
if test "$JS_HAS_CTYPES" -a -z "$MOZ_NATIVE_FFI"; then
@@ -4494,10 +4496,10 @@ if test "$JS_HAS_CTYPES" -a -z "$MOZ_NATIVE_FFI"; then
case "${target_cpu}" in
x86_64)
# Need target since MSYS tools into mozilla-build may be 32bit
- ac_configure_args="$ac_configure_args CC=\"$_topsrcdir/ctypes/libffi/msvcc.sh -m64\" --build=$build --host=$target"
+ ac_configure_args="$ac_configure_args CC=\"$_topsrcdir/js/src/ctypes/libffi/msvcc.sh -m64\" --build=$build --host=$target"
;;
*)
- ac_configure_args="$ac_configure_args CC=$_topsrcdir/ctypes/libffi/msvcc.sh"
+ ac_configure_args="$ac_configure_args CC=$_topsrcdir/js/src/ctypes/libffi/msvcc.sh"
;;
esac
fi
@@ -4523,12 +4525,12 @@ if test "$JS_HAS_CTYPES" -a -z "$MOZ_NATIVE_FFI"; then
# Use a separate cache file for libffi, since it does things differently
# from our configure.
- mkdir -p $_objdir/ctypes/libffi
+ mkdir -p $_objdir/js/src/ctypes/libffi
old_cache_file=$cache_file
- cache_file=$_objdir/ctypes/libffi/config.cache
+ cache_file=$_objdir/js/src/ctypes/libffi/config.cache
old_config_files=$CONFIG_FILES
unset CONFIG_FILES
- AC_OUTPUT_SUBDIRS(ctypes/libffi)
+ AC_OUTPUT_SUBDIRS(js/src/ctypes/libffi)
cache_file=$old_cache_file
ac_configure_args="$_SUBDIR_CONFIG_ARGS"
CONFIG_FILES=$old_config_files
diff --git a/js/src/gdb/Makefile.in b/js/src/gdb/Makefile.in
index 86a15f5d9f9..dcde600909a 100644
--- a/js/src/gdb/Makefile.in
+++ b/js/src/gdb/Makefile.in
@@ -4,9 +4,9 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-LIBS = $(DEPTH)/$(LIB_PREFIX)js_static.$(LIB_SUFFIX) $(NSPR_LIBS) $(MOZ_ZLIB_LIBS)
+LIBS = ../$(LIB_PREFIX)js_static.$(LIB_SUFFIX) $(NSPR_LIBS) $(MOZ_ZLIB_LIBS)
-LOCAL_INCLUDES += -I$(topsrcdir) -I..
+LOCAL_INCLUDES += -I$(srcdir)/.. -I..
ifdef MOZ_SHARED_ICU
EXTRA_LIBS += $(MOZ_ICU_LIBS)
@@ -18,7 +18,7 @@ EXTRA_LIBS += $(MOZ_FFI_LIBS)
# in the build directory and in the dist/bin directory.
PP_TARGETS += GDB_AUTOLOAD
GDB_AUTOLOAD := gdb-tests-gdb.py.in
-GDB_AUTOLOAD_FLAGS := -Dtopsrcdir=$(abspath $(topsrcdir))
+GDB_AUTOLOAD_FLAGS := -Dtopsrcdir=$(abspath $(srcdir)/..)
INSTALL_TARGETS += GDB_INSTALL_AUTOLOAD
GDB_INSTALL_AUTOLOAD_FILES := $(CURDIR)/gdb-tests-gdb.py
diff --git a/js/src/jsapi-tests/Makefile.in b/js/src/jsapi-tests/Makefile.in
index d6b8dfefa65..31d9720ad15 100644
--- a/js/src/jsapi-tests/Makefile.in
+++ b/js/src/jsapi-tests/Makefile.in
@@ -4,9 +4,9 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-LIBS = $(DEPTH)/$(LIB_PREFIX)js_static.$(LIB_SUFFIX) $(NSPR_LIBS) $(MOZ_ZLIB_LIBS)
+LIBS = ../$(LIB_PREFIX)js_static.$(LIB_SUFFIX) $(NSPR_LIBS) $(MOZ_ZLIB_LIBS)
-LOCAL_INCLUDES += -I$(topsrcdir) -I..
+LOCAL_INCLUDES += -I$(srcdir)/.. -I..
ifdef MOZ_SHARED_ICU
EXTRA_LIBS += $(MOZ_ICU_LIBS)
@@ -22,7 +22,7 @@ endif
# the build directory.
PP_TARGETS += JSAPI_TESTS_AUTOLOAD
JSAPI_TESTS_AUTOLOAD := jsapi-tests-gdb.py.in
-JSAPI_TESTS_AUTOLOAD_FLAGS := -Dtopsrcdir=$(abspath $(topsrcdir))
+JSAPI_TESTS_AUTOLOAD_FLAGS := -Dtopsrcdir=$(abspath $(srcdir)/..)
include $(topsrcdir)/config/rules.mk
diff --git a/js/src/jsapi.cpp b/js/src/jsapi.cpp
index 527108a3197..0adef45a61e 100644
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -3289,10 +3289,23 @@ JS_DefineProperties(JSContext *cx, JSObject *objArg, const JSPropertySpec *ps)
RootedObject obj(cx, objArg);
bool ok;
for (ok = true; ps->name; ps++) {
- if (ps->selfHostedGetter) {
+ if (ps->flags & JSPROP_NATIVE_ACCESSORS) {
+ // If you declare native accessors, then you should have a native
+ // getter.
+ JS_ASSERT(ps->getter.propertyOp.op);
+ // If you do not have a self-hosted getter, you should not have a
+ // self-hosted setter. This is the closest approximation to that
+ // assertion we can have with our setup.
+ JS_ASSERT_IF(ps->setter.propertyOp.info, ps->setter.propertyOp.op);
+
+ ok = DefineProperty(cx, obj, ps->name, UndefinedValue(),
+ ps->getter.propertyOp, ps->setter.propertyOp,
+ ps->flags, Shape::HAS_SHORTID, ps->tinyid);
+ } else {
// If you have self-hosted getter/setter, you can't have a
// native one.
- JS_ASSERT(!ps->getter.op && !ps->setter.op);
+ JS_ASSERT(!ps->getter.propertyOp.op && !ps->setter.propertyOp.op);
+ JS_ASSERT(ps->flags & JSPROP_GETTER);
/*
* During creation of the self-hosting global, we ignore all
* self-hosted properties, as that means we're currently setting up
@@ -3304,18 +3317,10 @@ JS_DefineProperties(JSContext *cx, JSObject *objArg, const JSPropertySpec *ps)
continue;
ok = DefineSelfHostedProperty(cx, obj, ps->name,
- ps->selfHostedGetter,
- ps->selfHostedSetter,
+ ps->getter.selfHosted.funname,
+ ps->setter.selfHosted.funname,
ps->flags, Shape::HAS_SHORTID,
ps->tinyid);
- } else {
- // If you do not have a self-hosted getter, you should
- // have a native getter; and you should not have a
- // self-hosted setter.
- JS_ASSERT(ps->getter.op && !ps->selfHostedSetter);
-
- ok = DefineProperty(cx, obj, ps->name, UndefinedValue(), ps->getter, ps->setter,
- ps->flags, Shape::HAS_SHORTID, ps->tinyid);
}
if (!ok)
break;
diff --git a/js/src/jsapi.h b/js/src/jsapi.h
index e9d058517fc..0173cea004d 100644
--- a/js/src/jsapi.h
+++ b/js/src/jsapi.h
@@ -2390,7 +2390,7 @@ typedef struct JSNativeWrapper {
* Macro static initializers which make it easy to pass no JSJitInfo as part of a
* JSPropertySpec or JSFunctionSpec.
*/
-#define JSOP_WRAPPER(op) {op, nullptr}
+#define JSOP_WRAPPER(op) { {op, nullptr} }
#define JSOP_NULLWRAPPER JSOP_WRAPPER(nullptr)
/*
@@ -2399,13 +2399,30 @@ typedef struct JSNativeWrapper {
* JSPROP_INDEX bit in flags.
*/
struct JSPropertySpec {
+ struct SelfHostedWrapper {
+ void *unused;
+ const char *funname;
+ };
+
const char *name;
int8_t tinyid;
uint8_t flags;
- JSPropertyOpWrapper getter;
- JSStrictPropertyOpWrapper setter;
- const char *selfHostedGetter;
- const char *selfHostedSetter;
+ union {
+ JSPropertyOpWrapper propertyOp;
+ SelfHostedWrapper selfHosted;
+ } getter;
+ union {
+ JSStrictPropertyOpWrapper propertyOp;
+ SelfHostedWrapper selfHosted;
+ } setter;
+
+private:
+ void StaticAsserts() {
+ JS_STATIC_ASSERT(sizeof(SelfHostedWrapper) == sizeof(JSPropertyOpWrapper));
+ JS_STATIC_ASSERT(sizeof(SelfHostedWrapper) == sizeof(JSStrictPropertyOpWrapper));
+ JS_STATIC_ASSERT(offsetof(SelfHostedWrapper, funname) ==
+ offsetof(JSPropertyOpWrapper, info));
+ }
};
namespace JS {
@@ -2414,6 +2431,11 @@ namespace detail {
/* NEVER DEFINED, DON'T USE. For use by JS_CAST_NATIVE_TO only. */
inline int CheckIsNative(JSNative native);
+/* NEVER DEFINED, DON'T USE. For use by JS_CAST_STRING_TO only. */
+template
+inline int
+CheckIsCharacterLiteral(const char (&arr)[N]);
+
} // namespace detail
} // namespace JS
@@ -2421,6 +2443,10 @@ inline int CheckIsNative(JSNative native);
(static_cast(sizeof(JS::detail::CheckIsNative(v))), \
reinterpret_cast(v))
+#define JS_CAST_STRING_TO(s, To) \
+ (static_cast(sizeof(JS::detail::CheckIsCharacterLiteral(s))), \
+ reinterpret_cast(s))
+
#define JS_CHECK_ACCESSOR_FLAGS(flags) \
(static_cast::Type>(0), \
(flags))
@@ -2436,23 +2462,23 @@ inline int CheckIsNative(JSNative native);
{name, 0, \
uint8_t(JS_CHECK_ACCESSOR_FLAGS(flags) | JSPROP_SHARED | JSPROP_NATIVE_ACCESSORS), \
JSOP_WRAPPER(JS_CAST_NATIVE_TO(getter, JSPropertyOp)), \
- JSOP_NULLWRAPPER, nullptr, nullptr}
+ JSOP_NULLWRAPPER}
#define JS_PSGS(name, getter, setter, flags) \
{name, 0, \
uint8_t(JS_CHECK_ACCESSOR_FLAGS(flags) | JSPROP_SHARED | JSPROP_NATIVE_ACCESSORS), \
JSOP_WRAPPER(JS_CAST_NATIVE_TO(getter, JSPropertyOp)), \
- JSOP_WRAPPER(JS_CAST_NATIVE_TO(setter, JSStrictPropertyOp)), \
- nullptr, nullptr}
+ JSOP_WRAPPER(JS_CAST_NATIVE_TO(setter, JSStrictPropertyOp))}
#define JS_SELF_HOSTED_GET(name, getterName, flags) \
{name, 0, \
uint8_t(JS_CHECK_ACCESSOR_FLAGS(flags) | JSPROP_SHARED | JSPROP_GETTER), \
- JSOP_NULLWRAPPER, JSOP_NULLWRAPPER, getterName, nullptr}
+ { nullptr, JS_CAST_STRING_TO(getterName, const JSJitInfo *) }, \
+ JSOP_NULLWRAPPER }
#define JS_SELF_HOSTED_GETSET(name, getterName, setterName, flags) \
{name, 0, \
uint8_t(JS_CHECK_ACCESSOR_FLAGS(flags) | JSPROP_SHARED | JSPROP_GETTER | JSPROP_SETTER), \
- JSOP_NULLWRAPPER, JSOP_NULLWRAPPER, getterName, setterName}
-#define JS_PS_END {0, 0, 0, JSOP_NULLWRAPPER, JSOP_NULLWRAPPER, \
- nullptr, nullptr}
+ { nullptr, JS_CAST_STRING_TO(getterName, const JSJitInfo *) }, \
+ { nullptr, JS_CAST_STRING_TO(setterName, const JSJitInfo *) } }
+#define JS_PS_END {0, 0, 0, JSOP_NULLWRAPPER, JSOP_NULLWRAPPER }
/*
* To define a native function, set call to a JSNativeWrapper. To define a
diff --git a/js/src/make-source-package.sh b/js/src/make-source-package.sh
index 2d3108ba94e..b5cb9f56cd4 100755
--- a/js/src/make-source-package.sh
+++ b/js/src/make-source-package.sh
@@ -39,9 +39,17 @@ case $cmd in
${MKDIR} -p ${tgtpath}/intl
cp -t ${tgtpath}/intl -dRp ${SRCDIR}/../../intl/icu
- # copy autoconf config directory.
+ # copy main moz.build and Makefile.in
+ cp -t ${tgtpath} -dRp ${SRCDIR}/../../Makefile.in ${SRCDIR}/../../moz.build
+
+ # copy a nspr file used by the build system
+ ${MKDIR} -p ${tgtpath}/nsprpub/config
+ cp -t ${tgtpath}/nsprpub/config -dRp \
+ ${SRCDIR}/../../nsprpub/config/make-system-wrappers.pl
+
+ # copy build and config directory.
${MKDIR} -p ${tgtpath}/build
- cp -t ${tgtpath}/build -dRp ${SRCDIR}/../../build/autoconf
+ cp -t ${tgtpath} -dRp ${SRCDIR}/../../build ${SRCDIR}/../../config
# put in js itself
cp -t ${tgtpath} -dRp ${SRCDIR}/../../mfbt
@@ -54,22 +62,17 @@ case $cmd in
${MAKE} -C ${tgtpath}/js/src distclean
fi
- # put in the virtualenv and supporting files if it doesnt already exist
- if [ ! -e ${SRCDIR}/build/virtualenv_packages.txt ]; then
- cp -t ${tgtpath}/js/src/build -dRp \
- ${SRCDIR}/../../build/virtualenv_packages.txt \
- ${SRCDIR}/../../build/buildconfig.py
- fi
- if [ ! -e ${SRCDIR}/python ]; then
- cp -t ${tgtpath}/js/src -dRp \
- ${SRCDIR}/../../python
- fi
- if [ ! -e ${SRCDIR}/testing ]; then
- ${MKDIR} -p ${tgtpath}/js/src/testing
- cp -t ${tgtpath}/js/src/testing -dRp \
- ${SRCDIR}/../../testing/mozbase
- fi
- # end of virtualenv injection
+ cp -t ${tgtpath} -dRp \
+ ${SRCDIR}/../../python
+ ${MKDIR} -p ${tgtpath}/dom/bindings
+ cp -t ${tgtpath}/dom/bindings -dRp \
+ ${SRCDIR}/../../dom/bindings/mozwebidlcodegen
+ ${MKDIR} -p ${tgtpath}/media/webrtc/trunk/tools
+ cp -t ${tgtpath}/media/webrtc/trunk/tools -dRp \
+ ${SRCDIR}/../../media/webrtc/trunk/tools/gyp
+ ${MKDIR} -p ${tgtpath}/testing
+ cp -t ${tgtpath}/testing -dRp \
+ ${SRCDIR}/../../testing/mozbase
# remove *.pyc and *.pyo files if any
find ${tgtpath} -type f -name "*.pyc" -o -name "*.pyo" |xargs rm -f
diff --git a/js/src/moz.build b/js/src/moz.build
index 8c20db5dc8e..a9075e07602 100644
--- a/js/src/moz.build
+++ b/js/src/moz.build
@@ -4,8 +4,6 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-DIRS += ['config']
-
if CONFIG['DEHYDRA_PATH']:
DIRS += ['analysis-tests']
diff --git a/js/src/shell/Makefile.in b/js/src/shell/Makefile.in
index a9c30fa83f5..0261f61aa19 100644
--- a/js/src/shell/Makefile.in
+++ b/js/src/shell/Makefile.in
@@ -4,7 +4,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-LIBS = $(NSPR_LIBS) $(EDITLINE_LIBS) $(DEPTH)/$(LIB_PREFIX)js_static.$(LIB_SUFFIX) $(MOZ_ZLIB_LIBS)
+LIBS = $(NSPR_LIBS) $(EDITLINE_LIBS) ../$(LIB_PREFIX)js_static.$(LIB_SUFFIX) $(MOZ_ZLIB_LIBS)
ifdef MOZ_NATIVE_FFI
EXTRA_LIBS += $(MOZ_FFI_LIBS)
endif
@@ -12,7 +12,7 @@ ifdef MOZ_SHARED_ICU
EXTRA_LIBS += $(MOZ_ICU_LIBS)
endif
-LOCAL_INCLUDES += -I$(topsrcdir) -I..
+LOCAL_INCLUDES += -I$(srcdir)/.. -I..
ifeq ($(OS_ARCH),Darwin)
ifeq ($(TARGET_CPU),x86_64)
@@ -28,7 +28,7 @@ endif
# the build directory and in the dist/bin directory.
PP_TARGETS += SHELL_AUTOLOAD
SHELL_AUTOLOAD := js-gdb.py.in
-SHELL_AUTOLOAD_FLAGS := -Dtopsrcdir=$(abspath $(topsrcdir))
+SHELL_AUTOLOAD_FLAGS := -Dtopsrcdir=$(abspath $(srcdir)/..)
INSTALL_TARGETS += SHELL_INSTALL_AUTOLOAD
SHELL_INSTALL_AUTOLOAD_FILES := $(CURDIR)/js-gdb.py
@@ -42,9 +42,9 @@ include $(topsrcdir)/config/rules.mk
# People expect the js shell to wind up in the top-level JS dir.
libs::
- $(INSTALL) $(IFLAGS2) $(PROGRAM) $(DEPTH)
+ $(INSTALL) $(IFLAGS2) $(PROGRAM) ..
-GARBAGE += $(DEPTH)/$(PROGRAM)
+GARBAGE += ../$(PROGRAM)
install:: $(PROGRAM)
$(SYSINSTALL) $^ $(DESTDIR)$(bindir)
diff --git a/js/src/tests/ecma_6/TypedObject/simd/bug953270.js b/js/src/tests/ecma_6/TypedObject/simd/bug953270.js
new file mode 100644
index 00000000000..74ac44dc92b
--- /dev/null
+++ b/js/src/tests/ecma_6/TypedObject/simd/bug953270.js
@@ -0,0 +1,27 @@
+// |reftest| skip-if(!this.hasOwnProperty("SIMD"))
+
+/*
+ * Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/licenses/publicdomain/
+ */
+
+var BUGNUMBER = 953270;
+var summary = 'Handles';
+
+// Check that NaN normalization is applied when extracting the x lane
+// out, after bit conversion has occurred.
+
+var int32x4 = SIMD.int32x4;
+var a = int32x4((4294967295), 200, 300, 400);
+var c = SIMD.int32x4.bitsToFloat32x4(a);
+
+// NaN canonicalization occurs when extracting out x lane:
+assertEq(c.x, NaN);
+
+// but underlying bits are faithfully transmitted
+// (though reinterpreted as a signed integer):
+var d = SIMD.float32x4.bitsToInt32x4(c);
+assertEq(d.x, -1);
+
+reportCompare(true, true);
+print("Tests complete");
diff --git a/layout/generic/nsFrame.cpp b/layout/generic/nsFrame.cpp
index 3b5d5a0fa1c..8decf170488 100644
--- a/layout/generic/nsFrame.cpp
+++ b/layout/generic/nsFrame.cpp
@@ -5374,6 +5374,12 @@ nsIFrame::DumpFrameTree()
RootFrameList(PresContext(), stdout, 0);
}
+void
+nsIFrame::DumpFrameTreeLimited()
+{
+ List(stdout, 0);
+}
+
void
nsIFrame::RootFrameList(nsPresContext* aPresContext, FILE* out, int32_t aIndent)
{
diff --git a/layout/generic/nsIFrame.h b/layout/generic/nsIFrame.h
index 060d9c5c1a6..53dbfedd524 100644
--- a/layout/generic/nsIFrame.h
+++ b/layout/generic/nsIFrame.h
@@ -3261,6 +3261,7 @@ public:
static void RootFrameList(nsPresContext* aPresContext,
FILE* out, int32_t aIndent);
virtual void DumpFrameTree();
+ void DumpFrameTreeLimited();
NS_IMETHOD GetFrameName(nsAString& aResult) const = 0;
#endif
diff --git a/moz.build b/moz.build
index 544509a820c..48a96dba85e 100644
--- a/moz.build
+++ b/moz.build
@@ -5,30 +5,40 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
CONFIGURE_SUBST_FILES += [
- 'mozilla-config.h',
- 'tools/update-packaging/Makefile',
+ 'config/autoconf.mk',
+ 'config/emptyvars.mk',
]
-if CONFIG['ENABLE_CLANG_PLUGIN']:
- add_tier_dir('base', 'build/clang-plugin', external=True)
+if CONFIG['BUILDING_JS']:
+ if CONFIG['JS_STANDALONE']:
+ DIRS += ['config']
+ DIRS += ['js/src']
+else:
+ CONFIGURE_SUBST_FILES += [
+ 'mozilla-config.h',
+ 'tools/update-packaging/Makefile',
+ ]
-add_tier_dir('base', ['config', 'build', 'probes', 'python'])
+ if CONFIG['ENABLE_CLANG_PLUGIN']:
+ add_tier_dir('base', 'build/clang-plugin', external=True)
-if not CONFIG['LIBXUL_SDK']:
- add_tier_dir('base', ['mfbt'])
+ add_tier_dir('base', ['config', 'build', 'probes', 'python'])
- if CONFIG['MOZ_WIDGET_TOOLKIT'] in ('android', 'gonk'):
- add_tier_dir('base', ['other-licenses/android'])
+ if not CONFIG['LIBXUL_SDK']:
+ add_tier_dir('base', ['mfbt'])
- if CONFIG['MOZ_MEMORY']:
- add_tier_dir('base', ['memory'])
+ if CONFIG['MOZ_WIDGET_TOOLKIT'] in ('android', 'gonk'):
+ add_tier_dir('base', ['other-licenses/android'])
- if not CONFIG['MOZ_NATIVE_ZLIB']:
- add_tier_dir('base', ['modules/zlib'])
+ if CONFIG['MOZ_MEMORY']:
+ add_tier_dir('base', ['memory'])
- add_tier_dir('base', ['mozglue', 'memory/mozalloc'])
+ if not CONFIG['MOZ_NATIVE_ZLIB']:
+ add_tier_dir('base', ['modules/zlib'])
-add_tier_dir('precompile', 'xpcom/xpidl')
+ add_tier_dir('base', ['mozglue', 'memory/mozalloc'])
-# Bring in the configuration for the configured application.
-include('/' + CONFIG['MOZ_BUILD_APP'] + '/app.mozbuild')
+ add_tier_dir('precompile', 'xpcom/xpidl')
+
+ # Bring in the configuration for the configured application.
+ include('/' + CONFIG['MOZ_BUILD_APP'] + '/app.mozbuild')
diff --git a/netwerk/protocol/http/Http2Compression.cpp b/netwerk/protocol/http/Http2Compression.cpp
index f0065a5ae6e..08f5c9ede25 100644
--- a/netwerk/protocol/http/Http2Compression.cpp
+++ b/netwerk/protocol/http/Http2Compression.cpp
@@ -81,6 +81,7 @@ InitializeStaticHeaders()
AddStaticElement(NS_LITERAL_CSTRING("expect"));
AddStaticElement(NS_LITERAL_CSTRING("expires"));
AddStaticElement(NS_LITERAL_CSTRING("from"));
+ AddStaticElement(NS_LITERAL_CSTRING("host"));
AddStaticElement(NS_LITERAL_CSTRING("if-match"));
AddStaticElement(NS_LITERAL_CSTRING("if-modified-since"));
AddStaticElement(NS_LITERAL_CSTRING("if-none-match"));
@@ -434,7 +435,22 @@ Http2Decompressor::OutputHeader(const nsACString &name, const nsACString &value)
mOutput->Append(name);
mOutput->Append(NS_LITERAL_CSTRING(": "));
- mOutput->Append(value);
+ // Special handling for set-cookie according to the spec
+ bool isSetCookie = name.Equals(NS_LITERAL_CSTRING("set-cookie"));
+ int32_t valueLen = value.Length();
+ for (int32_t i = 0; i < valueLen; ++i) {
+ if (value[i] == '\0') {
+ if (isSetCookie) {
+ mOutput->Append(NS_LITERAL_CSTRING("\r\n"));
+ mOutput->Append(name);
+ mOutput->Append(NS_LITERAL_CSTRING(": "));
+ } else {
+ mOutput->Append(NS_LITERAL_CSTRING(", "));
+ }
+ } else {
+ mOutput->Append(value[i]);
+ }
+ }
mOutput->Append(NS_LITERAL_CSTRING("\r\n"));
return NS_OK;
}
@@ -688,6 +704,14 @@ Http2Decompressor::DoIndexed()
LOG3(("HTTP decompressor indexed entry %u\n", index));
+ if (index == 0) {
+ // Index 0 is a special case - it clear out the reference set
+ mReferenceSet.Clear();
+ mAlternateReferenceSet.Clear();
+ return NS_OK;
+ }
+ index--; // Internally, we 0-index everything, since this is, y'know, C++
+
// Toggle this in the reference set..
// if its not currently in the reference set then add it and
// also emit it. If it is currently in the reference set then just
@@ -922,7 +946,25 @@ Http2Compressor::EncodeHeaderBlock(const nsCString &nvInput,
mParsedContentLength = len;
}
- ProcessHeader(nvPair(name, value));
+ if (name.Equals("cookie")) {
+ // cookie crumbling
+ bool haveMoreCookies = true;
+ int32_t nextCookie = valueIndex;
+ while (haveMoreCookies) {
+ int32_t semiSpaceIndex = nvInput.Find("; ", false, nextCookie,
+ crlfIndex - nextCookie);
+ if (semiSpaceIndex == -1) {
+ haveMoreCookies = false;
+ semiSpaceIndex = crlfIndex;
+ }
+ nsDependentCSubstring cookie = Substring(beginBuffer + nextCookie,
+ beginBuffer + semiSpaceIndex);
+ ProcessHeader(nvPair(name, cookie));
+ nextCookie = semiSpaceIndex + 2;
+ }
+ } else {
+ ProcessHeader(nvPair(name, value));
+ }
}
// iterate mreference set and if !alternate.contains(old[i])
@@ -957,6 +999,8 @@ Http2Compressor::DoOutput(Http2Compressor::outputCode code,
LOG3(("HTTP compressor %p noindex literal with name reference %u %s: %s\n",
this, index, pair->mName.get(), pair->mValue.get()));
+ // In this case, the index will have already been adjusted to be 1-based
+ // instead of 0-based.
EncodeInteger(6, index); // 01 2 bit prefix
startByte = reinterpret_cast(mOutput->BeginWriting()) + offset;
*startByte = (*startByte & 0x3f) | 0x40;
@@ -972,6 +1016,8 @@ Http2Compressor::DoOutput(Http2Compressor::outputCode code,
LOG3(("HTTP compressor %p literal with name reference %u %s: %s\n",
this, index, pair->mName.get(), pair->mValue.get()));
+ // In this case, the index will have already been adjusted to be 1-based
+ // instead of 0-based.
EncodeInteger(6, index); // 00 2 bit prefix
startByte = reinterpret_cast(mOutput->BeginWriting()) + offset;
*startByte = *startByte & 0x3f;
@@ -988,7 +1034,9 @@ Http2Compressor::DoOutput(Http2Compressor::outputCode code,
LOG3(("HTTP compressor %p toggle %s index %u %s\n",
this, (code == kToggleOff) ? "off" : "on",
index, pair->mName.get()));
- EncodeInteger(7, index);
+ // In this case, we are passed the raw 0-based C index, and need to
+ // increment to make it 1-based and comply with the spec
+ EncodeInteger(7, index + 1);
startByte = reinterpret_cast(mOutput->BeginWriting()) + offset;
*startByte = *startByte | 0x80; // 1 1 bit prefix
break;
diff --git a/netwerk/protocol/http/Http2Session.cpp b/netwerk/protocol/http/Http2Session.cpp
index 7183e4216a5..b0617b7e8ee 100644
--- a/netwerk/protocol/http/Http2Session.cpp
+++ b/netwerk/protocol/http/Http2Session.cpp
@@ -896,7 +896,10 @@ Http2Session::CleanupStream(Http2Stream *aStream, nsresult aResult,
{
MOZ_ASSERT(PR_GetCurrentThread() == gSocketThread);
LOG3(("Http2Session::CleanupStream %p %p 0x%X %X\n",
- this, aStream, aStream->StreamID(), aResult));
+ this, aStream, aStream ? aStream->StreamID() : 0, aResult));
+ if (!aStream) {
+ return;
+ }
Http2PushedStream *pushSource = nullptr;
diff --git a/netwerk/protocol/http/SpdySession3.cpp b/netwerk/protocol/http/SpdySession3.cpp
index 291e229c992..ebcab0fe08c 100644
--- a/netwerk/protocol/http/SpdySession3.cpp
+++ b/netwerk/protocol/http/SpdySession3.cpp
@@ -837,7 +837,10 @@ SpdySession3::CleanupStream(SpdyStream3 *aStream, nsresult aResult,
{
MOZ_ASSERT(PR_GetCurrentThread() == gSocketThread);
LOG3(("SpdySession3::CleanupStream %p %p 0x%X %X\n",
- this, aStream, aStream->StreamID(), aResult));
+ this, aStream, aStream ? aStream->StreamID() : 0, aResult));
+ if (!aStream) {
+ return;
+ }
SpdyPushedStream3 *pushSource = nullptr;
diff --git a/netwerk/protocol/http/SpdySession31.cpp b/netwerk/protocol/http/SpdySession31.cpp
index 9240d8c1d3e..0943145e2d2 100644
--- a/netwerk/protocol/http/SpdySession31.cpp
+++ b/netwerk/protocol/http/SpdySession31.cpp
@@ -874,7 +874,10 @@ SpdySession31::CleanupStream(SpdyStream31 *aStream, nsresult aResult,
{
MOZ_ASSERT(PR_GetCurrentThread() == gSocketThread);
LOG3(("SpdySession31::CleanupStream %p %p 0x%X %X\n",
- this, aStream, aStream->StreamID(), aResult));
+ this, aStream, aStream ? aStream->StreamID() : 0, aResult));
+ if (!aStream) {
+ return;
+ }
SpdyPushedStream31 *pushSource = nullptr;
diff --git a/netwerk/protocol/http/nsHttp.h b/netwerk/protocol/http/nsHttp.h
index f3ba845ff0c..e071fed5594 100644
--- a/netwerk/protocol/http/nsHttp.h
+++ b/netwerk/protocol/http/nsHttp.h
@@ -31,13 +31,14 @@ namespace net {
// leave room for official versions. telem goes to 48
// 24 was a internal spdy/3.1
// 25 was spdy/4a2
- HTTP2_VERSION_DRAFT08 = 26
+ // 26 was http/2-draft08 and http/2-draft07 (they were the same)
+ HTTP2_VERSION_DRAFT09 = 27
};
typedef uint8_t nsHttpVersion;
-#define NS_HTTP2_DRAFT_VERSION HTTP2_VERSION_DRAFT08
-#define NS_HTTP2_DRAFT_TOKEN "HTTP-draft-08/2.0"
+#define NS_HTTP2_DRAFT_VERSION HTTP2_VERSION_DRAFT09
+#define NS_HTTP2_DRAFT_TOKEN "HTTP-draft-09/2.0"
//-----------------------------------------------------------------------------
// http connection capabilities
diff --git a/netwerk/protocol/http/nsHttpChannel.cpp b/netwerk/protocol/http/nsHttpChannel.cpp
index dc977ae1334..67e5e31731c 100644
--- a/netwerk/protocol/http/nsHttpChannel.cpp
+++ b/netwerk/protocol/http/nsHttpChannel.cpp
@@ -5296,13 +5296,42 @@ nsHttpChannel::OnDataAvailable(nsIRequest *request, nsISupports *ctxt,
if (!mLogicalOffset)
MOZ_EVENT_TRACER_EXEC(this, "net::http::channel");
+ int64_t offsetBefore = 0;
+ nsCOMPtr seekable = do_QueryInterface(input);
+ if (seekable && NS_FAILED(seekable->Tell(&offsetBefore))) {
+ seekable = nullptr;
+ }
+
nsresult rv = mListener->OnDataAvailable(this,
mListenerContext,
input,
mLogicalOffset,
count);
- if (NS_SUCCEEDED(rv))
- mLogicalOffset = progress;
+ if (NS_SUCCEEDED(rv)) {
+ // by contract mListener must read all of "count" bytes, but
+ // nsInputStreamPump is tolerant to seekable streams that violate that
+ // and it will redeliver incompletely read data. So we need to do
+ // the same thing when updating the progress counter to stay in sync.
+ int64_t offsetAfter, delta;
+ if (seekable && NS_SUCCEEDED(seekable->Tell(&offsetAfter))) {
+ delta = offsetAfter - offsetBefore;
+ if (delta != count) {
+ count = delta;
+
+ NS_WARNING("Listener OnDataAvailable contract violation");
+ nsCOMPtr consoleService =
+ do_GetService(NS_CONSOLESERVICE_CONTRACTID);
+ nsAutoString message
+ (NS_LITERAL_STRING(
+ "http channel Listener OnDataAvailable contract violation"));
+ if (consoleService) {
+ consoleService->LogStringMessage(message.get());
+ }
+ }
+ }
+ mLogicalOffset += count;
+ }
+
return rv;
}
diff --git a/netwerk/test/unit/test_http2.js b/netwerk/test/unit/test_http2.js
index 3c3e9f9da8b..56c361114a4 100644
--- a/netwerk/test/unit/test_http2.js
+++ b/netwerk/test/unit/test_http2.js
@@ -25,7 +25,7 @@ var bigListenerMD5 = '8f607cfdd2c87d6a7eedb657dafbd836';
function checkIsHttp2(request) {
try {
- if (request.getResponseHeader("X-Firefox-Spdy") == "HTTP-draft-08/2.0") {
+ if (request.getResponseHeader("X-Firefox-Spdy") == "HTTP-draft-09/2.0") {
if (request.getResponseHeader("X-Connection-Http2") == "yes") {
return true;
}
diff --git a/python/lldbutils/README.txt b/python/lldbutils/README.txt
new file mode 100644
index 00000000000..8957b3a9ced
--- /dev/null
+++ b/python/lldbutils/README.txt
@@ -0,0 +1,12 @@
+lldb debugging functionality for Gecko
+--------------------------------------
+
+This directory contains a module, lldbutils, which is imported by the
+in-tree .lldbinit file. The lldbutil modules define some lldb commands
+that are handy for debugging Gecko.
+
+If you want to add a new command or Python-implemented type summary, either add
+it to one of the existing broad area Python files (such as lldbutils/layout.py
+for layout-related commands) or create a new file if none of the existing files
+is appropriate. If you add a new file, make sure you add it to __all__ in
+lldbutils/__init__.py.
diff --git a/python/lldbutils/lldbutils/__init__.py b/python/lldbutils/lldbutils/__init__.py
new file mode 100644
index 00000000000..1552d5f0bb4
--- /dev/null
+++ b/python/lldbutils/lldbutils/__init__.py
@@ -0,0 +1,7 @@
+import lldb
+
+__all__ = ['layout']
+
+def init():
+ for name in __all__:
+ __import__('lldbutils.' + name, globals(), locals(), ['init']).init(lldb.debugger)
diff --git a/python/lldbutils/lldbutils/layout.py b/python/lldbutils/lldbutils/layout.py
new file mode 100644
index 00000000000..97b4218a2ac
--- /dev/null
+++ b/python/lldbutils/lldbutils/layout.py
@@ -0,0 +1,15 @@
+import lldb
+
+def frametree(debugger, command, result, dict):
+ """Dumps the frame tree containing the given nsIFrame*."""
+ debugger.HandleCommand('expr (' + command + ')->DumpFrameTree()')
+
+def frametreelimited(debugger, command, result, dict):
+ """Dumps the subtree of a frame tree rooted at the given nsIFrame*."""
+ debugger.HandleCommand('expr (' + command + ')->DumpFrameTreeLimited()')
+
+def init(debugger):
+ debugger.HandleCommand('command script add -f lldbutils.layout.frametree frametree')
+ debugger.HandleCommand('command script add -f lldbutils.layout.frametree frametreelimited')
+ debugger.HandleCommand('command alias ft frametree')
+ debugger.HandleCommand('command alias ftl frametree')
diff --git a/python/mozbuild/mozbuild/backend/recursivemake.py b/python/mozbuild/mozbuild/backend/recursivemake.py
index 500b6eb3c59..005f3e251a1 100644
--- a/python/mozbuild/mozbuild/backend/recursivemake.py
+++ b/python/mozbuild/mozbuild/backend/recursivemake.py
@@ -554,7 +554,8 @@ class RecursiveMakeBackend(CommonBackend):
if dirs:
# For build systems without tiers (js/src), output a list
# of directories for each tier.
- root_mk.add_statement('%s_dirs := %s' % (tier, ' '.join(dirs)))
+ all_dirs = self._traversal.traverse('', filter)
+ root_mk.add_statement('%s_dirs := %s' % (tier, ' '.join(all_dirs)))
continue
if subtiers:
# Output the list of filtered subtiers for the given tier.
diff --git a/testing/testsuite-targets.mk b/testing/testsuite-targets.mk
index cfad7aea76c..bc6fcb07db9 100644
--- a/testing/testsuite-targets.mk
+++ b/testing/testsuite-targets.mk
@@ -479,7 +479,7 @@ stage-xpcshell: make-stage-dir
$(MAKE) -C $(DEPTH)/testing/xpcshell stage-package
stage-jstests: make-stage-dir
- $(MAKE) -C $(DEPTH)/js/src/tests stage-package
+ $(MAKE) -C $(DEPTH)/js/src/js/src/tests stage-package
stage-android: make-stage-dir
ifdef MOZ_ENABLE_SZIP
diff --git a/testing/xpcshell/node-http2/HISTORY.md b/testing/xpcshell/node-http2/HISTORY.md
index a2acdd21d10..987217d7112 100644
--- a/testing/xpcshell/node-http2/HISTORY.md
+++ b/testing/xpcshell/node-http2/HISTORY.md
@@ -1,6 +1,18 @@
Version history
===============
+### 2.2.0 (2013-12-25) ###
+
+* Upgrade to the latest draft: [draft-ietf-httpbis-http2-09]
+* [Tarball](https://github.com/molnarg/node-http2/archive/node-http2-2.2.0.tar.gz)
+
+[draft-ietf-httpbis-http2-09]: http://tools.ietf.org/html/draft-ietf-httpbis-http2-09
+
+### 2.1.1 (2013-12-21) ###
+
+* Minor bugfix
+* [Tarball](https://github.com/molnarg/node-http2/archive/node-http2-2.1.1.tar.gz)
+
### 2.1.0 (2013-11-10) ###
* Upgrade to the latest draft: [draft-ietf-httpbis-http2-07][draft-07]
diff --git a/testing/xpcshell/node-http2/README.md b/testing/xpcshell/node-http2/README.md
index 6b837fae434..36b9ab8e8de 100644
--- a/testing/xpcshell/node-http2/README.md
+++ b/testing/xpcshell/node-http2/README.md
@@ -1,7 +1,7 @@
node-http2
==========
-An HTTP/2 ([draft-ietf-httpbis-http2-07](http://tools.ietf.org/html/draft-ietf-httpbis-http2-76))
+An HTTP/2 ([draft-ietf-httpbis-http2-09](http://tools.ietf.org/html/draft-ietf-httpbis-http2-9))
client and server implementation for node.js.
Installation
diff --git a/testing/xpcshell/node-http2/doc/http.html b/testing/xpcshell/node-http2/doc/http.html
index fdf4436cf05..3bf7a129463 100644
--- a/testing/xpcshell/node-http2/doc/http.html
+++ b/testing/xpcshell/node-http2/doc/http.html
@@ -211,7 +211,7 @@ but will function normally when falling back to using HTTP/1.1.
Event: 'timeout'
response.writeContinue()
response.writeHead(statusCode, [reasonPhrase], [headers]) : reasonPhrase will always be
-ignored since it's not supported in HTTP/2
+ignored since it's not supported in HTTP/2
response.setTimeout(timeout, [callback])
@@ -288,11 +288,11 @@ exports.OutgoingMessage = OutgoingMessage;
- The implemented version of the HTTP/2 specification is draft 04 .
+ The implemented version of the HTTP/2 specification is draft 09 .
- var implementedVersion = 'HTTP-draft-07/2.0' ;
+ var implementedVersion = 'HTTP-draft-09/2.0' ;
@@ -478,7 +478,7 @@ IncomingMessage.prototype = Object.create(PassThrough.prototype, { constructor:
- Request Header Fields
+
Request Header Fields
* headers
argument: HTTP/2.0 request and response header fields carry information as a series
of key-value pairs. This includes the target URI for the request, the status code for the
response, as well as HTTP header fields.
@@ -849,6 +849,7 @@ Server.prototype = Object.create(EventEmitter.prototype, { constructor: { value:
});
endpoint.on('error' , this .emit.bind(this , 'clientError' ));
+ socket.on('error' , this .emit.bind(this , 'clientError' ));
this .emit('connection' , socket, endpoint);
};
@@ -1011,7 +1012,7 @@ IncomingRequest.prototype = Object.create(IncomingMessage.prototype, { construct
- Request Header Fields
+
Request Header Fields
* headers
argument: HTTP/2.0 request and response header fields carry information as a series
of key-value pairs. This includes the target URI for the request, the status code for the
response, as well as HTTP header fields.
@@ -1737,7 +1738,7 @@ IncomingResponse.prototype = Object.create(IncomingMessage.prototype, { construc
- Response Header Fields
+
Response Header Fields
* headers
argument: HTTP/2.0 request and response header fields carry information as a series
of key-value pairs. This includes the target URI for the request, the status code for the
response, as well as HTTP header fields.
diff --git a/testing/xpcshell/node-http2/doc/index.html b/testing/xpcshell/node-http2/doc/index.html
index 724ada150cc..0a75c8c8049 100644
--- a/testing/xpcshell/node-http2/doc/index.html
+++ b/testing/xpcshell/node-http2/doc/index.html
@@ -48,7 +48,7 @@
- node-http2 is an HTTP/2 (draft 06) implementation for node.js .
+ node-http2 is an HTTP/2 (draft 09) implementation for node.js .
The core of the protocol is implemented by the http2-protocol module. This module provides
two important features on top of http2-protocol:
diff --git a/testing/xpcshell/node-http2/lib/http.js b/testing/xpcshell/node-http2/lib/http.js
index 2fbfbf0cf3f..f65408d1d0c 100644
--- a/testing/xpcshell/node-http2/lib/http.js
+++ b/testing/xpcshell/node-http2/lib/http.js
@@ -121,7 +121,7 @@
//
// [1]: http://nodejs.org/api/https.html
// [2]: http://nodejs.org/api/http.html
-// [3]: http://tools.ietf.org/html/draft-ietf-httpbis-http2-04#section-8.1.3
+// [3]: http://tools.ietf.org/html/draft-ietf-httpbis-http2-09#section-8.1.3.2
// [expect-continue]: https://github.com/http2/http2-spec/issues/18
// [connect]: https://github.com/http2/http2-spec/issues/230
@@ -153,9 +153,9 @@ var deprecatedHeaders = [
'upgrade'
];
-// The implemented version of the HTTP/2 specification is [draft 04][1].
-// [1]: http://tools.ietf.org/html/draft-ietf-httpbis-http2-04
-var implementedVersion = 'HTTP-draft-08/2.0';
+// The implemented version of the HTTP/2 specification is [draft 09][1].
+// [1]: http://tools.ietf.org/html/draft-ietf-httpbis-http2-09
+var implementedVersion = 'HTTP-draft-09/2.0';
// When doing NPN/ALPN negotiation, HTTP/1.1 is used as fallback
var supportedProtocols = [implementedVersion, 'http/1.1', 'http/1.0'];
@@ -207,7 +207,7 @@ function IncomingMessage(stream) {
}
IncomingMessage.prototype = Object.create(PassThrough.prototype, { constructor: { value: IncomingMessage } });
-// [Request Header Fields](http://tools.ietf.org/html/draft-ietf-httpbis-http2-05#section-8.1.2.1)
+// [Request Header Fields](http://tools.ietf.org/html/draft-ietf-httpbis-http2-09#section-8.1.3.1)
// * `headers` argument: HTTP/2.0 request and response header fields carry information as a series
// of key-value pairs. This includes the target URI for the request, the status code for the
// response, as well as HTTP header fields.
@@ -403,6 +403,7 @@ Server.prototype._start = function _start(socket) {
});
endpoint.on('error', this.emit.bind(this, 'clientError'));
+ socket.on('error', this.emit.bind(this, 'clientError'));
this.emit('connection', socket, endpoint);
};
@@ -499,7 +500,7 @@ function IncomingRequest(stream) {
}
IncomingRequest.prototype = Object.create(IncomingMessage.prototype, { constructor: { value: IncomingRequest } });
-// [Request Header Fields](http://tools.ietf.org/html/draft-ietf-httpbis-http2-05#section-8.1.2.1)
+// [Request Header Fields](http://tools.ietf.org/html/draft-ietf-httpbis-http2-09#section-8.1.3.1)
// * `headers` argument: HTTP/2.0 request and response header fields carry information as a series
// of key-value pairs. This includes the target URI for the request, the status code for the
// response, as well as HTTP header fields.
@@ -930,7 +931,7 @@ function IncomingResponse(stream) {
}
IncomingResponse.prototype = Object.create(IncomingMessage.prototype, { constructor: { value: IncomingResponse } });
-// [Response Header Fields](http://tools.ietf.org/html/draft-ietf-httpbis-http2-05#section-8.1.2.2)
+// [Response Header Fields](http://tools.ietf.org/html/draft-ietf-httpbis-http2-09#section-8.1.3.2)
// * `headers` argument: HTTP/2.0 request and response header fields carry information as a series
// of key-value pairs. This includes the target URI for the request, the status code for the
// response, as well as HTTP header fields.
diff --git a/testing/xpcshell/node-http2/lib/index.js b/testing/xpcshell/node-http2/lib/index.js
index b05f2fb6d2d..48050c16270 100644
--- a/testing/xpcshell/node-http2/lib/index.js
+++ b/testing/xpcshell/node-http2/lib/index.js
@@ -1,4 +1,4 @@
-// [node-http2][homepage] is an [HTTP/2 (draft 06)][http2] implementation for [node.js][node].
+// [node-http2][homepage] is an [HTTP/2 (draft 09)][http2] implementation for [node.js][node].
//
// The core of the protocol is implemented by the [http2-protocol] module. This module provides
// two important features on top of http2-protocol:
@@ -11,7 +11,7 @@
//
// [homepage]: https://github.com/molnarg/node-http2
// [http2-protocol]: https://github.com/molnarg/node-http2-protocol
-// [http2]: http://tools.ietf.org/html/draft-ietf-httpbis-http2-06
+// [http2]: http://tools.ietf.org/html/draft-ietf-httpbis-http2-09
// [node]: http://nodejs.org/
// [node-https]: http://nodejs.org/api/https.html
// [node-http]: http://nodejs.org/api/http.html
diff --git a/testing/xpcshell/node-http2/node_modules/http2-protocol/HISTORY.md b/testing/xpcshell/node-http2/node_modules/http2-protocol/HISTORY.md
index 81d55d0e8e6..a45bc54c9ff 100644
--- a/testing/xpcshell/node-http2/node_modules/http2-protocol/HISTORY.md
+++ b/testing/xpcshell/node-http2/node_modules/http2-protocol/HISTORY.md
@@ -1,6 +1,13 @@
Version history
===============
+### 0.9.0 (2013-12-25) ###
+
+* Upgrade to the latest draft: [draft-ietf-httpbis-http2-09][draft-09]
+* [Tarball](https://github.com/molnarg/node-http2-protocol/archive/node-http2-protocol-0.9.0.tar.gz)
+
+[draft-09]: http://tools.ietf.org/html/draft-ietf-httpbis-http2-09
+
### 0.7.0 (2013-11-10) ###
* Upgrade to the latest draft: [draft-ietf-httpbis-http2-07][draft-07]
diff --git a/testing/xpcshell/node-http2/node_modules/http2-protocol/README.md b/testing/xpcshell/node-http2/node_modules/http2-protocol/README.md
index 94d7b0e2867..24a884edec7 100644
--- a/testing/xpcshell/node-http2/node_modules/http2-protocol/README.md
+++ b/testing/xpcshell/node-http2/node_modules/http2-protocol/README.md
@@ -1,7 +1,7 @@
node-http2-protocol
===================
-An HTTP/2 ([draft-ietf-httpbis-http2-07](http://tools.ietf.org/html/draft-ietf-httpbis-http2-07))
+An HTTP/2 ([draft-ietf-httpbis-http2-09](http://tools.ietf.org/html/draft-ietf-httpbis-http2-09))
framing layer implementaion for node.js.
Installation
@@ -40,7 +40,7 @@ For pretty printing logs, you will also need a global install of bunyan (`npm in
The developer documentation is located in the `doc` directory. The docs are usually updated only
before releasing a new version. To regenerate them manually, run `npm run-script prepublish`.
-There's a hosted version which is located [here](http://molnarg.github.io/node-http2/doc/).
+There's a hosted version which is located [here](http://molnarg.github.io/node-http2-protocol/doc/).
### Running the tests ###
@@ -52,10 +52,10 @@ point to understand the code.
To generate a code coverage report, run `npm test --coverage` (it may be slow, be patient).
Code coverage summary as of version 0.6.0:
```
-Statements : 92.39% ( 1165/1261 )
-Branches : 86.57% ( 477/551 )
-Functions : 91.22% ( 135/148 )
-Lines : 92.35% ( 1159/1255 )
+Statements : 92.43% ( 1257/1360 )
+Branches : 86.36% ( 500/579 )
+Functions : 90.12% ( 146/162 )
+Lines : 92.39% ( 1251/1354 )
```
There's a hosted version of the detailed (line-by-line) coverage report
diff --git a/testing/xpcshell/node-http2/node_modules/http2-protocol/doc/compressor.html b/testing/xpcshell/node-http2/node_modules/http2-protocol/doc/compressor.html
index 05120cdb94e..bebbe5ed358 100644
--- a/testing/xpcshell/node-http2/node_modules/http2-protocol/doc/compressor.html
+++ b/testing/xpcshell/node-http2/node_modules/http2-protocol/doc/compressor.html
@@ -73,7 +73,7 @@
- The implementation of the HTTP/2 Header Compression spec is separated from
+
The implementation of the HTTP/2 Header Compression spec is separated from
the 'integration' part which handles HEADERS and PUSH_PROMISE frames. The compression itself is
implemented in the first part of the file, and consists of three classes: HeaderTable
,
HeaderSetDecompressor
and HeaderSetCompressor
. The two latter classes are
@@ -133,9 +133,9 @@ exports.Decompressor = Decompressor;
- The Header Table is a component used to associate headers to index values. It is basically an
+
The Header Table is a component used to associate headers to index values. It is basically an
ordered list of [name, value]
pairs, so it's implemented as a subclass of Array
.
-In this implementation, the Header Table and the Static Table are handled as a single table.
+In this implementation, the Header Table and the Static Table are handled as a single table.
@@ -163,7 +163,7 @@ In this implementation, the Header Table and the Reference Set : contains a group of headers used as a reference for the
+Reference Set : contains a group of headers used as a reference for the
differential encoding of a new set of headers. (reference
flag)
Emitted headers: the headers that are already emitted as part of the current decompression
process (not part of the spec, emitted
flag)
@@ -231,7 +231,7 @@ length in bytes, of its value's length in bytes and of 32 bytes.
- The add(index, entry)
can be used to manage the header table :
+ The add(index, entry)
can be used to manage the header table :
it pushes the new entry
at the beggining of the table
before doing such a modification, it has to be ensured that the header table size will stay
@@ -302,7 +302,7 @@ HeaderTable.prototype.add = functio
-
+
@@ -357,6 +357,7 @@ HeaderTable.prototype.add = functio
[ 'expect' , '' ],
[ 'expires' , '' ],
[ 'from' , '' ],
+ [ 'host' , '' ],
[ 'if-match' , '' ],
[ 'if-modified-since' , '' ],
[ 'if-none-match' , '' ],
@@ -450,16 +451,18 @@ TransformStream class. It collects the data chunks for later processing.
- execute(rep)
executes the given header representation .
+ execute(rep)
executes the given header representation .
The JavaScript object representation of a header representation:
{
name: String || Integer, // string literal or index
value: String || Integer, // string literal or index
index: Boolean // with or without indexing
}
+Important: to ease the indexing of the header table, indexes start at 0 instead of 1.
Examples:
Indexed:
{ name: 2 , value: 2 , index: false }
+{ name: -1 , value: -1 , index: false } // reference set emptying
Literal:
{ name: 2 , value: 'X', index: false } // without indexing
{ name: 2 , value: 'Y', index: true } // with indexing
@@ -483,6 +486,11 @@ Literal:
¶
+An indexed representation with an index value of 0 (in our representation, it means -1)
+entails the following actions:
+The reference set is emptied.
+
+
An indexed representation corresponding to an entry present in the reference set
entails the following actions:
The entry is removed from the reference set.
@@ -512,9 +520,17 @@ this new entry didn't fit in the header table)
var index = rep.value;
entry = this ._table[index];
- if (entry.reference) {
+ if (index == -1 ) {
+ for (var i = 0 ; i < this ._table.length; i++) {
+ this ._table[i].reference = false ;
+ }
+ }
+
+ else if (entry.reference) {
entry.reference = false ;
- } else {
+ }
+
+ else {
pair = entry.slice();
this .push(pair);
@@ -619,7 +635,7 @@ the input stream is over.
¶
@@ -776,7 +792,7 @@ in the reference set (that means don't remove at the end of the encoding pro
if (fullMatch !== -1 ) {
- rep = { name: fullMatch, value: fullMatch, index: -1 };
+ rep = { name: fullMatch, value: fullMatch, index: false };
if (!entry.reference) {
if (fullMatch >= this ._table.length - this ._table._staticLength) {
@@ -881,7 +897,7 @@ TransformStream class. It gets called when there's no more header to compres
for (var index = 0 ; index < this ._table.length; index++) {
var entry = this ._table[index];
if (entry.reference && !entry.keep && !entry.emitted) {
- this .send({ name: index, value: index, index: -1 });
+ this .send({ name: index, value: index, index: false });
entry.reference = false ;
}
entry.keep = false ;
@@ -900,7 +916,7 @@ TransformStream class. It gets called when there's no more header to compres
-
+
@@ -1807,16 +1823,16 @@ HeaderSetCompressor.header =
functi
}
if (representation === representations.indexed) {
- buffers.push(HeaderSetCompressor.integer(header.value, representation.prefix));
+ buffers.push(HeaderSetCompressor.integer(header.value + 1 , representation.prefix));
+ }
- } else {
+ else {
if (typeof header.name === 'number' ) {
buffers.push(HeaderSetCompressor.integer(header.name + 1 , representation.prefix));
} else {
buffers.push(HeaderSetCompressor.integer(0 , representation.prefix));
buffers.push(HeaderSetCompressor.string(header.name, huffmanTable));
}
-
buffers.push(HeaderSetCompressor.string(header.value, huffmanTable));
}
@@ -1838,17 +1854,16 @@ HeaderSetDecompressor.header = func
}
if (representation === representations.indexed) {
- header.value = header.name = HeaderSetDecompressor.integer(buffer, representation.prefix);
+ header.value = header.name = HeaderSetDecompressor.integer(buffer, representation.prefix) - 1 ;
header.index = false ;
+ }
- } else {
+ else {
header.name = HeaderSetDecompressor.integer(buffer, representation.prefix) - 1 ;
if (header.name === -1 ) {
header.name = HeaderSetDecompressor.string(buffer, huffmanTable);
}
-
header.value = HeaderSetDecompressor.string(buffer, huffmanTable);
-
header.index = (representation === representations.literalIncremental);
}
@@ -1972,12 +1987,63 @@ but the API becomes simpler.
var compressor = new HeaderSetCompressor(this ._log, this ._table, this ._huffmanTable);
for (var name in headers) {
var value = headers[name];
+ name = String(name).toLowerCase();
+
+
+
+
+
+
+
+
+
+To allow for better compression efficiency, the Cookie header field MAY be split into
+separate header fields, each with one or more cookie-pairs.
+
+
+
+
+ if (name == 'cookie' ) {
+ if (!(value instanceof Array)) {
+ value = [value]
+ }
+ value = Array.prototype.concat.apply([], value.map(function (cookie) {
+ return String(cookie).split(';' ).map(trim)
+ }));
+ }
+
+
+
+
+
+
+
+
+
+To preserve the order of a comma-separated list, the ordered values for a single header
+field name appearing in different header fields are concatenated into a single value.
+A zero-valued octet (0x0) is used to delimit multiple values.
+Header fields containing multiple values MUST be concatenated into a single value unless
+the ordering of that header field is known to be not significant.
+Currently, only the Cookie header is considered to be order-insensitive.
+
+
+
+
+ if ((value instanceof Array) && (name !== 'cookie' )) {
+ value = value.join('\0' );
+ }
+
if (value instanceof Array) {
- for (var i = 0 ; i< value.length; i++) {
- compressor.write([String(name), String(value[i])]);
+ for (var i = 0 ; i < value.length; i++) {
+ compressor.write([name, String(value[i])]);
}
} else {
- compressor.write([String(name), String(value)]);
+ compressor.write([name, String(value)]);
}
}
compressor.end();
@@ -1992,11 +2058,11 @@ but the API becomes simpler.
-
+
When a frame
arrives
@@ -2007,11 +2073,11 @@ but the API becomes simpler.
-
+
and it is a HEADERS or PUSH_PROMISE frame
@@ -2057,11 +2123,11 @@ marks the last frame and the END_STREAM flag which is always false before the en
-
+
otherwise, the frame is forwarded without taking any action
@@ -2079,11 +2145,11 @@ marks the last frame and the END_STREAM flag which is always false before the en
-
+
The Decompressor class
@@ -2092,11 +2158,11 @@ marks the last frame and the END_STREAM flag which is always false before the en
-
+
The Decompressor is a stateful transform stream, since it has to collect multiple frames first,
and the decoding comes after unifying the payload of those frames.
@@ -2124,11 +2190,11 @@ and
this._stream
respectively.
-
+
Changing the header table size
@@ -2141,11 +2207,11 @@ and
this._stream
respectively.
-
+
decompress
takes a full header block, and decompresses it using a new HeaderSetDecompressor
stream instance. This means that from now on, the advantages of streaming header decoding are
@@ -2160,17 +2226,58 @@ lost, but the API becomes simpler.
var headers = {};
var pair;
while (pair = decompressor.read()) {
-
var name = pair[
0 ];
-
var value = pair[
1 ];
-
if (name
in headers) {
-
if (headers[name]
instanceof Array) {
- headers[name].push(value);
+
var name = pair[
0 ];
+
+
+
+
+
+
+
+
+
+After decompression, header fields that have values containing zero octets (0x0) MUST be
+split into multiple header fields before being processed.
+
+
+
+
+ var values = pair[1 ].split('\0' );
+ for (var i = 0 ; i < values.length; i++) {
+ var value = values[i];
+ if (name in headers) {
+ if (headers[name] instanceof Array) {
+ headers[name].push(value);
+ } else {
+ headers[name] = [headers[name], value];
+ }
} else {
- headers[name] = [headers[name], value];
+ headers[name] = value;
}
- } else {
- headers[name] = value;
}
+ }
+
+
+
+
+
+
+
+
+
+If there are multiple Cookie header fields after decompression, these MUST be concatenated
+into a single octet string using the two octet delimiter of 0x3B, 0x20 (the ASCII
+string "; ").
+
+
+
+
+ if (('cookie' in headers) && (headers['cookie' ] instanceof Array)) {
+ headers['cookie' ] = headers['cookie' ].join('; ' )
}
return headers;
@@ -2179,11 +2286,11 @@ lost, but the API becomes simpler.
-
+
When a frame
arrives
@@ -2194,11 +2301,11 @@ lost, but the API becomes simpler.
-
+
and the collection process is already _inProgress
, the frame is simply stored, except if
@@ -2219,11 +2326,11 @@ it's an illegal frame
-
+
and the collection process is not _inProgress
, but the new frame's type is HEADERS or
@@ -2234,18 +2341,18 @@ PUSH_PROMISE, a new collection process begins
else if ((frame.type === 'HEADERS' ) || (frame.type === 'PUSH_PROMISE' )) {
this ._inProgress = true ;
- this ._base = frame;
+ this ._base = util._extend({}, frame);
this ._frames = [frame];
}
-
+
otherwise, the frame is forwarded without taking any action
@@ -2260,11 +2367,11 @@ PUSH_PROMISE, a new collection process begins
-
+
When the frame signals that it's the last in the series, the header block chunks are
@@ -2285,7 +2392,7 @@ decompressed headers.
this .emit('error' , 'COMPRESSION_ERROR' );
return ;
}
- this .push(util._extend({ headers: headers }, this ._base));
+ this .push(util._extend(this ._base, { headers: headers }));
this ._inProgress = false ;
}
@@ -2295,11 +2402,11 @@ decompressed headers.
-
+
Helper functions
@@ -2308,11 +2415,11 @@ decompressed headers.
-
+
Concatenate an array of buffers into a new buffer
@@ -2335,11 +2442,11 @@ decompressed headers.
-
+
Cut buffer
into chunks not larger than size
@@ -2354,6 +2461,10 @@ decompressed headers.
cursor += chunkSize;
}
while (cursor < buffer.length);
return chunks;
+}
+
+
function trim (string) {
+
return string.trim()
}
diff --git a/testing/xpcshell/node-http2/node_modules/http2-protocol/doc/flow.html b/testing/xpcshell/node-http2/node_modules/http2-protocol/doc/flow.html
index 1efe43b9905..b1e6fdfbcef 100644
--- a/testing/xpcshell/node-http2/node_modules/http2-protocol/doc/flow.html
+++ b/testing/xpcshell/node-http2/node_modules/http2-protocol/doc/flow.html
@@ -135,7 +135,7 @@ exports.Flow = Flow;
Event: 'error' (type) : signals an error
setInitialWindow(size) : the initial flow control window size can be changed any time
-(as described in the standard ) using this method
+(as described in the standard ) using this method
disableRemoteFlowControl() : sends a WINDOW_UPDATE signaling that we don't want flow control
diff --git a/testing/xpcshell/node-http2/node_modules/http2-protocol/doc/framer.html b/testing/xpcshell/node-http2/node_modules/http2-protocol/doc/framer.html
index 56ed094de89..b4506f74786 100644
--- a/testing/xpcshell/node-http2/node_modules/http2-protocol/doc/framer.html
+++ b/testing/xpcshell/node-http2/node_modules/http2-protocol/doc/framer.html
@@ -340,7 +340,7 @@ will also run.
-
+
@@ -484,7 +484,7 @@ Deserializer.commonHeader = functio
typeSpecificAttributes
: a register of frame specific frame object attributes (used by
logging code and also serves as documentation for frame objects)
-
+
@@ -533,7 +533,7 @@ Deserializer.DATA = function
-
+
@@ -618,7 +618,7 @@ Deserializer.HEADERS = function
¶
-
+
@@ -679,7 +679,7 @@ Deserializer.PRIORITY =
function
¶
-
+
@@ -743,7 +743,7 @@ Deserializer.RST_STREAM = function<
-
+
@@ -957,7 +957,7 @@ bits are reserved.
-
+
@@ -1034,7 +1034,7 @@ Deserializer.PUSH_PROMISE = functio
-
+
@@ -1096,7 +1096,7 @@ Deserializer.PING = function
-
+
@@ -1172,7 +1172,7 @@ Deserializer.GOAWAY = function
¶
-
+
@@ -1235,7 +1235,7 @@ Deserializer.WINDOW_UPDATE = functi
-
+
@@ -1281,7 +1281,7 @@ Deserializer.CONTINUATION = functio
-
+
diff --git a/testing/xpcshell/node-http2/node_modules/http2-protocol/doc/index.html b/testing/xpcshell/node-http2/node_modules/http2-protocol/doc/index.html
index 90190ab9312..8d0a58c948f 100644
--- a/testing/xpcshell/node-http2/node_modules/http2-protocol/doc/index.html
+++ b/testing/xpcshell/node-http2/node_modules/http2-protocol/doc/index.html
@@ -73,14 +73,14 @@
- node-http2-protocol is an implementation of the HTTP/2 (draft 06)
+
node-http2-protocol is an implementation of the HTTP/2 (draft 09)
framing layer for node.js .
The main building blocks are node.js streams that are connected through pipes.
The main components are:
Endpoint : represents an HTTP/2 endpoint (client or server). It's
responsible for the the first part of the handshake process (sending/receiving the
-connection header ) and manages other components (framer, compressor,
+connection header ) and manages other components (framer, compressor,
connection, streams) that make up a client or server.
Connection : multiplexes the active HTTP/2 streams, manages connection
@@ -88,7 +88,7 @@ lifecycle and settings, and responsible for enforcing the connection level limit
control, initiated stream limit)
Stream : implementation of the HTTP/2 stream concept .
-Implements the stream state machine defined by the standard, provides
+Implements the stream state machine defined by the standard, provides
management methods and events for using the stream (sending/receiving headers, data, etc.),
and enforces stream level constraints (flow control, sending only legal frames).
diff --git a/testing/xpcshell/node-http2/node_modules/http2-protocol/doc/stream.html b/testing/xpcshell/node-http2/node_modules/http2-protocol/doc/stream.html
index e713f200dda..2054f95c6c6 100644
--- a/testing/xpcshell/node-http2/node_modules/http2-protocol/doc/stream.html
+++ b/testing/xpcshell/node-http2/node_modules/http2-protocol/doc/stream.html
@@ -769,7 +769,7 @@ Stream.prototype._finishing = funct
-
+
diff --git a/testing/xpcshell/node-http2/node_modules/http2-protocol/lib/compressor.js b/testing/xpcshell/node-http2/node_modules/http2-protocol/lib/compressor.js
index 5c3c774658f..0c04b9c164b 100644
--- a/testing/xpcshell/node-http2/node_modules/http2-protocol/lib/compressor.js
+++ b/testing/xpcshell/node-http2/node_modules/http2-protocol/lib/compressor.js
@@ -13,7 +13,7 @@
//
// [node-transform]: http://nodejs.org/api/stream.html#stream_class_stream_transform
// [node-objectmode]: http://nodejs.org/api/stream.html#stream_new_stream_readable_options
-// [http2-compression]: http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-04
+// [http2-compression]: http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-05
exports.HeaderTable = HeaderTable;
exports.HuffmanTable = HuffmanTable;
@@ -35,8 +35,8 @@ var util = require('util');
// The [Header Table] is a component used to associate headers to index values. It is basically an
// ordered list of `[name, value]` pairs, so it's implemented as a subclass of `Array`.
// In this implementation, the Header Table and the [Static Table] are handled as a single table.
-// [Header Table]: http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-04#section-3.1.2
-// [Static Table]: http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-04#appendix-B
+// [Header Table]: http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-05#section-3.1.2
+// [Static Table]: http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-05#appendix-B
function HeaderTable(log, limit) {
var self = HeaderTable.staticTable.map(entryFromPair);
self._log = log;
@@ -59,7 +59,7 @@ function HeaderTable(log, limit) {
// * Headers to be kept: headers that should not be removed as the last step of the encoding process
// (not part of the spec, `keep` flag)
//
-// [referenceset]: http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-04#section-3.1.3
+// [referenceset]: http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-05#section-3.1.3
//
// Relations of the sets:
//
@@ -99,7 +99,7 @@ function size(entry) {
}
// The `add(index, entry)` can be used to [manage the header table][tablemgmt]:
-// [tablemgmt]: http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-04#section-3.3
+// [tablemgmt]: http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-05#section-3.3
//
// * it pushes the new `entry` at the beggining of the table
// * before doing such a modification, it has to be ensured that the header table size will stay
@@ -146,9 +146,9 @@ HeaderTable.prototype.setSizeLimit = function setSizeLimit(limit) {
this._enforceLimit(this._limit);
};
-// [The Static Table](http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-04#appendix-B)
+// [The Static Table](http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-05#appendix-B)
// ------------------
-// [statictable]:http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-04#appendix-B
+// [statictable]:http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-05#appendix-B
// The table is generated with feeding the table from the spec to the following sed command:
//
@@ -191,6 +191,7 @@ HeaderTable.staticTable = [
[ 'expect' , '' ],
[ 'expires' , '' ],
[ 'from' , '' ],
+ [ 'host' , '' ],
[ 'if-match' , '' ],
[ 'if-modified-since' , '' ],
[ 'if-none-match' , '' ],
@@ -245,7 +246,7 @@ HeaderSetDecompressor.prototype._transform = function _transform(chunk, encoding
};
// `execute(rep)` executes the given [header representation][representation].
-// [representation]: http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-04#section-3.1.4
+// [representation]: http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-05#section-3.1.4
// The *JavaScript object representation* of a header representation:
//
@@ -255,10 +256,13 @@ HeaderSetDecompressor.prototype._transform = function _transform(chunk, encoding
// index: Boolean // with or without indexing
// }
//
+// *Important:* to ease the indexing of the header table, indexes start at 0 instead of 1.
+//
// Examples:
//
// Indexed:
// { name: 2 , value: 2 , index: false }
+// { name: -1 , value: -1 , index: false } // reference set emptying
// Literal:
// { name: 2 , value: 'X', index: false } // without indexing
// { name: 2 , value: 'Y', index: true } // with indexing
@@ -269,6 +273,9 @@ HeaderSetDecompressor.prototype._execute = function _execute(rep) {
var entry, pair;
+ // * An _indexed representation_ with an index value of 0 (in our representation, it means -1)
+ // entails the following actions:
+ // * The reference set is emptied.
// * An _indexed representation_ corresponding to an entry _present_ in the reference set
// entails the following actions:
// * The entry is removed from the reference set.
@@ -286,9 +293,17 @@ HeaderSetDecompressor.prototype._execute = function _execute(rep) {
var index = rep.value;
entry = this._table[index];
- if (entry.reference) {
+ if (index == -1) {
+ for (var i = 0; i < this._table.length; i++) {
+ this._table[i].reference = false;
+ }
+ }
+
+ else if (entry.reference) {
entry.reference = false;
- } else {
+ }
+
+ else {
pair = entry.slice();
this.push(pair);
@@ -340,7 +355,7 @@ HeaderSetDecompressor.prototype._flush = function _flush(callback) {
this._execute(HeaderSetDecompressor.header(buffer, this._huffmanTable));
}
- // * [emits the reference set](http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-04#section-3.2.2)
+ // * [emits the reference set](http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-05#section-3.2.2)
for (var index = 0; index < this._table.length; index++) {
var entry = this._table[index];
if (entry.reference && !entry.emitted) {
@@ -431,7 +446,7 @@ HeaderSetCompressor.prototype._transform = function _transform(pair, encoding, c
// header is common with the previous header set, and is still untouched. We mark it to keep
// in the reference set (that means don't remove at the end of the encoding process).
if (fullMatch !== -1) {
- rep = { name: fullMatch, value: fullMatch, index: -1 };
+ rep = { name: fullMatch, value: fullMatch, index: false };
if (!entry.reference) {
if (fullMatch >= this._table.length - this._table._staticLength) {
@@ -496,7 +511,7 @@ HeaderSetCompressor.prototype._flush = function _flush(callback) {
for (var index = 0; index < this._table.length; index++) {
var entry = this._table[index];
if (entry.reference && !entry.keep && !entry.emitted) {
- this.send({ name: index, value: index, index: -1 });
+ this.send({ name: index, value: index, index: false });
entry.reference = false;
}
entry.keep = false;
@@ -506,7 +521,7 @@ HeaderSetCompressor.prototype._flush = function _flush(callback) {
callback();
};
-// [Detailed Format](http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-04#section-4)
+// [Detailed Format](http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-05#section-4)
// -----------------
// ### Integer representation ###
@@ -1349,16 +1364,16 @@ HeaderSetCompressor.header = function writeHeader(header, huffmanTable) {
}
if (representation === representations.indexed) {
- buffers.push(HeaderSetCompressor.integer(header.value, representation.prefix));
+ buffers.push(HeaderSetCompressor.integer(header.value + 1, representation.prefix));
+ }
- } else {
+ else {
if (typeof header.name === 'number') {
buffers.push(HeaderSetCompressor.integer(header.name + 1, representation.prefix));
} else {
buffers.push(HeaderSetCompressor.integer(0, representation.prefix));
buffers.push(HeaderSetCompressor.string(header.name, huffmanTable));
}
-
buffers.push(HeaderSetCompressor.string(header.value, huffmanTable));
}
@@ -1380,17 +1395,16 @@ HeaderSetDecompressor.header = function readHeader(buffer, huffmanTable) {
}
if (representation === representations.indexed) {
- header.value = header.name = HeaderSetDecompressor.integer(buffer, representation.prefix);
+ header.value = header.name = HeaderSetDecompressor.integer(buffer, representation.prefix) - 1;
header.index = false;
+ }
- } else {
+ else {
header.name = HeaderSetDecompressor.integer(buffer, representation.prefix) - 1;
if (header.name === -1) {
header.name = HeaderSetDecompressor.string(buffer, huffmanTable);
}
-
header.value = HeaderSetDecompressor.string(buffer, huffmanTable);
-
header.index = (representation === representations.literalIncremental);
}
@@ -1449,12 +1463,35 @@ Compressor.prototype.compress = function compress(headers) {
var compressor = new HeaderSetCompressor(this._log, this._table, this._huffmanTable);
for (var name in headers) {
var value = headers[name];
+ name = String(name).toLowerCase();
+
+ // * To allow for better compression efficiency, the Cookie header field MAY be split into
+ // separate header fields, each with one or more cookie-pairs.
+ if (name == 'cookie') {
+ if (!(value instanceof Array)) {
+ value = [value]
+ }
+ value = Array.prototype.concat.apply([], value.map(function(cookie) {
+ return String(cookie).split(';').map(trim)
+ }));
+ }
+
+ // * To preserve the order of a comma-separated list, the ordered values for a single header
+ // field name appearing in different header fields are concatenated into a single value.
+ // A zero-valued octet (0x0) is used to delimit multiple values.
+ // * Header fields containing multiple values MUST be concatenated into a single value unless
+ // the ordering of that header field is known to be not significant.
+ // * Currently, only the Cookie header is considered to be order-insensitive.
+ if ((value instanceof Array) && (name !== 'cookie')) {
+ value = value.join('\0');
+ }
+
if (value instanceof Array) {
- for (var i = 0; i< value.length; i++) {
- compressor.write([String(name), String(value[i])]);
+ for (var i = 0; i < value.length; i++) {
+ compressor.write([name, String(value[i])]);
}
} else {
- compressor.write([String(name), String(value)]);
+ compressor.write([name, String(value)]);
}
}
compressor.end();
@@ -1550,18 +1587,30 @@ Decompressor.prototype.decompress = function decompress(block) {
var pair;
while (pair = decompressor.read()) {
var name = pair[0];
- var value = pair[1];
- if (name in headers) {
- if (headers[name] instanceof Array) {
- headers[name].push(value);
+ // * After decompression, header fields that have values containing zero octets (0x0) MUST be
+ // split into multiple header fields before being processed.
+ var values = pair[1].split('\0');
+ for (var i = 0; i < values.length; i++) {
+ var value = values[i];
+ if (name in headers) {
+ if (headers[name] instanceof Array) {
+ headers[name].push(value);
+ } else {
+ headers[name] = [headers[name], value];
+ }
} else {
- headers[name] = [headers[name], value];
+ headers[name] = value;
}
- } else {
- headers[name] = value;
}
}
+ // * If there are multiple Cookie header fields after decompression, these MUST be concatenated
+ // into a single octet string using the two octet delimiter of 0x3B, 0x20 (the ASCII
+ // string "; ").
+ if (('cookie' in headers) && (headers['cookie'] instanceof Array)) {
+ headers['cookie'] = headers['cookie'].join('; ')
+ }
+
return headers;
};
@@ -1582,7 +1631,7 @@ Decompressor.prototype._transform = function _transform(frame, encoding, done) {
// PUSH_PROMISE, a new collection process begins
else if ((frame.type === 'HEADERS') || (frame.type === 'PUSH_PROMISE')) {
this._inProgress = true;
- this._base = frame;
+ this._base = util._extend({}, frame);
this._frames = [frame];
}
@@ -1605,7 +1654,7 @@ Decompressor.prototype._transform = function _transform(frame, encoding, done) {
this.emit('error', 'COMPRESSION_ERROR');
return;
}
- this.push(util._extend({ headers: headers }, this._base));
+ this.push(util._extend(this._base, { headers: headers }));
this._inProgress = false;
}
@@ -1641,3 +1690,7 @@ function cut(buffer, size) {
} while(cursor < buffer.length);
return chunks;
}
+
+function trim(string) {
+ return string.trim()
+}
diff --git a/testing/xpcshell/node-http2/node_modules/http2-protocol/lib/flow.js b/testing/xpcshell/node-http2/node_modules/http2-protocol/lib/flow.js
index f5b19af62b6..108c840613d 100644
--- a/testing/xpcshell/node-http2/node_modules/http2-protocol/lib/flow.js
+++ b/testing/xpcshell/node-http2/node_modules/http2-protocol/lib/flow.js
@@ -23,7 +23,7 @@ exports.Flow = Flow;
//
// * **disableLocalFlowControl()**: disables flow control for outgoing frames
//
-// [1]: http://tools.ietf.org/html/draft-ietf-httpbis-http2-04#section-6.9.2
+// [1]: http://tools.ietf.org/html/draft-ietf-httpbis-http2-09#section-6.9.2
// API for child classes
// ---------------------
diff --git a/testing/xpcshell/node-http2/node_modules/http2-protocol/lib/framer.js b/testing/xpcshell/node-http2/node_modules/http2-protocol/lib/framer.js
index acecf9ef22b..b2f00f123b0 100644
--- a/testing/xpcshell/node-http2/node_modules/http2-protocol/lib/framer.js
+++ b/testing/xpcshell/node-http2/node_modules/http2-protocol/lib/framer.js
@@ -143,7 +143,7 @@ Deserializer.prototype._transform = function _transform(chunk, encoding, done) {
done();
};
-// [Frame Header](http://http2.github.io/http2-spec/#FrameHeader)
+// [Frame Header](http://tools.ietf.org/html/draft-ietf-httpbis-http2-09#section-4.1)
// --------------------------------------------------------------
//
// HTTP/2.0 frames share a common base format consisting of an 8-byte header followed by 0 to 65535
@@ -259,7 +259,7 @@ Deserializer.commonHeader = function readCommonHeader(buffer, frame) {
// * `typeSpecificAttributes`: a register of frame specific frame object attributes (used by
// logging code and also serves as documentation for frame objects)
-// [DATA Frames](http://http2.github.io/http2-spec/#DataFrames)
+// [DATA Frames](http://tools.ietf.org/html/draft-ietf-httpbis-http2-09#section-6.1)
// ------------------------------------------------------------
//
// DATA frames (type=0x0) convey arbitrary, variable-length sequences of octets associated with a
@@ -287,7 +287,7 @@ Deserializer.DATA = function readData(buffer, frame) {
frame.data = buffer;
};
-// [HEADERS](http://http2.github.io/http2-spec/#HEADERS)
+// [HEADERS](http://tools.ietf.org/html/draft-ietf-httpbis-http2-09#section-6.2)
// --------------------------------------------------------------
//
// The HEADERS frame (type=0x1) allows the sender to create a stream.
@@ -341,7 +341,7 @@ Deserializer.HEADERS = function readHeadersPriority(buffer, frame) {
}
};
-// [PRIORITY](http://http2.github.io/http2-spec/#PRIORITY)
+// [PRIORITY](http://tools.ietf.org/html/draft-ietf-httpbis-http2-09#section-6.3)
// -------------------------------------------------------
//
// The PRIORITY frame (type=0x2) specifies the sender-advised priority of a stream.
@@ -372,7 +372,7 @@ Deserializer.PRIORITY = function readPriority(buffer, frame) {
frame.priority = buffer.readUInt32BE(0);
};
-// [RST_STREAM](http://http2.github.io/http2-spec/#RST_STREAM)
+// [RST_STREAM](http://tools.ietf.org/html/draft-ietf-httpbis-http2-09#section-6.4)
// -----------------------------------------------------------
//
// The RST_STREAM frame (type=0x3) allows for abnormal termination of a stream.
@@ -406,7 +406,7 @@ Deserializer.RST_STREAM = function readRstStream(buffer, frame) {
frame.error = errorCodes[buffer.readUInt32BE(0)];
};
-// [SETTINGS](http://http2.github.io/http2-spec/#SETTINGS)
+// [SETTINGS](http://tools.ietf.org/html/draft-ietf-httpbis-http2-09#section-6.5)
// -------------------------------------------------------
//
// The SETTINGS frame (type=0x4) conveys configuration parameters that affect how endpoints
@@ -507,7 +507,7 @@ definedSettings[7] = { name: 'SETTINGS_INITIAL_WINDOW_SIZE', flag: false };
// bits are reserved.
definedSettings[10] = { name: 'SETTINGS_FLOW_CONTROL_OPTIONS', flag: true };
-// [PUSH_PROMISE](http://http2.github.io/http2-spec/#PUSH_PROMISE)
+// [PUSH_PROMISE](http://tools.ietf.org/html/draft-ietf-httpbis-http2-09#section-6.6)
// ---------------------------------------------------------------
//
// The PUSH_PROMISE frame (type=0x5) is used to notify the peer endpoint in advance of streams the
@@ -553,7 +553,7 @@ Deserializer.PUSH_PROMISE = function readPushPromise(buffer, frame) {
frame.data = buffer.slice(4);
};
-// [PING](http://http2.github.io/http2-spec/#PING)
+// [PING](http://tools.ietf.org/html/draft-ietf-httpbis-http2-09#section-6.7)
// -----------------------------------------------
//
// The PING frame (type=0x6) is a mechanism for measuring a minimal round-trip time from the
@@ -583,7 +583,7 @@ Deserializer.PING = function readPing(buffer, frame) {
frame.data = buffer;
};
-// [GOAWAY](http://http2.github.io/http2-spec/#GOAWAY)
+// [GOAWAY](http://tools.ietf.org/html/draft-ietf-httpbis-http2-09#section-6.8)
// ---------------------------------------------------
//
// The GOAWAY frame (type=0x7) informs the remote peer to stop creating streams on this connection.
@@ -630,7 +630,7 @@ Deserializer.GOAWAY = function readGoaway(buffer, frame) {
frame.error = errorCodes[buffer.readUInt32BE(4)];
};
-// [WINDOW_UPDATE](http://http2.github.io/http2-spec/#WINDOW_UPDATE)
+// [WINDOW_UPDATE](http://tools.ietf.org/html/draft-ietf-httpbis-http2-09#section-6.9)
// -----------------------------------------------------------------
//
// The WINDOW_UPDATE frame (type=0x9) is used to implement flow control.
@@ -662,7 +662,7 @@ Deserializer.WINDOW_UPDATE = function readWindowUpdate(buffer, frame) {
frame.window_size = buffer.readUInt32BE(0) & 0x7fffffff;
};
-// [CONTINUATION](http://http2.github.io/http2-spec/#CONTINUATION)
+// [CONTINUATION](http://tools.ietf.org/html/draft-ietf-httpbis-http2-09#section-6.10)
// ------------------------------------------------------------
//
// The CONTINUATION frame (type=0xA) is used to continue a sequence of header block fragments.
@@ -687,7 +687,7 @@ Deserializer.CONTINUATION = function readContinuation(buffer, frame) {
frame.data = buffer;
};
-// [Error Codes](http://http2.github.io/http2-spec/#ErrorCodes)
+// [Error Codes](http://tools.ietf.org/html/draft-ietf-httpbis-http2-09#section-7)
// ------------------------------------------------------------
var errorCodes = [
diff --git a/testing/xpcshell/node-http2/node_modules/http2-protocol/lib/index.js b/testing/xpcshell/node-http2/node_modules/http2-protocol/lib/index.js
index d3be5cfc6c4..f4892f47750 100644
--- a/testing/xpcshell/node-http2/node_modules/http2-protocol/lib/index.js
+++ b/testing/xpcshell/node-http2/node_modules/http2-protocol/lib/index.js
@@ -1,4 +1,4 @@
-// [node-http2-protocol][homepage] is an implementation of the [HTTP/2 (draft 06)][http2]
+// [node-http2-protocol][homepage] is an implementation of the [HTTP/2 (draft 09)][http2]
// framing layer for [node.js][node].
//
// The main building blocks are [node.js streams][node-stream] that are connected through pipes.
@@ -28,10 +28,10 @@
// between the binary and the JavaScript object representation of HTTP/2 frames
//
// [homepage]: https://github.com/molnarg/node-http2
-// [http2]: http://tools.ietf.org/html/draft-ietf-httpbis-http2-06
-// [http2-connheader]: http://tools.ietf.org/html/draft-ietf-httpbis-http2-06#section-3.5
-// [http2-stream]: http://tools.ietf.org/html/draft-ietf-httpbis-http2-06#section-5
-// [http2-streamstate]: http://tools.ietf.org/html/draft-ietf-httpbis-http2-06#section-5.1
+// [http2]: http://tools.ietf.org/html/draft-ietf-httpbis-http2-09
+// [http2-connheader]: http://tools.ietf.org/html/draft-ietf-httpbis-http2-09#section-3.5
+// [http2-stream]: http://tools.ietf.org/html/draft-ietf-httpbis-http2-09#section-5
+// [http2-streamstate]: http://tools.ietf.org/html/draft-ietf-httpbis-http2-09#section-5.1
// [node]: http://nodejs.org/
// [node-stream]: http://nodejs.org/api/stream.html
// [node-https]: http://nodejs.org/api/https.html
diff --git a/testing/xpcshell/node-http2/node_modules/http2-protocol/lib/stream.js b/testing/xpcshell/node-http2/node_modules/http2-protocol/lib/stream.js
index e923274b733..1e92ca24d54 100644
--- a/testing/xpcshell/node-http2/node_modules/http2-protocol/lib/stream.js
+++ b/testing/xpcshell/node-http2/node_modules/http2-protocol/lib/stream.js
@@ -322,7 +322,7 @@ Stream.prototype._finishing = function _finishing() {
}
};
-// [Stream States](http://tools.ietf.org/id/draft-unicorn-httpbis-http2-01.html#StreamStates)
+// [Stream States](http://tools.ietf.org/html/draft-ietf-httpbis-http2-09#section-5.1)
// ----------------
//
// +--------+
diff --git a/testing/xpcshell/node-http2/node_modules/http2-protocol/package.json b/testing/xpcshell/node-http2/node_modules/http2-protocol/package.json
index 2580cf83448..42cde6ce49f 100644
--- a/testing/xpcshell/node-http2/node_modules/http2-protocol/package.json
+++ b/testing/xpcshell/node-http2/node_modules/http2-protocol/package.json
@@ -1,6 +1,6 @@
{
"name": "http2-protocol",
- "version": "0.7.0",
+ "version": "0.9.0",
"description": "A JavaScript implementation of the HTTP/2 framing layer",
"main": "lib/index.js",
"engines": {
@@ -46,7 +46,7 @@
],
"license": "MIT",
"readmeFilename": "README.md",
- "readme": "node-http2-protocol\n===================\n\nAn HTTP/2 ([draft-ietf-httpbis-http2-07](http://tools.ietf.org/html/draft-ietf-httpbis-http2-07))\nframing layer implementaion for node.js.\n\nInstallation\n------------\n\n```\nnpm install http2-protocol\n```\n\nExamples\n--------\n\nAPI\n---\n\nDevelopment\n-----------\n\n### Development dependencies ###\n\nThere's a few library you will need to have installed to do anything described in the following\nsections. After installing/cloning node-http2, run `npm install` in its directory to install\ndevelopment dependencies.\n\nUsed libraries:\n\n* [mocha](http://visionmedia.github.io/mocha/) for tests\n* [chai](http://chaijs.com/) for assertions\n* [istanbul](https://github.com/gotwarlost/istanbul) for code coverage analysis\n* [docco](http://jashkenas.github.io/docco/) for developer documentation\n* [bunyan](https://github.com/trentm/node-bunyan) for logging\n\nFor pretty printing logs, you will also need a global install of bunyan (`npm install -g bunyan`).\n\n### Developer documentation ###\n\nThe developer documentation is located in the `doc` directory. The docs are usually updated only\nbefore releasing a new version. To regenerate them manually, run `npm run-script prepublish`.\nThere's a hosted version which is located [here](http://molnarg.github.io/node-http2/doc/).\n\n### Running the tests ###\n\nIt's easy, just run `npm test`. The tests are written in BDD style, so they are a good starting\npoint to understand the code.\n\n### Test coverage ###\n\nTo generate a code coverage report, run `npm test --coverage` (it may be slow, be patient).\nCode coverage summary as of version 0.6.0:\n```\nStatements : 92.39% ( 1165/1261 )\nBranches : 86.57% ( 477/551 )\nFunctions : 91.22% ( 135/148 )\nLines : 92.35% ( 1159/1255 )\n```\n\nThere's a hosted version of the detailed (line-by-line) coverage report\n[here](http://molnarg.github.io/node-http2-protocol/coverage/lcov-report/lib/).\n\n### Logging ###\n\nContributors\n------------\n\nCode contributions are always welcome! People who contributed to node-http2 so far:\n\n* Nick Hurley\n* Mike Belshe\n\nSpecial thanks to Google for financing the development of this module as part of their [Summer of\nCode program](https://developers.google.com/open-source/soc/) (project: [HTTP/2 prototype server\nimplementation](https://google-melange.appspot.com/gsoc/project/google/gsoc2013/molnarg/5001)), and\nNick Hurley of Mozilla, my GSoC mentor, who helped with regular code review and technical advices.\n\nLicense\n-------\n\nThe MIT License\n\nCopyright (C) 2013 Gábor Molnár \n",
- "_id": "http2-protocol@0.7.0",
- "_from": "http2-protocol@0.7.x"
+ "readme": "node-http2-protocol\n===================\n\nAn HTTP/2 ([draft-ietf-httpbis-http2-09](http://tools.ietf.org/html/draft-ietf-httpbis-http2-09))\nframing layer implementaion for node.js.\n\nInstallation\n------------\n\n```\nnpm install http2-protocol\n```\n\nExamples\n--------\n\nAPI\n---\n\nDevelopment\n-----------\n\n### Development dependencies ###\n\nThere's a few library you will need to have installed to do anything described in the following\nsections. After installing/cloning node-http2, run `npm install` in its directory to install\ndevelopment dependencies.\n\nUsed libraries:\n\n* [mocha](http://visionmedia.github.io/mocha/) for tests\n* [chai](http://chaijs.com/) for assertions\n* [istanbul](https://github.com/gotwarlost/istanbul) for code coverage analysis\n* [docco](http://jashkenas.github.io/docco/) for developer documentation\n* [bunyan](https://github.com/trentm/node-bunyan) for logging\n\nFor pretty printing logs, you will also need a global install of bunyan (`npm install -g bunyan`).\n\n### Developer documentation ###\n\nThe developer documentation is located in the `doc` directory. The docs are usually updated only\nbefore releasing a new version. To regenerate them manually, run `npm run-script prepublish`.\nThere's a hosted version which is located [here](http://molnarg.github.io/node-http2-protocol/doc/).\n\n### Running the tests ###\n\nIt's easy, just run `npm test`. The tests are written in BDD style, so they are a good starting\npoint to understand the code.\n\n### Test coverage ###\n\nTo generate a code coverage report, run `npm test --coverage` (it may be slow, be patient).\nCode coverage summary as of version 0.6.0:\n```\nStatements : 92.43% ( 1257/1360 )\nBranches : 86.36% ( 500/579 )\nFunctions : 90.12% ( 146/162 )\nLines : 92.39% ( 1251/1354 )\n```\n\nThere's a hosted version of the detailed (line-by-line) coverage report\n[here](http://molnarg.github.io/node-http2-protocol/coverage/lcov-report/lib/).\n\n### Logging ###\n\nContributors\n------------\n\nCode contributions are always welcome! People who contributed to node-http2 so far:\n\n* Nick Hurley\n* Mike Belshe\n\nSpecial thanks to Google for financing the development of this module as part of their [Summer of\nCode program](https://developers.google.com/open-source/soc/) (project: [HTTP/2 prototype server\nimplementation](https://google-melange.appspot.com/gsoc/project/google/gsoc2013/molnarg/5001)), and\nNick Hurley of Mozilla, my GSoC mentor, who helped with regular code review and technical advices.\n\nLicense\n-------\n\nThe MIT License\n\nCopyright (C) 2013 Gábor Molnár \n",
+ "_id": "http2-protocol@0.9.0",
+ "_from": "http2-protocol@0.9.x"
}
diff --git a/testing/xpcshell/node-http2/node_modules/http2-protocol/test/compressor.js b/testing/xpcshell/node-http2/node_modules/http2-protocol/test/compressor.js
index da3bcfbf0ca..e9f7f48481f 100644
--- a/testing/xpcshell/node-http2/node_modules/http2-protocol/test/compressor.js
+++ b/testing/xpcshell/node-http2/node_modules/http2-protocol/test/compressor.js
@@ -158,14 +158,14 @@ var test_headers = [{
value: 3,
index: false
},
- buffer: new Buffer('83', 'hex')
+ buffer: new Buffer('84', 'hex')
}, {
header: {
name: 5,
value: 5,
index: false
},
- buffer: new Buffer('85', 'hex')
+ buffer: new Buffer('86', 'hex')
}, {
header: {
name: 4,
@@ -186,14 +186,21 @@ var test_headers = [{
value: 2,
index: false
},
- buffer: new Buffer('82', 'hex')
+ buffer: new Buffer('83', 'hex')
}, {
header: {
name: 6,
value: 6,
index: false
},
- buffer: new Buffer('86', 'hex')
+ buffer: new Buffer('87', 'hex')
+}, {
+ header: {
+ name: -1,
+ value: -1,
+ index: false
+ },
+ buffer: new Buffer('80', 'hex')
}];
var test_header_sets = [{
@@ -231,11 +238,15 @@ var test_header_sets = [{
'custom-key': 'custom-value'
},
buffer: test_headers[3].buffer
+}, {
+ headers: {},
+ buffer: test_headers[13].buffer
}, {
headers: {
':status': '200',
'user-agent': 'my-user-agent',
- 'cookie': ['first', 'second', 'third', 'third'],
+ 'cookie': 'first; second; third; third; fourth',
+ 'multiple': ['first', 'second', 'third', 'third; fourth'],
'verylong': (new Buffer(9000)).toString('hex')
}
}];
@@ -343,7 +354,7 @@ describe('compressor.js', function() {
describe('method decompress(buffer)', function() {
it('should return the parsed header set in { name1: value1, name2: [value2, value3], ... } format', function() {
var decompressor = new Decompressor(util.log, 'REQUEST');
- for (var i = 0; i < 4; i++) {
+ for (var i = 0; i < 5; i++) {
var header_set = test_header_sets[i];
expect(decompressor.decompress(header_set.buffer)).to.deep.equal(header_set.headers);
}
diff --git a/testing/xpcshell/node-http2/package.json b/testing/xpcshell/node-http2/package.json
index 5ac6620c20b..bed96b428e6 100644
--- a/testing/xpcshell/node-http2/package.json
+++ b/testing/xpcshell/node-http2/package.json
@@ -1,13 +1,13 @@
{
"name": "http2",
- "version": "2.1.0",
+ "version": "2.2.0",
"description": "An HTTP/2 client and server implementation",
"main": "lib/index.js",
"engines": {
"node": ">=0.10.19"
},
"dependencies": {
- "http2-protocol": "0.7.x"
+ "http2-protocol": "0.9.x"
},
"devDependencies": {
"istanbul": "*",
@@ -49,7 +49,7 @@
],
"license": "MIT",
"readmeFilename": "README.md",
- "readme": "node-http2\n==========\n\nAn HTTP/2 ([draft-ietf-httpbis-http2-07](http://tools.ietf.org/html/draft-ietf-httpbis-http2-76))\nclient and server implementation for node.js.\n\nInstallation\n------------\n\n```\nnpm install http2\n```\n\nAPI\n---\n\nThe API is very similar to the [standard node.js HTTPS API](http://nodejs.org/api/https.html). The\ngoal is the perfect API compatibility, with additional HTTP2 related extensions (like server push).\n\nDetailed API documentation is primarily maintained in the `lib/http.js` file and is [available in\nthe wiki](https://github.com/molnarg/node-http2/wiki/Public-API) as well.\n\nExamples\n--------\n\n### Using as a server ###\n\n```javascript\nvar options = {\n key: fs.readFileSync('./example/localhost.key'),\n cert: fs.readFileSync('./example/localhost.crt')\n};\n\nrequire('http2').createServer(options, function(request, response) {\n response.end('Hello world!');\n}).listen(8080);\n```\n\n### Using as a client ###\n\n```javascript\nprocess.env.NODE_TLS_REJECT_UNAUTHORIZED = \"0\";\n\nrequire('http2').get('https://localhost:8080/', function(response) {\n response.pipe(process.stdout);\n});\n```\n\n### Simple static file server ###\n\nAn simple static file server serving up content from its own directory is available in the `example`\ndirectory. Running the server:\n\n```bash\n$ node ./example/server.js\n```\n\n### Simple command line client ###\n\nAn example client is also available. Downloading the server's own source code from the server:\n\n```bash\n$ node ./example/client.js 'https://localhost:8080/server.js' >/tmp/server.js\n```\n\n### Server push ###\n\nFor a server push example, see the source code of the example\n[server](https://github.com/molnarg/node-http2/blob/master/example/server.js) and\n[client](https://github.com/molnarg/node-http2/blob/master/example/client.js).\n\nStatus\n------\n\n* ALPN is not yet supported in node.js (see\n [this issue](https://github.com/joyent/node/issues/5945)). For ALPN support, you will have to use\n [Shigeki Ohtsu's node.js fork](https://github.com/shigeki/node/tree/alpn_support) until this code\n gets merged upstream.\n* Upgrade mechanism to start HTTP/2 over unencrypted channel is not implemented yet\n (issue [#4](https://github.com/molnarg/node-http2/issues/4))\n* Other minor features found in\n [this list](https://github.com/molnarg/node-http2/issues?labels=feature) are not implemented yet\n\nDevelopment\n-----------\n\n### Development dependencies ###\n\nThere's a few library you will need to have installed to do anything described in the following\nsections. After installing/cloning node-http2, run `npm install` in its directory to install\ndevelopment dependencies.\n\nUsed libraries:\n\n* [mocha](http://visionmedia.github.io/mocha/) for tests\n* [chai](http://chaijs.com/) for assertions\n* [istanbul](https://github.com/gotwarlost/istanbul) for code coverage analysis\n* [docco](http://jashkenas.github.io/docco/) for developer documentation\n* [bunyan](https://github.com/trentm/node-bunyan) for logging\n\nFor pretty printing logs, you will also need a global install of bunyan (`npm install -g bunyan`).\n\n### Developer documentation ###\n\nThe developer documentation is located in the `doc` directory. The docs are usually updated only\nbefore releasing a new version. To regenerate them manually, run `npm run-script prepublish`.\nThere's a hosted version which is located [here](http://molnarg.github.io/node-http2/doc/).\n\n### Running the tests ###\n\nIt's easy, just run `npm test`. The tests are written in BDD style, so they are a good starting\npoint to understand the code.\n\n### Test coverage ###\n\nTo generate a code coverage report, run `npm test --coverage` (which runs very slowly, be patient).\nCode coverage summary as of version 1.0.1:\n```\nStatements : 93.26% ( 1563/1676 )\nBranches : 84.85% ( 605/713 )\nFunctions : 94.81% ( 201/212 )\nLines : 93.23% ( 1557/1670 )\n```\n\nThere's a hosted version of the detailed (line-by-line) coverage report\n[here](http://molnarg.github.io/node-http2/coverage/lcov-report/lib/).\n\n### Logging ###\n\nLogging is turned off by default. You can turn it on by passing a bunyan logger as `log` option when\ncreating a server or agent.\n\nWhen using the example server or client, it's very easy to turn logging on: set the `HTTP2_LOG`\nenvironment variable to `fatal`, `error`, `warn`, `info`, `debug` or `trace` (the logging level).\nTo log every single incoming and outgoing data chunk, use `HTTP2_LOG_DATA=1` besides\n`HTTP2_LOG=trace`. Log output goes to the standard error output. If the standard error is redirected\ninto a file, then the log output is in bunyan's JSON format for easier post-mortem analysis.\n\nRunning the example server and client with `info` level logging output:\n\n```bash\n$ HTTP2_LOG=info node ./example/server.js\n```\n\n```bash\n$ HTTP2_LOG=info node ./example/client.js 'http://localhost:8080/server.js' >/dev/null\n```\n\nContributors\n------------\n\nCode contributions are always welcome! People who contributed to node-http2 so far:\n\n* Nick Hurley\n* Mike Belshe\n\nSpecial thanks to Google for financing the development of this module as part of their [Summer of\nCode program](https://developers.google.com/open-source/soc/) (project: [HTTP/2 prototype server\nimplementation](https://google-melange.appspot.com/gsoc/project/google/gsoc2013/molnarg/5001)), and\nNick Hurley of Mozilla, my GSoC mentor, who helped with regular code review and technical advices.\n\nLicense\n-------\n\nThe MIT License\n\nCopyright (C) 2013 Gábor Molnár \n",
- "_id": "http2@2.1.0",
+ "readme": "node-http2\n==========\n\nAn HTTP/2 ([draft-ietf-httpbis-http2-09](http://tools.ietf.org/html/draft-ietf-httpbis-http2-9))\nclient and server implementation for node.js.\n\nInstallation\n------------\n\n```\nnpm install http2\n```\n\nAPI\n---\n\nThe API is very similar to the [standard node.js HTTPS API](http://nodejs.org/api/https.html). The\ngoal is the perfect API compatibility, with additional HTTP2 related extensions (like server push).\n\nDetailed API documentation is primarily maintained in the `lib/http.js` file and is [available in\nthe wiki](https://github.com/molnarg/node-http2/wiki/Public-API) as well.\n\nExamples\n--------\n\n### Using as a server ###\n\n```javascript\nvar options = {\n key: fs.readFileSync('./example/localhost.key'),\n cert: fs.readFileSync('./example/localhost.crt')\n};\n\nrequire('http2').createServer(options, function(request, response) {\n response.end('Hello world!');\n}).listen(8080);\n```\n\n### Using as a client ###\n\n```javascript\nprocess.env.NODE_TLS_REJECT_UNAUTHORIZED = \"0\";\n\nrequire('http2').get('https://localhost:8080/', function(response) {\n response.pipe(process.stdout);\n});\n```\n\n### Simple static file server ###\n\nAn simple static file server serving up content from its own directory is available in the `example`\ndirectory. Running the server:\n\n```bash\n$ node ./example/server.js\n```\n\n### Simple command line client ###\n\nAn example client is also available. Downloading the server's own source code from the server:\n\n```bash\n$ node ./example/client.js 'https://localhost:8080/server.js' >/tmp/server.js\n```\n\n### Server push ###\n\nFor a server push example, see the source code of the example\n[server](https://github.com/molnarg/node-http2/blob/master/example/server.js) and\n[client](https://github.com/molnarg/node-http2/blob/master/example/client.js).\n\nStatus\n------\n\n* ALPN is not yet supported in node.js (see\n [this issue](https://github.com/joyent/node/issues/5945)). For ALPN support, you will have to use\n [Shigeki Ohtsu's node.js fork](https://github.com/shigeki/node/tree/alpn_support) until this code\n gets merged upstream.\n* Upgrade mechanism to start HTTP/2 over unencrypted channel is not implemented yet\n (issue [#4](https://github.com/molnarg/node-http2/issues/4))\n* Other minor features found in\n [this list](https://github.com/molnarg/node-http2/issues?labels=feature) are not implemented yet\n\nDevelopment\n-----------\n\n### Development dependencies ###\n\nThere's a few library you will need to have installed to do anything described in the following\nsections. After installing/cloning node-http2, run `npm install` in its directory to install\ndevelopment dependencies.\n\nUsed libraries:\n\n* [mocha](http://visionmedia.github.io/mocha/) for tests\n* [chai](http://chaijs.com/) for assertions\n* [istanbul](https://github.com/gotwarlost/istanbul) for code coverage analysis\n* [docco](http://jashkenas.github.io/docco/) for developer documentation\n* [bunyan](https://github.com/trentm/node-bunyan) for logging\n\nFor pretty printing logs, you will also need a global install of bunyan (`npm install -g bunyan`).\n\n### Developer documentation ###\n\nThe developer documentation is located in the `doc` directory. The docs are usually updated only\nbefore releasing a new version. To regenerate them manually, run `npm run-script prepublish`.\nThere's a hosted version which is located [here](http://molnarg.github.io/node-http2/doc/).\n\n### Running the tests ###\n\nIt's easy, just run `npm test`. The tests are written in BDD style, so they are a good starting\npoint to understand the code.\n\n### Test coverage ###\n\nTo generate a code coverage report, run `npm test --coverage` (which runs very slowly, be patient).\nCode coverage summary as of version 1.0.1:\n```\nStatements : 93.26% ( 1563/1676 )\nBranches : 84.85% ( 605/713 )\nFunctions : 94.81% ( 201/212 )\nLines : 93.23% ( 1557/1670 )\n```\n\nThere's a hosted version of the detailed (line-by-line) coverage report\n[here](http://molnarg.github.io/node-http2/coverage/lcov-report/lib/).\n\n### Logging ###\n\nLogging is turned off by default. You can turn it on by passing a bunyan logger as `log` option when\ncreating a server or agent.\n\nWhen using the example server or client, it's very easy to turn logging on: set the `HTTP2_LOG`\nenvironment variable to `fatal`, `error`, `warn`, `info`, `debug` or `trace` (the logging level).\nTo log every single incoming and outgoing data chunk, use `HTTP2_LOG_DATA=1` besides\n`HTTP2_LOG=trace`. Log output goes to the standard error output. If the standard error is redirected\ninto a file, then the log output is in bunyan's JSON format for easier post-mortem analysis.\n\nRunning the example server and client with `info` level logging output:\n\n```bash\n$ HTTP2_LOG=info node ./example/server.js\n```\n\n```bash\n$ HTTP2_LOG=info node ./example/client.js 'http://localhost:8080/server.js' >/dev/null\n```\n\nContributors\n------------\n\nCode contributions are always welcome! People who contributed to node-http2 so far:\n\n* Nick Hurley\n* Mike Belshe\n\nSpecial thanks to Google for financing the development of this module as part of their [Summer of\nCode program](https://developers.google.com/open-source/soc/) (project: [HTTP/2 prototype server\nimplementation](https://google-melange.appspot.com/gsoc/project/google/gsoc2013/molnarg/5001)), and\nNick Hurley of Mozilla, my GSoC mentor, who helped with regular code review and technical advices.\n\nLicense\n-------\n\nThe MIT License\n\nCopyright (C) 2013 Gábor Molnár \n",
+ "_id": "http2@2.2.0",
"_from": "http2@"
}
diff --git a/toolkit/mozapps/installer/packager.mk b/toolkit/mozapps/installer/packager.mk
index 41029934582..2fb75855ed5 100644
--- a/toolkit/mozapps/installer/packager.mk
+++ b/toolkit/mozapps/installer/packager.mk
@@ -381,8 +381,11 @@ RELEASE_SIGN_ANDROID_APK = \
$(RM) $(2)-unaligned.apk
ROBOCOP_PATH = $(abspath $(_ABS_DIST)/../build/mobile/robocop)
+# Normally, $(NSINSTALL) would be used instead of cp, but INNER_ROBOCOP_PACKAGE
+# is used in a series of commands that run under a "cd something", while
+# $(NSINSTALL) is relative.
INNER_ROBOCOP_PACKAGE= \
- $(NSINSTALL) $(GECKO_APP_AP_PATH)/fennec_ids.txt $(_ABS_DIST) && \
+ cp $(GECKO_APP_AP_PATH)/fennec_ids.txt $(_ABS_DIST) && \
$(call RELEASE_SIGN_ANDROID_APK,$(ROBOCOP_PATH)/robocop-debug-unsigned-unaligned.apk,$(_ABS_DIST)/robocop.apk)
BACKGROUND_TESTS_PATH = $(abspath $(_ABS_DIST)/../mobile/android/tests/background/junit3)
diff --git a/widget/cocoa/nsMenuBarX.h b/widget/cocoa/nsMenuBarX.h
index c482b646988..f1563f3023b 100644
--- a/widget/cocoa/nsMenuBarX.h
+++ b/widget/cocoa/nsMenuBarX.h
@@ -16,6 +16,7 @@
#include "nsString.h"
class nsMenuX;
+class nsMenuBarX;
class nsMenuItemX;
class nsIWidget;
class nsIContent;
@@ -32,11 +33,26 @@ public:
NS_IMETHOD CreateNativeMenuBar(nsIWidget* aParent, nsIContent* aMenuBarNode);
};
+@interface NSMenu (Undocumented)
+// Undocumented method, present unchanged since OS X 10.6, used to temporarily
+// highlight a top-level menu item when an appropriate Cmd+key combination is
+// pressed.
+- (void)_performActionWithHighlightingForItemAtIndex:(NSInteger)index;
+@end
+
// Objective-C class used to allow us to intervene with keyboard event handling.
// We allow mouse actions to work normally.
@interface GeckoNSMenu : NSMenu
{
+@private
+ nsMenuBarX *mMenuBarOwner; // Weak -- if non-null it owns us
+ bool mDelayResignMainMenu;
}
+- (id)initWithTitle:(NSString *)aTitle andMenuBarOwner:(nsMenuBarX *)aMenuBarOwner;
+- (void)resetMenuBarOwner;
+- (bool)delayResignMainMenu;
+- (void)setDelayResignMainMenu:(bool)aShouldDelay;
+- (void)delayedPaintMenuBar:(id)unused;
@end
// Objective-C class used as action target for menu items
@@ -80,6 +96,7 @@ public:
static NativeMenuItemTarget* sNativeEventTarget;
static nsMenuBarX* sLastGeckoMenuBarPainted;
+ static nsMenuBarX* sCurrentPaintDelayedMenuBar;
// The following content nodes have been removed from the menu system.
// We save them here for use in command handling.
@@ -103,7 +120,9 @@ public:
nsMenuX* GetMenuAt(uint32_t aIndex);
nsMenuX* GetXULHelpMenu();
void SetSystemHelpMenu();
- nsresult Paint();
+ nsresult Paint(bool aDelayed = false);
+ void PaintMenuBarAfterDelay();
+ void ResetAwaitingDelayedPaint() { mAwaitingDelayedPaint = false; }
void ForceUpdateNativeMenuAt(const nsAString& indexString);
void ForceNativeMenuReload(); // used for testing
static char GetLocalizedAccelKey(const char *shortcutID);
@@ -121,6 +140,8 @@ protected:
nsTArray< nsAutoPtr > mMenuArray;
nsIWidget* mParentWindow; // [weak]
GeckoNSMenu* mNativeMenu; // root menu, representing entire menu bar
+
+ bool mAwaitingDelayedPaint;
};
#endif // nsMenuBarX_h_
diff --git a/widget/cocoa/nsMenuBarX.mm b/widget/cocoa/nsMenuBarX.mm
index 250448587f1..eb9330d2f61 100644
--- a/widget/cocoa/nsMenuBarX.mm
+++ b/widget/cocoa/nsMenuBarX.mm
@@ -28,7 +28,8 @@
#include "nsIDOMElement.h"
NativeMenuItemTarget* nsMenuBarX::sNativeEventTarget = nil;
-nsMenuBarX* nsMenuBarX::sLastGeckoMenuBarPainted = nullptr;
+nsMenuBarX* nsMenuBarX::sLastGeckoMenuBarPainted = nullptr; // Weak
+nsMenuBarX* nsMenuBarX::sCurrentPaintDelayedMenuBar = nullptr; // Weak
NSMenu* sApplicationMenu = nil;
BOOL gSomeMenuBarPainted = NO;
@@ -55,11 +56,11 @@ NS_IMETHODIMP nsNativeMenuServiceX::CreateNativeMenuBar(nsIWidget* aParent, nsIC
}
nsMenuBarX::nsMenuBarX()
-: nsMenuGroupOwnerX(), mParentWindow(nullptr)
+: nsMenuGroupOwnerX(), mParentWindow(nullptr), mAwaitingDelayedPaint(false)
{
NS_OBJC_BEGIN_TRY_ABORT_BLOCK;
- mNativeMenu = [[GeckoNSMenu alloc] initWithTitle:@"MainMenuBar"];
+ mNativeMenu = [[GeckoNSMenu alloc] initWithTitle:@"MainMenuBar" andMenuBarOwner:this];
NS_OBJC_END_TRY_ABORT_BLOCK;
}
@@ -91,6 +92,7 @@ nsMenuBarX::~nsMenuBarX()
// before the registration hash table is destroyed.
mMenuArray.Clear();
+ [mNativeMenu resetMenuBarOwner];
[mNativeMenu release];
NS_OBJC_END_TRY_ABORT_BLOCK;
@@ -339,10 +341,15 @@ void nsMenuBarX::SetSystemHelpMenu()
}
}
-nsresult nsMenuBarX::Paint()
+nsresult nsMenuBarX::Paint(bool aDelayed)
{
NS_OBJC_BEGIN_TRY_ABORT_BLOCK_NSRESULT;
+ if (!aDelayed && mAwaitingDelayedPaint) {
+ return NS_OK;
+ }
+ mAwaitingDelayedPaint = false;
+
// Don't try to optimize anything in this painting by checking
// sLastGeckoMenuBarPainted because the menubar can be manipulated by
// native dialogs and sheet code and other things besides this paint method.
@@ -352,13 +359,36 @@ nsresult nsMenuBarX::Paint()
NSMenu* outgoingMenu = [NSApp mainMenu];
NS_ASSERTION([outgoingMenu numberOfItems] > 0, "Main menu does not have any items, something is terribly wrong!");
- NSMenuItem* appMenuItem = [[outgoingMenu itemAtIndex:0] retain];
- [outgoingMenu removeItemAtIndex:0];
- [mNativeMenu insertItem:appMenuItem atIndex:0];
- [appMenuItem release];
+ // To work around bug 722676, we sometimes need to delay making mNativeMenu
+ // the main menu. This is an Apple bug that sometimes causes a top-level
+ // menu item to remain highlighted after pressing a Cmd+key combination that
+ // opens a new window, then closing the window. The OS temporarily
+ // highlights the appropriate top-level menu item whenever you press the
+ // Cmd+key combination for one of its submenus. (It does this by setting a
+ // "pressed" attribute on it.) The OS then uses a timer to remove this
+ // "pressed" attribute. But without our workaround we sometimes change the
+ // main menu before the timer has fired, so when it fires the menu item it
+ // was intended to unhighlight is no longer present in the main menu. This
+ // causes the item to remain semi-permanently highlighted (until you quit
+ // Firefox or navigate the main menu by hand).
+ if ((outgoingMenu != mNativeMenu) &&
+ [outgoingMenu isKindOfClass:[GeckoNSMenu class]]) {
+ if (aDelayed) {
+ [(GeckoNSMenu *)outgoingMenu setDelayResignMainMenu:false];
+ } else if ([(GeckoNSMenu *)outgoingMenu delayResignMainMenu]) {
+ PaintMenuBarAfterDelay();
+ return NS_OK;
+ }
+ }
- // Set menu bar and event target.
- [NSApp setMainMenu:mNativeMenu];
+ if (outgoingMenu != mNativeMenu) {
+ NSMenuItem* appMenuItem = [[outgoingMenu itemAtIndex:0] retain];
+ [outgoingMenu removeItemAtIndex:0];
+ [mNativeMenu insertItem:appMenuItem atIndex:0];
+ [appMenuItem release];
+ // Set menu bar and event target.
+ [NSApp setMainMenu:mNativeMenu];
+ }
SetSystemHelpMenu();
nsMenuBarX::sLastGeckoMenuBarPainted = this;
@@ -369,6 +399,19 @@ nsresult nsMenuBarX::Paint()
NS_OBJC_END_TRY_ABORT_BLOCK_NSRESULT;
}
+// Used to delay a call to nsMenuBarX::Paint(). Needed to work around
+// bug 722676.
+void nsMenuBarX::PaintMenuBarAfterDelay()
+{
+ mAwaitingDelayedPaint = true;
+ nsMenuBarX::sCurrentPaintDelayedMenuBar = this;
+ [mNativeMenu retain];
+ // The delay for Apple's unhighlight timer is 0.1f, so we make ours a bit longer.
+ [mNativeMenu performSelector:@selector(delayedPaintMenuBar:)
+ withObject:nil
+ afterDelay:0.15f];
+}
+
// Returns the 'key' attribute of the 'shortcutID' object (if any) in the
// currently active menubar's DOM document. 'shortcutID' should be the id
// (i.e. the name) of a component that defines a commonly used (and
@@ -727,6 +770,66 @@ static BOOL gMenuItemsExecuteCommands = YES;
@implementation GeckoNSMenu
+- (id)initWithTitle:(NSString *)aTitle
+{
+ if (self = [super initWithTitle:aTitle]) {
+ mMenuBarOwner = nullptr;
+ mDelayResignMainMenu = false;
+ }
+ return self;
+}
+
+- (id)initWithTitle:(NSString *)aTitle andMenuBarOwner:(nsMenuBarX *)aMenuBarOwner
+{
+ if (self = [super initWithTitle:aTitle]) {
+ mMenuBarOwner = aMenuBarOwner;
+ mDelayResignMainMenu = false;
+ }
+ return self;
+}
+
+- (void)resetMenuBarOwner
+{
+ mMenuBarOwner = nil;
+}
+
+- (bool)delayResignMainMenu
+{
+ return mDelayResignMainMenu;
+}
+
+- (void)setDelayResignMainMenu:(bool)aShouldDelay
+{
+ mDelayResignMainMenu = aShouldDelay;
+}
+
+// Used to delay a call to nsMenuBarX::Paint(). Needed to work around
+// bug 722676.
+- (void)delayedPaintMenuBar:(id)unused
+{
+ if (mMenuBarOwner) {
+ if (mMenuBarOwner == nsMenuBarX::sCurrentPaintDelayedMenuBar) {
+ mMenuBarOwner->Paint(true);
+ nsMenuBarX::sCurrentPaintDelayedMenuBar = nullptr;
+ } else {
+ mMenuBarOwner->ResetAwaitingDelayedPaint();
+ }
+ }
+ [self release];
+}
+
+// Undocumented method, present unchanged since OS X 10.6, used to temporarily
+// highlight a top-level menu item when an appropriate Cmd+key combination is
+// pressed.
+- (void)_performActionWithHighlightingForItemAtIndex:(NSInteger)index;
+{
+ NSMenu *mainMenu = [NSApp mainMenu];
+ if ([mainMenu isKindOfClass:[GeckoNSMenu class]]) {
+ [(GeckoNSMenu *)mainMenu setDelayResignMainMenu:true];
+ }
+ [super _performActionWithHighlightingForItemAtIndex:index];
+}
+
// Keyboard commands should not cause menu items to invoke their
// commands when there is a key window because we'd rather send
// the keyboard command to the window. We still have the menus
diff --git a/widget/windows/nsWindow.cpp b/widget/windows/nsWindow.cpp
index fd4c2006cbf..f2cd1b66721 100644
--- a/widget/windows/nsWindow.cpp
+++ b/widget/windows/nsWindow.cpp
@@ -7390,25 +7390,39 @@ nsWindow::DealWithPopups(HWND aWnd, UINT aMessage,
::PostMessageW(aWnd, MOZ_WM_REACTIVATE, aWParam, aLParam);
return true;
}
- } else if (sPendingNCACTIVATE && LOWORD(aWParam) == WA_INACTIVE &&
- NeedsToHandleNCActivateDelayed(aWnd)) {
- // If focus moves to non-popup widget or focusable popup, the window
- // needs to update its nonclient area.
+ // Don't rollup the popup when focus moves back to the parent window
+ // from a popup because such case is caused by strange mouse drivers.
+ nsWindow* prevWindow =
+ WinUtils::GetNSWindowPtr(reinterpret_cast(aLParam));
+ if (prevWindow && prevWindow->IsPopup()) {
+ return false;
+ }
+ } else if (LOWORD(aWParam) == WA_INACTIVE) {
nsWindow* activeWindow =
WinUtils::GetNSWindowPtr(reinterpret_cast(aLParam));
- if (!activeWindow || !activeWindow->IsPopup()) {
- sSendingNCACTIVATE = true;
- ::SendMessageW(aWnd, WM_NCACTIVATE, false, 0);
- sSendingNCACTIVATE = false;
+ if (sPendingNCACTIVATE && NeedsToHandleNCActivateDelayed(aWnd)) {
+ // If focus moves to non-popup widget or focusable popup, the window
+ // needs to update its nonclient area.
+ if (!activeWindow || !activeWindow->IsPopup()) {
+ sSendingNCACTIVATE = true;
+ ::SendMessageW(aWnd, WM_NCACTIVATE, false, 0);
+ sSendingNCACTIVATE = false;
+ }
+ sPendingNCACTIVATE = false;
+ }
+ // If focus moves from/to popup, we don't need to rollup the popup
+ // because such case is caused by strange mouse drivers.
+ if (activeWindow) {
+ if (activeWindow->IsPopup()) {
+ return false;
+ }
+ nsWindow* deactiveWindow = WinUtils::GetNSWindowPtr(aWnd);
+ if (deactiveWindow && deactiveWindow->IsPopup()) {
+ return false;
+ }
}
- sPendingNCACTIVATE = false;
}
- // XXX Why do we need to check the message pos?
- if (!EventIsInsideWindow(popupWindow) &&
- GetPopupsToRollup(rollupListener, &popupsToRollup)) {
- break;
- }
- return false;
+ break;
case MOZ_WM_REACTIVATE:
// The previous active window should take back focus.
@@ -7432,7 +7446,6 @@ nsWindow::DealWithPopups(HWND aWnd, UINT aMessage,
return false;
case WM_MOUSEACTIVATE:
- // XXX Why do we need to check the message pos?
if (!EventIsInsideWindow(popupWindow) &&
GetPopupsToRollup(rollupListener, &popupsToRollup)) {
// WM_MOUSEACTIVATE may be caused by moving the mouse (e.g., X-mouse
@@ -7456,23 +7469,14 @@ nsWindow::DealWithPopups(HWND aWnd, UINT aMessage,
// If focus moves to other window created in different process/thread,
// e.g., a plugin window, popups should be rolled up.
if (IsDifferentThreadWindow(reinterpret_cast(aWParam))) {
- // XXX Why do we need to check the message pos?
- if (!EventIsInsideWindow(popupWindow) &&
- GetPopupsToRollup(rollupListener, &popupsToRollup)) {
- break;
- }
+ break;
}
return false;
case WM_MOVING:
case WM_SIZING:
case WM_MENUSELECT:
- // XXX Why do we need to check the message pos?
- if (!EventIsInsideWindow(popupWindow) &&
- GetPopupsToRollup(rollupListener, &popupsToRollup)) {
- break;
- }
- return false;
+ break;
default:
return false;