mirror of
https://gitlab.winehq.org/wine/wine-gecko.git
synced 2024-09-13 09:24:08 -07:00
Bug 928195 - Part 3: Consolidate all WebIDL Python logic into mozwebidl module; r=bz, froydnj
--HG-- extra : rebase_source : 0d8f664ecda7a92b6fba768a0dd0fd867c7d0ed3
This commit is contained in:
parent
1dc2a7a39b
commit
b89193cb67
@ -13,6 +13,8 @@ mock.pth:python/mock-1.0.0
|
||||
mozilla.pth:build
|
||||
mozilla.pth:config
|
||||
mozilla.pth:xpcom/typelib/xpt/tools
|
||||
mozilla.pth:dom/bindings
|
||||
mozilla.pth:dom/bindings/parser
|
||||
copy:build/buildconfig.py
|
||||
packages.txt:testing/mozbase/packages.txt
|
||||
objdir:build
|
||||
|
@ -90,6 +90,8 @@ globalgen_headers_DEST = $(ABS_DIST)/include/mozilla/dom
|
||||
globalgen_headers_TARGET := export
|
||||
INSTALL_TARGETS += globalgen_headers
|
||||
|
||||
PYTHON_UNIT_TESTS += $(srcdir)/mozwebidl/test/test_mozwebidl.py
|
||||
|
||||
include $(topsrcdir)/config/rules.mk
|
||||
|
||||
ifdef GNU_CC
|
||||
|
563
dom/bindings/mozwebidl/__init__.py
Normal file
563
dom/bindings/mozwebidl/__init__.py
Normal file
@ -0,0 +1,563 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
# This module contains code for managing WebIDL files and bindings for
|
||||
# the build system.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import errno
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
|
||||
from copy import deepcopy
|
||||
|
||||
from mach.mixin.logging import LoggingMixin
|
||||
|
||||
from mozbuild.base import MozbuildObject
|
||||
from mozbuild.makeutil import Makefile
|
||||
from mozbuild.pythonutil import iter_modules_in_path
|
||||
from mozbuild.util import FileAvoidWrite
|
||||
|
||||
import WebIDL
|
||||
from Codegen import (
|
||||
CGBindingRoot,
|
||||
CGEventRoot,
|
||||
CGExampleRoot,
|
||||
GlobalGenRoots,
|
||||
)
|
||||
from Configuration import Configuration
|
||||
|
||||
|
||||
class BuildResult(object):
|
||||
"""Represents the result of building WebIDL files.
|
||||
|
||||
This holds a summary of output file generation during a build.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
# The .webidl files that had their outputs regenerated.
|
||||
self.inputs = set()
|
||||
|
||||
# The output files that were created.
|
||||
self.created = set()
|
||||
|
||||
# The output files that changed.
|
||||
self.updated = set()
|
||||
|
||||
# The output files that didn't change.
|
||||
self.unchanged = set()
|
||||
|
||||
|
||||
class WebIDLCodegenManagerState(dict):
|
||||
"""Holds state for the WebIDL code generation manager.
|
||||
|
||||
State is currently just an extended dict. The internal implementation of
|
||||
state should be considered a black box to everyone except
|
||||
WebIDLCodegenManager. But we'll still document it.
|
||||
|
||||
Fields:
|
||||
|
||||
version
|
||||
The integer version of the format. This is to detect incompatible
|
||||
changes between state. It should be bumped whenever the format
|
||||
changes or semantics change.
|
||||
|
||||
webidls
|
||||
A dictionary holding information about every known WebIDL input.
|
||||
Keys are the basenames of input WebIDL files. Values are dicts of
|
||||
metadata. Keys in those dicts are:
|
||||
|
||||
* filename - The full path to the input filename.
|
||||
* inputs - A set of full paths to other webidl files this webidl
|
||||
depends on.
|
||||
* outputs - Set of full output paths that are created/derived from
|
||||
this file.
|
||||
* sha1 - The hexidecimal SHA-1 of the input filename from the last
|
||||
processing time.
|
||||
|
||||
global_inputs
|
||||
A dictionary defining files that influence all processing. Keys
|
||||
are full filenames. Values are hexidecimal SHA-1 from the last
|
||||
processing time.
|
||||
"""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self, fh=None):
|
||||
self['version'] = self.VERSION
|
||||
self['webidls'] = {}
|
||||
self['global_depends'] = {}
|
||||
|
||||
if not fh:
|
||||
return
|
||||
|
||||
state = json.load(fh)
|
||||
if state['version'] != self.VERSION:
|
||||
raise Exception('Unknown state version: %s' % state['version'])
|
||||
|
||||
self['version'] = state['version']
|
||||
self['global_depends'] = state['global_depends']
|
||||
|
||||
for k, v in state['webidls'].items():
|
||||
self['webidls'][k] = v
|
||||
|
||||
# Sets are converted to lists for serialization because JSON
|
||||
# doesn't support sets.
|
||||
self['webidls'][k]['inputs'] = set(v['inputs'])
|
||||
self['webidls'][k]['outputs'] = set(v['outputs'])
|
||||
|
||||
def dump(self, fh):
|
||||
"""Dump serialized state to a file handle."""
|
||||
normalized = deepcopy(self)
|
||||
|
||||
for k, v in self['webidls'].items():
|
||||
# Convert sets to lists because JSON doesn't support sets.
|
||||
normalized['webidls'][k]['outputs'] = sorted(v['outputs'])
|
||||
normalized['webidls'][k]['inputs'] = sorted(v['inputs'])
|
||||
|
||||
json.dump(normalized, fh, sort_keys=True)
|
||||
|
||||
|
||||
class WebIDLCodegenManager(LoggingMixin):
|
||||
"""Manages all things WebIDL.
|
||||
|
||||
This object is meant to be generic and reusable. Paths, etc should be
|
||||
parameters and not hardcoded.
|
||||
"""
|
||||
|
||||
# Global parser derived declaration files.
|
||||
GLOBAL_DECLARE_FILES = {
|
||||
'GeneratedAtomList.h',
|
||||
'PrototypeList.h',
|
||||
'RegisterBindings.h',
|
||||
'UnionConversions.h',
|
||||
'UnionTypes.h',
|
||||
}
|
||||
|
||||
# Global parser derived definition files.
|
||||
GLOBAL_DEFINE_FILES = {
|
||||
'RegisterBindings.cpp',
|
||||
'UnionTypes.cpp',
|
||||
}
|
||||
|
||||
# Example interfaces to build along with the tree. Other example
|
||||
# interfaces will need to be generated manually.
|
||||
BUILD_EXAMPLE_INTERFACES = {
|
||||
'TestExampleInterface',
|
||||
'TestExampleProxyInterface',
|
||||
}
|
||||
|
||||
def __init__(self, config_path, inputs, exported_header_dir,
|
||||
codegen_dir, state_path, cache_dir=None, make_deps_path=None,
|
||||
make_deps_target=None):
|
||||
"""Create an instance that manages WebIDLs in the build system.
|
||||
|
||||
config_path refers to a WebIDL config file (e.g. Bindings.conf).
|
||||
inputs is a 3-tuple describing the input .webidl files and how to
|
||||
process them. Members are:
|
||||
(set(.webidl files), set(basenames of exported files),
|
||||
set(basenames of generated events files))
|
||||
|
||||
exported_header_dir and codegen_dir are directories where generated
|
||||
files will be written to.
|
||||
state_path is the path to a file that will receive JSON state from our
|
||||
actions.
|
||||
make_deps_path is the path to a make dependency file that we can
|
||||
optionally write.
|
||||
make_deps_target is the target that receives the make dependencies. It
|
||||
must be defined if using make_deps_path.
|
||||
"""
|
||||
self.populate_logger()
|
||||
|
||||
input_paths, exported_stems, generated_events_stems = inputs
|
||||
|
||||
self._config_path = config_path
|
||||
self._input_paths = set(input_paths)
|
||||
self._exported_stems = set(exported_stems)
|
||||
self._generated_events_stems = set(generated_events_stems)
|
||||
self._exported_header_dir = exported_header_dir
|
||||
self._codegen_dir = codegen_dir
|
||||
self._state_path = state_path
|
||||
self._cache_dir = cache_dir
|
||||
self._make_deps_path = make_deps_path
|
||||
self._make_deps_target = make_deps_target
|
||||
|
||||
if (make_deps_path and not make_deps_target) or (not make_deps_path and
|
||||
make_deps_target):
|
||||
raise Exception('Must define both make_deps_path and make_deps_target '
|
||||
'if one is defined.')
|
||||
|
||||
self._parser_results = None
|
||||
self._config = None
|
||||
self._state = WebIDLCodegenManagerState()
|
||||
|
||||
if os.path.exists(state_path):
|
||||
with open(state_path, 'rb') as fh:
|
||||
try:
|
||||
self._state = WebIDLCodegenManagerState(fh=fh)
|
||||
except Exception as e:
|
||||
self.log(logging.WARN, 'webidl_bad_state', {'msg': str(e)},
|
||||
'Bad WebIDL state: {msg}')
|
||||
|
||||
@property
|
||||
def config(self):
|
||||
if not self._config:
|
||||
self._parse_webidl()
|
||||
|
||||
return self._config
|
||||
|
||||
def generate_build_files(self):
|
||||
"""Generate files required for the build.
|
||||
|
||||
This function is in charge of generating all the .h/.cpp files derived
|
||||
from input .webidl files. Please note that there are build actions
|
||||
required to produce .webidl files and these build actions are
|
||||
explicitly not captured here: this function assumes all .webidl files
|
||||
are present and up to date.
|
||||
|
||||
This routine is called as part of the build to ensure files that need
|
||||
to exist are present and up to date. This routine may not be called if
|
||||
the build dependencies (generated as a result of calling this the first
|
||||
time) say everything is up to date.
|
||||
|
||||
Because reprocessing outputs for every .webidl on every invocation
|
||||
is expensive, we only regenerate the minimal set of files on every
|
||||
invocation. The rules for deciding what needs done are roughly as
|
||||
follows:
|
||||
|
||||
1. If any .webidl changes, reparse all .webidl files and regenerate
|
||||
the global derived files. Only regenerate output files (.h/.cpp)
|
||||
impacted by the modified .webidl files.
|
||||
2. If an non-.webidl dependency (Python files, config file) changes,
|
||||
assume everything is out of date and regenerate the world. This
|
||||
is because changes in those could globally impact every output
|
||||
file.
|
||||
3. If an output file is missing, ensure it is present by performing
|
||||
necessary regeneration.
|
||||
"""
|
||||
# Despite #1 above, we assume the build system is smart enough to not
|
||||
# invoke us if nothing has changed. Therefore, any invocation means
|
||||
# something has changed. And, if anything has changed, we need to
|
||||
# parse the WebIDL.
|
||||
self._parse_webidl()
|
||||
|
||||
result = BuildResult()
|
||||
|
||||
# If we parse, we always update globals - they are cheap and it is
|
||||
# easier that way.
|
||||
created, updated, unchanged = self._write_global_derived()
|
||||
result.created |= created
|
||||
result.updated |= updated
|
||||
result.unchanged |= unchanged
|
||||
|
||||
# If any of the extra dependencies changed, regenerate the world.
|
||||
global_changed, global_hashes = self._global_dependencies_changed()
|
||||
if global_changed:
|
||||
# Make a copy because we may modify.
|
||||
changed_inputs = set(self._input_paths)
|
||||
else:
|
||||
changed_inputs = self._compute_changed_inputs()
|
||||
|
||||
self._state['global_depends'] = global_hashes
|
||||
|
||||
# Generate bindings from .webidl files.
|
||||
for filename in sorted(changed_inputs):
|
||||
basename = os.path.basename(filename)
|
||||
result.inputs.add(filename)
|
||||
written, deps = self._generate_build_files_for_webidl(filename)
|
||||
result.created |= written[0]
|
||||
result.updated |= written[1]
|
||||
result.unchanged |= written[2]
|
||||
|
||||
self._state['webidls'][basename] = dict(
|
||||
filename=filename,
|
||||
outputs=written[0] | written[1] | written[2],
|
||||
inputs=set(deps),
|
||||
sha1=self._input_hashes[filename],
|
||||
)
|
||||
|
||||
# Process some special interfaces required for testing.
|
||||
for interface in self.BUILD_EXAMPLE_INTERFACES:
|
||||
written = self.generate_example_files(interface)
|
||||
result.created |= written[0]
|
||||
result.updated |= written[1]
|
||||
result.unchanged |= written[2]
|
||||
|
||||
# Generate a make dependency file.
|
||||
if self._make_deps_path:
|
||||
mk = Makefile()
|
||||
codegen_rule = mk.create_rule([self._make_deps_target])
|
||||
codegen_rule.add_dependencies(global_hashes.keys())
|
||||
codegen_rule.add_dependencies(self._input_paths)
|
||||
|
||||
with FileAvoidWrite(self._make_deps_path) as fh:
|
||||
mk.dump(fh)
|
||||
|
||||
self._save_state()
|
||||
|
||||
return result
|
||||
|
||||
def generate_example_files(self, interface):
|
||||
"""Generates example files for a given interface."""
|
||||
root = CGExampleRoot(self.config, interface)
|
||||
|
||||
return self._maybe_write_codegen(root, *self._example_paths(interface))
|
||||
|
||||
def _parse_webidl(self):
|
||||
self.log(logging.INFO, 'webidl_parse',
|
||||
{'count': len(self._input_paths)},
|
||||
'Parsing {count} WebIDL files.')
|
||||
|
||||
hashes = {}
|
||||
parser = WebIDL.Parser(self._cache_dir)
|
||||
|
||||
for path in sorted(self._input_paths):
|
||||
with open(path, 'rb') as fh:
|
||||
data = fh.read()
|
||||
hashes[path] = hashlib.sha1(data).hexdigest()
|
||||
parser.parse(data, path)
|
||||
|
||||
self._parser_results = parser.finish()
|
||||
self._config = Configuration(self._config_path, self._parser_results)
|
||||
self._input_hashes = hashes
|
||||
|
||||
def _write_global_derived(self):
|
||||
things = [('declare', f) for f in self.GLOBAL_DECLARE_FILES]
|
||||
things.extend(('define', f) for f in self.GLOBAL_DEFINE_FILES)
|
||||
|
||||
result = (set(), set(), set())
|
||||
|
||||
for what, filename in things:
|
||||
stem = os.path.splitext(filename)[0]
|
||||
root = getattr(GlobalGenRoots, stem)(self._config)
|
||||
|
||||
if what == 'declare':
|
||||
code = root.declare()
|
||||
output_root = self._exported_header_dir
|
||||
elif what == 'define':
|
||||
code = root.define()
|
||||
output_root = self._codegen_dir
|
||||
else:
|
||||
raise Exception('Unknown global gen type: %s' % what)
|
||||
|
||||
output_path = os.path.join(output_root, filename)
|
||||
self._maybe_write_file(output_path, code, result)
|
||||
|
||||
return result
|
||||
|
||||
def _compute_changed_inputs(self):
|
||||
"""Compute the set of input files that need regenerated."""
|
||||
changed_inputs = set()
|
||||
expected_outputs = self.expected_build_output_files()
|
||||
|
||||
# Look for missing output files.
|
||||
if any(not os.path.exists(f) for f in expected_outputs):
|
||||
# FUTURE Bug 940469 Only regenerate minimum set.
|
||||
changed_inputs |= self._input_paths
|
||||
|
||||
# That's it for examining output files. We /could/ examine SHA-1's of
|
||||
# output files from a previous run to detect modifications. But that's
|
||||
# a lot of extra work and most build systems don't do that anyway.
|
||||
|
||||
# Now we move on to the input files.
|
||||
old_hashes = {v['filename']: v['sha1']
|
||||
for v in self._state['webidls'].values()}
|
||||
|
||||
old_filenames = set(old_hashes.keys())
|
||||
new_filenames = self._input_paths
|
||||
|
||||
# If an old file has disappeared or a new file has arrived, mark
|
||||
# it.
|
||||
changed_inputs |= old_filenames ^ new_filenames
|
||||
|
||||
# For the files in common between runs, compare content. If the file
|
||||
# has changed, mark it. We don't need to perform mtime comparisons
|
||||
# because content is a stronger validator.
|
||||
for filename in old_filenames & new_filenames:
|
||||
if old_hashes[filename] != self._input_hashes[filename]:
|
||||
changed_inputs.add(filename)
|
||||
|
||||
# We've now populated the base set of inputs that have changed.
|
||||
|
||||
# Inherit dependencies from previous run. The full set of dependencies
|
||||
# is associated with each record, so we don't need to perform any fancy
|
||||
# graph traversal.
|
||||
for v in self._state['webidls'].values():
|
||||
if any(dep for dep in v['inputs'] if dep in changed_inputs):
|
||||
changed_inputs.add(v['filename'])
|
||||
|
||||
# Ensure all changed inputs actually exist (some changed inputs could
|
||||
# have been from deleted files).
|
||||
return set(f for f in changed_inputs if os.path.exists(f))
|
||||
|
||||
def _binding_info(self, p):
|
||||
"""Compute binding metadata for an input path.
|
||||
|
||||
Returns a tuple of:
|
||||
|
||||
(stem, binding_stem, is_event, output_files)
|
||||
|
||||
output_files is itself a tuple. The first two items are the binding
|
||||
header and C++ paths, respectively. The 2nd pair are the event header
|
||||
and C++ paths or None if this isn't an event binding.
|
||||
"""
|
||||
basename = os.path.basename(p)
|
||||
stem = os.path.splitext(basename)[0]
|
||||
binding_stem = '%sBinding' % stem
|
||||
|
||||
if stem in self._exported_stems:
|
||||
header_dir = self._exported_header_dir
|
||||
else:
|
||||
header_dir = self._codegen_dir
|
||||
|
||||
is_event = stem in self._generated_events_stems
|
||||
|
||||
files = (
|
||||
os.path.join(header_dir, '%s.h' % binding_stem),
|
||||
os.path.join(self._codegen_dir, '%s.cpp' % binding_stem),
|
||||
os.path.join(header_dir, '%s.h' % stem) if is_event else None,
|
||||
os.path.join(self._codegen_dir, '%s.cpp' % stem) if is_event else None,
|
||||
)
|
||||
|
||||
return stem, binding_stem, is_event, header_dir, files
|
||||
|
||||
def _example_paths(self, interface):
|
||||
return (
|
||||
os.path.join(self._codegen_dir, '%s-example.h' % interface),
|
||||
os.path.join(self._codegen_dir, '%s-example.cpp' % interface))
|
||||
|
||||
def expected_build_output_files(self):
|
||||
"""Obtain the set of files generate_build_files() should write."""
|
||||
paths = set()
|
||||
|
||||
# Account for global generation.
|
||||
for p in self.GLOBAL_DECLARE_FILES:
|
||||
paths.add(os.path.join(self._exported_header_dir, p))
|
||||
for p in self.GLOBAL_DEFINE_FILES:
|
||||
paths.add(os.path.join(self._codegen_dir, p))
|
||||
|
||||
for p in self._input_paths:
|
||||
stem, binding_stem, is_event, header_dir, files = self._binding_info(p)
|
||||
paths |= {f for f in files if f}
|
||||
|
||||
for interface in self.BUILD_EXAMPLE_INTERFACES:
|
||||
for p in self._example_paths(interface):
|
||||
paths.add(p)
|
||||
|
||||
return paths
|
||||
|
||||
def _generate_build_files_for_webidl(self, filename):
|
||||
self.log(logging.INFO, 'webidl_generate_build_for_input',
|
||||
{'filename': filename},
|
||||
'Generating WebIDL files derived from {filename}')
|
||||
|
||||
stem, binding_stem, is_event, header_dir, files = self._binding_info(filename)
|
||||
root = CGBindingRoot(self._config, binding_stem, filename)
|
||||
|
||||
result = self._maybe_write_codegen(root, files[0], files[1])
|
||||
|
||||
if is_event:
|
||||
generated_event = CGEventRoot(self._config, stem)
|
||||
result = self._maybe_write_codegen(generated_event, files[2],
|
||||
files[3], result)
|
||||
|
||||
return result, root.deps()
|
||||
|
||||
def _global_dependencies_changed(self):
|
||||
"""Determine whether the global dependencies have changed."""
|
||||
current_files = set(iter_modules_in_path(os.path.dirname(__file__)))
|
||||
|
||||
# We need to catch other .py files from /dom/bindings. We assume these
|
||||
# are in the same directory as the config file.
|
||||
current_files |= set(iter_modules_in_path(os.path.dirname(self._config_path)))
|
||||
|
||||
current_files.add(self._config_path)
|
||||
|
||||
current_hashes = {}
|
||||
for f in current_files:
|
||||
# This will fail if the file doesn't exist. If a current global
|
||||
# dependency doesn't exist, something else is wrong.
|
||||
with open(f, 'rb') as fh:
|
||||
current_hashes[f] = hashlib.sha1(fh.read()).hexdigest()
|
||||
|
||||
# The set of files has changed.
|
||||
if current_files ^ set(self._state['global_depends'].keys()):
|
||||
return True, current_hashes
|
||||
|
||||
# Compare hashes.
|
||||
for f, sha1 in current_hashes.items():
|
||||
if sha1 != self._state['global_depends'][f]:
|
||||
return True, current_hashes
|
||||
|
||||
return False, current_hashes
|
||||
|
||||
def _save_state(self):
|
||||
with open(self._state_path, 'wb') as fh:
|
||||
self._state.dump(fh)
|
||||
|
||||
def _maybe_write_codegen(self, obj, declare_path, define_path, result=None):
|
||||
assert declare_path and define_path
|
||||
if not result:
|
||||
result = (set(), set(), set())
|
||||
|
||||
self._maybe_write_file(declare_path, obj.declare(), result)
|
||||
self._maybe_write_file(define_path, obj.define(), result)
|
||||
|
||||
return result
|
||||
|
||||
def _maybe_write_file(self, path, content, result):
|
||||
fh = FileAvoidWrite(path)
|
||||
fh.write(content)
|
||||
existed, updated = fh.close()
|
||||
|
||||
if not existed:
|
||||
result[0].add(path)
|
||||
elif updated:
|
||||
result[1].add(path)
|
||||
else:
|
||||
result[2].add(path)
|
||||
|
||||
|
||||
def create_build_system_manager(topsrcdir, topobjdir, dist_dir):
|
||||
"""Create a WebIDLManager for use by the build system."""
|
||||
src_dir = os.path.join(topsrcdir, 'dom', 'bindings')
|
||||
obj_dir = os.path.join(topobjdir, 'dom', 'bindings')
|
||||
|
||||
with open(os.path.join(obj_dir, 'file-lists.json'), 'rb') as fh:
|
||||
files = json.load(fh)
|
||||
|
||||
inputs = (files['webidls'], files['exported_stems'],
|
||||
files['generated_events_stems'])
|
||||
|
||||
cache_dir = os.path.join(obj_dir, '_cache')
|
||||
try:
|
||||
os.makedirs(cache_dir)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
raise
|
||||
|
||||
return WebIDLCodegenManager(
|
||||
os.path.join(src_dir, 'Bindings.conf'),
|
||||
inputs,
|
||||
os.path.join(dist_dir, 'include', 'mozilla', 'dom'),
|
||||
obj_dir,
|
||||
os.path.join(obj_dir, 'codegen.json'),
|
||||
cache_dir=cache_dir,
|
||||
# The make rules include a codegen.pp file containing dependencies.
|
||||
make_deps_path=os.path.join(obj_dir, 'codegen.pp'),
|
||||
make_deps_target='codegen.pp',
|
||||
)
|
||||
|
||||
|
||||
class BuildSystemWebIDL(MozbuildObject):
|
||||
@property
|
||||
def manager(self):
|
||||
if not hasattr(self, '_webidl_manager'):
|
||||
self._webidl_manager = create_build_system_manager(
|
||||
self.topsrcdir, self.topobjdir, self.distdir)
|
||||
|
||||
return self._webidl_manager
|
3
dom/bindings/mozwebidl/test/Child.webidl
Normal file
3
dom/bindings/mozwebidl/test/Child.webidl
Normal file
@ -0,0 +1,3 @@
|
||||
interface Child : Parent {
|
||||
void ChildBaz();
|
||||
};
|
2
dom/bindings/mozwebidl/test/DummyBinding.webidl
Normal file
2
dom/bindings/mozwebidl/test/DummyBinding.webidl
Normal file
@ -0,0 +1,2 @@
|
||||
interface DummyInterface {};
|
||||
interface DummyInterfaceWorkers {};
|
3
dom/bindings/mozwebidl/test/ExampleBinding.webidl
Normal file
3
dom/bindings/mozwebidl/test/ExampleBinding.webidl
Normal file
@ -0,0 +1,3 @@
|
||||
/* These interfaces are hard-coded and need to be defined. */
|
||||
interface TestExampleInterface {};
|
||||
interface TestExampleProxyInterface {};
|
3
dom/bindings/mozwebidl/test/Parent.webidl
Normal file
3
dom/bindings/mozwebidl/test/Parent.webidl
Normal file
@ -0,0 +1,3 @@
|
||||
interface Parent {
|
||||
void MethodFoo();
|
||||
};
|
13
dom/bindings/mozwebidl/test/TestEvent.webidl
Normal file
13
dom/bindings/mozwebidl/test/TestEvent.webidl
Normal file
@ -0,0 +1,13 @@
|
||||
interface EventTarget {
|
||||
void addEventListener();
|
||||
};
|
||||
|
||||
interface Event {};
|
||||
|
||||
callback EventHandlerNonNull = any (Event event);
|
||||
typedef EventHandlerNonNull? EventHandler;
|
||||
|
||||
[NoInterfaceObject]
|
||||
interface TestEvent : EventTarget {
|
||||
attribute EventHandler onfoo;
|
||||
};
|
276
dom/bindings/mozwebidl/test/test_mozwebidl.py
Normal file
276
dom/bindings/mozwebidl/test/test_mozwebidl.py
Normal file
@ -0,0 +1,276 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import imp
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
from mozwebidl import (
|
||||
WebIDLCodegenManager,
|
||||
WebIDLCodegenManagerState,
|
||||
)
|
||||
|
||||
from mozfile import NamedTemporaryFile
|
||||
|
||||
from mozunit import (
|
||||
MockedOpen,
|
||||
main,
|
||||
)
|
||||
|
||||
|
||||
OUR_DIR = os.path.abspath(os.path.dirname(__file__))
|
||||
TOPSRCDIR = os.path.normpath(os.path.join(OUR_DIR, '..', '..', '..', '..'))
|
||||
|
||||
|
||||
class TestWebIDLCodegenManager(unittest.TestCase):
|
||||
TEST_STEMS = {
|
||||
'Child',
|
||||
'Parent',
|
||||
'ExampleBinding',
|
||||
'TestEvent',
|
||||
}
|
||||
|
||||
@property
|
||||
def _static_input_paths(self):
|
||||
s = {os.path.join(OUR_DIR, p) for p in os.listdir(OUR_DIR)
|
||||
if p.endswith('.webidl')}
|
||||
|
||||
return s
|
||||
|
||||
@property
|
||||
def _config_path(self):
|
||||
config = os.path.join(TOPSRCDIR, 'dom', 'bindings', 'Bindings.conf')
|
||||
self.assertTrue(os.path.exists(config))
|
||||
|
||||
return config
|
||||
|
||||
def _get_manager_args(self):
|
||||
tmp = tempfile.mkdtemp()
|
||||
self.addCleanup(shutil.rmtree, tmp)
|
||||
|
||||
cache_dir = os.path.join(tmp, 'cache')
|
||||
os.mkdir(cache_dir)
|
||||
|
||||
ip = self._static_input_paths
|
||||
|
||||
inputs = (
|
||||
ip,
|
||||
{os.path.splitext(os.path.basename(p))[0] for p in ip},
|
||||
set()
|
||||
)
|
||||
|
||||
return dict(
|
||||
config_path=self._config_path,
|
||||
inputs=inputs,
|
||||
exported_header_dir=os.path.join(tmp, 'exports'),
|
||||
codegen_dir=os.path.join(tmp, 'codegen'),
|
||||
state_path=os.path.join(tmp, 'state.json'),
|
||||
make_deps_path=os.path.join(tmp, 'codegen.pp'),
|
||||
make_deps_target='codegen.pp',
|
||||
cache_dir=cache_dir,
|
||||
)
|
||||
|
||||
def _get_manager(self):
|
||||
return WebIDLCodegenManager(**self._get_manager_args())
|
||||
|
||||
def test_unknown_state_version(self):
|
||||
"""Loading a state file with a too new version resets state."""
|
||||
args = self._get_manager_args()
|
||||
|
||||
p = args['state_path']
|
||||
|
||||
with open(p, 'wb') as fh:
|
||||
json.dump({
|
||||
'version': WebIDLCodegenManagerState.VERSION + 1,
|
||||
'foobar': '1',
|
||||
}, fh)
|
||||
|
||||
manager = WebIDLCodegenManager(**args)
|
||||
|
||||
self.assertEqual(manager._state['version'],
|
||||
WebIDLCodegenManagerState.VERSION)
|
||||
self.assertNotIn('foobar', manager._state)
|
||||
|
||||
def test_generate_build_files(self):
|
||||
"""generate_build_files() does the right thing from empty."""
|
||||
manager = self._get_manager()
|
||||
result = manager.generate_build_files()
|
||||
self.assertEqual(len(result.inputs), 5)
|
||||
|
||||
output = manager.expected_build_output_files()
|
||||
self.assertEqual(result.created, output)
|
||||
self.assertEqual(len(result.updated), 0)
|
||||
self.assertEqual(len(result.unchanged), 0)
|
||||
|
||||
for f in output:
|
||||
self.assertTrue(os.path.isfile(f))
|
||||
|
||||
for f in manager.GLOBAL_DECLARE_FILES:
|
||||
self.assertIn(os.path.join(manager._exported_header_dir, f), output)
|
||||
|
||||
for f in manager.GLOBAL_DEFINE_FILES:
|
||||
self.assertIn(os.path.join(manager._codegen_dir, f), output)
|
||||
|
||||
for s in self.TEST_STEMS:
|
||||
self.assertTrue(os.path.isfile(os.path.join(
|
||||
manager._exported_header_dir, '%sBinding.h' % s)))
|
||||
self.assertTrue(os.path.isfile(os.path.join(
|
||||
manager._codegen_dir, '%sBinding.cpp' % s)))
|
||||
|
||||
self.assertTrue(os.path.isfile(manager._state_path))
|
||||
|
||||
with open(manager._state_path, 'rb') as fh:
|
||||
state = json.load(fh)
|
||||
self.assertEqual(state['version'], 1)
|
||||
self.assertIn('webidls', state)
|
||||
|
||||
child = state['webidls']['Child.webidl']
|
||||
self.assertEqual(len(child['inputs']), 2)
|
||||
self.assertEqual(len(child['outputs']), 2)
|
||||
self.assertEqual(child['sha1'], 'c41527cad3bc161fa6e7909e48fa11f9eca0468b')
|
||||
|
||||
def test_generate_build_files_load_state(self):
|
||||
"""State should be equivalent when instantiating a new instance."""
|
||||
args = self._get_manager_args()
|
||||
m1 = WebIDLCodegenManager(**args)
|
||||
self.assertEqual(len(m1._state['webidls']), 0)
|
||||
m1.generate_build_files()
|
||||
|
||||
m2 = WebIDLCodegenManager(**args)
|
||||
self.assertGreater(len(m2._state['webidls']), 2)
|
||||
self.assertEqual(m1._state, m2._state)
|
||||
|
||||
def test_no_change_no_writes(self):
|
||||
"""If nothing changes, no files should be updated."""
|
||||
args = self._get_manager_args()
|
||||
m1 = WebIDLCodegenManager(**args)
|
||||
m1.generate_build_files()
|
||||
|
||||
m2 = WebIDLCodegenManager(**args)
|
||||
result = m2.generate_build_files()
|
||||
|
||||
self.assertEqual(len(result.inputs), 0)
|
||||
self.assertEqual(len(result.created), 0)
|
||||
self.assertEqual(len(result.updated), 0)
|
||||
|
||||
def test_output_file_regenerated(self):
|
||||
"""If an output file disappears, it is regenerated."""
|
||||
args = self._get_manager_args()
|
||||
m1 = WebIDLCodegenManager(**args)
|
||||
m1.generate_build_files()
|
||||
|
||||
rm_count = 0
|
||||
for p in m1._state['webidls']['Child.webidl']['outputs']:
|
||||
rm_count += 1
|
||||
os.unlink(p)
|
||||
|
||||
for p in m1.GLOBAL_DECLARE_FILES:
|
||||
rm_count += 1
|
||||
os.unlink(os.path.join(m1._exported_header_dir, p))
|
||||
|
||||
m2 = WebIDLCodegenManager(**args)
|
||||
result = m2.generate_build_files()
|
||||
self.assertEqual(len(result.created), rm_count)
|
||||
|
||||
def test_only_rebuild_self(self):
|
||||
"""If an input file changes, only rebuild that one file."""
|
||||
args = self._get_manager_args()
|
||||
m1 = WebIDLCodegenManager(**args)
|
||||
m1.generate_build_files()
|
||||
|
||||
child_path = None
|
||||
for p in m1._input_paths:
|
||||
if p.endswith('Child.webidl'):
|
||||
child_path = p
|
||||
break
|
||||
|
||||
self.assertIsNotNone(child_path)
|
||||
child_content = open(child_path, 'rb').read()
|
||||
|
||||
with MockedOpen({child_path: child_content + '\n/* */'}):
|
||||
m2 = WebIDLCodegenManager(**args)
|
||||
result = m2.generate_build_files()
|
||||
self.assertEqual(result.inputs, set([child_path]))
|
||||
self.assertEqual(len(result.updated), 0)
|
||||
self.assertEqual(len(result.created), 0)
|
||||
|
||||
def test_rebuild_dependencies(self):
|
||||
"""Ensure an input file used by others results in others rebuilding."""
|
||||
args = self._get_manager_args()
|
||||
m1 = WebIDLCodegenManager(**args)
|
||||
m1.generate_build_files()
|
||||
|
||||
parent_path = None
|
||||
child_path = None
|
||||
for p in m1._input_paths:
|
||||
if p.endswith('Parent.webidl'):
|
||||
parent_path = p
|
||||
elif p.endswith('Child.webidl'):
|
||||
child_path = p
|
||||
|
||||
self.assertIsNotNone(parent_path)
|
||||
parent_content = open(parent_path, 'rb').read()
|
||||
|
||||
with MockedOpen({parent_path: parent_content + '\n/* */'}):
|
||||
m2 = WebIDLCodegenManager(**args)
|
||||
result = m2.generate_build_files()
|
||||
self.assertEqual(result.inputs, {child_path, parent_path})
|
||||
self.assertEqual(len(result.updated), 0)
|
||||
self.assertEqual(len(result.created), 0)
|
||||
|
||||
def test_python_change_regenerate_everything(self):
|
||||
"""If a Python file changes, we should attempt to rebuild everything."""
|
||||
|
||||
# We don't want to mutate files in the source directory because we want
|
||||
# to be able to build from a read-only filesystem. So, we install a
|
||||
# dummy module and rewrite the metadata to say it comes from the source
|
||||
# directory.
|
||||
#
|
||||
# Hacking imp to accept a MockedFile doesn't appear possible. So for
|
||||
# the first iteration we read from a temp file. The second iteration
|
||||
# doesn't need to import, so we are fine with a mocked file.
|
||||
fake_path = os.path.join(OUR_DIR, 'fakemodule.py')
|
||||
with NamedTemporaryFile('wt') as fh:
|
||||
fh.write('# Original content')
|
||||
fh.flush()
|
||||
mod = imp.load_source('mozwebidl.fakemodule', fh.name)
|
||||
mod.__file__ = fake_path
|
||||
|
||||
args = self._get_manager_args()
|
||||
m1 = WebIDLCodegenManager(**args)
|
||||
with MockedOpen({fake_path: '# Original content'}):
|
||||
old_exists = os.path.exists
|
||||
try:
|
||||
def exists(p):
|
||||
if p == fake_path:
|
||||
return True
|
||||
return old_exists(p)
|
||||
|
||||
os.path.exists = exists
|
||||
|
||||
result = m1.generate_build_files()
|
||||
l = len(result.inputs)
|
||||
|
||||
with open(fake_path, 'wt') as fh:
|
||||
fh.write('# Modified content')
|
||||
|
||||
m2 = WebIDLCodegenManager(**args)
|
||||
result = m2.generate_build_files()
|
||||
self.assertEqual(len(result.inputs), l)
|
||||
|
||||
result = m2.generate_build_files()
|
||||
self.assertEqual(len(result.inputs), 0)
|
||||
finally:
|
||||
os.path.exists = old_exists
|
||||
del sys.modules['mozwebidl.fakemodule']
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
Loading…
Reference in New Issue
Block a user