Imported Upstream version 5.0.0.42

Former-commit-id: fd56571888259555122d8a0f58c68838229cea2b
This commit is contained in:
Xamarin Public Jenkins (auto-signing)
2017-04-10 11:41:01 +00:00
parent 1190d13a04
commit 6bdd276d05
19939 changed files with 3099680 additions and 93811 deletions

27
external/bockbuild/.gitignore vendored Normal file
View File

@@ -0,0 +1,27 @@
profiles/*/build-root
profiles/*/package-root
profiles/*/stage-root
profiles/*/toolchain-root
profiles/*/artifacts
profiles/*/logs
profiles/*/bundle.glick
profiles/*/global.env
profiles/*/*_env.sh
solitary/Options.cs
solitary/Solitary.exe*
*.pyc
*.pyo
*.pkg
*.sh
*~
.DS_Store
cache
/artifacts/
/builds/
/stage/
/toolchain/
/logs/
/scratch/
/.idea/
/distribution/
/last-successful-build.env

18
external/bockbuild/COPYING vendored Normal file
View File

@@ -0,0 +1,18 @@
Copyright 2009-2010 Novell, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

25
external/bockbuild/README.md vendored Normal file
View File

@@ -0,0 +1,25 @@
Note: This is the actively maintained version of Bockbuild, used to put together the Mono SDK package for macOS. The legacy versions (used for Banshee and older Mono versions) are available here: https://github.com/mono/bockbuild/tree/legacy
The Mono macOS SDK
------------------
Bockbuild is already provided as a submodule of Mono. To build a functional distribution in Bockbuild's 'stage' directory, begin from a Mono checkout:
$ git clone git@github.com:mono/mono
$ cd mono
$ make mac-sdk-package
To get a shell that uses your custom-built distribution (e.g. for testing, to build & run Monodevelop against it):
$ ./external/bockbuild/bb MacSDK --shell
Finally, to create a package of the distribution that installs on the "system Mono" path (/Library/Frameworks/Mono.framework/Versions/...)
$ ./external/bockbuild/bb MacSDK --package
Xamarin Releases
----------------
Release packages are built with the following:
$ ./external/bockbuild/bb MacSDKRelease --package

1
external/bockbuild/VERSION vendored Normal file
View File

@@ -0,0 +1 @@
2.0

392
external/bockbuild/bb vendored Executable file
View File

@@ -0,0 +1,392 @@
#!/usr/bin/python -u -OO
import os
from optparse import OptionParser
from bockbuild.util.util import *
from bockbuild.util.csproj import *
from bockbuild.environment import Environment
from bockbuild.package import *
from bockbuild.profile import Profile
import collections
import hashlib
import itertools
import traceback
from collections import namedtuple
ProfileDesc = namedtuple ('Profile', 'name description path modes')
global active_profile, bockbuild
active_profile = None
bockbuild = None
def find_profiles (base_path):
assert Profile.loaded == None
search_path = first_existing(['%s/bockbuild' % base_path, '%s/packaging' % base_path])
sys.path.append(search_path)
profiles = []
resolved_names = []
while True:
progress_made = False
for path in iterate_dir (search_path, with_dirs=True):
file = '%s/profile.py' % path
if os.path.isdir (path) and os.path.isfile (file):
name = os.path.basename (path)
if name in resolved_names:
continue
fail = None
profile = None
try:
execfile(file, globals())
if not Profile.loaded:
fail = 'No profile loaded'
profile = Profile.loaded
except Exception as e:
fail = e
finally:
Profile.loaded = None
if not fail:
profile = Profile.loaded
Profile.loaded = None
progress_made = True
description = ""
if hasattr(profile.__class__, 'description'):
description = profile.__class__.description
profiles.append (ProfileDesc (name = name, description = description, path = path, modes = ""))
resolved_names.append(name)
else:
warn(fail)
if not progress_made:
break
assert Profile.loaded == None
return profiles
class Bockbuild:
def run(self):
self.name = 'bockbuild'
self.root = os.path.dirname (os.path.realpath(__file__)) # Bockbuild system root
config.protected_git_repos.append (self.root)
self.execution_root = os.getcwd()
config.absolute_root = os.path.commonprefix([self.root, self.execution_root])
self.resources = set([os.path.realpath(
os.path.join(self.root, 'packages'))]) # list of paths on where to look for packages, patches, etc.
self.build_root = os.path.join(self.root, 'builds')
self.staged_prefix = os.path.join(self.root, 'stage')
self.toolchain_root = os.path.join(self.root, 'toolchain')
self.artifact_root = os.path.join(self.root, 'artifacts')
self.package_root = os.path.join(self.root, 'distribution')
self.scratch = os.path.join(self.root, 'scratch')
self.logs = os.path.join(self.root, 'logs')
self.env_file = os.path.join(self.root, 'last-successful-build.env')
self.source_cache = os.getenv('BOCKBUILD_SOURCE_CACHE') or os.path.realpath(
os.path.join(self.root, 'cache'))
self.cpu_count = get_cpu_count()
self.host = get_host()
self.uname = backtick('uname -a')
self.full_rebuild = False
self.toolchain = []
find_git(self)
self.bockbuild_rev = git_get_revision(self, self.root)
self.profile_root = git_rootdir (self, self.execution_root)
self.profiles = find_profiles (self.profile_root)
for profile in self.profiles:
self.resources.add(profile.path)
loginit('bockbuild (%s)' % (git_shortid(self, self.root)))
info('cmd: %s' % ' '.join(sys.argv))
if len (sys.argv) < 2:
info ('Profiles in %s --' % self.git ('config --get remote.origin.url', self.profile_root)[0])
info(map (lambda x: '\t%s: %s' % (x.name, x.description), self.profiles))
finish()
global active_profile
Package.profile = active_profile = self.load_profile (sys.argv[1])
self.parser = self.init_parser()
self.cmd_options, self.cmd_args = self.parser.parse_args(sys.argv[2:])
self.packages_to_build = self.cmd_args or active_profile.packages
active_profile.setup()
self.verbose = self.cmd_options.verbose
config.verbose = self.cmd_options.verbose
self.arch = self.cmd_options.arch
self.unsafe = self.cmd_options.unsafe
config.trace = self.cmd_options.trace
self.tracked_env = []
ensure_dir(self.source_cache, purge=False)
ensure_dir(self.artifact_root, purge=False)
ensure_dir(self.build_root, purge=False)
ensure_dir(self.scratch, purge=True)
ensure_dir(self.logs, purge=False)
self.build()
def init_parser(self):
parser = OptionParser(
usage='usage: %prog [options] [package_names...]')
parser.add_option('--build',
action='store_true', dest='do_build', default=True,
help='build the profile')
parser.add_option('--package',
action='store_true', dest='do_package', default=False,
help='package the profile')
parser.add_option('--verbose',
action='store_true', dest='verbose', default=False,
help='show all build output (e.g. configure, make)')
parser.add_option('-d', '--debug', default=False,
action='store_true', dest='debug',
help='Build with debug flags enabled')
parser.add_option('-e', '--environment', default=False,
action='store_true', dest='dump_environment',
help='Dump the profile environment as a shell-sourceable list of exports ')
parser.add_option('-r', '--release', default=False,
action='store_true', dest='release_build',
help='Whether or not this build is a release build')
parser.add_option('', '--csproj-env', default=False,
action='store_true', dest='dump_environment_csproj',
help='Dump the profile environment xml formarted for use in .csproj files')
parser.add_option('', '--csproj-insert', default=None,
action='store', dest='csproj_file',
help='Inserts the profile environment variables into VS/MonoDevelop .csproj files')
parser.add_option('', '--arch', default='default',
action='store', dest='arch',
help='Select the target architecture(s) for the package')
parser.add_option('', '--shell', default=False,
action='store_true', dest='shell',
help='Get an shell with the package environment')
parser.add_option('', '--unsafe', default=False,
action='store_true', dest='unsafe',
help='Prevents full rebuilds when a build environment change is detected. Useful for debugging.')
parser.add_option('', '--trace', default=False,
action='store_true', dest='trace',
help='Enable tracing (for diagnosing bockbuild problems')
return parser
def build_distribution(self, packages, dest, stage, arch):
# TODO: full relocation means that we shouldn't need dest at this stage
build_list = []
stage_invalidated = False #if anything is dirty we flush the stageination path and fill it again
progress('Fetching packages')
for package in packages.values():
package.build_artifact = os.path.join(
self.artifact_root, '%s-%s' % (package.name, arch))
package.buildstring_file = package.build_artifact + '.buildstring'
package.log = os.path.join(self.logs, package.name + '.log')
if os.path.exists(package.log):
delete(package.log)
package.source_dir_name = expand_macros(package.source_dir_name, package)
dest = os.path.join(self.build_root, package.source_dir_name)
package.fetch(dest)
if self.full_rebuild:
package.request_build('Full rebuild')
elif not os.path.exists(package.build_artifact):
package.request_build('No artifact')
elif is_changed(package.buildstring, package.buildstring_file):
package.request_build('Updated')
if package.needs_build:
build_list.append(package)
stage_invalidated = True
verbose('%d packages need building:' % len(build_list))
verbose(['%s (%s)' % (x.name, x.needs_build) for x in build_list])
if stage_invalidated:
ensure_dir (stage, purge = True)
for package in packages.values():
package.deploy_requests.append (stage)
for package in packages.values():
package.start_build(arch, stage, stage)
# make artifact in scratch
# delete artifact + buildstring
with open(package.buildstring_file, 'w') as output:
output.write('\n'.join(package.buildstring))
def build(self):
profile = active_profile
env = profile.env
if self.cmd_options.dump_environment:
env.compile()
env.dump()
sys.exit(0)
if self.cmd_options.dump_environment_csproj:
# specify to use our GAC, else MonoDevelop would
# use its own
env.set('MONO_GAC_PREFIX', self.staged_prefix)
env.compile()
env.dump_csproj()
sys.exit(0)
if self.cmd_options.csproj_file is not None:
env.set('MONO_GAC_PREFIX', self.staged_prefix)
env.compile()
env.write_csproj(self.cmd_options.csproj_file)
sys.exit(0)
profile.toolchain_packages = collections.OrderedDict()
for source in self.toolchain:
package = self.load_package(source)
profile.toolchain_packages[package.name] = package
profile.release_packages = collections.OrderedDict()
for source in self.packages_to_build:
package = self.load_package(source)
profile.release_packages[package.name] = package
profile.setup_release()
if self.track_env():
if self.unsafe:
warn('Build environment changed, but overriding full rebuild!')
else:
info('Build environment changed, full rebuild triggered')
self.full_rebuild = True
ensure_dir(self.build_root, purge=True)
if self.cmd_options.shell:
title('Shell')
self.shell()
if self.cmd_options.do_build:
title('Building toolchain')
self.build_distribution(
profile.toolchain_packages, self.toolchain_root, self.toolchain_root, arch='toolchain')
title('Building release')
self.build_distribution(
profile.release_packages, profile.prefix, self.staged_prefix, arch=self.arch)
# update env
with open(self.env_file, 'w') as output:
output.write('\n'.join(self.tracked_env))
if self.cmd_options.do_package:
title('Packaging')
protect_dir(self.staged_prefix)
ensure_dir(self.package_root, True)
run_shell('rsync -aPq %s/* %s' %
(self.staged_prefix, self.package_root), False)
unprotect_dir(self.package_root)
profile.process_release(self.package_root)
profile.package()
def track_env(self):
env = active_profile.env
env.compile()
env.export()
self.env_script = os.path.join(
self.root, self.profile_name) + '_env.sh'
env.write_source_script(self.env_script)
if not os.path.exists (self.env_file):
return False
self.tracked_env.extend(env.serialize())
return is_changed(self.tracked_env, self.env_file)
def load_package(self, source):
if isinstance(source, Package): # package can already be loaded in the source list
return source
fullpath = None
for i in self.resources:
candidate_fullpath = os.path.join(i, source + '.py')
trace (candidate_fullpath)
if os.path.exists(candidate_fullpath):
if fullpath is not None:
error ('Package "%s" resolved in multiple locations (search paths: %s' % (source, self.resources))
fullpath = candidate_fullpath
if not fullpath:
error("Package '%s' not found ('search paths: %s')" % (source, self.resources))
Package.last_instance = None
execfile(fullpath, globals())
if Package.last_instance is None:
error('%s does not provide a valid package.' % source)
new_package = Package.last_instance
new_package._path = fullpath
return new_package
def load_profile(self, source):
if Profile.loaded:
error ('A profile is already loaded: %s' % Profile.loaded)
path = None
for profile in self.profiles:
if profile.name == source:
path = profile.path
if path == None:
if isinstance(source, Profile): # package can already be loaded in the source list
Profile.loaded = source
else:
error("Profile '%s' not found" % source)
fullpath = os.path.join(path, 'profile.py')
if not os.path.exists(fullpath):
error("Profile '%s' not found" % source)
sys.path.append (path)
self.resources.add (path)
execfile(fullpath, globals())
Profile.loaded.attach (self)
if Profile.loaded is None:
error('%s does not provide a valid profile (developers: ensure Profile.attach() is called.)' % source)
if Profile.loaded.bockbuild is None:
error ('Profile init is invalid: Failed to attach to bockbuild object')
new_profile = Profile.loaded
new_profile._path = fullpath
new_profile.directory = path
new_profile.git_root = git_rootdir (self, os.path.dirname (path))
config.protected_git_repos.append (new_profile.git_root)
self.profile_name = source
return new_profile
if __name__ == "__main__":
try:
bockbuild = Bockbuild()
bockbuild.run()
except Exception as e:
exc_type, exc_value, exc_traceback = sys.exc_info()
error('%s (%s)' % (e, exc_type.__name__), more_output=True)
error(('%s:%s @%s\t\t"%s"' % p for p in traceback.extract_tb(
exc_traceback)[-5:]), more_output=True)
except KeyboardInterrupt:
error('Interrupted.')
finally:
finish()

392
external/bockbuild/bockbuild.py vendored Executable file
View File

@@ -0,0 +1,392 @@
#!/usr/bin/python -u -OO
import os
from optparse import OptionParser
from bockbuild.util.util import *
from bockbuild.util.csproj import *
from bockbuild.environment import Environment
from bockbuild.package import *
from bockbuild.profile import Profile
import collections
import hashlib
import itertools
import traceback
from collections import namedtuple
ProfileDesc = namedtuple ('Profile', 'name description path modes')
global active_profile, bockbuild
active_profile = None
bockbuild = None
def find_profiles (base_path):
assert Profile.loaded == None
search_path = first_existing(['%s/bockbuild' % base_path, '%s/packaging' % base_path])
sys.path.append(search_path)
profiles = []
resolved_names = []
while True:
progress_made = False
for path in iterate_dir (search_path, with_dirs=True):
file = '%s/profile.py' % path
if os.path.isdir (path) and os.path.isfile (file):
name = os.path.basename (path)
if name in resolved_names:
continue
fail = None
profile = None
try:
execfile(file, globals())
if not Profile.loaded:
fail = 'No profile loaded'
profile = Profile.loaded
except Exception as e:
fail = e
finally:
Profile.loaded = None
if not fail:
profile = Profile.loaded
Profile.loaded = None
progress_made = True
description = ""
if hasattr(profile.__class__, 'description'):
description = profile.__class__.description
profiles.append (ProfileDesc (name = name, description = description, path = path, modes = ""))
resolved_names.append(name)
else:
warn(fail)
if not progress_made:
break
assert Profile.loaded == None
return profiles
class Bockbuild:
def run(self):
self.name = 'bockbuild'
self.root = os.path.dirname (os.path.realpath(__file__)) # Bockbuild system root
config.protected_git_repos.append (self.root)
self.execution_root = os.getcwd()
config.absolute_root = os.path.commonprefix([self.root, self.execution_root])
self.resources = set([os.path.realpath(
os.path.join(self.root, 'packages'))]) # list of paths on where to look for packages, patches, etc.
self.build_root = os.path.join(self.root, 'builds')
self.staged_prefix = os.path.join(self.root, 'stage')
self.toolchain_root = os.path.join(self.root, 'toolchain')
self.artifact_root = os.path.join(self.root, 'artifacts')
self.package_root = os.path.join(self.root, 'distribution')
self.scratch = os.path.join(self.root, 'scratch')
self.logs = os.path.join(self.root, 'logs')
self.env_file = os.path.join(self.root, 'last-successful-build.env')
self.source_cache = os.getenv('BOCKBUILD_SOURCE_CACHE') or os.path.realpath(
os.path.join(self.root, 'cache'))
self.cpu_count = get_cpu_count()
self.host = get_host()
self.uname = backtick('uname -a')
self.full_rebuild = False
self.toolchain = []
find_git(self)
self.bockbuild_rev = git_get_revision(self, self.root)
self.profile_root = git_rootdir (self, self.execution_root)
self.profiles = find_profiles (self.profile_root)
for profile in self.profiles:
self.resources.add(profile.path)
loginit('bockbuild (%s)' % (git_shortid(self, self.root)))
info('cmd: %s' % ' '.join(sys.argv))
if len (sys.argv) < 2:
info ('Profiles in %s --' % self.git ('config --get remote.origin.url', self.profile_root)[0])
info(map (lambda x: '\t%s: %s' % (x.name, x.description), self.profiles))
finish()
global active_profile
Package.profile = active_profile = self.load_profile (sys.argv[1])
self.parser = self.init_parser()
self.cmd_options, self.cmd_args = self.parser.parse_args(sys.argv[2:])
self.packages_to_build = self.cmd_args or active_profile.packages
active_profile.setup()
self.verbose = self.cmd_options.verbose
config.verbose = self.cmd_options.verbose
self.arch = self.cmd_options.arch
self.unsafe = self.cmd_options.unsafe
config.trace = self.cmd_options.trace
self.tracked_env = []
ensure_dir(self.source_cache, purge=False)
ensure_dir(self.artifact_root, purge=False)
ensure_dir(self.build_root, purge=False)
ensure_dir(self.scratch, purge=True)
ensure_dir(self.logs, purge=False)
self.build()
def init_parser(self):
parser = OptionParser(
usage='usage: %prog [options] [package_names...]')
parser.add_option('--build',
action='store_true', dest='do_build', default=True,
help='build the profile')
parser.add_option('--package',
action='store_true', dest='do_package', default=False,
help='package the profile')
parser.add_option('--verbose',
action='store_true', dest='verbose', default=False,
help='show all build output (e.g. configure, make)')
parser.add_option('-d', '--debug', default=False,
action='store_true', dest='debug',
help='Build with debug flags enabled')
parser.add_option('-e', '--environment', default=False,
action='store_true', dest='dump_environment',
help='Dump the profile environment as a shell-sourceable list of exports ')
parser.add_option('-r', '--release', default=False,
action='store_true', dest='release_build',
help='Whether or not this build is a release build')
parser.add_option('', '--csproj-env', default=False,
action='store_true', dest='dump_environment_csproj',
help='Dump the profile environment xml formarted for use in .csproj files')
parser.add_option('', '--csproj-insert', default=None,
action='store', dest='csproj_file',
help='Inserts the profile environment variables into VS/MonoDevelop .csproj files')
parser.add_option('', '--arch', default='default',
action='store', dest='arch',
help='Select the target architecture(s) for the package')
parser.add_option('', '--shell', default=False,
action='store_true', dest='shell',
help='Get an shell with the package environment')
parser.add_option('', '--unsafe', default=False,
action='store_true', dest='unsafe',
help='Prevents full rebuilds when a build environment change is detected. Useful for debugging.')
parser.add_option('', '--trace', default=False,
action='store_true', dest='trace',
help='Enable tracing (for diagnosing bockbuild problems')
return parser
def build_distribution(self, packages, dest, stage, arch):
# TODO: full relocation means that we shouldn't need dest at this stage
build_list = []
stage_invalidated = False #if anything is dirty we flush the stageination path and fill it again
progress('Fetching packages')
for package in packages.values():
package.build_artifact = os.path.join(
self.artifact_root, '%s-%s' % (package.name, arch))
package.buildstring_file = package.build_artifact + '.buildstring'
package.log = os.path.join(self.logs, package.name + '.log')
if os.path.exists(package.log):
delete(package.log)
package.source_dir_name = expand_macros(package.source_dir_name, package)
dest = os.path.join(self.build_root, package.source_dir_name)
package.fetch(dest)
if self.full_rebuild:
package.request_build('Full rebuild')
elif not os.path.exists(package.build_artifact):
package.request_build('No artifact')
elif is_changed(package.buildstring, package.buildstring_file):
package.request_build('Updated')
if package.needs_build:
build_list.append(package)
stage_invalidated = True
verbose('%d packages need building:' % len(build_list))
verbose(['%s (%s)' % (x.name, x.needs_build) for x in build_list])
if stage_invalidated:
ensure_dir (stage, purge = True)
for package in packages.values():
package.deploy_requests.append (stage)
for package in packages.values():
package.start_build(arch, stage, stage)
# make artifact in scratch
# delete artifact + buildstring
with open(package.buildstring_file, 'w') as output:
output.write('\n'.join(package.buildstring))
def build(self):
profile = active_profile
env = profile.env
if self.cmd_options.dump_environment:
env.compile()
env.dump()
sys.exit(0)
if self.cmd_options.dump_environment_csproj:
# specify to use our GAC, else MonoDevelop would
# use its own
env.set('MONO_GAC_PREFIX', self.staged_prefix)
env.compile()
env.dump_csproj()
sys.exit(0)
if self.cmd_options.csproj_file is not None:
env.set('MONO_GAC_PREFIX', self.staged_prefix)
env.compile()
env.write_csproj(self.cmd_options.csproj_file)
sys.exit(0)
profile.toolchain_packages = collections.OrderedDict()
for source in self.toolchain:
package = self.load_package(source)
profile.toolchain_packages[package.name] = package
profile.release_packages = collections.OrderedDict()
for source in self.packages_to_build:
package = self.load_package(source)
profile.release_packages[package.name] = package
profile.setup_release()
if self.track_env():
if self.unsafe:
warn('Build environment changed, but overriding full rebuild!')
else:
info('Build environment changed, full rebuild triggered')
self.full_rebuild = True
ensure_dir(self.build_root, purge=True)
if self.cmd_options.shell:
title('Shell')
self.shell()
if self.cmd_options.do_build:
title('Building toolchain')
self.build_distribution(
profile.toolchain_packages, self.toolchain_root, self.toolchain_root, arch='toolchain')
title('Building release')
self.build_distribution(
profile.release_packages, profile.prefix, self.staged_prefix, arch=self.arch)
# update env
with open(self.env_file, 'w') as output:
output.write('\n'.join(self.tracked_env))
if self.cmd_options.do_package:
title('Packaging')
protect_dir(self.staged_prefix)
ensure_dir(self.package_root, True)
run_shell('rsync -aPq %s/* %s' %
(self.staged_prefix, self.package_root), False)
unprotect_dir(self.package_root)
profile.process_release(self.package_root)
profile.package()
def track_env(self):
env = active_profile.env
env.compile()
env.export()
self.env_script = os.path.join(
self.root, self.profile_name) + '_env.sh'
env.write_source_script(self.env_script)
if not os.path.exists (self.env_file):
return False
self.tracked_env.extend(env.serialize())
return is_changed(self.tracked_env, self.env_file)
def load_package(self, source):
if isinstance(source, Package): # package can already be loaded in the source list
return source
fullpath = None
for i in self.resources:
candidate_fullpath = os.path.join(i, source + '.py')
trace (candidate_fullpath)
if os.path.exists(candidate_fullpath):
if fullpath is not None:
error ('Package "%s" resolved in multiple locations (search paths: %s' % (source, self.resources))
fullpath = candidate_fullpath
if not fullpath:
error("Package '%s' not found ('search paths: %s')" % (source, self.resources))
Package.last_instance = None
execfile(fullpath, globals())
if Package.last_instance is None:
error('%s does not provide a valid package.' % source)
new_package = Package.last_instance
new_package._path = fullpath
return new_package
def load_profile(self, source):
if Profile.loaded:
error ('A profile is already loaded: %s' % Profile.loaded)
path = None
for profile in self.profiles:
if profile.name == source:
path = profile.path
if path == None:
if isinstance(source, Profile): # package can already be loaded in the source list
Profile.loaded = source
else:
error("Profile '%s' not found" % source)
fullpath = os.path.join(path, 'profile.py')
if not os.path.exists(fullpath):
error("Profile '%s' not found" % source)
sys.path.append (path)
self.resources.add (path)
execfile(fullpath, globals())
Profile.loaded.attach (self)
if Profile.loaded is None:
error('%s does not provide a valid profile (developers: ensure Profile.attach() is called.)' % source)
if Profile.loaded.bockbuild is None:
error ('Profile init is invalid: Failed to attach to bockbuild object')
new_profile = Profile.loaded
new_profile._path = fullpath
new_profile.directory = path
new_profile.git_root = git_rootdir (self, os.path.dirname (path))
config.protected_git_repos.append (new_profile.git_root)
self.profile_name = source
return new_profile
if __name__ == "__main__":
try:
bockbuild = Bockbuild()
bockbuild.run()
except Exception as e:
exc_type, exc_value, exc_traceback = sys.exc_info()
error('%s (%s)' % (e, exc_type.__name__), more_output=True)
error(('%s:%s @%s\t\t"%s"' % p for p in traceback.extract_tb(
exc_traceback)[-5:]), more_output=True)
except KeyboardInterrupt:
error('Interrupted.')
finally:
finish()

View File

View File

@@ -0,0 +1,327 @@
import os
import shutil
from plistlib import Plist
from util.util import *
from unixprofile import UnixProfile
from profile import Profile
import stat
# staging helper functions
def match_stageable_text(path, filetype):
if os.path.islink(path) or os.path.isdir(path):
return False
return path.endswith('.pc') or 'libtool library file' in filetype or 'text executable' in filetype
def match_text(path, filetype):
if os.path.islink(path) or os.path.isdir(path):
return False
return match_stageable_text(path, filetype) or 'text' in filetype
def match_stageable_binary(path, filetype):
if os.path.islink(path) or os.path.isdir(path):
return False
return 'Mach-O' in filetype and not path.endswith('.a') and not 'dSYM' in path
def match_symlinks(path, filetype):
return os.path.islink(path)
def match_real_files(path, filetype):
return (not os.path.islink(path) and not os.path.isdir(path))
class DarwinProfile (UnixProfile):
# package order is very important.
# autoconf and automake don't depend on CC
# ccache uses a different CC since it's not installed yet
# every thing after ccache needs a working ccache
default_toolchain = [
'autoconf',
'automake',
'ccache',
'libtool',
'xz',
'tar',
'gtk-osx-docbook',
'gtk-doc',
# needed to autogen gtk+
'gtk-osx-docbook',
'gtk-doc'
]
def attach (self, bockbuild):
UnixProfile.attach (self, bockbuild)
bockbuild.toolchain = list (DarwinProfile.default_toolchain)
self.name = 'darwin'
xcode_version = backtick('xcodebuild -version')[0]
self.env.set('xcode_version', xcode_version)
osx_sdk = backtick('xcrun --show-sdk-path')[0]
self.env.set('osx_sdk', osx_sdk)
if not os.path.exists(osx_sdk):
error('Mac OS X SDK not found under %s' % osx_sdk)
info('%s, %s' % (xcode_version, os.path.basename(osx_sdk)))
self.gcc_flags.extend([
'-D_XOPEN_SOURCE',
'-isysroot %s' % osx_sdk,
# needed to ensure install_name_tool can succeed staging binaries
'-Wl,-headerpad_max_install_names'
])
self.ld_flags.extend([
# needed to ensure install_name_tool can succeed staging binaries
'-headerpad_max_install_names'
])
def setup (self):
if self.min_version:
self.target_osx = '10.%s' % self.min_version
self.gcc_flags.extend(
['-mmacosx-version-min=%s' % self.target_osx])
self.env.set('MACOSX_DEPLOYMENT_TARGET', self.target_osx)
if Profile.bockbuild.cmd_options.debug is True:
self.gcc_flags.extend(['-O0', '-ggdb3'])
if os.getenv('BOCKBUILD_USE_CCACHE') is None:
self.env.set('CC', 'xcrun gcc')
self.env.set('CXX', 'xcrun g++')
else:
self.env.set('CC', 'ccache xcrun gcc')
self.env.set('CXX', 'ccache xcrun g++')
self.debug_info = []
if self.bockbuild.cmd_options.arch == 'default':
self.bockbuild.cmd_options.arch = 'darwin-32'
def arch_build(self, arch, package):
if arch == 'darwin-universal':
package.local_ld_flags = ['-arch i386', '-arch x86_64']
package.local_gcc_flags = ['-arch i386', '-arch x86_64']
elif arch == 'darwin-32':
package.local_ld_flags = ['-arch i386', '-m32']
package.local_gcc_flags = ['-arch i386', '-m32']
package.local_configure_flags = [
'--build=i386-apple-darwin11.2.0', '--disable-dependency-tracking']
elif arch == 'darwin-64':
package.local_ld_flags = ['-arch x86_64 -m64']
package.local_gcc_flags = ['-arch x86_64 -m64']
package.local_configure_flags = ['--disable-dependency-tracking']
else:
error('Unknown arch %s' % arch)
configure_cache = '%s/%s-%s.cache' % (self.bockbuild.build_root, package.name, arch)
package.aux_files.append (configure_cache)
package.local_configure_flags.extend(
['--cache-file=%s' % configure_cache])
if package.name in self.debug_info:
package.local_gcc_flags.extend(['-g'])
def process_package(self, package):
failure_count = 0
def staging_harness(path, func, failure_count=failure_count):
def relocate_to_profile(token):
if token.find(package.staged_prefix) == -1 and token.find(package.staged_profile) == -1:
newtoken = token.replace(
package.package_prefix, package.staged_profile)
else:
newtoken = token.replace(
package.staged_prefix, package.staged_profile)
if newtoken != token:
package.trace('%s:\n\t%s\t->\t%s' %
(os.path.basename(path), token, newtoken))
return newtoken
if (path.endswith('.release')):
error('Staging backup exists in dir we''re trying to stage: %s' % path)
backup = path + '.release'
shutil.copy2(path, backup)
try:
trace('Staging %s' % path)
func(path, relocate_to_profile)
if os.path.exists(path + '.stage'):
os.remove(path)
shutil.move(path + '.stage', path)
shutil.copystat(backup, path)
except CommandException as e:
package.rm_if_exists(path)
shutil.copy2(backup, path)
package.rm(backup)
warn('Staging failed for %s' % os.path.basename(path))
error(str(e))
failure_count = failure_count + 1
if failure_count > 10:
error('Possible staging issue, >10 staging failures')
extra_files = [os.path.join(package.staged_prefix, expand_macros(file, package))
for file in package.extra_stage_files]
procs = []
if package.name in self.debug_info:
procs.append(self.generate_dsyms())
procs.append(self.stage_textfiles(harness=staging_harness,
match=match_stageable_text, extra_files=extra_files))
procs.append(self.stage_binaries(
harness=staging_harness, match=match_stageable_binary))
Profile.postprocess(self, procs, package.staged_prefix)
def process_release(self, directory):
# validate staged install
# TODO: move to end of '--build' instead of at the beginning of '--package'
# unprotect_dir (self.staged_prefix, recursive = True)
# Profile.postprocess (self, [self.validate_rpaths (match = self.match_stageable_binary),
# self.validate_symlinks (match = self.match_symlinks)],
# self.staged_prefix)
unprotect_dir(directory, recursive=True)
def destaging_harness(backup, func):
path = backup[0:-len('.release')]
trace(path)
def relocate_for_release(token):
newtoken = token.replace(self.staged_prefix, self.prefix).replace(directory, self.prefix)
if newtoken != token:
trace('%s:\n\t%s\t->\t%s' %
(os.path.basename(path), token, newtoken))
return newtoken
try:
trace('Destaging %s' % path)
func(path, relocate_for_release)
if os.path.exists(path + '.stage'):
os.remove(path)
shutil.move(path + '.stage', path)
shutil.copystat(backup, path)
os.remove(backup)
except Exception as e:
warn ('Critical: Destaging failed for ''%s''' % path)
raise
procs = [self.stage_textfiles(harness=destaging_harness, match=match_text),
self.stage_binaries(harness=destaging_harness, match=match_stageable_binary)]
Profile.postprocess(self, procs, directory,
lambda l: l.endswith('.release'))
class validate_text_staging (Profile.FileProcessor):
problem_files = []
def __init__(self, package):
self.package = package
Profile.FileProcessor.__init__(self)
def process(self, path):
with open(path) as text:
stage_name = os.path.basename(self.package.stage_root)
for line in text:
if stage_name in line:
warn('String ''%s'' was found in %s' %
(stage_name, self.relpath(path)))
self.problem_files.append(self.relpath(path))
def end(self):
if len(self.problem_files) > 0:
error('Problematic staging files:\n' +
'\n'.join(self.problem_files))
class validate_symlinks (Profile.FileProcessor):
problem_links = []
def process(self, path):
if path.endswith('.release'):
# get rid of these symlinks
os.remove(path)
return
target = os.path.realpath(path)
if not os.path.exists(target) or not target.startswith(self.root):
self.problem_links.append(
'%s -> %s' % (self.relpath(path), target))
def end(self):
if len(self.problem_links) > 0:
# TODO: Turn into error when we handle them
warn('Broken symlinks:\n' + '\n'.join(self.problem_links))
class generate_dsyms (Profile.FileProcessor):
def __init__(self):
Profile.FileProcessor.__init__(self, match=match_stageable_binary)
def process(self, path):
run_shell('dsymutil -t 2 "%s" >/dev/null' % path)
run_shell('strip -S "%s" > /dev/null' % path)
class validate_rpaths (Profile.FileProcessor):
def process(self, path):
if path.endswith('.release'):
return
libs = backtick('otool -L %s' % path)
for line in libs:
# parse 'otool -L'
if not line.startswith('\t'):
continue
rpath = line.strip().split(' ')[0]
if not os.path.exists(rpath):
error('%s contains reference to non-existing file %s' %
(self.relpath(path), rpath))
# if rpath.startswith (self.package.profile.MONO_ROOT):
# error ('%s is linking to external distribution %s' % (path, rpath))
class stage_textfiles (Profile.FileProcessor):
def process(self, path, fixup_func):
with open(path) as text:
output = open(path + '.stage', 'w')
for line in text:
tokens = line.split(" ")
for idx, token in enumerate(tokens):
remap = fixup_func(token)
tokens[idx] = remap
output.write(" ".join(tokens))
output.close ()
class stage_binaries (Profile.FileProcessor):
def process(self, path, fixup_func):
staged_path = fixup_func(path)
run_shell('install_name_tool -id %s %s' %
(staged_path, path), False)
libs = backtick('otool -L %s' % path)
for line in libs:
# parse 'otool -L'
if not line.startswith('\t'):
continue
rpath = line.strip().split(' ')[0]
remap = fixup_func(rpath)
if remap != rpath:
run_shell('install_name_tool -change %s %s %s' %
(rpath, remap, path), False)

View File

@@ -0,0 +1,83 @@
import os
from util.util import *
from util.csproj import *
from collections import deque
class EnvironmentItem:
def __init__(self, name, joinchar, values):
self.name = name
self.joinchar = joinchar
self.values = values
def __str__(self):
return self.joinchar.join(self.values)
class Environment:
def __init__(self, profile):
self._profile = profile
def set(self, *argv):
args = deque(argv)
name = args.popleft()
joinchar = args.popleft()
if len(args) == 0:
values = list(self.iter_flatten(joinchar))
joinchar = ''
else:
values = list(self.iter_flatten(list(args)))
self.__dict__[name] = EnvironmentItem(name, joinchar, values)
return self.__dict__[name]
def compile(self):
expand_macros(self, self._profile)
def write_source_script(self, filename):
envscript = '#!/bin/sh\n'
for k in self.get_names():
envscript = envscript + 'export %s="%s"\n' % (k, self.__dict__[k])
with open(filename, 'w') as f:
f.write(envscript)
os.chmod(filename, 0o755)
def serialize(self):
names = sorted(self.get_names())
for k in names:
yield '%s = "%s"' % (k, self.__dict__[k])
def dump_csproj(self):
for k in self.get_names():
print '<Variable name="%s" value="%s" />' % (k, self.__dict__[k])
def write_csproj(self, file):
writer = csproj_writer(file, self)
writer.write()
def export(self):
for k in self.get_names():
os.environ[k] = str(self.__dict__[k])
def get_names(self):
for k in self.__dict__.keys():
if not k.startswith('_'):
yield k
def iter_flatten(self, iterable):
if not isinstance(iterable, (list, tuple)):
yield iterable
return
it = iter(iterable)
for e in it:
if isinstance(e, (list, tuple)):
for f in self.iter_flatten(e):
yield f
else:
yield e

988
external/bockbuild/bockbuild/package.py vendored Normal file

File diff suppressed because it is too large Load Diff

76
external/bockbuild/bockbuild/profile.py vendored Normal file
View File

@@ -0,0 +1,76 @@
import os
from optparse import OptionParser
from util.util import *
from util.csproj import *
from environment import Environment
from bockbuild.package import *
import collections
import hashlib
import itertools
class Profile:
def __init__ (self):
Profile.loaded = self
def attach (self, bockbuild):
Profile.bockbuild = bockbuild
class FileProcessor (object):
def __init__(self, harness=None, match=None, process=None, extra_files=None):
self.harness = harness
self.match = match
self.files = list(extra_files) if extra_files else list()
self.root = None
def relpath(self, path):
return os.path.relpath(path, self.root)
def run(self):
for path in self.files:
self.harness(path, self.process)
def end(self):
return
def postprocess(self, processors, directory, filefilter=None):
def simple_harness(path, func):
if not os.path.lexists(path):
return # file removed by previous processor function
# TODO: Fix so that it works on symlinks
# hash = hashlib.sha1(open(path).read()).hexdigest()
func(path)
if not os.path.lexists(path):
trace('Removed file: %s' % path)
# if hash != hashlib.sha1(open(path).read()).hexdigest():
# warn ('Changed file: %s' % path)
for proc in processors:
proc.root = directory
if proc.harness is None:
proc.harness = simple_harness
if proc.match is None:
error('proc %s has no match function' %
proc.__class__.__name__)
for path in filter(filefilter, iterate_dir(directory, with_dirs=True, with_links=True)):
filetype = get_filetype(path)
for proc in processors:
if proc.match(path, filetype) == True:
trace('%s matched %s / %s' % (proc.__class__.__name__,
os.path.basename(path), filetype))
proc.files.append(path)
for proc in processors:
verbose('%s: %s items' %
(proc.__class__.__name__, len(proc.files)))
proc.run()
for proc in processors:
proc.end()
proc.harness = None
proc.files = []
Profile.loaded = None
Profile.bockbuild = None

View File

@@ -0,0 +1,39 @@
from profile import Profile
from bockbuild.environment import Environment
class UnixProfile (Profile):
def attach (self, bockbuild):
Profile.attach (self, bockbuild)
self.name = 'unix'
self.env = Environment(self)
self.staged_prefix = bockbuild.staged_prefix
self.toolchain_root = bockbuild.toolchain_root
self.gcc_flags = ['-I%s/include' % self.staged_prefix]
self.ld_flags = ['-L%s/lib' % self.staged_prefix]
self.env.set('BUILD_PREFIX', '%{prefix}')
self.env.set('PATH', ':',
'%{toolchain_root}/bin',
'%{staged_prefix}/bin',
'/usr/bin',
'/bin',
'/usr/local/git/bin')
self.env.set('C_INCLUDE_PATH', '%{staged_prefix}/include')
#self.env.set ('LD_LIBRARY_PATH', '%{staged_prefix}/lib')
self.env.set('ACLOCAL_FLAGS', '-I%{staged_prefix}/share/aclocal')
self.env.set('PKG_CONFIG_PATH', ':',
'%{staged_prefix}/lib/pkgconfig',
'%{staged_prefix}/share/pkgconfig',
'%{toolchain_root}/lib/pkgconfig',
'%{toolchain_root}/share/pkgconfig')
self.env.set('XDG_CONFIG_DIRS', '%{staged_prefix}/etc/xdg')
self.env.set('XDG_DATA_DIRS', '%{staged_prefix}/share')
self.env.set('XDG_CONFIG_HOME', '$HOME/.config')

View File

View File

@@ -0,0 +1,93 @@
#!/usr/bin/env python
# coding: utf8
import shutil
import io
from xml.etree import ElementTree
from xml.etree.ElementTree import Element
from xml.etree.ElementTree import Comment
class csproj_writer:
def __init__(self, file, vars, condition="Debug|AnyCPU"):
self.vars = vars
self.file = file
self.tree = ElementTree.parse(file)
# get the namespace from the root Project element
self.ns = self.tree.getroot().tag[1:].split("}")[0]
# the python elementtree always puts a prefix,
# hardcode it so we can substitute it later
ElementTree.register_namespace("csproj0", self.ns)
# find PropertyGroup
group = self.tree.findall(".//{%s}PropertyGroup[@Condition]" % self.ns)
for node in group:
# only insert into Debug|AnyCPU
if condition in (node.get("Condition")):
self.insert_env_if_missing(node)
self.substitute_env_var(node)
def substitute_env_var(self, propertygroup):
node = propertygroup.find(
"./{%s}EnvironmentVariables/{%s}EnvironmentVariables" % (self.ns, self.ns))
for name in self.vars.get_names():
value = self.vars.__dict__[name]
# check if variable is already set
var = node.find("{%s}Variable[@name='%s']" % (self.ns, name))
if var is not None:
# update its value
var.set('value', "%s" % value)
else:
# insert new node
el = Element("{%s}Variable" % self.ns)
el.set('name', name)
el.set('value', "%s" % value)
self.insert(node, 0, el)
def insert_env_if_missing(self, node):
# test if environment variable is present - it is usually double
# wrapped
outer = node.find("{%s}EnvironmentVariables" % self.ns)
if outer is not None:
# see if the inner node is present, too
inner = node.find("{%s}EnvironmentVariables" % self.ns)
if inner is None:
inner = Element("{%s}EnvironmentVariables" % self.ns)
self.insert(outer, 0, inner)
else:
outer = Element("{%s}EnvironmentVariables" % self.ns)
inner = Element("{%s}EnvironmentVariables" % self.ns)
#self.insert (outer, 0, Comment ("AUTO GENERATED VARIABLES - DO NOT INCLUDE IN ANY GIT COMMITS!"))
self.insert(node, 1, outer)
self.insert(outer, 0, inner)
# wrapper arround Element.insert that appends a linebreak comment
# HACK etree xml library can not pretty print
def insert(self, node, pos, element):
brcomment = Comment("REPLACE_WITH_LINEBREAK")
node.insert(pos, brcomment)
node.insert(pos, element)
def write(self):
xml = ElementTree.tostring(self.tree.getroot(), encoding="utf-8")
# HACK the python xml library is nuts - manually remove the forced namespace
# prefix and re-establish a minimal form of pretty printing
xml = xml.replace("csproj0:", "")
xml = xml.replace("xmlns:csproj0", "xmlns")
xml = xml.replace("<!--REPLACE_WITH_LINEBREAK-->", "\n")
f = open(self.file, 'w')
f.write(xml)

File diff suppressed because it is too large Load Diff

View File

1
external/bockbuild/packages/atk.py vendored Normal file
View File

@@ -0,0 +1 @@
GnomeXzPackage('atk', version_major='2.8', version_minor='0')

27
external/bockbuild/packages/autoconf.py vendored Normal file
View File

@@ -0,0 +1,27 @@
class Autoconf (GnuPackage):
def __init__(self):
GnuPackage.__init__(self, 'autoconf', '2.69', override_properties={
'build_dependency': True})
self.extra_stage_files = ['share/autoconf/autom4te.cfg']
def install(self):
Package.install(self)
aclocal_dir = os.path.join(self.staged_prefix, "share", "aclocal")
if not os.path.exists(aclocal_dir):
os.makedirs(aclocal_dir)
def arch_build(self, arch):
if arch == 'darwin-universal':
self.local_ld_flags = ['-arch i386', '-arch x86_64']
self.local_gcc_flags = ['-arch i386', '-arch x86_64']
elif arch == 'darwin-32':
self.local_ld_flags = ['-arch i386', '-m32']
self.local_gcc_flags = ['-arch i386', '-m32']
self.local_configure_flags = ['--build=i386-apple-darwin11.2.0']
elif arch == 'darwin-64':
self.local_ld_flags = ['-arch x86_64 -m64']
self.local_gcc_flags = ['-arch x86_64 -m64']
Autoconf()

20
external/bockbuild/packages/automake.py vendored Normal file
View File

@@ -0,0 +1,20 @@
class Automake (GnuPackage):
def __init__(self):
GnuPackage.__init__(self, 'automake', '1.13', override_properties={
'build_dependency': True})
self.extra_stage_files = [
'share/automake-%{version}/Automake/Config.pm']
def arch_build(self, arch):
if arch == 'darwin-universal':
self.local_ld_flags = ['-arch i386', '-arch x86_64']
self.local_gcc_flags = ['-arch i386', '-arch x86_64']
elif arch == 'darwin-32':
self.local_ld_flags = ['-arch i386', '-m32']
self.local_gcc_flags = ['-arch i386', '-m32']
self.local_configure_flags = ['--build=i386-apple-darwin11.2.0']
elif arch == 'darwin-64':
self.local_ld_flags = ['-arch x86_64 -m64']
self.local_gcc_flags = ['-arch x86_64 -m64']
Automake()

40
external/bockbuild/packages/cairo.py vendored Normal file
View File

@@ -0,0 +1,40 @@
class CairoPackage (CairoGraphicsXzPackage):
def __init__(self):
CairoGraphicsXzPackage.__init__(self, 'cairo', '1.12.14')
self.sources.extend([
'patches/cairo-quartz-crash.patch',
'patches/cairo-fix-color-bitmap-fonts.patch',
'patches/cairo-fix-CGFontGetGlyphPath-deprecation.patch',
# 'patches/cairo-cglayer.patch',
])
def prep(self):
Package.prep(self)
if Package.profile.name == 'darwin':
for p in range(1, len(self.local_sources)):
self.sh('patch -p1 < "%{local_sources[' + str(p) + ']}"')
def build(self):
self.configure_flags = [
'--enable-pdf',
]
if Package.profile.name == 'darwin':
self.configure_flags.extend([
'--enable-quartz',
'--enable-quartz-font',
'--enable-quartz-image',
'--disable-xlib',
'--without-x'
])
elif Package.profile.name == 'linux':
self.configure_flags.extend([
'--disable-quartz',
'--with-x'
])
Package.build(self)
CairoPackage()

Some files were not shown because too many files have changed in this diff Show More