initial import of the packaging package in the standard library

This commit is contained in:
Tarek Ziade
2011-05-19 13:07:25 +02:00
parent 566f8a646e
commit 1231a4e097
193 changed files with 30376 additions and 149 deletions

17
Lib/packaging/__init__.py Normal file
View File

@@ -0,0 +1,17 @@
"""Support for packaging, distribution and installation of Python projects.
Third-party tools can use parts of packaging as building blocks
without causing the other modules to be imported:
import packaging.version
import packaging.metadata
import packaging.pypi.simple
import packaging.tests.pypi_server
"""
from logging import getLogger
__all__ = ['__version__', 'logger']
__version__ = "1.0a3"
logger = getLogger('packaging')

552
Lib/packaging/_trove.py Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,56 @@
"""Subpackage containing all standard commands."""
from packaging.errors import PackagingModuleError
from packaging.util import resolve_name
__all__ = ['get_command_names', 'set_command', 'get_command_class',
'STANDARD_COMMANDS']
_COMMANDS = {
'check': 'packaging.command.check.check',
'test': 'packaging.command.test.test',
'build': 'packaging.command.build.build',
'build_py': 'packaging.command.build_py.build_py',
'build_ext': 'packaging.command.build_ext.build_ext',
'build_clib': 'packaging.command.build_clib.build_clib',
'build_scripts': 'packaging.command.build_scripts.build_scripts',
'clean': 'packaging.command.clean.clean',
'install_dist': 'packaging.command.install_dist.install_dist',
'install_lib': 'packaging.command.install_lib.install_lib',
'install_headers': 'packaging.command.install_headers.install_headers',
'install_scripts': 'packaging.command.install_scripts.install_scripts',
'install_data': 'packaging.command.install_data.install_data',
'install_distinfo':
'packaging.command.install_distinfo.install_distinfo',
'sdist': 'packaging.command.sdist.sdist',
'bdist': 'packaging.command.bdist.bdist',
'bdist_dumb': 'packaging.command.bdist_dumb.bdist_dumb',
'bdist_wininst': 'packaging.command.bdist_wininst.bdist_wininst',
'register': 'packaging.command.register.register',
'upload': 'packaging.command.upload.upload',
'upload_docs': 'packaging.command.upload_docs.upload_docs'}
STANDARD_COMMANDS = set(_COMMANDS)
def get_command_names():
"""Return registered commands"""
return sorted(_COMMANDS)
def set_command(location):
cls = resolve_name(location)
# XXX we want to do the duck-type checking here
_COMMANDS[cls.get_command_name()] = cls
def get_command_class(name):
"""Return the registered command"""
try:
cls = _COMMANDS[name]
if isinstance(cls, str):
cls = resolve_name(cls)
_COMMANDS[name] = cls
return cls
except KeyError:
raise PackagingModuleError("Invalid command %s" % name)

View File

@@ -0,0 +1,141 @@
"""Create a built (binary) distribution.
If a --formats option was given on the command line, this command will
call the corresponding bdist_* commands; if the option was absent, a
bdist_* command depending on the current platform will be called.
"""
import os
from packaging import util
from packaging.command.cmd import Command
from packaging.errors import PackagingPlatformError, PackagingOptionError
def show_formats():
"""Print list of available formats (arguments to "--format" option).
"""
from packaging.fancy_getopt import FancyGetopt
formats = []
for format in bdist.format_commands:
formats.append(("formats=" + format, None,
bdist.format_command[format][1]))
pretty_printer = FancyGetopt(formats)
pretty_printer.print_help("List of available distribution formats:")
class bdist(Command):
description = "create a built (binary) distribution"
user_options = [('bdist-base=', 'b',
"temporary directory for creating built distributions"),
('plat-name=', 'p',
"platform name to embed in generated filenames "
"(default: %s)" % util.get_platform()),
('formats=', None,
"formats for distribution (comma-separated list)"),
('dist-dir=', 'd',
"directory to put final built distributions in "
"[default: dist]"),
('skip-build', None,
"skip rebuilding everything (for testing/debugging)"),
('owner=', 'u',
"Owner name used when creating a tar file"
" [default: current user]"),
('group=', 'g',
"Group name used when creating a tar file"
" [default: current group]"),
]
boolean_options = ['skip-build']
help_options = [
('help-formats', None,
"lists available distribution formats", show_formats),
]
# This is of course very simplistic. The various UNIX family operating
# systems have their specific formats, but they are out of scope for us;
# bdist_dumb is, well, dumb; it's more a building block for other
# packaging tools than a real end-user binary format.
default_format = {'posix': 'gztar',
'nt': 'zip',
'os2': 'zip'}
# Establish the preferred order (for the --help-formats option).
format_commands = ['gztar', 'bztar', 'ztar', 'tar',
'wininst', 'zip', 'msi']
# And the real information.
format_command = {'gztar': ('bdist_dumb', "gzip'ed tar file"),
'bztar': ('bdist_dumb', "bzip2'ed tar file"),
'ztar': ('bdist_dumb', "compressed tar file"),
'tar': ('bdist_dumb', "tar file"),
'wininst': ('bdist_wininst',
"Windows executable installer"),
'zip': ('bdist_dumb', "ZIP file"),
'msi': ('bdist_msi', "Microsoft Installer")
}
def initialize_options(self):
self.bdist_base = None
self.plat_name = None
self.formats = None
self.dist_dir = None
self.skip_build = False
self.group = None
self.owner = None
def finalize_options(self):
# have to finalize 'plat_name' before 'bdist_base'
if self.plat_name is None:
if self.skip_build:
self.plat_name = util.get_platform()
else:
self.plat_name = self.get_finalized_command('build').plat_name
# 'bdist_base' -- parent of per-built-distribution-format
# temporary directories (eg. we'll probably have
# "build/bdist.<plat>/dumb", etc.)
if self.bdist_base is None:
build_base = self.get_finalized_command('build').build_base
self.bdist_base = os.path.join(build_base,
'bdist.' + self.plat_name)
self.ensure_string_list('formats')
if self.formats is None:
try:
self.formats = [self.default_format[os.name]]
except KeyError:
raise PackagingPlatformError("don't know how to create built distributions " + \
"on platform %s" % os.name)
if self.dist_dir is None:
self.dist_dir = "dist"
def run(self):
# Figure out which sub-commands we need to run.
commands = []
for format in self.formats:
try:
commands.append(self.format_command[format][0])
except KeyError:
raise PackagingOptionError("invalid format '%s'" % format)
# Reinitialize and run each command.
for i in range(len(self.formats)):
cmd_name = commands[i]
sub_cmd = self.get_reinitialized_command(cmd_name)
# passing the owner and group names for tar archiving
if cmd_name == 'bdist_dumb':
sub_cmd.owner = self.owner
sub_cmd.group = self.group
# If we're going to need to run this command again, tell it to
# keep its temporary files around so subsequent runs go faster.
if cmd_name in commands[i+1:]:
sub_cmd.keep_temp = True
self.run_command(cmd_name)

View File

@@ -0,0 +1,137 @@
"""Create a "dumb" built distribution.
A dumb distribution is just an archive meant to be unpacked under
sys.prefix or sys.exec_prefix.
"""
import os
from shutil import rmtree
from sysconfig import get_python_version
from packaging.util import get_platform
from packaging.command.cmd import Command
from packaging.errors import PackagingPlatformError
from packaging import logger
class bdist_dumb(Command):
description = 'create a "dumb" built distribution'
user_options = [('bdist-dir=', 'd',
"temporary directory for creating the distribution"),
('plat-name=', 'p',
"platform name to embed in generated filenames "
"(default: %s)" % get_platform()),
('format=', 'f',
"archive format to create (tar, ztar, gztar, zip)"),
('keep-temp', 'k',
"keep the pseudo-installation tree around after " +
"creating the distribution archive"),
('dist-dir=', 'd',
"directory to put final built distributions in"),
('skip-build', None,
"skip rebuilding everything (for testing/debugging)"),
('relative', None,
"build the archive using relative paths"
"(default: false)"),
('owner=', 'u',
"Owner name used when creating a tar file"
" [default: current user]"),
('group=', 'g',
"Group name used when creating a tar file"
" [default: current group]"),
]
boolean_options = ['keep-temp', 'skip-build', 'relative']
default_format = { 'posix': 'gztar',
'nt': 'zip',
'os2': 'zip' }
def initialize_options(self):
self.bdist_dir = None
self.plat_name = None
self.format = None
self.keep_temp = False
self.dist_dir = None
self.skip_build = False
self.relative = False
self.owner = None
self.group = None
def finalize_options(self):
if self.bdist_dir is None:
bdist_base = self.get_finalized_command('bdist').bdist_base
self.bdist_dir = os.path.join(bdist_base, 'dumb')
if self.format is None:
try:
self.format = self.default_format[os.name]
except KeyError:
raise PackagingPlatformError(("don't know how to create dumb built distributions " +
"on platform %s") % os.name)
self.set_undefined_options('bdist', 'dist_dir', 'plat_name')
def run(self):
if not self.skip_build:
self.run_command('build')
install = self.get_reinitialized_command('install_dist',
reinit_subcommands=True)
install.root = self.bdist_dir
install.skip_build = self.skip_build
install.warn_dir = False
logger.info("installing to %s", self.bdist_dir)
self.run_command('install_dist')
# And make an archive relative to the root of the
# pseudo-installation tree.
archive_basename = "%s.%s" % (self.distribution.get_fullname(),
self.plat_name)
# OS/2 objects to any ":" characters in a filename (such as when
# a timestamp is used in a version) so change them to hyphens.
if os.name == "os2":
archive_basename = archive_basename.replace(":", "-")
pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
if not self.relative:
archive_root = self.bdist_dir
else:
if (self.distribution.has_ext_modules() and
(install.install_base != install.install_platbase)):
raise PackagingPlatformError(
"can't make a dumb built distribution where base and "
"platbase are different (%r, %r)" %
(install.install_base, install.install_platbase))
else:
archive_root = os.path.join(
self.bdist_dir,
self._ensure_relative(install.install_base))
# Make the archive
filename = self.make_archive(pseudoinstall_root,
self.format, root_dir=archive_root,
owner=self.owner, group=self.group)
if self.distribution.has_ext_modules():
pyversion = get_python_version()
else:
pyversion = 'any'
self.distribution.dist_files.append(('bdist_dumb', pyversion,
filename))
if not self.keep_temp:
if self.dry_run:
logger.info('removing %s', self.bdist_dir)
else:
rmtree(self.bdist_dir)
def _ensure_relative(self, path):
# copied from dir_util, deleted
drive, path = os.path.splitdrive(path)
if path[0:1] == os.sep:
path = drive + path[1:]
return path

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,342 @@
"""Create an executable installer for Windows."""
# FIXME synchronize bytes/str use with same file in distutils
import sys
import os
from shutil import rmtree
from sysconfig import get_python_version
from packaging.command.cmd import Command
from packaging.errors import PackagingOptionError, PackagingPlatformError
from packaging import logger
from packaging.util import get_platform
class bdist_wininst(Command):
description = "create an executable installer for Windows"
user_options = [('bdist-dir=', None,
"temporary directory for creating the distribution"),
('plat-name=', 'p',
"platform name to embed in generated filenames "
"(default: %s)" % get_platform()),
('keep-temp', 'k',
"keep the pseudo-installation tree around after " +
"creating the distribution archive"),
('target-version=', None,
"require a specific python version" +
" on the target system"),
('no-target-compile', 'c',
"do not compile .py to .pyc on the target system"),
('no-target-optimize', 'o',
"do not compile .py to .pyo (optimized)"
"on the target system"),
('dist-dir=', 'd',
"directory to put final built distributions in"),
('bitmap=', 'b',
"bitmap to use for the installer instead of python-powered logo"),
('title=', 't',
"title to display on the installer background instead of default"),
('skip-build', None,
"skip rebuilding everything (for testing/debugging)"),
('install-script=', None,
"basename of installation script to be run after"
"installation or before deinstallation"),
('pre-install-script=', None,
"Fully qualified filename of a script to be run before "
"any files are installed. This script need not be in the "
"distribution"),
('user-access-control=', None,
"specify Vista's UAC handling - 'none'/default=no "
"handling, 'auto'=use UAC if target Python installed for "
"all users, 'force'=always use UAC"),
]
boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
'skip-build']
def initialize_options(self):
self.bdist_dir = None
self.plat_name = None
self.keep_temp = False
self.no_target_compile = False
self.no_target_optimize = False
self.target_version = None
self.dist_dir = None
self.bitmap = None
self.title = None
self.skip_build = False
self.install_script = None
self.pre_install_script = None
self.user_access_control = None
def finalize_options(self):
if self.bdist_dir is None:
if self.skip_build and self.plat_name:
# If build is skipped and plat_name is overridden, bdist will
# not see the correct 'plat_name' - so set that up manually.
bdist = self.distribution.get_command_obj('bdist')
bdist.plat_name = self.plat_name
# next the command will be initialized using that name
bdist_base = self.get_finalized_command('bdist').bdist_base
self.bdist_dir = os.path.join(bdist_base, 'wininst')
if not self.target_version:
self.target_version = ""
if not self.skip_build and self.distribution.has_ext_modules():
short_version = get_python_version()
if self.target_version and self.target_version != short_version:
raise PackagingOptionError("target version can only be %s, or the '--skip-build'" \
" option must be specified" % (short_version,))
self.target_version = short_version
self.set_undefined_options('bdist', 'dist_dir', 'plat_name')
if self.install_script:
for script in self.distribution.scripts:
if self.install_script == os.path.basename(script):
break
else:
raise PackagingOptionError("install_script '%s' not found in scripts" % \
self.install_script)
def run(self):
if (sys.platform != "win32" and
(self.distribution.has_ext_modules() or
self.distribution.has_c_libraries())):
raise PackagingPlatformError \
("distribution contains extensions and/or C libraries; "
"must be compiled on a Windows 32 platform")
if not self.skip_build:
self.run_command('build')
install = self.get_reinitialized_command('install',
reinit_subcommands=True)
install.root = self.bdist_dir
install.skip_build = self.skip_build
install.warn_dir = False
install.plat_name = self.plat_name
install_lib = self.get_reinitialized_command('install_lib')
# we do not want to include pyc or pyo files
install_lib.compile = False
install_lib.optimize = 0
if self.distribution.has_ext_modules():
# If we are building an installer for a Python version other
# than the one we are currently running, then we need to ensure
# our build_lib reflects the other Python version rather than ours.
# Note that for target_version!=sys.version, we must have skipped the
# build step, so there is no issue with enforcing the build of this
# version.
target_version = self.target_version
if not target_version:
assert self.skip_build, "Should have already checked this"
target_version = sys.version[0:3]
plat_specifier = ".%s-%s" % (self.plat_name, target_version)
build = self.get_finalized_command('build')
build.build_lib = os.path.join(build.build_base,
'lib' + plat_specifier)
# Use a custom scheme for the zip-file, because we have to decide
# at installation time which scheme to use.
for key in ('purelib', 'platlib', 'headers', 'scripts', 'data'):
value = key.upper()
if key == 'headers':
value = value + '/Include/$dist_name'
setattr(install,
'install_' + key,
value)
logger.info("installing to %s", self.bdist_dir)
install.ensure_finalized()
# avoid warning of 'install_lib' about installing
# into a directory not in sys.path
sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB'))
install.run()
del sys.path[0]
# And make an archive relative to the root of the
# pseudo-installation tree.
from tempfile import NamedTemporaryFile
archive_basename = NamedTemporaryFile().name
fullname = self.distribution.get_fullname()
arcname = self.make_archive(archive_basename, "zip",
root_dir=self.bdist_dir)
# create an exe containing the zip-file
self.create_exe(arcname, fullname, self.bitmap)
if self.distribution.has_ext_modules():
pyversion = get_python_version()
else:
pyversion = 'any'
self.distribution.dist_files.append(('bdist_wininst', pyversion,
self.get_installer_filename(fullname)))
# remove the zip-file again
logger.debug("removing temporary file '%s'", arcname)
os.remove(arcname)
if not self.keep_temp:
if self.dry_run:
logger.info('removing %s', self.bdist_dir)
else:
rmtree(self.bdist_dir)
def get_inidata(self):
# Return data describing the installation.
lines = []
metadata = self.distribution.metadata
# Write the [metadata] section.
lines.append("[metadata]")
# 'info' will be displayed in the installer's dialog box,
# describing the items to be installed.
info = (metadata.long_description or '') + '\n'
# Escape newline characters
def escape(s):
return s.replace("\n", "\\n")
for name in ["author", "author_email", "description", "maintainer",
"maintainer_email", "name", "url", "version"]:
data = getattr(metadata, name, "")
if data:
info = info + ("\n %s: %s" % \
(name.capitalize(), escape(data)))
lines.append("%s=%s" % (name, escape(data)))
# The [setup] section contains entries controlling
# the installer runtime.
lines.append("\n[Setup]")
if self.install_script:
lines.append("install_script=%s" % self.install_script)
lines.append("info=%s" % escape(info))
lines.append("target_compile=%d" % (not self.no_target_compile))
lines.append("target_optimize=%d" % (not self.no_target_optimize))
if self.target_version:
lines.append("target_version=%s" % self.target_version)
if self.user_access_control:
lines.append("user_access_control=%s" % self.user_access_control)
title = self.title or self.distribution.get_fullname()
lines.append("title=%s" % escape(title))
import time
import packaging
build_info = "Built %s with packaging-%s" % \
(time.ctime(time.time()), packaging.__version__)
lines.append("build_info=%s" % build_info)
return "\n".join(lines)
def create_exe(self, arcname, fullname, bitmap=None):
import struct
self.mkpath(self.dist_dir)
cfgdata = self.get_inidata()
installer_name = self.get_installer_filename(fullname)
logger.info("creating %s", installer_name)
if bitmap:
with open(bitmap, "rb") as fp:
bitmapdata = fp.read()
bitmaplen = len(bitmapdata)
else:
bitmaplen = 0
with open(installer_name, "wb") as file:
file.write(self.get_exe_bytes())
if bitmap:
file.write(bitmapdata)
# Convert cfgdata from unicode to ascii, mbcs encoded
if isinstance(cfgdata, str):
cfgdata = cfgdata.encode("mbcs")
# Append the pre-install script
cfgdata = cfgdata + "\0"
if self.pre_install_script:
with open(self.pre_install_script) as fp:
script_data = fp.read()
cfgdata = cfgdata + script_data + "\n\0"
else:
# empty pre-install script
cfgdata = cfgdata + "\0"
file.write(cfgdata)
# The 'magic number' 0x1234567B is used to make sure that the
# binary layout of 'cfgdata' is what the wininst.exe binary
# expects. If the layout changes, increment that number, make
# the corresponding changes to the wininst.exe sources, and
# recompile them.
header = struct.pack("<iii",
0x1234567B, # tag
len(cfgdata), # length
bitmaplen, # number of bytes in bitmap
)
file.write(header)
with open(arcname, "rb") as fp:
file.write(fp.read())
def get_installer_filename(self, fullname):
# Factored out to allow overriding in subclasses
if self.target_version:
# if we create an installer for a specific python version,
# it's better to include this in the name
installer_name = os.path.join(self.dist_dir,
"%s.%s-py%s.exe" %
(fullname, self.plat_name, self.target_version))
else:
installer_name = os.path.join(self.dist_dir,
"%s.%s.exe" % (fullname, self.plat_name))
return installer_name
def get_exe_bytes(self):
from packaging.compiler.msvccompiler import get_build_version
# If a target-version other than the current version has been
# specified, then using the MSVC version from *this* build is no good.
# Without actually finding and executing the target version and parsing
# its sys.version, we just hard-code our knowledge of old versions.
# NOTE: Possible alternative is to allow "--target-version" to
# specify a Python executable rather than a simple version string.
# We can then execute this program to obtain any info we need, such
# as the real sys.version string for the build.
cur_version = get_python_version()
if self.target_version and self.target_version != cur_version:
# If the target version is *later* than us, then we assume they
# use what we use
# string compares seem wrong, but are what sysconfig.py itself uses
if self.target_version > cur_version:
bv = get_build_version()
else:
if self.target_version < "2.4":
bv = 6.0
else:
bv = 7.1
else:
# for current version - use authoritative check.
bv = get_build_version()
# wininst-x.y.exe is in the same directory as this file
directory = os.path.dirname(__file__)
# we must use a wininst-x.y.exe built with the same C compiler
# used for python. XXX What about mingw, borland, and so on?
# if plat_name starts with "win" but is not "win32"
# we want to strip "win" and leave the rest (e.g. -amd64)
# for all other cases, we don't want any suffix
if self.plat_name != 'win32' and self.plat_name[:3] == 'win':
sfix = self.plat_name[3:]
else:
sfix = ''
filename = os.path.join(directory, "wininst-%.1f%s.exe" % (bv, sfix))
with open(filename, "rb") as fp:
return fp.read()

View File

@@ -0,0 +1,151 @@
"""Main build command, which calls the other build_* commands."""
import sys
import os
from packaging.util import get_platform
from packaging.command.cmd import Command
from packaging.errors import PackagingOptionError
from packaging.compiler import show_compilers
class build(Command):
description = "build everything needed to install"
user_options = [
('build-base=', 'b',
"base directory for build library"),
('build-purelib=', None,
"build directory for platform-neutral distributions"),
('build-platlib=', None,
"build directory for platform-specific distributions"),
('build-lib=', None,
"build directory for all distribution (defaults to either " +
"build-purelib or build-platlib"),
('build-scripts=', None,
"build directory for scripts"),
('build-temp=', 't',
"temporary build directory"),
('plat-name=', 'p',
"platform name to build for, if supported "
"(default: %s)" % get_platform()),
('compiler=', 'c',
"specify the compiler type"),
('debug', 'g',
"compile extensions and libraries with debugging information"),
('force', 'f',
"forcibly build everything (ignore file timestamps)"),
('executable=', 'e',
"specify final destination interpreter path (build.py)"),
('use-2to3', None,
"use 2to3 to make source python 3.x compatible"),
('convert-2to3-doctests', None,
"use 2to3 to convert doctests in seperate text files"),
('use-2to3-fixers', None,
"list additional fixers opted for during 2to3 conversion"),
]
boolean_options = ['debug', 'force']
help_options = [
('help-compiler', None,
"list available compilers", show_compilers),
]
def initialize_options(self):
self.build_base = 'build'
# these are decided only after 'build_base' has its final value
# (unless overridden by the user or client)
self.build_purelib = None
self.build_platlib = None
self.build_lib = None
self.build_temp = None
self.build_scripts = None
self.compiler = None
self.plat_name = None
self.debug = None
self.force = False
self.executable = None
self.use_2to3 = False
self.convert_2to3_doctests = None
self.use_2to3_fixers = None
def finalize_options(self):
if self.plat_name is None:
self.plat_name = get_platform()
else:
# plat-name only supported for windows (other platforms are
# supported via ./configure flags, if at all). Avoid misleading
# other platforms.
if os.name != 'nt':
raise PackagingOptionError(
"--plat-name only supported on Windows (try "
"using './configure --help' on your platform)")
plat_specifier = ".%s-%s" % (self.plat_name, sys.version[0:3])
# Make it so Python 2.x and Python 2.x with --with-pydebug don't
# share the same build directories. Doing so confuses the build
# process for C modules
if hasattr(sys, 'gettotalrefcount'):
plat_specifier += '-pydebug'
# 'build_purelib' and 'build_platlib' just default to 'lib' and
# 'lib.<plat>' under the base build directory. We only use one of
# them for a given distribution, though --
if self.build_purelib is None:
self.build_purelib = os.path.join(self.build_base, 'lib')
if self.build_platlib is None:
self.build_platlib = os.path.join(self.build_base,
'lib' + plat_specifier)
# 'build_lib' is the actual directory that we will use for this
# particular module distribution -- if user didn't supply it, pick
# one of 'build_purelib' or 'build_platlib'.
if self.build_lib is None:
if self.distribution.ext_modules:
self.build_lib = self.build_platlib
else:
self.build_lib = self.build_purelib
# 'build_temp' -- temporary directory for compiler turds,
# "build/temp.<plat>"
if self.build_temp is None:
self.build_temp = os.path.join(self.build_base,
'temp' + plat_specifier)
if self.build_scripts is None:
self.build_scripts = os.path.join(self.build_base,
'scripts-' + sys.version[0:3])
if self.executable is None:
self.executable = os.path.normpath(sys.executable)
def run(self):
# Run all relevant sub-commands. This will be some subset of:
# - build_py - pure Python modules
# - build_clib - standalone C libraries
# - build_ext - Python extension modules
# - build_scripts - Python scripts
for cmd_name in self.get_sub_commands():
self.run_command(cmd_name)
# -- Predicates for the sub-command list ---------------------------
def has_pure_modules(self):
return self.distribution.has_pure_modules()
def has_c_libraries(self):
return self.distribution.has_c_libraries()
def has_ext_modules(self):
return self.distribution.has_ext_modules()
def has_scripts(self):
return self.distribution.has_scripts()
sub_commands = [('build_py', has_pure_modules),
('build_clib', has_c_libraries),
('build_ext', has_ext_modules),
('build_scripts', has_scripts),
]

View File

@@ -0,0 +1,198 @@
"""Build C/C++ libraries.
This command is useful to build libraries that are included in the
distribution and needed by extension modules.
"""
# XXX this module has *lots* of code ripped-off quite transparently from
# build_ext.py -- not surprisingly really, as the work required to build
# a static library from a collection of C source files is not really all
# that different from what's required to build a shared object file from
# a collection of C source files. Nevertheless, I haven't done the
# necessary refactoring to account for the overlap in code between the
# two modules, mainly because a number of subtle details changed in the
# cut 'n paste. Sigh.
import os
from packaging.command.cmd import Command
from packaging.errors import PackagingSetupError
from packaging.compiler import customize_compiler
from packaging import logger
def show_compilers():
from packaging.compiler import show_compilers
show_compilers()
class build_clib(Command):
description = "build C/C++ libraries used by extension modules"
user_options = [
('build-clib=', 'b',
"directory to build C/C++ libraries to"),
('build-temp=', 't',
"directory to put temporary build by-products"),
('debug', 'g',
"compile with debugging information"),
('force', 'f',
"forcibly build everything (ignore file timestamps)"),
('compiler=', 'c',
"specify the compiler type"),
]
boolean_options = ['debug', 'force']
help_options = [
('help-compiler', None,
"list available compilers", show_compilers),
]
def initialize_options(self):
self.build_clib = None
self.build_temp = None
# List of libraries to build
self.libraries = None
# Compilation options for all libraries
self.include_dirs = None
self.define = None
self.undef = None
self.debug = None
self.force = False
self.compiler = None
def finalize_options(self):
# This might be confusing: both build-clib and build-temp default
# to build-temp as defined by the "build" command. This is because
# I think that C libraries are really just temporary build
# by-products, at least from the point of view of building Python
# extensions -- but I want to keep my options open.
self.set_undefined_options('build',
('build_temp', 'build_clib'),
('build_temp', 'build_temp'),
'compiler', 'debug', 'force')
self.libraries = self.distribution.libraries
if self.libraries:
self.check_library_list(self.libraries)
if self.include_dirs is None:
self.include_dirs = self.distribution.include_dirs or []
if isinstance(self.include_dirs, str):
self.include_dirs = self.include_dirs.split(os.pathsep)
# XXX same as for build_ext -- what about 'self.define' and
# 'self.undef' ?
def run(self):
if not self.libraries:
return
# Yech -- this is cut 'n pasted from build_ext.py!
from packaging.compiler import new_compiler
self.compiler = new_compiler(compiler=self.compiler,
dry_run=self.dry_run,
force=self.force)
customize_compiler(self.compiler)
if self.include_dirs is not None:
self.compiler.set_include_dirs(self.include_dirs)
if self.define is not None:
# 'define' option is a list of (name,value) tuples
for name, value in self.define:
self.compiler.define_macro(name, value)
if self.undef is not None:
for macro in self.undef:
self.compiler.undefine_macro(macro)
self.build_libraries(self.libraries)
def check_library_list(self, libraries):
"""Ensure that the list of libraries is valid.
`library` is presumably provided as a command option 'libraries'.
This method checks that it is a list of 2-tuples, where the tuples
are (library_name, build_info_dict).
Raise PackagingSetupError if the structure is invalid anywhere;
just returns otherwise.
"""
if not isinstance(libraries, list):
raise PackagingSetupError("'libraries' option must be a list of tuples")
for lib in libraries:
if not isinstance(lib, tuple) and len(lib) != 2:
raise PackagingSetupError("each element of 'libraries' must a 2-tuple")
name, build_info = lib
if not isinstance(name, str):
raise PackagingSetupError("first element of each tuple in 'libraries' " + \
"must be a string (the library name)")
if '/' in name or (os.sep != '/' and os.sep in name):
raise PackagingSetupError(("bad library name '%s': " +
"may not contain directory separators") % \
lib[0])
if not isinstance(build_info, dict):
raise PackagingSetupError("second element of each tuple in 'libraries' " + \
"must be a dictionary (build info)")
def get_library_names(self):
# Assume the library list is valid -- 'check_library_list()' is
# called from 'finalize_options()', so it should be!
if not self.libraries:
return None
lib_names = []
for lib_name, build_info in self.libraries:
lib_names.append(lib_name)
return lib_names
def get_source_files(self):
self.check_library_list(self.libraries)
filenames = []
for lib_name, build_info in self.libraries:
sources = build_info.get('sources')
if sources is None or not isinstance(sources, (list, tuple)):
raise PackagingSetupError(("in 'libraries' option (library '%s'), "
"'sources' must be present and must be "
"a list of source filenames") % lib_name)
filenames.extend(sources)
return filenames
def build_libraries(self, libraries):
for lib_name, build_info in libraries:
sources = build_info.get('sources')
if sources is None or not isinstance(sources, (list, tuple)):
raise PackagingSetupError(("in 'libraries' option (library '%s'), " +
"'sources' must be present and must be " +
"a list of source filenames") % lib_name)
sources = list(sources)
logger.info("building '%s' library", lib_name)
# First, compile the source code to object files in the library
# directory. (This should probably change to putting object
# files in a temporary build directory.)
macros = build_info.get('macros')
include_dirs = build_info.get('include_dirs')
objects = self.compiler.compile(sources,
output_dir=self.build_temp,
macros=macros,
include_dirs=include_dirs,
debug=self.debug)
# Now "link" the object files together into a static library.
# (On Unix at least, this isn't really linking -- it just
# builds an archive. Whatever.)
self.compiler.create_static_lib(objects, lib_name,
output_dir=self.build_clib,
debug=self.debug)

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,410 @@
"""Build pure Python modules (just copy to build directory)."""
import os
import sys
from glob import glob
from packaging import logger
from packaging.command.cmd import Command
from packaging.errors import PackagingOptionError, PackagingFileError
from packaging.util import convert_path
from packaging.compat import Mixin2to3
# marking public APIs
__all__ = ['build_py']
class build_py(Command, Mixin2to3):
description = "build pure Python modules (copy to build directory)"
user_options = [
('build-lib=', 'd', "directory to build (copy) to"),
('compile', 'c', "compile .py to .pyc"),
('no-compile', None, "don't compile .py files [default]"),
('optimize=', 'O',
"also compile with optimization: -O1 for \"python -O\", "
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
('force', 'f', "forcibly build everything (ignore file timestamps)"),
('use-2to3', None,
"use 2to3 to make source python 3.x compatible"),
('convert-2to3-doctests', None,
"use 2to3 to convert doctests in seperate text files"),
('use-2to3-fixers', None,
"list additional fixers opted for during 2to3 conversion"),
]
boolean_options = ['compile', 'force']
negative_opt = {'no-compile' : 'compile'}
def initialize_options(self):
self.build_lib = None
self.py_modules = None
self.package = None
self.package_data = None
self.package_dir = None
self.compile = False
self.optimize = 0
self.force = None
self._updated_files = []
self._doctests_2to3 = []
self.use_2to3 = False
self.convert_2to3_doctests = None
self.use_2to3_fixers = None
def finalize_options(self):
self.set_undefined_options('build',
'use_2to3', 'use_2to3_fixers',
'convert_2to3_doctests', 'build_lib',
'force')
# Get the distribution options that are aliases for build_py
# options -- list of packages and list of modules.
self.packages = self.distribution.packages
self.py_modules = self.distribution.py_modules
self.package_data = self.distribution.package_data
self.package_dir = None
if self.distribution.package_dir is not None:
self.package_dir = convert_path(self.distribution.package_dir)
self.data_files = self.get_data_files()
# Ick, copied straight from install_lib.py (fancy_getopt needs a
# type system! Hell, *everything* needs a type system!!!)
if not isinstance(self.optimize, int):
try:
self.optimize = int(self.optimize)
assert 0 <= self.optimize <= 2
except (ValueError, AssertionError):
raise PackagingOptionError("optimize must be 0, 1, or 2")
def run(self):
# XXX copy_file by default preserves atime and mtime. IMHO this is
# the right thing to do, but perhaps it should be an option -- in
# particular, a site administrator might want installed files to
# reflect the time of installation rather than the last
# modification time before the installed release.
# XXX copy_file by default preserves mode, which appears to be the
# wrong thing to do: if a file is read-only in the working
# directory, we want it to be installed read/write so that the next
# installation of the same module distribution can overwrite it
# without problems. (This might be a Unix-specific issue.) Thus
# we turn off 'preserve_mode' when copying to the build directory,
# since the build directory is supposed to be exactly what the
# installation will look like (ie. we preserve mode when
# installing).
# Two options control which modules will be installed: 'packages'
# and 'py_modules'. The former lets us work with whole packages, not
# specifying individual modules at all; the latter is for
# specifying modules one-at-a-time.
if self.py_modules:
self.build_modules()
if self.packages:
self.build_packages()
self.build_package_data()
if self.use_2to3 and self._updated_files:
self.run_2to3(self._updated_files, self._doctests_2to3,
self.use_2to3_fixers)
self.byte_compile(self.get_outputs(include_bytecode=False))
# -- Top-level worker functions ------------------------------------
def get_data_files(self):
"""Generate list of '(package,src_dir,build_dir,filenames)' tuples.
Helper function for `finalize_options()`.
"""
data = []
if not self.packages:
return data
for package in self.packages:
# Locate package source directory
src_dir = self.get_package_dir(package)
# Compute package build directory
build_dir = os.path.join(*([self.build_lib] + package.split('.')))
# Length of path to strip from found files
plen = 0
if src_dir:
plen = len(src_dir)+1
# Strip directory from globbed filenames
filenames = [
file[plen:] for file in self.find_data_files(package, src_dir)
]
data.append((package, src_dir, build_dir, filenames))
return data
def find_data_files(self, package, src_dir):
"""Return filenames for package's data files in 'src_dir'.
Helper function for `get_data_files()`.
"""
globs = (self.package_data.get('', [])
+ self.package_data.get(package, []))
files = []
for pattern in globs:
# Each pattern has to be converted to a platform-specific path
filelist = glob(os.path.join(src_dir, convert_path(pattern)))
# Files that match more than one pattern are only added once
files.extend(fn for fn in filelist if fn not in files)
return files
def build_package_data(self):
"""Copy data files into build directory.
Helper function for `run()`.
"""
# FIXME add tests for this method
for package, src_dir, build_dir, filenames in self.data_files:
for filename in filenames:
target = os.path.join(build_dir, filename)
srcfile = os.path.join(src_dir, filename)
self.mkpath(os.path.dirname(target))
outf, copied = self.copy_file(srcfile,
target, preserve_mode=False)
if copied and srcfile in self.distribution.convert_2to3.doctests:
self._doctests_2to3.append(outf)
# XXX - this should be moved to the Distribution class as it is not
# only needed for build_py. It also has no dependencies on this class.
def get_package_dir(self, package):
"""Return the directory, relative to the top of the source
distribution, where package 'package' should be found
(at least according to the 'package_dir' option, if any)."""
path = package.split('.')
if self.package_dir is not None:
path.insert(0, self.package_dir)
if len(path) > 0:
return os.path.join(*path)
return ''
def check_package(self, package, package_dir):
"""Helper function for `find_package_modules()` and `find_modules()'.
"""
# Empty dir name means current directory, which we can probably
# assume exists. Also, os.path.exists and isdir don't know about
# my "empty string means current dir" convention, so we have to
# circumvent them.
if package_dir != "":
if not os.path.exists(package_dir):
raise PackagingFileError(
"package directory '%s' does not exist" % package_dir)
if not os.path.isdir(package_dir):
raise PackagingFileError(
"supposed package directory '%s' exists, "
"but is not a directory" % package_dir)
# Require __init__.py for all but the "root package"
if package:
init_py = os.path.join(package_dir, "__init__.py")
if os.path.isfile(init_py):
return init_py
else:
logger.warning(("package init file '%s' not found " +
"(or not a regular file)"), init_py)
# Either not in a package at all (__init__.py not expected), or
# __init__.py doesn't exist -- so don't return the filename.
return None
def check_module(self, module, module_file):
if not os.path.isfile(module_file):
logger.warning("file %s (for module %s) not found",
module_file, module)
return False
else:
return True
def find_package_modules(self, package, package_dir):
self.check_package(package, package_dir)
module_files = glob(os.path.join(package_dir, "*.py"))
modules = []
if self.distribution.script_name is not None:
setup_script = os.path.abspath(self.distribution.script_name)
else:
setup_script = None
for f in module_files:
abs_f = os.path.abspath(f)
if abs_f != setup_script:
module = os.path.splitext(os.path.basename(f))[0]
modules.append((package, module, f))
else:
logger.debug("excluding %s", setup_script)
return modules
def find_modules(self):
"""Finds individually-specified Python modules, ie. those listed by
module name in 'self.py_modules'. Returns a list of tuples (package,
module_base, filename): 'package' is a tuple of the path through
package-space to the module; 'module_base' is the bare (no
packages, no dots) module name, and 'filename' is the path to the
".py" file (relative to the distribution root) that implements the
module.
"""
# Map package names to tuples of useful info about the package:
# (package_dir, checked)
# package_dir - the directory where we'll find source files for
# this package
# checked - true if we have checked that the package directory
# is valid (exists, contains __init__.py, ... ?)
packages = {}
# List of (package, module, filename) tuples to return
modules = []
# We treat modules-in-packages almost the same as toplevel modules,
# just the "package" for a toplevel is empty (either an empty
# string or empty list, depending on context). Differences:
# - don't check for __init__.py in directory for empty package
for module in self.py_modules:
path = module.split('.')
package = '.'.join(path[0:-1])
module_base = path[-1]
try:
package_dir, checked = packages[package]
except KeyError:
package_dir = self.get_package_dir(package)
checked = False
if not checked:
init_py = self.check_package(package, package_dir)
packages[package] = (package_dir, 1)
if init_py:
modules.append((package, "__init__", init_py))
# XXX perhaps we should also check for just .pyc files
# (so greedy closed-source bastards can distribute Python
# modules too)
module_file = os.path.join(package_dir, module_base + ".py")
if not self.check_module(module, module_file):
continue
modules.append((package, module_base, module_file))
return modules
def find_all_modules(self):
"""Compute the list of all modules that will be built, whether
they are specified one-module-at-a-time ('self.py_modules') or
by whole packages ('self.packages'). Return a list of tuples
(package, module, module_file), just like 'find_modules()' and
'find_package_modules()' do."""
modules = []
if self.py_modules:
modules.extend(self.find_modules())
if self.packages:
for package in self.packages:
package_dir = self.get_package_dir(package)
m = self.find_package_modules(package, package_dir)
modules.extend(m)
return modules
def get_source_files(self):
sources = [module[-1] for module in self.find_all_modules()]
sources += [
os.path.join(src_dir, filename)
for package, src_dir, build_dir, filenames in self.data_files
for filename in filenames]
return sources
def get_module_outfile(self, build_dir, package, module):
outfile_path = [build_dir] + list(package) + [module + ".py"]
return os.path.join(*outfile_path)
def get_outputs(self, include_bytecode=True):
modules = self.find_all_modules()
outputs = []
for package, module, module_file in modules:
package = package.split('.')
filename = self.get_module_outfile(self.build_lib, package, module)
outputs.append(filename)
if include_bytecode:
if self.compile:
outputs.append(filename + "c")
if self.optimize > 0:
outputs.append(filename + "o")
outputs += [
os.path.join(build_dir, filename)
for package, src_dir, build_dir, filenames in self.data_files
for filename in filenames]
return outputs
def build_module(self, module, module_file, package):
if isinstance(package, str):
package = package.split('.')
elif not isinstance(package, (list, tuple)):
raise TypeError(
"'package' must be a string (dot-separated), list, or tuple")
# Now put the module source file into the "build" area -- this is
# easy, we just copy it somewhere under self.build_lib (the build
# directory for Python source).
outfile = self.get_module_outfile(self.build_lib, package, module)
dir = os.path.dirname(outfile)
self.mkpath(dir)
return self.copy_file(module_file, outfile, preserve_mode=False)
def build_modules(self):
modules = self.find_modules()
for package, module, module_file in modules:
# Now "build" the module -- ie. copy the source file to
# self.build_lib (the build directory for Python source).
# (Actually, it gets copied to the directory for this package
# under self.build_lib.)
self.build_module(module, module_file, package)
def build_packages(self):
for package in self.packages:
# Get list of (package, module, module_file) tuples based on
# scanning the package directory. 'package' is only included
# in the tuple so that 'find_modules()' and
# 'find_package_tuples()' have a consistent interface; it's
# ignored here (apart from a sanity check). Also, 'module' is
# the *unqualified* module name (ie. no dots, no package -- we
# already know its package!), and 'module_file' is the path to
# the .py file, relative to the current directory
# (ie. including 'package_dir').
package_dir = self.get_package_dir(package)
modules = self.find_package_modules(package, package_dir)
# Now loop over the modules we found, "building" each one (just
# copy it to self.build_lib).
for package_, module, module_file in modules:
assert package == package_
self.build_module(module, module_file, package)
def byte_compile(self, files):
if hasattr(sys, 'dont_write_bytecode') and sys.dont_write_bytecode:
logger.warning('%s: byte-compiling is disabled, skipping.',
self.get_command_name())
return
from packaging.util import byte_compile
prefix = self.build_lib
if prefix[-1] != os.sep:
prefix = prefix + os.sep
# XXX this code is essentially the same as the 'byte_compile()
# method of the "install_lib" command, except for the determination
# of the 'prefix' string. Hmmm.
if self.compile:
byte_compile(files, optimize=0,
force=self.force, prefix=prefix, dry_run=self.dry_run)
if self.optimize > 0:
byte_compile(files, optimize=self.optimize,
force=self.force, prefix=prefix, dry_run=self.dry_run)

View File

@@ -0,0 +1,132 @@
"""Build scripts (copy to build dir and fix up shebang line)."""
import os
import re
import sysconfig
from packaging.command.cmd import Command
from packaging.util import convert_path, newer
from packaging import logger
from packaging.compat import Mixin2to3
# check if Python is called on the first line with this expression
first_line_re = re.compile('^#!.*python[0-9.]*([ \t].*)?$')
class build_scripts(Command, Mixin2to3):
description = "build scripts (copy and fix up shebang line)"
user_options = [
('build-dir=', 'd', "directory to build (copy) to"),
('force', 'f', "forcibly build everything (ignore file timestamps"),
('executable=', 'e', "specify final destination interpreter path"),
]
boolean_options = ['force']
def initialize_options(self):
self.build_dir = None
self.scripts = None
self.force = None
self.executable = None
self.outfiles = None
self.use_2to3 = False
self.convert_2to3_doctests = None
self.use_2to3_fixers = None
def finalize_options(self):
self.set_undefined_options('build',
('build_scripts', 'build_dir'),
'use_2to3', 'use_2to3_fixers',
'convert_2to3_doctests', 'force',
'executable')
self.scripts = self.distribution.scripts
def get_source_files(self):
return self.scripts
def run(self):
if not self.scripts:
return
copied_files = self.copy_scripts()
if self.use_2to3 and copied_files:
self._run_2to3(copied_files, fixers=self.use_2to3_fixers)
def copy_scripts(self):
"""Copy each script listed in 'self.scripts'; if it's marked as a
Python script in the Unix way (first line matches 'first_line_re',
ie. starts with "\#!" and contains "python"), then adjust the first
line to refer to the current Python interpreter as we copy.
"""
self.mkpath(self.build_dir)
outfiles = []
for script in self.scripts:
adjust = False
script = convert_path(script)
outfile = os.path.join(self.build_dir, os.path.basename(script))
outfiles.append(outfile)
if not self.force and not newer(script, outfile):
logger.debug("not copying %s (up-to-date)", script)
continue
# Always open the file, but ignore failures in dry-run mode --
# that way, we'll get accurate feedback if we can read the
# script.
try:
f = open(script, "r")
except IOError:
if not self.dry_run:
raise
f = None
else:
first_line = f.readline()
if not first_line:
logger.warning('%s: %s is an empty file (skipping)',
self.get_command_name(), script)
continue
match = first_line_re.match(first_line)
if match:
adjust = True
post_interp = match.group(1) or ''
if adjust:
logger.info("copying and adjusting %s -> %s", script,
self.build_dir)
if not self.dry_run:
outf = open(outfile, "w")
if not sysconfig.is_python_build():
outf.write("#!%s%s\n" %
(self.executable,
post_interp))
else:
outf.write("#!%s%s\n" %
(os.path.join(
sysconfig.get_config_var("BINDIR"),
"python%s%s" % (sysconfig.get_config_var("VERSION"),
sysconfig.get_config_var("EXE"))),
post_interp))
outf.writelines(f.readlines())
outf.close()
if f:
f.close()
else:
if f:
f.close()
self.copy_file(script, outfile)
if os.name == 'posix':
for file in outfiles:
if self.dry_run:
logger.info("changing mode of %s", file)
else:
oldmode = os.stat(file).st_mode & 0o7777
newmode = (oldmode | 0o555) & 0o7777
if newmode != oldmode:
logger.info("changing mode of %s from %o to %o",
file, oldmode, newmode)
os.chmod(file, newmode)
return outfiles

View File

@@ -0,0 +1,88 @@
"""Check PEP compliance of metadata."""
from packaging import logger
from packaging.command.cmd import Command
from packaging.errors import PackagingSetupError
from packaging.util import resolve_name
class check(Command):
description = "check PEP compliance of metadata"
user_options = [('metadata', 'm', 'Verify metadata'),
('all', 'a',
('runs extended set of checks')),
('strict', 's',
'Will exit with an error if a check fails')]
boolean_options = ['metadata', 'all', 'strict']
def initialize_options(self):
"""Sets default values for options."""
self.all = False
self.metadata = True
self.strict = False
self._warnings = []
def finalize_options(self):
pass
def warn(self, msg, *args):
"""Wrapper around logging that also remembers messages."""
# XXX we could use a special handler for this, but would need to test
# if it works even if the logger has a too high level
self._warnings.append((msg, args))
return logger.warning(self.get_command_name() + msg, *args)
def run(self):
"""Runs the command."""
# perform the various tests
if self.metadata:
self.check_metadata()
if self.all:
self.check_restructuredtext()
self.check_hooks_resolvable()
# let's raise an error in strict mode, if we have at least
# one warning
if self.strict and len(self._warnings) > 0:
msg = '\n'.join(msg % args for msg, args in self._warnings)
raise PackagingSetupError(msg)
def check_metadata(self):
"""Ensures that all required elements of metadata are supplied.
name, version, URL, author
Warns if any are missing.
"""
missing, warnings = self.distribution.metadata.check(strict=True)
if missing != []:
self.warn('missing required metadata: %s', ', '.join(missing))
for warning in warnings:
self.warn(warning)
def check_restructuredtext(self):
"""Checks if the long string fields are reST-compliant."""
missing, warnings = self.distribution.metadata.check(restructuredtext=True)
if self.distribution.metadata.docutils_support:
for warning in warnings:
line = warning[-1].get('line')
if line is None:
warning = warning[1]
else:
warning = '%s (line %s)' % (warning[1], line)
self.warn(warning)
elif self.strict:
raise PackagingSetupError('The docutils package is needed.')
def check_hooks_resolvable(self):
for options in self.distribution.command_options.values():
for hook_kind in ("pre_hook", "post_hook"):
if hook_kind not in options:
break
for hook_name in options[hook_kind][1].values():
try:
resolve_name(hook_name)
except ImportError:
self.warn('name %r cannot be resolved', hook_name)

View File

@@ -0,0 +1,76 @@
"""Clean up temporary files created by the build command."""
# Contributed by Bastian Kleineidam <calvin@cs.uni-sb.de>
import os
from shutil import rmtree
from packaging.command.cmd import Command
from packaging import logger
class clean(Command):
description = "clean up temporary files from 'build' command"
user_options = [
('build-base=', 'b',
"base build directory (default: 'build.build-base')"),
('build-lib=', None,
"build directory for all modules (default: 'build.build-lib')"),
('build-temp=', 't',
"temporary build directory (default: 'build.build-temp')"),
('build-scripts=', None,
"build directory for scripts (default: 'build.build-scripts')"),
('bdist-base=', None,
"temporary directory for built distributions"),
('all', 'a',
"remove all build output, not just temporary by-products")
]
boolean_options = ['all']
def initialize_options(self):
self.build_base = None
self.build_lib = None
self.build_temp = None
self.build_scripts = None
self.bdist_base = None
self.all = None
def finalize_options(self):
self.set_undefined_options('build', 'build_base', 'build_lib',
'build_scripts', 'build_temp')
self.set_undefined_options('bdist', 'bdist_base')
def run(self):
# remove the build/temp.<plat> directory (unless it's already
# gone)
if os.path.exists(self.build_temp):
if self.dry_run:
logger.info('removing %s', self.build_temp)
else:
rmtree(self.build_temp)
else:
logger.debug("'%s' does not exist -- can't clean it",
self.build_temp)
if self.all:
# remove build directories
for directory in (self.build_lib,
self.bdist_base,
self.build_scripts):
if os.path.exists(directory):
if self.dry_run:
logger.info('removing %s', directory)
else:
rmtree(directory)
else:
logger.warning("'%s' does not exist -- can't clean it",
directory)
# just for the heck of it, try to remove the base build directory:
# we might have emptied it right now, but if not we don't care
if not self.dry_run:
try:
os.rmdir(self.build_base)
logger.info("removing '%s'", self.build_base)
except OSError:
pass

View File

@@ -0,0 +1,440 @@
"""Base class for commands."""
import os
import re
from shutil import copyfile, move, make_archive
from packaging import util
from packaging import logger
from packaging.errors import PackagingOptionError
class Command:
"""Abstract base class for defining command classes, the "worker bees"
of the Packaging. A useful analogy for command classes is to think of
them as subroutines with local variables called "options". The options
are "declared" in 'initialize_options()' and "defined" (given their
final values, aka "finalized") in 'finalize_options()', both of which
must be defined by every command class. The distinction between the
two is necessary because option values might come from the outside
world (command line, config file, ...), and any options dependent on
other options must be computed *after* these outside influences have
been processed -- hence 'finalize_options()'. The "body" of the
subroutine, where it does all its work based on the values of its
options, is the 'run()' method, which must also be implemented by every
command class.
"""
# 'sub_commands' formalizes the notion of a "family" of commands,
# eg. "install_dist" as the parent with sub-commands "install_lib",
# "install_headers", etc. The parent of a family of commands
# defines 'sub_commands' as a class attribute; it's a list of
# (command_name : string, predicate : unbound_method | string | None)
# tuples, where 'predicate' is a method of the parent command that
# determines whether the corresponding command is applicable in the
# current situation. (Eg. we "install_headers" is only applicable if
# we have any C header files to install.) If 'predicate' is None,
# that command is always applicable.
#
# 'sub_commands' is usually defined at the *end* of a class, because
# predicates can be unbound methods, so they must already have been
# defined. The canonical example is the "install_dist" command.
sub_commands = []
# Pre and post command hooks are run just before or just after the command
# itself. They are simple functions that receive the command instance. They
# are specified as callable objects or dotted strings (for lazy loading).
pre_hook = None
post_hook = None
# -- Creation/initialization methods -------------------------------
def __init__(self, dist):
"""Create and initialize a new Command object. Most importantly,
invokes the 'initialize_options()' method, which is the real
initializer and depends on the actual command being instantiated.
"""
# late import because of mutual dependence between these classes
from packaging.dist import Distribution
if not isinstance(dist, Distribution):
raise TypeError("dist must be a Distribution instance")
if self.__class__ is Command:
raise RuntimeError("Command is an abstract class")
self.distribution = dist
self.initialize_options()
# Per-command versions of the global flags, so that the user can
# customize Packaging' behaviour command-by-command and let some
# commands fall back on the Distribution's behaviour. None means
# "not defined, check self.distribution's copy", while 0 or 1 mean
# false and true (duh). Note that this means figuring out the real
# value of each flag is a touch complicated -- hence "self._dry_run"
# will be handled by a property, below.
# XXX This needs to be fixed. [I changed it to a property--does that
# "fix" it?]
self._dry_run = None
# Some commands define a 'self.force' option to ignore file
# timestamps, but methods defined *here* assume that
# 'self.force' exists for all commands. So define it here
# just to be safe.
self.force = None
# The 'help' flag is just used for command line parsing, so
# none of that complicated bureaucracy is needed.
self.help = False
# 'finalized' records whether or not 'finalize_options()' has been
# called. 'finalize_options()' itself should not pay attention to
# this flag: it is the business of 'ensure_finalized()', which
# always calls 'finalize_options()', to respect/update it.
self.finalized = False
# XXX A more explicit way to customize dry_run would be better.
@property
def dry_run(self):
if self._dry_run is None:
return getattr(self.distribution, 'dry_run')
else:
return self._dry_run
def ensure_finalized(self):
if not self.finalized:
self.finalize_options()
self.finalized = True
# Subclasses must define:
# initialize_options()
# provide default values for all options; may be customized by
# setup script, by options from config file(s), or by command-line
# options
# finalize_options()
# decide on the final values for all options; this is called
# after all possible intervention from the outside world
# (command line, option file, etc.) has been processed
# run()
# run the command: do whatever it is we're here to do,
# controlled by the command's various option values
def initialize_options(self):
"""Set default values for all the options that this command
supports. Note that these defaults may be overridden by other
commands, by the setup script, by config files, or by the
command line. Thus, this is not the place to code dependencies
between options; generally, 'initialize_options()' implementations
are just a bunch of "self.foo = None" assignments.
This method must be implemented by all command classes.
"""
raise RuntimeError(
"abstract method -- subclass %s must override" % self.__class__)
def finalize_options(self):
"""Set final values for all the options that this command supports.
This is always called as late as possible, ie. after any option
assignments from the command line or from other commands have been
done. Thus, this is the place to code option dependencies: if
'foo' depends on 'bar', then it is safe to set 'foo' from 'bar' as
long as 'foo' still has the same value it was assigned in
'initialize_options()'.
This method must be implemented by all command classes.
"""
raise RuntimeError(
"abstract method -- subclass %s must override" % self.__class__)
def dump_options(self, header=None, indent=""):
if header is None:
header = "command options for '%s':" % self.get_command_name()
logger.info(indent + header)
indent = indent + " "
negative_opt = getattr(self, 'negative_opt', ())
for option, _, _ in self.user_options:
if option in negative_opt:
continue
option = option.replace('-', '_')
if option[-1] == "=":
option = option[:-1]
value = getattr(self, option)
logger.info(indent + "%s = %s", option, value)
def run(self):
"""A command's raison d'etre: carry out the action it exists to
perform, controlled by the options initialized in
'initialize_options()', customized by other commands, the setup
script, the command line and config files, and finalized in
'finalize_options()'. All terminal output and filesystem
interaction should be done by 'run()'.
This method must be implemented by all command classes.
"""
raise RuntimeError(
"abstract method -- subclass %s must override" % self.__class__)
# -- External interface --------------------------------------------
# (called by outsiders)
def get_source_files(self):
"""Return the list of files that are used as inputs to this command,
i.e. the files used to generate the output files. The result is used
by the `sdist` command in determining the set of default files.
Command classes should implement this method if they operate on files
from the source tree.
"""
return []
def get_outputs(self):
"""Return the list of files that would be produced if this command
were actually run. Not affected by the "dry-run" flag or whether
any other commands have been run.
Command classes should implement this method if they produce any
output files that get consumed by another command. e.g., `build_ext`
returns the list of built extension modules, but not any temporary
files used in the compilation process.
"""
return []
# -- Option validation methods -------------------------------------
# (these are very handy in writing the 'finalize_options()' method)
#
# NB. the general philosophy here is to ensure that a particular option
# value meets certain type and value constraints. If not, we try to
# force it into conformance (eg. if we expect a list but have a string,
# split the string on comma and/or whitespace). If we can't force the
# option into conformance, raise PackagingOptionError. Thus, command
# classes need do nothing more than (eg.)
# self.ensure_string_list('foo')
# and they can be guaranteed that thereafter, self.foo will be
# a list of strings.
def _ensure_stringlike(self, option, what, default=None):
val = getattr(self, option)
if val is None:
setattr(self, option, default)
return default
elif not isinstance(val, str):
raise PackagingOptionError("'%s' must be a %s (got `%s`)" %
(option, what, val))
return val
def ensure_string(self, option, default=None):
"""Ensure that 'option' is a string; if not defined, set it to
'default'.
"""
self._ensure_stringlike(option, "string", default)
def ensure_string_list(self, option):
r"""Ensure that 'option' is a list of strings. If 'option' is
currently a string, we split it either on /,\s*/ or /\s+/, so
"foo bar baz", "foo,bar,baz", and "foo, bar baz" all become
["foo", "bar", "baz"].
"""
val = getattr(self, option)
if val is None:
return
elif isinstance(val, str):
setattr(self, option, re.split(r',\s*|\s+', val))
else:
if isinstance(val, list):
# checks if all elements are str
ok = True
for element in val:
if not isinstance(element, str):
ok = False
break
else:
ok = False
if not ok:
raise PackagingOptionError(
"'%s' must be a list of strings (got %r)" % (option, val))
def _ensure_tested_string(self, option, tester,
what, error_fmt, default=None):
val = self._ensure_stringlike(option, what, default)
if val is not None and not tester(val):
raise PackagingOptionError(
("error in '%s' option: " + error_fmt) % (option, val))
def ensure_filename(self, option):
"""Ensure that 'option' is the name of an existing file."""
self._ensure_tested_string(option, os.path.isfile,
"filename",
"'%s' does not exist or is not a file")
def ensure_dirname(self, option):
self._ensure_tested_string(option, os.path.isdir,
"directory name",
"'%s' does not exist or is not a directory")
# -- Convenience methods for commands ------------------------------
@classmethod
def get_command_name(cls):
if hasattr(cls, 'command_name'):
return cls.command_name
else:
return cls.__name__
def set_undefined_options(self, src_cmd, *options):
"""Set values of undefined options from another command.
Undefined options are options set to None, which is the convention
used to indicate that an option has not been changed between
'initialize_options()' and 'finalize_options()'. This method is
usually called from 'finalize_options()' for options that depend on
some other command rather than another option of the same command,
typically subcommands.
The 'src_cmd' argument is the other command from which option values
will be taken (a command object will be created for it if necessary);
the remaining positional arguments are strings that give the name of
the option to set. If the name is different on the source and target
command, you can pass a tuple with '(name_on_source, name_on_dest)' so
that 'self.name_on_dest' will be set from 'src_cmd.name_on_source'.
"""
src_cmd_obj = self.distribution.get_command_obj(src_cmd)
src_cmd_obj.ensure_finalized()
for obj in options:
if isinstance(obj, tuple):
src_option, dst_option = obj
else:
src_option, dst_option = obj, obj
if getattr(self, dst_option) is None:
setattr(self, dst_option,
getattr(src_cmd_obj, src_option))
def get_finalized_command(self, command, create=True):
"""Wrapper around Distribution's 'get_command_obj()' method: find
(create if necessary and 'create' is true) the command object for
'command', call its 'ensure_finalized()' method, and return the
finalized command object.
"""
cmd_obj = self.distribution.get_command_obj(command, create)
cmd_obj.ensure_finalized()
return cmd_obj
def get_reinitialized_command(self, command, reinit_subcommands=False):
return self.distribution.get_reinitialized_command(
command, reinit_subcommands)
def run_command(self, command):
"""Run some other command: uses the 'run_command()' method of
Distribution, which creates and finalizes the command object if
necessary and then invokes its 'run()' method.
"""
self.distribution.run_command(command)
def get_sub_commands(self):
"""Determine the sub-commands that are relevant in the current
distribution (ie., that need to be run). This is based on the
'sub_commands' class attribute: each tuple in that list may include
a method that we call to determine if the subcommand needs to be
run for the current distribution. Return a list of command names.
"""
commands = []
for sub_command in self.sub_commands:
if len(sub_command) == 2:
cmd_name, method = sub_command
if method is None or method(self):
commands.append(cmd_name)
else:
commands.append(sub_command)
return commands
# -- External world manipulation -----------------------------------
def execute(self, func, args, msg=None, level=1):
util.execute(func, args, msg, dry_run=self.dry_run)
def mkpath(self, name, mode=0o777, dry_run=None, verbose=0):
if dry_run is None:
dry_run = self.dry_run
name = os.path.normpath(name)
if os.path.isdir(name) or name == '':
return
if dry_run:
head = ''
for part in name.split(os.sep):
logger.info("created directory %s%s", head, part)
head += part + os.sep
return
os.makedirs(name, mode)
def copy_file(self, infile, outfile,
preserve_mode=True, preserve_times=True, link=None, level=1):
"""Copy a file respecting verbose, dry-run and force flags. (The
former two default to whatever is in the Distribution object, and
the latter defaults to false for commands that don't define it.)"""
if self.dry_run:
# XXX add a comment
return
if os.path.isdir(outfile):
outfile = os.path.join(outfile, os.path.split(infile)[-1])
copyfile(infile, outfile)
return outfile, None # XXX
def copy_tree(self, infile, outfile, preserve_mode=True,
preserve_times=True, preserve_symlinks=False, level=1):
"""Copy an entire directory tree respecting verbose, dry-run,
and force flags.
"""
if self.dry_run:
return # see if we want to display something
return util.copy_tree(infile, outfile, preserve_mode, preserve_times,
preserve_symlinks, not self.force, dry_run=self.dry_run)
def move_file(self, src, dst, level=1):
"""Move a file respecting the dry-run flag."""
if self.dry_run:
return # XXX log ?
return move(src, dst)
def spawn(self, cmd, search_path=True, level=1):
"""Spawn an external command respecting dry-run flag."""
from packaging.util import spawn
spawn(cmd, search_path, dry_run=self.dry_run)
def make_archive(self, base_name, format, root_dir=None, base_dir=None,
owner=None, group=None):
return make_archive(base_name, format, root_dir,
base_dir, dry_run=self.dry_run,
owner=owner, group=group)
def make_file(self, infiles, outfile, func, args,
exec_msg=None, skip_msg=None, level=1):
"""Special case of 'execute()' for operations that process one or
more input files and generate one output file. Works just like
'execute()', except the operation is skipped and a different
message printed if 'outfile' already exists and is newer than all
files listed in 'infiles'. If the command defined 'self.force',
and it is true, then the command is unconditionally run -- does no
timestamp checks.
"""
if skip_msg is None:
skip_msg = "skipping %s (inputs unchanged)" % outfile
# Allow 'infiles' to be a single string
if isinstance(infiles, str):
infiles = (infiles,)
elif not isinstance(infiles, (list, tuple)):
raise TypeError(
"'infiles' must be a string, or a list or tuple of strings")
if exec_msg is None:
exec_msg = "generating %s from %s" % (outfile, ', '.join(infiles))
# If 'outfile' must be regenerated (either because it doesn't
# exist, is out-of-date, or the 'force' flag is true) then
# perform the action that presumably regenerates it
if self.force or util.newer_group(infiles, outfile):
self.execute(func, args, exec_msg, level)
# Otherwise, print the "skip" message
else:
logger.debug(skip_msg)

View File

@@ -0,0 +1,35 @@
"""Do X and Y."""
from packaging import logger
from packaging.command.cmd import Command
class x(Command):
# Brief (40-50 characters) description of the command
description = ""
# List of option tuples: long name, short name (None if no short
# name), and help string.
user_options = [
('', '', # long option, short option (one letter) or None
""), # help text
]
def initialize_options(self):
self. = None
self. = None
self. = None
def finalize_options(self):
if self.x is None:
self.x = ...
def run(self):
...
logger.info(...)
if not self.dry_run:
...
self.execute(..., dry_run=self.dry_run)

View File

@@ -0,0 +1,351 @@
"""Prepare the build.
This module provides config, a (mostly) empty command class
that exists mainly to be sub-classed by specific module distributions and
applications. The idea is that while every "config" command is different,
at least they're all named the same, and users always see "config" in the
list of standard commands. Also, this is a good place to put common
configure-like tasks: "try to compile this C code", or "figure out where
this header file lives".
"""
import os
import re
from packaging.command.cmd import Command
from packaging.errors import PackagingExecError
from packaging.compiler import customize_compiler
from packaging import logger
LANG_EXT = {'c': '.c', 'c++': '.cxx'}
class config(Command):
description = "prepare the build"
user_options = [
('compiler=', None,
"specify the compiler type"),
('cc=', None,
"specify the compiler executable"),
('include-dirs=', 'I',
"list of directories to search for header files"),
('define=', 'D',
"C preprocessor macros to define"),
('undef=', 'U',
"C preprocessor macros to undefine"),
('libraries=', 'l',
"external C libraries to link with"),
('library-dirs=', 'L',
"directories to search for external C libraries"),
('noisy', None,
"show every action (compile, link, run, ...) taken"),
('dump-source', None,
"dump generated source files before attempting to compile them"),
]
# The three standard command methods: since the "config" command
# does nothing by default, these are empty.
def initialize_options(self):
self.compiler = None
self.cc = None
self.include_dirs = None
self.libraries = None
self.library_dirs = None
# maximal output for now
self.noisy = True
self.dump_source = True
# list of temporary files generated along-the-way that we have
# to clean at some point
self.temp_files = []
def finalize_options(self):
if self.include_dirs is None:
self.include_dirs = self.distribution.include_dirs or []
elif isinstance(self.include_dirs, str):
self.include_dirs = self.include_dirs.split(os.pathsep)
if self.libraries is None:
self.libraries = []
elif isinstance(self.libraries, str):
self.libraries = [self.libraries]
if self.library_dirs is None:
self.library_dirs = []
elif isinstance(self.library_dirs, str):
self.library_dirs = self.library_dirs.split(os.pathsep)
def run(self):
pass
# Utility methods for actual "config" commands. The interfaces are
# loosely based on Autoconf macros of similar names. Sub-classes
# may use these freely.
def _check_compiler(self):
"""Check that 'self.compiler' really is a CCompiler object;
if not, make it one.
"""
# We do this late, and only on-demand, because this is an expensive
# import.
from packaging.compiler.ccompiler import CCompiler
from packaging.compiler import new_compiler
if not isinstance(self.compiler, CCompiler):
self.compiler = new_compiler(compiler=self.compiler,
dry_run=self.dry_run, force=True)
customize_compiler(self.compiler)
if self.include_dirs:
self.compiler.set_include_dirs(self.include_dirs)
if self.libraries:
self.compiler.set_libraries(self.libraries)
if self.library_dirs:
self.compiler.set_library_dirs(self.library_dirs)
def _gen_temp_sourcefile(self, body, headers, lang):
filename = "_configtest" + LANG_EXT[lang]
file = open(filename, "w")
if headers:
for header in headers:
file.write("#include <%s>\n" % header)
file.write("\n")
file.write(body)
if body[-1] != "\n":
file.write("\n")
file.close()
return filename
def _preprocess(self, body, headers, include_dirs, lang):
src = self._gen_temp_sourcefile(body, headers, lang)
out = "_configtest.i"
self.temp_files.extend((src, out))
self.compiler.preprocess(src, out, include_dirs=include_dirs)
return src, out
def _compile(self, body, headers, include_dirs, lang):
src = self._gen_temp_sourcefile(body, headers, lang)
if self.dump_source:
dump_file(src, "compiling '%s':" % src)
obj = self.compiler.object_filenames([src])[0]
self.temp_files.extend((src, obj))
self.compiler.compile([src], include_dirs=include_dirs)
return src, obj
def _link(self, body, headers, include_dirs, libraries, library_dirs,
lang):
src, obj = self._compile(body, headers, include_dirs, lang)
prog = os.path.splitext(os.path.basename(src))[0]
self.compiler.link_executable([obj], prog,
libraries=libraries,
library_dirs=library_dirs,
target_lang=lang)
if self.compiler.exe_extension is not None:
prog = prog + self.compiler.exe_extension
self.temp_files.append(prog)
return src, obj, prog
def _clean(self, *filenames):
if not filenames:
filenames = self.temp_files
self.temp_files = []
logger.info("removing: %s", ' '.join(filenames))
for filename in filenames:
try:
os.remove(filename)
except OSError:
pass
# XXX these ignore the dry-run flag: what to do, what to do? even if
# you want a dry-run build, you still need some sort of configuration
# info. My inclination is to make it up to the real config command to
# consult 'dry_run', and assume a default (minimal) configuration if
# true. The problem with trying to do it here is that you'd have to
# return either true or false from all the 'try' methods, neither of
# which is correct.
# XXX need access to the header search path and maybe default macros.
def try_cpp(self, body=None, headers=None, include_dirs=None, lang="c"):
"""Construct a source file from 'body' (a string containing lines
of C/C++ code) and 'headers' (a list of header files to include)
and run it through the preprocessor. Return true if the
preprocessor succeeded, false if there were any errors.
('body' probably isn't of much use, but what the heck.)
"""
from packaging.compiler.ccompiler import CompileError
self._check_compiler()
ok = True
try:
self._preprocess(body, headers, include_dirs, lang)
except CompileError:
ok = False
self._clean()
return ok
def search_cpp(self, pattern, body=None, headers=None, include_dirs=None,
lang="c"):
"""Construct a source file (just like 'try_cpp()'), run it through
the preprocessor, and return true if any line of the output matches
'pattern'. 'pattern' should either be a compiled regex object or a
string containing a regex. If both 'body' and 'headers' are None,
preprocesses an empty file -- which can be useful to determine the
symbols the preprocessor and compiler set by default.
"""
self._check_compiler()
src, out = self._preprocess(body, headers, include_dirs, lang)
if isinstance(pattern, str):
pattern = re.compile(pattern)
file = open(out)
match = False
while True:
line = file.readline()
if line == '':
break
if pattern.search(line):
match = True
break
file.close()
self._clean()
return match
def try_compile(self, body, headers=None, include_dirs=None, lang="c"):
"""Try to compile a source file built from 'body' and 'headers'.
Return true on success, false otherwise.
"""
from packaging.compiler.ccompiler import CompileError
self._check_compiler()
try:
self._compile(body, headers, include_dirs, lang)
ok = True
except CompileError:
ok = False
logger.info(ok and "success!" or "failure.")
self._clean()
return ok
def try_link(self, body, headers=None, include_dirs=None, libraries=None,
library_dirs=None, lang="c"):
"""Try to compile and link a source file, built from 'body' and
'headers', to executable form. Return true on success, false
otherwise.
"""
from packaging.compiler.ccompiler import CompileError, LinkError
self._check_compiler()
try:
self._link(body, headers, include_dirs,
libraries, library_dirs, lang)
ok = True
except (CompileError, LinkError):
ok = False
logger.info(ok and "success!" or "failure.")
self._clean()
return ok
def try_run(self, body, headers=None, include_dirs=None, libraries=None,
library_dirs=None, lang="c"):
"""Try to compile, link to an executable, and run a program
built from 'body' and 'headers'. Return true on success, false
otherwise.
"""
from packaging.compiler.ccompiler import CompileError, LinkError
self._check_compiler()
try:
src, obj, exe = self._link(body, headers, include_dirs,
libraries, library_dirs, lang)
self.spawn([exe])
ok = True
except (CompileError, LinkError, PackagingExecError):
ok = False
logger.info(ok and "success!" or "failure.")
self._clean()
return ok
# -- High-level methods --------------------------------------------
# (these are the ones that are actually likely to be useful
# when implementing a real-world config command!)
def check_func(self, func, headers=None, include_dirs=None,
libraries=None, library_dirs=None, decl=False, call=False):
"""Determine if function 'func' is available by constructing a
source file that refers to 'func', and compiles and links it.
If everything succeeds, returns true; otherwise returns false.
The constructed source file starts out by including the header
files listed in 'headers'. If 'decl' is true, it then declares
'func' (as "int func()"); you probably shouldn't supply 'headers'
and set 'decl' true in the same call, or you might get errors about
a conflicting declarations for 'func'. Finally, the constructed
'main()' function either references 'func' or (if 'call' is true)
calls it. 'libraries' and 'library_dirs' are used when
linking.
"""
self._check_compiler()
body = []
if decl:
body.append("int %s ();" % func)
body.append("int main () {")
if call:
body.append(" %s();" % func)
else:
body.append(" %s;" % func)
body.append("}")
body = "\n".join(body) + "\n"
return self.try_link(body, headers, include_dirs,
libraries, library_dirs)
def check_lib(self, library, library_dirs=None, headers=None,
include_dirs=None, other_libraries=[]):
"""Determine if 'library' is available to be linked against,
without actually checking that any particular symbols are provided
by it. 'headers' will be used in constructing the source file to
be compiled, but the only effect of this is to check if all the
header files listed are available. Any libraries listed in
'other_libraries' will be included in the link, in case 'library'
has symbols that depend on other libraries.
"""
self._check_compiler()
return self.try_link("int main (void) { }",
headers, include_dirs,
[library]+other_libraries, library_dirs)
def check_header(self, header, include_dirs=None, library_dirs=None,
lang="c"):
"""Determine if the system header file named by 'header_file'
exists and can be found by the preprocessor; return true if so,
false otherwise.
"""
return self.try_cpp(body="/* No body */", headers=[header],
include_dirs=include_dirs)
def dump_file(filename, head=None):
"""Dumps a file content into log.info.
If head is not None, will be dumped before the file content.
"""
if head is None:
logger.info(filename)
else:
logger.info(head)
with open(filename) as file:
logger.info(file.read())

View File

@@ -0,0 +1,79 @@
"""Install platform-independent data files."""
# Contributed by Bastian Kleineidam
import os
from shutil import Error
from sysconfig import get_paths, format_value
from packaging import logger
from packaging.util import convert_path
from packaging.command.cmd import Command
class install_data(Command):
description = "install platform-independent data files"
user_options = [
('install-dir=', 'd',
"base directory for installing data files "
"(default: installation base dir)"),
('root=', None,
"install everything relative to this alternate root directory"),
('force', 'f', "force installation (overwrite existing files)"),
]
boolean_options = ['force']
def initialize_options(self):
self.install_dir = None
self.outfiles = []
self.data_files_out = []
self.root = None
self.force = False
self.data_files = self.distribution.data_files
self.warn_dir = True
def finalize_options(self):
self.set_undefined_options('install_dist',
('install_data', 'install_dir'),
'root', 'force')
def run(self):
self.mkpath(self.install_dir)
for _file in self.data_files.items():
destination = convert_path(self.expand_categories(_file[1]))
dir_dest = os.path.abspath(os.path.dirname(destination))
self.mkpath(dir_dest)
try:
out = self.copy_file(_file[0], dir_dest)[0]
except Error as e:
logger.warning('%s: %s', self.get_command_name(), e)
out = destination
self.outfiles.append(out)
self.data_files_out.append((_file[0], destination))
def expand_categories(self, path_with_categories):
local_vars = get_paths()
local_vars['distribution.name'] = self.distribution.metadata['Name']
expanded_path = format_value(path_with_categories, local_vars)
expanded_path = format_value(expanded_path, local_vars)
if '{' in expanded_path and '}' in expanded_path:
logger.warning(
'%s: unable to expand %s, some categories may be missing',
self.get_command_name(), path_with_categories)
return expanded_path
def get_source_files(self):
return list(self.data_files)
def get_inputs(self):
return list(self.data_files)
def get_outputs(self):
return self.outfiles
def get_resources_out(self):
return self.data_files_out

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,175 @@
"""Create the PEP 376-compliant .dist-info directory."""
# Forked from the former install_egg_info command by Josip Djolonga
import csv
import os
import re
import hashlib
from packaging.command.cmd import Command
from packaging import logger
from shutil import rmtree
class install_distinfo(Command):
description = 'create a .dist-info directory for the distribution'
user_options = [
('distinfo-dir=', None,
"directory where the the .dist-info directory will be installed"),
('installer=', None,
"the name of the installer"),
('requested', None,
"generate a REQUESTED file"),
('no-requested', None,
"do not generate a REQUESTED file"),
('no-record', None,
"do not generate a RECORD file"),
('no-resources', None,
"do not generate a RESSOURCES list installed file")
]
boolean_options = ['requested', 'no-record', 'no-resources']
negative_opt = {'no-requested': 'requested'}
def initialize_options(self):
self.distinfo_dir = None
self.installer = None
self.requested = None
self.no_record = None
self.no_resources = None
def finalize_options(self):
self.set_undefined_options('install_dist',
'installer', 'requested', 'no_record')
self.set_undefined_options('install_lib',
('install_dir', 'distinfo_dir'))
if self.installer is None:
# FIXME distutils or packaging?
# + document default in the option help text above and in install
self.installer = 'distutils'
if self.requested is None:
self.requested = True
if self.no_record is None:
self.no_record = False
if self.no_resources is None:
self.no_resources = False
metadata = self.distribution.metadata
basename = "%s-%s.dist-info" % (
to_filename(safe_name(metadata['Name'])),
to_filename(safe_version(metadata['Version'])))
self.distinfo_dir = os.path.join(self.distinfo_dir, basename)
self.outputs = []
def run(self):
# FIXME dry-run should be used at a finer level, so that people get
# useful logging output and can have an idea of what the command would
# have done
if not self.dry_run:
target = self.distinfo_dir
if os.path.isdir(target) and not os.path.islink(target):
rmtree(target)
elif os.path.exists(target):
self.execute(os.unlink, (self.distinfo_dir,),
"removing " + target)
self.execute(os.makedirs, (target,), "creating " + target)
metadata_path = os.path.join(self.distinfo_dir, 'METADATA')
logger.info('creating %s', metadata_path)
self.distribution.metadata.write(metadata_path)
self.outputs.append(metadata_path)
installer_path = os.path.join(self.distinfo_dir, 'INSTALLER')
logger.info('creating %s', installer_path)
with open(installer_path, 'w') as f:
f.write(self.installer)
self.outputs.append(installer_path)
if self.requested:
requested_path = os.path.join(self.distinfo_dir, 'REQUESTED')
logger.info('creating %s', requested_path)
open(requested_path, 'w').close()
self.outputs.append(requested_path)
if not self.no_resources:
install_data = self.get_finalized_command('install_data')
if install_data.get_resources_out() != []:
resources_path = os.path.join(self.distinfo_dir,
'RESOURCES')
logger.info('creating %s', resources_path)
with open(resources_path, 'wb') as f:
writer = csv.writer(f, delimiter=',',
lineterminator=os.linesep,
quotechar='"')
for tuple in install_data.get_resources_out():
writer.writerow(tuple)
self.outputs.append(resources_path)
if not self.no_record:
record_path = os.path.join(self.distinfo_dir, 'RECORD')
logger.info('creating %s', record_path)
with open(record_path, 'w', encoding='utf-8') as f:
writer = csv.writer(f, delimiter=',',
lineterminator=os.linesep,
quotechar='"')
install = self.get_finalized_command('install_dist')
for fpath in install.get_outputs():
if fpath.endswith('.pyc') or fpath.endswith('.pyo'):
# do not put size and md5 hash, as in PEP-376
writer.writerow((fpath, '', ''))
else:
size = os.path.getsize(fpath)
with open(fpath, 'r') as fp:
hash = hashlib.md5()
hash.update(fp.read().encode())
md5sum = hash.hexdigest()
writer.writerow((fpath, md5sum, size))
# add the RECORD file itself
writer.writerow((record_path, '', ''))
self.outputs.append(record_path)
def get_outputs(self):
return self.outputs
# The following functions are taken from setuptools' pkg_resources module.
def safe_name(name):
"""Convert an arbitrary string to a standard distribution name
Any runs of non-alphanumeric/. characters are replaced with a single '-'.
"""
return re.sub('[^A-Za-z0-9.]+', '-', name)
def safe_version(version):
"""Convert an arbitrary string to a standard version string
Spaces become dots, and all other non-alphanumeric characters become
dashes, with runs of multiple dashes condensed to a single dash.
"""
version = version.replace(' ', '.')
return re.sub('[^A-Za-z0-9.]+', '-', version)
def to_filename(name):
"""Convert a project or version name to its filename-escaped form
Any '-' characters are currently replaced with '_'.
"""
return name.replace('-', '_')

Some files were not shown because too many files have changed in this diff Show More