mirror of
https://gitlab.winehq.org/wine/wine-staging.git
synced 2024-11-21 16:46:54 -08:00
patchupdate.py: Add proper commandline parser and '--skip-checks' option.
This commit is contained in:
parent
f8acf446ea
commit
3265df0984
204
debian/tools/patchupdate.py
vendored
204
debian/tools/patchupdate.py
vendored
@ -19,6 +19,7 @@
|
||||
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
|
||||
#
|
||||
|
||||
import argparse
|
||||
import binascii
|
||||
import cPickle as pickle
|
||||
import contextlib
|
||||
@ -470,7 +471,7 @@ def resolve_dependencies(all_patches, index = None, depends = None, auto_deps =
|
||||
_resolve(depends)
|
||||
return resolved
|
||||
|
||||
def generate_ifdefined(all_patches):
|
||||
def generate_ifdefined(all_patches, skip_checks=False):
|
||||
"""Update autogenerated ifdefined patches, which can be used to selectively disable features at compile time."""
|
||||
enabled_patches = dict([(i, patch) for i, patch in all_patches.iteritems() if not patch.disabled])
|
||||
|
||||
@ -479,9 +480,18 @@ def generate_ifdefined(all_patches):
|
||||
continue
|
||||
|
||||
filename = os.path.join(patch.directory, config.path_IfDefined)
|
||||
headers = { 'author': "Wine Staging Team",
|
||||
'email': "webmaster@fds-team.de",
|
||||
'subject': "Autogenerated #ifdef patch for %s." % patch.name }
|
||||
|
||||
if skip_checks:
|
||||
patch.files.append(os.path.basename(filename))
|
||||
patch.patches.append(patchutils.PatchObject(filename, headers))
|
||||
continue
|
||||
|
||||
with open(filename, "wb") as fp:
|
||||
fp.write("From: Wine Staging Team <webmaster@fds-team.de>\n")
|
||||
fp.write("Subject: Autogenerated #ifdef patch for %s.\n" % patch.name)
|
||||
fp.write("From: %s <%s>\n" % (headers['author'], headers['email']))
|
||||
fp.write("Subject: %s\n" % headers['subject'])
|
||||
fp.write("\n")
|
||||
|
||||
depends = resolve_dependencies(enabled_patches, i)
|
||||
@ -534,110 +544,112 @@ def generate_ifdefined(all_patches):
|
||||
assert p.modified_file in patch.modified_files
|
||||
patch.patches.append(p)
|
||||
|
||||
def generate_script(all_patches):
|
||||
def generate_script(all_patches, skip_checks=False):
|
||||
"""Resolve dependencies, and afterwards check if everything applies properly."""
|
||||
depends = sorted([i for i, patch in all_patches.iteritems() if not patch.disabled])
|
||||
resolved = resolve_dependencies(all_patches, depends=depends)
|
||||
max_patches = max(resolved) + 1
|
||||
|
||||
# Generate timestamps based on dependencies, still required for binary patches
|
||||
# Find out which files are modified by multiple patches
|
||||
modified_files = {}
|
||||
for i, patch in [(i, all_patches[i]) for i in resolved]:
|
||||
patch.verify_time = [0]*max_patches
|
||||
patch.verify_time[i] += 1
|
||||
for j in patch.depends:
|
||||
patch.verify_time = causal_time_combine(patch.verify_time, all_patches[j].verify_time)
|
||||
if not skip_checks:
|
||||
|
||||
for f in patch.modified_files:
|
||||
if f not in modified_files:
|
||||
modified_files[f] = []
|
||||
modified_files[f].append(i)
|
||||
# Generate timestamps based on dependencies, still required for binary patches
|
||||
# Find out which files are modified by multiple patches
|
||||
modified_files = {}
|
||||
for i, patch in [(i, all_patches[i]) for i in resolved]:
|
||||
patch.verify_time = [0]*max_patches
|
||||
patch.verify_time[i] += 1
|
||||
for j in patch.depends:
|
||||
patch.verify_time = causal_time_combine(patch.verify_time, all_patches[j].verify_time)
|
||||
|
||||
# Check dependencies
|
||||
dependency_cache = _load_dict(config.path_cache)
|
||||
pool = multiprocessing.pool.ThreadPool(processes=4)
|
||||
try:
|
||||
for filename, indices in modified_files.iteritems():
|
||||
for f in patch.modified_files:
|
||||
if f not in modified_files:
|
||||
modified_files[f] = []
|
||||
modified_files[f].append(i)
|
||||
|
||||
# If one of patches is a binary patch, then we cannot / won't verify it - require dependencies in this case
|
||||
if contains_binary_patch(all_patches, indices, filename):
|
||||
if not causal_time_relation_any(all_patches, indices):
|
||||
raise PatchUpdaterError("Because of binary patch modifying file %s the following patches need explicit dependencies: %s" %
|
||||
(filename, ", ".join([all_patches[i].name for i in indices])))
|
||||
continue
|
||||
# Check dependencies
|
||||
dependency_cache = _load_dict(config.path_cache)
|
||||
pool = multiprocessing.pool.ThreadPool(processes=4)
|
||||
try:
|
||||
for filename, indices in modified_files.iteritems():
|
||||
|
||||
original_content = get_wine_file(filename)
|
||||
original_hash = _sha256(original_content)
|
||||
selected_patches = select_patches(all_patches, indices, filename)
|
||||
|
||||
# Generate a unique id based on the original content, the selected patches
|
||||
# and the dependency information. Since this information only has to be compared
|
||||
# we can throw it into a single hash.
|
||||
m = hashlib.sha256()
|
||||
m.update(original_hash)
|
||||
for i in indices:
|
||||
m.update("P%s" % selected_patches[i][0])
|
||||
for j in indices:
|
||||
if causal_time_smaller(all_patches[j].verify_time, all_patches[i].verify_time):
|
||||
m.update("D%s" % selected_patches[j][0])
|
||||
unique_hash = m.digest()
|
||||
|
||||
# Skip checks if it matches the information from the cache
|
||||
try:
|
||||
if dependency_cache[filename] == unique_hash:
|
||||
# If one of patches is a binary patch, then we cannot / won't verify it - require dependencies in this case
|
||||
if contains_binary_patch(all_patches, indices, filename):
|
||||
if not causal_time_relation_any(all_patches, indices):
|
||||
raise PatchUpdaterError("Because of binary patch modifying file %s the following patches need explicit dependencies: %s" %
|
||||
(filename, ", ".join([all_patches[i].name for i in indices])))
|
||||
continue
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
# Show a progress bar while applying the patches - this task might take some time
|
||||
chunk_size = 20
|
||||
with progressbar.ProgressBar(desc=filename, total=2 ** len(indices) / chunk_size) as progress:
|
||||
original_content = get_wine_file(filename)
|
||||
original_hash = _sha256(original_content)
|
||||
selected_patches = select_patches(all_patches, indices, filename)
|
||||
|
||||
def test_apply(current):
|
||||
set_apply = [(i, all_patches[i]) for i in current]
|
||||
set_skip = [(i, all_patches[i]) for i in indices if i not in current]
|
||||
# Generate a unique id based on the original content, the selected patches
|
||||
# and the dependency information. Since this information only has to be compared
|
||||
# we can throw it into a single hash.
|
||||
m = hashlib.sha256()
|
||||
m.update(original_hash)
|
||||
for i in indices:
|
||||
m.update("P%s" % selected_patches[i][0])
|
||||
for j in indices:
|
||||
if causal_time_smaller(all_patches[j].verify_time, all_patches[i].verify_time):
|
||||
m.update("D%s" % selected_patches[j][0])
|
||||
unique_hash = m.digest()
|
||||
|
||||
# Check if there is any patch2 which depends directly or indirectly on patch1.
|
||||
# If this is the case we found an impossible situation, we can be skipped in this test.
|
||||
for i, patch1 in set_apply:
|
||||
for j, patch2 in set_skip:
|
||||
if causal_time_smaller(patch2.verify_time, patch1.verify_time):
|
||||
return None # we can skip this test
|
||||
# Skip checks if it matches the information from the cache
|
||||
try:
|
||||
if dependency_cache[filename] == unique_hash:
|
||||
continue
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
try:
|
||||
original = original_content
|
||||
for i, _ in set_apply:
|
||||
original = patchutils.apply_patch(original, selected_patches[i][1], fuzz=0)
|
||||
except patchutils.PatchApplyError:
|
||||
return current
|
||||
# Show a progress bar while applying the patches - this task might take some time
|
||||
chunk_size = 20
|
||||
with progressbar.ProgressBar(desc=filename, total=2 ** len(indices) / chunk_size) as progress:
|
||||
|
||||
return None # everything is fine
|
||||
def test_apply(current):
|
||||
set_apply = [(i, all_patches[i]) for i in current]
|
||||
set_skip = [(i, all_patches[i]) for i in indices if i not in current]
|
||||
|
||||
def test_apply_seq(current_list):
|
||||
for current in current_list:
|
||||
failed = test_apply(current)
|
||||
# Check if there is any patch2 which depends directly or indirectly on patch1.
|
||||
# If this is the case we found an impossible situation, we can be skipped in this test.
|
||||
for i, patch1 in set_apply:
|
||||
for j, patch2 in set_skip:
|
||||
if causal_time_smaller(patch2.verify_time, patch1.verify_time):
|
||||
return None # we can skip this test
|
||||
|
||||
try:
|
||||
original = original_content
|
||||
for i, _ in set_apply:
|
||||
original = patchutils.apply_patch(original, selected_patches[i][1], fuzz=0)
|
||||
except patchutils.PatchApplyError:
|
||||
return current
|
||||
|
||||
return None # everything is fine
|
||||
|
||||
def test_apply_seq(current_list):
|
||||
for current in current_list:
|
||||
failed = test_apply(current)
|
||||
if failed is not None:
|
||||
return failed
|
||||
return None
|
||||
|
||||
iterables = []
|
||||
for i in xrange(0, len(indices) + 1):
|
||||
iterables.append(itertools.combinations(indices, i))
|
||||
it = _split_seq(itertools.chain(*iterables), chunk_size)
|
||||
for k, failed in enumerate(pool.imap_unordered(test_apply_seq, it)):
|
||||
if failed is not None:
|
||||
return failed
|
||||
return None
|
||||
progress.finish("<failed to apply>")
|
||||
raise PatchUpdaterError("Changes to file %s don't apply: %s" %
|
||||
(filename, ", ".join([all_patches[i].name for i in failed])))
|
||||
progress.update(k)
|
||||
|
||||
iterables = []
|
||||
for i in xrange(0, len(indices) + 1):
|
||||
iterables.append(itertools.combinations(indices, i))
|
||||
it = _split_seq(itertools.chain(*iterables), chunk_size)
|
||||
for k, failed in enumerate(pool.imap_unordered(test_apply_seq, it)):
|
||||
if failed is not None:
|
||||
progress.finish("<failed to apply>")
|
||||
raise PatchUpdaterError("Changes to file %s don't apply: %s" %
|
||||
(filename, ", ".join([all_patches[i].name for i in failed])))
|
||||
progress.update(k)
|
||||
# Update the dependency cache
|
||||
dependency_cache[filename] = unique_hash
|
||||
|
||||
# Update the dependency cache
|
||||
dependency_cache[filename] = unique_hash
|
||||
|
||||
finally:
|
||||
pool.close()
|
||||
_save_dict(config.path_cache, dependency_cache)
|
||||
finally:
|
||||
pool.close()
|
||||
_save_dict(config.path_cache, dependency_cache)
|
||||
|
||||
# Generate code for helper functions
|
||||
lines = []
|
||||
@ -850,21 +862,31 @@ if __name__ == "__main__":
|
||||
raise RuntimeError("CTRL+C pressed")
|
||||
signal.signal(signal.SIGINT, _sig_int)
|
||||
|
||||
def _check_commit_hash(commit):
|
||||
if len(commit) != 40 or commit != commit.lower():
|
||||
raise argparse.ArgumentTypeError("not a valid commit hash")
|
||||
return commit
|
||||
|
||||
parser = argparse.ArgumentParser(description="Automatic patch dependency checker and apply script/README.md generator.")
|
||||
parser.add_argument('--skip-checks', action='store_true', help="Skip dependency checks")
|
||||
parser.add_argument('--commit', type=_check_commit_hash, help="Use given commit hash instead of HEAD")
|
||||
args = parser.parse_args()
|
||||
|
||||
tools_directory = os.path.dirname(os.path.realpath(__file__))
|
||||
os.chdir(os.path.join(tools_directory, "./../.."))
|
||||
|
||||
try:
|
||||
|
||||
# Get information about Wine and Staging version
|
||||
latest_wine_commit = _latest_wine_commit(sys.argv[1] if len(sys.argv) >= 2 else None)
|
||||
latest_wine_commit = _latest_wine_commit(args.commit)
|
||||
latest_staging_version = _latest_staging_version(only_stable=True)
|
||||
|
||||
# Read current and stable patches
|
||||
all_patches = read_patchset()
|
||||
stable_patches = read_patchset(revision="v%s" % latest_staging_version)
|
||||
|
||||
generate_ifdefined(all_patches)
|
||||
generate_script(all_patches)
|
||||
generate_ifdefined(all_patches, skip_checks=args.skip_checks)
|
||||
generate_script(all_patches, skip_checks=args.skip_checks)
|
||||
generate_markdown(all_patches, stable_patches)
|
||||
wrap_changelog()
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user