2014-09-01 12:32:23 -07:00
|
|
|
#!/usr/bin/python2
|
2014-07-25 12:34:03 -07:00
|
|
|
#
|
|
|
|
# Automatic patch dependency checker and Makefile/README.md generator.
|
|
|
|
#
|
|
|
|
# Copyright (C) 2014 Sebastian Lackner
|
|
|
|
#
|
|
|
|
# This library is free software; you can redistribute it and/or
|
|
|
|
# modify it under the terms of the GNU Lesser General Public
|
|
|
|
# License as published by the Free Software Foundation; either
|
|
|
|
# version 2.1 of the License, or (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This library is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
# Lesser General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU Lesser General Public
|
|
|
|
# License along with this library; if not, write to the Free Software
|
|
|
|
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
|
|
|
|
#
|
|
|
|
|
2014-11-28 23:17:16 -08:00
|
|
|
import binascii
|
|
|
|
import cPickle as pickle
|
2014-07-24 18:32:01 -07:00
|
|
|
import hashlib
|
|
|
|
import itertools
|
2014-11-28 23:17:16 -08:00
|
|
|
import math
|
|
|
|
import multiprocessing.pool
|
2014-11-14 21:35:26 -08:00
|
|
|
import operator
|
2014-07-25 12:34:03 -07:00
|
|
|
import os
|
2014-07-24 18:32:01 -07:00
|
|
|
import patchutils
|
2014-11-28 23:17:16 -08:00
|
|
|
import progressbar
|
2014-07-11 12:25:18 -07:00
|
|
|
import re
|
2014-11-28 23:17:16 -08:00
|
|
|
import signal
|
2014-07-25 12:34:03 -07:00
|
|
|
import subprocess
|
2014-11-28 23:17:16 -08:00
|
|
|
import sys
|
|
|
|
import tempfile
|
2014-07-25 12:34:03 -07:00
|
|
|
import textwrap
|
2014-07-11 09:51:03 -07:00
|
|
|
|
2014-11-28 23:17:16 -08:00
|
|
|
_devnull = open(os.devnull, 'wb')
|
|
|
|
|
2014-07-25 07:39:08 -07:00
|
|
|
# Cached information to speed up patch dependency checks
|
2014-07-27 17:22:58 -07:00
|
|
|
latest_wine_commit = None
|
2014-07-25 07:39:08 -07:00
|
|
|
cached_patch_result = {}
|
|
|
|
|
2014-07-27 17:41:50 -07:00
|
|
|
class config(object):
|
2014-11-28 23:17:16 -08:00
|
|
|
path_depcache = ".patchupdate.cache"
|
2014-07-27 17:41:50 -07:00
|
|
|
|
2014-08-12 16:59:01 -07:00
|
|
|
path_patches = "patches"
|
|
|
|
path_changelog = "debian/changelog"
|
|
|
|
path_wine = "debian/tools/wine"
|
2014-07-27 17:41:50 -07:00
|
|
|
|
2014-08-12 16:59:01 -07:00
|
|
|
path_template_Makefile = "debian/tools/Makefile.in"
|
|
|
|
path_Makefile = "patches/Makefile"
|
2014-07-27 17:41:50 -07:00
|
|
|
|
2014-08-12 16:59:01 -07:00
|
|
|
path_README_md = "README.md"
|
|
|
|
path_template_README_md = "debian/tools/README.md.in"
|
2014-07-27 17:41:50 -07:00
|
|
|
|
2014-07-25 12:54:58 -07:00
|
|
|
class PatchUpdaterError(RuntimeError):
|
|
|
|
"""Failed to update patches."""
|
|
|
|
pass
|
|
|
|
|
2014-07-11 09:51:03 -07:00
|
|
|
class PatchSet(object):
|
|
|
|
def __init__(self, name):
|
2014-07-24 18:32:01 -07:00
|
|
|
self.name = name
|
|
|
|
self.fixes = []
|
|
|
|
self.changes = []
|
2014-11-03 11:12:34 -08:00
|
|
|
self.disabled = False
|
2014-07-11 09:51:03 -07:00
|
|
|
|
2014-07-24 18:32:01 -07:00
|
|
|
self.files = []
|
|
|
|
self.patches = []
|
|
|
|
self.modified_files = set()
|
|
|
|
self.depends = set()
|
2014-07-11 09:51:03 -07:00
|
|
|
|
|
|
|
self.verify_depends = set()
|
|
|
|
self.verify_time = None
|
|
|
|
|
2014-07-27 17:32:13 -07:00
|
|
|
def _pairs(a):
|
|
|
|
"""Iterate over all pairs of elements contained in the list a."""
|
|
|
|
for i, j in enumerate(a):
|
|
|
|
for k in a[i+1:]:
|
|
|
|
yield (j, k)
|
|
|
|
|
2014-11-14 21:35:26 -08:00
|
|
|
def _unique(iterable, key=None):
|
|
|
|
"List unique elements, preserving order. Remember only the element just seen."
|
2014-11-23 10:45:14 -08:00
|
|
|
# _unique('AAAABBBCCDAABBB') --> A B C D A B
|
|
|
|
# _unique('ABBCcAD', str.lower) --> A B C A D
|
2014-11-14 21:35:26 -08:00
|
|
|
return itertools.imap(next, itertools.imap(operator.itemgetter(1), itertools.groupby(iterable, key)))
|
|
|
|
|
2014-11-28 23:17:16 -08:00
|
|
|
def _split_seq(iterable, size):
|
|
|
|
"""Split an iterator into chunks of a given size."""
|
|
|
|
it = iter(iterable)
|
|
|
|
items = list(itertools.islice(it, size))
|
|
|
|
while items:
|
|
|
|
yield items
|
|
|
|
items = list(itertools.islice(it, size))
|
|
|
|
|
|
|
|
def _merge_seq(iterable, callback=None):
|
|
|
|
"""Merge lists/iterators into a new one. Call callback after each chunk"""
|
|
|
|
for i, items in enumerate(iterable):
|
|
|
|
if callback is not None:
|
|
|
|
callback(i)
|
|
|
|
for obj in items:
|
|
|
|
yield obj
|
|
|
|
|
2014-11-14 21:35:26 -08:00
|
|
|
def _escape(s):
|
|
|
|
"""Escape string inside of '...' quotes."""
|
2014-11-23 10:45:14 -08:00
|
|
|
return s.replace("\\", "\\\\\\\\").replace("\"", "\\\"").replace("'", "'\\''")
|
2014-11-14 21:35:26 -08:00
|
|
|
|
2014-07-27 17:32:13 -07:00
|
|
|
def _load_dict(filename):
|
|
|
|
"""Load a Python dictionary object from a file."""
|
|
|
|
try:
|
|
|
|
with open(filename) as fp:
|
|
|
|
return pickle.load(fp)
|
|
|
|
except IOError:
|
|
|
|
return {}
|
|
|
|
|
|
|
|
def _save_dict(filename, value):
|
|
|
|
"""Save a Python dictionary object to a file."""
|
|
|
|
with open(filename, "wb") as fp:
|
|
|
|
pickle.dump(value, fp, pickle.HIGHEST_PROTOCOL)
|
|
|
|
|
2014-11-28 23:17:16 -08:00
|
|
|
def _sha256(fp):
|
|
|
|
"""Calculate sha256sum from a file descriptor."""
|
|
|
|
m = hashlib.sha256()
|
|
|
|
fp.seek(0)
|
|
|
|
while True:
|
|
|
|
buf = fp.read(16384)
|
|
|
|
if buf == "": break
|
|
|
|
m.update(buf)
|
|
|
|
return m.digest()
|
|
|
|
|
|
|
|
def _parse_int(val, default=0):
|
2014-11-03 11:12:34 -08:00
|
|
|
"""Parse an integer or boolean value."""
|
|
|
|
r = re.match("^[0-9]+$", val)
|
|
|
|
if r:
|
|
|
|
return int(val)
|
|
|
|
try:
|
|
|
|
return {'true': 1, 'yes': 1, 'false': 0, 'no': 0}[val.lower()]
|
|
|
|
except AttributeError:
|
|
|
|
return default
|
|
|
|
|
2014-08-12 16:59:01 -07:00
|
|
|
def _read_changelog():
|
2014-11-28 23:17:16 -08:00
|
|
|
"""Read information from changelog."""
|
2014-08-12 16:59:01 -07:00
|
|
|
with open(config.path_changelog) as fp:
|
|
|
|
for line in fp:
|
|
|
|
r = re.match("^([a-zA-Z0-9][^(]*)\((.*)\) ([^;]*)", line)
|
|
|
|
if r: yield (r.group(1).strip(), r.group(2).strip(), r.group(3).strip())
|
|
|
|
|
|
|
|
def _stable_compholio_version():
|
2014-11-28 23:17:16 -08:00
|
|
|
"""Get version number of the latest stable release."""
|
2014-08-12 16:59:01 -07:00
|
|
|
for package, version, distro in _read_changelog():
|
|
|
|
if distro.lower() != "unreleased":
|
|
|
|
return version
|
|
|
|
|
|
|
|
def _latest_wine_commit():
|
2014-11-28 23:17:16 -08:00
|
|
|
"""Get latest wine commit."""
|
2014-08-12 16:59:01 -07:00
|
|
|
if not os.path.isdir(config.path_wine):
|
|
|
|
raise PatchUpdaterError("Please create a symlink to the wine repository in %s" % config.path_wine)
|
|
|
|
commit = subprocess.check_output(["git", "rev-parse", "origin/master"], cwd=config.path_wine).strip()
|
|
|
|
assert len(commit) == 40
|
|
|
|
return commit
|
|
|
|
|
|
|
|
def enum_directories(revision, path):
|
|
|
|
"""Enumerate all subdirectories of 'path' at a specific revision."""
|
|
|
|
dirs = []
|
|
|
|
|
|
|
|
if path[0:2] == "./":
|
|
|
|
path = path[2:]
|
|
|
|
elif path[0] == "/":
|
|
|
|
raise RuntimeError("Expected relative path, not an absolute path")
|
|
|
|
|
|
|
|
if revision is None:
|
|
|
|
for name in os.listdir(path):
|
|
|
|
if name in [".", ".."]: continue
|
|
|
|
subdirectory = os.path.join(path, name)
|
|
|
|
if not os.path.isdir(subdirectory):
|
|
|
|
continue
|
|
|
|
dirs.append((name, subdirectory))
|
|
|
|
else:
|
|
|
|
filename = "%s:%s" % (revision, path)
|
2014-08-12 17:45:21 -07:00
|
|
|
try:
|
2014-11-28 23:17:16 -08:00
|
|
|
content = subprocess.check_output(["git", "show", filename], stderr=_devnull)
|
2014-08-12 17:45:21 -07:00
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
if e.returncode != 128: raise
|
|
|
|
return [] # ignore error
|
2014-08-12 16:59:01 -07:00
|
|
|
lines = content.split("\n")
|
|
|
|
if not lines[0].startswith("tree ") or lines[1] != "":
|
|
|
|
raise RuntimeError("Unexpected output from 'git show %s'" % filename)
|
|
|
|
for name in lines[2:]:
|
|
|
|
if name == "" or name[-1] != "/": continue
|
|
|
|
name = name[:-1]
|
|
|
|
dirs.append((name, os.path.join(path, name)))
|
|
|
|
|
|
|
|
return dirs
|
|
|
|
|
|
|
|
def read_definition(revision, filename, name_to_id):
|
2014-11-14 21:35:26 -08:00
|
|
|
"""Read a definition file and return information as tuple (depends, fixes)."""
|
2014-08-12 17:15:22 -07:00
|
|
|
filename = os.path.join(filename, "definition")
|
|
|
|
if revision is None:
|
|
|
|
with open(filename) as fp:
|
|
|
|
content = fp.read()
|
|
|
|
else:
|
|
|
|
filename = "%s:%s" % (revision, filename)
|
|
|
|
try:
|
2014-11-28 23:17:16 -08:00
|
|
|
content = subprocess.check_output(["git", "show", filename], stderr=_devnull)
|
2014-11-15 11:24:21 -08:00
|
|
|
except subprocess.CalledProcessError:
|
2014-08-12 17:15:22 -07:00
|
|
|
raise IOError("Failed to load %s" % filename)
|
2014-08-12 15:13:36 -07:00
|
|
|
|
2014-11-03 11:12:34 -08:00
|
|
|
depends = set()
|
|
|
|
fixes = []
|
|
|
|
disabled = False
|
2014-08-12 15:13:36 -07:00
|
|
|
|
|
|
|
for line in content.split("\n"):
|
|
|
|
if line.startswith("#"):
|
|
|
|
continue
|
|
|
|
tmp = line.split(":", 1)
|
|
|
|
if len(tmp) != 2:
|
|
|
|
continue
|
|
|
|
key, val = tmp[0].lower(), tmp[1].strip()
|
2014-11-14 21:35:26 -08:00
|
|
|
if key == "depends":
|
2014-08-12 15:13:36 -07:00
|
|
|
if name_to_id is not None:
|
|
|
|
if not name_to_id.has_key(val):
|
|
|
|
raise PatchUpdaterError("Definition file %s references unknown dependency %s" % (filename, val))
|
|
|
|
depends.add(name_to_id[val])
|
|
|
|
elif key == "fixes":
|
|
|
|
r = re.match("^[0-9]+$", val)
|
|
|
|
if r:
|
|
|
|
fixes.append((int(val), None))
|
|
|
|
continue
|
|
|
|
r = re.match("^\\[ *([0-9]+) *\\](.*)$", val)
|
|
|
|
if r:
|
|
|
|
fixes.append((int(r.group(1)), r.group(2).strip()))
|
|
|
|
continue
|
|
|
|
fixes.append((None, val))
|
2014-11-03 11:12:34 -08:00
|
|
|
elif key == "disabled":
|
2014-11-28 23:17:16 -08:00
|
|
|
disabled = _parse_int(val)
|
2014-11-15 01:33:43 -08:00
|
|
|
elif revision is None:
|
2014-11-03 11:12:34 -08:00
|
|
|
print "WARNING: Ignoring unknown command in definition file %s: %s" % (filename, line)
|
2014-08-12 15:13:36 -07:00
|
|
|
|
2014-11-14 21:35:26 -08:00
|
|
|
return depends, fixes, disabled
|
2014-08-12 15:13:36 -07:00
|
|
|
|
2014-08-12 16:59:01 -07:00
|
|
|
def read_patchset(revision = None):
|
|
|
|
"""Read information about all patchsets for a specific revision."""
|
2014-07-25 12:34:03 -07:00
|
|
|
unique_id = itertools.count()
|
|
|
|
all_patches = {}
|
|
|
|
name_to_id = {}
|
|
|
|
|
|
|
|
# Read in sorted order (to ensure created Makefile doesn't change too much)
|
2014-08-12 16:59:01 -07:00
|
|
|
for name, subdirectory in sorted(enum_directories(revision, config.path_patches)):
|
2014-07-25 12:34:03 -07:00
|
|
|
patch = PatchSet(name)
|
|
|
|
|
2014-08-12 16:59:01 -07:00
|
|
|
if revision is None:
|
|
|
|
|
|
|
|
# If its the latest revision, then request additional information
|
|
|
|
if not os.path.isdir(subdirectory):
|
|
|
|
raise RuntimeError("Unable to open directory %s" % subdirectory)
|
|
|
|
|
|
|
|
# Enumerate .patch files in the given directory, enumerate individual patches and affected files
|
|
|
|
for f in sorted(os.listdir(subdirectory)):
|
|
|
|
if not f.endswith(".patch") or not os.path.isfile(os.path.join(subdirectory, f)):
|
|
|
|
continue
|
|
|
|
patch.files.append(f)
|
|
|
|
for p in patchutils.read_patch(os.path.join(subdirectory, f)):
|
|
|
|
patch.modified_files.add(p.modified_file)
|
|
|
|
patch.patches.append(p)
|
|
|
|
|
|
|
|
# No single patch within this directory, ignore it
|
|
|
|
if len(patch.patches) == 0:
|
|
|
|
del patch
|
2014-07-25 12:34:03 -07:00
|
|
|
continue
|
|
|
|
|
|
|
|
i = next(unique_id)
|
|
|
|
all_patches[i] = patch
|
|
|
|
name_to_id[name] = i
|
|
|
|
|
|
|
|
# Now read the definition files in a second step
|
|
|
|
for i, patch in all_patches.iteritems():
|
2014-08-12 16:59:01 -07:00
|
|
|
try:
|
2014-11-14 21:35:26 -08:00
|
|
|
patch.depends, patch.fixes, patch.disabled = \
|
2014-08-12 16:59:01 -07:00
|
|
|
read_definition(revision, os.path.join(config.path_patches, patch.name), name_to_id)
|
|
|
|
except IOError:
|
2014-11-15 01:33:43 -08:00
|
|
|
patch.depends, patch.fixes, patch.disabled = set(), [], False
|
2014-07-25 12:34:03 -07:00
|
|
|
|
|
|
|
return all_patches
|
2014-07-11 09:51:03 -07:00
|
|
|
|
|
|
|
def causal_time_combine(a, b):
|
2014-07-25 12:34:03 -07:00
|
|
|
"""Combines two timestamps into a new one."""
|
2014-07-11 16:33:04 -07:00
|
|
|
return [max(a, b) for a, b in zip(a, b)]
|
2014-07-11 09:51:03 -07:00
|
|
|
|
|
|
|
def causal_time_smaller(a, b):
|
2014-07-25 12:34:03 -07:00
|
|
|
"""Checks if timestamp a is smaller than timestamp b."""
|
2014-07-11 09:51:03 -07:00
|
|
|
return all([i <= j for i, j in zip(a,b)]) and any([i < j for i, j in zip(a,b)])
|
|
|
|
|
2014-07-26 14:19:39 -07:00
|
|
|
def causal_time_relation(all_patches, indices):
|
|
|
|
"""Checks if the dependencies of patches are compatible with a specific apply order."""
|
2014-07-25 12:34:03 -07:00
|
|
|
for i, j in _pairs(indices):
|
2014-07-26 14:19:39 -07:00
|
|
|
if causal_time_smaller(all_patches[j].verify_time, all_patches[i].verify_time):
|
2014-07-25 07:39:08 -07:00
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
2014-07-26 14:19:39 -07:00
|
|
|
def causal_time_relation_any(all_patches, indices):
|
|
|
|
"""Similar to causal_time_relation(), but also check all possible permutations of indices."""
|
|
|
|
for i, j in _pairs(indices):
|
|
|
|
if not (causal_time_smaller(all_patches[i].verify_time, all_patches[j].verify_time) or \
|
|
|
|
causal_time_smaller(all_patches[j].verify_time, all_patches[i].verify_time)):
|
2014-07-26 14:13:27 -07:00
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
2014-11-28 23:17:16 -08:00
|
|
|
def causal_time_permutations(all_patches, indices):
|
2014-07-25 12:34:03 -07:00
|
|
|
"""Iterate over all possible permutations of patches affecting
|
|
|
|
a specific file, which are compatible with dependencies."""
|
2014-07-26 14:13:27 -07:00
|
|
|
for permutation in itertools.permutations(indices):
|
|
|
|
if causal_time_relation(all_patches, permutation):
|
2014-11-28 23:17:16 -08:00
|
|
|
yield permutation
|
2014-07-25 07:39:08 -07:00
|
|
|
|
|
|
|
def contains_binary_patch(all_patches, indices, filename):
|
2014-07-25 12:34:03 -07:00
|
|
|
"""Checks if any patch with given indices affecting filename is a binary patch."""
|
2014-07-25 07:39:08 -07:00
|
|
|
for i in indices:
|
|
|
|
for patch in all_patches[i].patches:
|
|
|
|
if patch.modified_file == filename and patch.is_binary():
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2014-11-28 23:17:16 -08:00
|
|
|
def get_wine_file(filename):
|
2014-07-27 17:32:13 -07:00
|
|
|
"""Return the hash and optionally the content of a file."""
|
2014-11-28 23:17:16 -08:00
|
|
|
entry = "%s:%s" % (latest_wine_commit, filename)
|
|
|
|
result = tempfile.NamedTemporaryFile()
|
2014-07-27 17:22:58 -07:00
|
|
|
try:
|
2014-11-28 23:17:16 -08:00
|
|
|
content = subprocess.check_call(["git", "show", entry], cwd=config.path_wine, \
|
|
|
|
stdout=result, stderr=_devnull)
|
2014-07-27 17:22:58 -07:00
|
|
|
except subprocess.CalledProcessError as e:
|
2014-08-12 17:45:21 -07:00
|
|
|
if e.returncode != 128: raise
|
2014-11-28 23:17:16 -08:00
|
|
|
result.flush() # shouldn't be necessary because the subprocess writes directly to the fd
|
|
|
|
return result
|
|
|
|
|
|
|
|
def select_patches(all_patches, indices, filename):
|
|
|
|
"""Create a temporary patch file for each patchset and calculate the checksum."""
|
|
|
|
selected_patches = {}
|
|
|
|
|
|
|
|
for i in indices:
|
|
|
|
p = tempfile.NamedTemporaryFile()
|
|
|
|
m = hashlib.sha256()
|
|
|
|
|
|
|
|
for patch in all_patches[i].patches:
|
|
|
|
if patch.modified_file != filename:
|
|
|
|
continue
|
|
|
|
for chunk in patch.read_chunks():
|
|
|
|
p.write(chunk)
|
|
|
|
m.update(chunk)
|
|
|
|
p.write("\n")
|
|
|
|
m.update("\n")
|
2014-07-27 17:22:58 -07:00
|
|
|
|
2014-11-28 23:17:16 -08:00
|
|
|
p.flush()
|
|
|
|
selected_patches[i] = (m.digest(), p)
|
2014-07-25 07:39:08 -07:00
|
|
|
|
2014-11-28 23:17:16 -08:00
|
|
|
return selected_patches
|
|
|
|
|
|
|
|
def verify_patch_order(all_patches, indices, filename, pool):
|
2014-08-27 19:06:16 -07:00
|
|
|
"""Checks if the dependencies are defined correctly by applying
|
|
|
|
the patches on a (temporary) copy from the git tree."""
|
2014-07-25 07:39:08 -07:00
|
|
|
|
|
|
|
# If one of patches is a binary patch, then we cannot / won't verify it - require dependencies in this case
|
|
|
|
if contains_binary_patch(all_patches, indices, filename):
|
2014-07-26 14:13:27 -07:00
|
|
|
if not causal_time_relation_any(all_patches, indices):
|
2014-07-25 12:54:58 -07:00
|
|
|
raise PatchUpdaterError("Because of binary patch modifying file %s the following patches need explicit dependencies: %s" %
|
|
|
|
(filename, ", ".join([all_patches[i].name for i in indices])))
|
2014-07-25 07:39:08 -07:00
|
|
|
return
|
|
|
|
|
2014-11-28 23:17:16 -08:00
|
|
|
original_content = get_wine_file(filename)
|
|
|
|
original_content_hash = _sha256(original_content)
|
|
|
|
selected_patches = select_patches(all_patches, indices, filename)
|
|
|
|
try:
|
2014-07-25 07:39:08 -07:00
|
|
|
|
2014-11-28 23:17:16 -08:00
|
|
|
def _test_apply(permutations):
|
|
|
|
"""Tests if specific permutations of patches apply on the wine source tree."""
|
|
|
|
patch_stack_indices = []
|
|
|
|
patch_stack_patches = []
|
2014-07-25 07:39:08 -07:00
|
|
|
try:
|
|
|
|
|
2014-11-28 23:17:16 -08:00
|
|
|
for permutation in permutations:
|
|
|
|
|
|
|
|
# Calculate hash
|
|
|
|
m = hashlib.sha256()
|
|
|
|
m.update(original_content_hash)
|
|
|
|
for i in permutation:
|
|
|
|
m.update(selected_patches[i][0])
|
|
|
|
input_hash = m.digest()
|
|
|
|
|
|
|
|
# Fast path -> we know that it applies properly
|
|
|
|
try:
|
|
|
|
yield cached_patch_result[input_hash]
|
|
|
|
continue
|
|
|
|
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
# Remove unneeded patches from patch stack
|
|
|
|
while list(permutation[:len(patch_stack_indices)]) != patch_stack_indices:
|
|
|
|
patch_stack_indices.pop()
|
|
|
|
patch_stack_patches.pop().close()
|
|
|
|
|
|
|
|
# Apply the patches (without fuzz)
|
|
|
|
try:
|
|
|
|
while len(patch_stack_indices) < len(permutation):
|
|
|
|
i = permutation[len(patch_stack_indices)]
|
|
|
|
original = patch_stack_patches[-1] if len(patch_stack_indices) else original_content
|
|
|
|
patchfile = selected_patches[i][1]
|
|
|
|
patch_stack_patches.append(patchutils.apply_patch(original, patchfile, fuzz=0))
|
|
|
|
patch_stack_indices.append(i)
|
|
|
|
output_hash = _sha256(patch_stack_patches[-1])
|
|
|
|
|
|
|
|
except patchutils.PatchApplyError:
|
|
|
|
output_hash = None
|
|
|
|
|
|
|
|
cached_patch_result[input_hash] = output_hash
|
|
|
|
yield output_hash
|
|
|
|
|
|
|
|
finally:
|
|
|
|
# Ensure temporary files are cleaned up properly
|
|
|
|
while len(patch_stack_patches):
|
|
|
|
patch_stack_patches.pop().close()
|
|
|
|
|
|
|
|
# Show a progress bar while applying the patches - this task might take some time
|
|
|
|
chunk_size = 20
|
|
|
|
total_tasks = (math.factorial(len(indices)) + chunk_size - 1) / chunk_size
|
|
|
|
with progressbar.ProgressBar(desc=filename, total=total_tasks) as progress:
|
|
|
|
|
|
|
|
failed_to_apply = False
|
|
|
|
last_result_hash = None
|
|
|
|
|
|
|
|
# Check for possible ways to apply the patch
|
|
|
|
it = _split_seq(causal_time_permutations(all_patches, indices), chunk_size)
|
|
|
|
for output_hash in _merge_seq(pool.imap_unordered(lambda seq: list(_test_apply(seq)), it), \
|
|
|
|
callback=progress.update):
|
|
|
|
|
|
|
|
# Failed to apply patch, continue checking the rest.
|
|
|
|
if output_hash is None:
|
|
|
|
failed_to_apply = True
|
|
|
|
if last_result_hash is None:
|
|
|
|
continue
|
|
|
|
break
|
|
|
|
|
|
|
|
# No known hash yet, remember the result. If we failed applying before, we can stop now.
|
|
|
|
elif last_result_hash is None:
|
|
|
|
last_result_hash = output_hash
|
|
|
|
if failed_to_apply: break
|
|
|
|
|
|
|
|
# Applied successful, but result has a different hash - also treat as failure.
|
|
|
|
elif last_result_hash != output_hash:
|
|
|
|
failed_to_apply = True
|
|
|
|
break
|
|
|
|
|
|
|
|
if failed_to_apply:
|
|
|
|
progress.finish("<failed to apply>")
|
|
|
|
elif verbose:
|
|
|
|
progress.finish(binascii.hexlify(last_result_hash))
|
2014-07-25 07:39:08 -07:00
|
|
|
|
2014-11-28 23:17:16 -08:00
|
|
|
finally:
|
|
|
|
original_content.close()
|
|
|
|
for _, (_, p) in selected_patches.iteritems():
|
|
|
|
p.close()
|
2014-07-25 07:39:08 -07:00
|
|
|
|
2014-07-27 17:32:13 -07:00
|
|
|
# If something failed, then show the appropriate error message.
|
2014-07-25 07:39:08 -07:00
|
|
|
if failed_to_apply and last_result_hash is None:
|
2014-07-25 12:54:58 -07:00
|
|
|
raise PatchUpdaterError("Changes to file %s don't apply on git source tree: %s" %
|
|
|
|
(filename, ", ".join([all_patches[i].name for i in indices])))
|
|
|
|
|
2014-08-27 18:52:50 -07:00
|
|
|
elif failed_to_apply:
|
2014-08-27 19:06:16 -07:00
|
|
|
raise PatchUpdaterError("Depending on the order some changes to file %s don't apply / lead to different results: %s" %
|
2014-07-25 12:54:58 -07:00
|
|
|
(filename, ", ".join([all_patches[i].name for i in indices])))
|
|
|
|
|
2014-07-25 07:39:08 -07:00
|
|
|
else:
|
|
|
|
assert len(last_result_hash) == 32
|
2014-07-11 09:51:03 -07:00
|
|
|
|
|
|
|
def verify_dependencies(all_patches):
|
2014-07-25 12:34:03 -07:00
|
|
|
"""Resolve dependencies, and afterwards run verify_patch_order() to check if everything applies properly."""
|
2014-07-27 17:32:13 -07:00
|
|
|
|
|
|
|
def _load_patch_cache():
|
2014-08-27 19:06:16 -07:00
|
|
|
"""Load dictionary for cached patch dependency tests."""
|
2014-07-27 17:32:13 -07:00
|
|
|
global cached_patch_result
|
2014-07-27 17:41:50 -07:00
|
|
|
cached_patch_result = _load_dict(config.path_depcache)
|
2014-07-27 17:32:13 -07:00
|
|
|
|
|
|
|
def _save_patch_cache():
|
2014-08-27 19:06:16 -07:00
|
|
|
"""Save dictionary for cached patch dependency tests."""
|
2014-11-28 23:17:16 -08:00
|
|
|
_save_dict(config.path_depcache, cached_patch_result.copy())
|
2014-07-27 17:32:13 -07:00
|
|
|
|
2014-11-03 11:12:34 -08:00
|
|
|
enabled_patches = dict([(i, patch) for i, patch in all_patches.iteritems() if not patch.disabled])
|
|
|
|
max_patches = max(enabled_patches.keys()) + 1
|
2014-07-11 09:51:03 -07:00
|
|
|
|
2014-11-03 11:12:34 -08:00
|
|
|
for i, patch in enabled_patches.iteritems():
|
2014-07-11 09:51:03 -07:00
|
|
|
patch.verify_depends = set(patch.depends)
|
|
|
|
patch.verify_time = [0]*max_patches
|
|
|
|
|
|
|
|
# Check for circular dependencies and perform modified vector clock algorithm
|
2014-11-03 11:12:34 -08:00
|
|
|
patches = dict(enabled_patches)
|
2014-07-11 09:51:03 -07:00
|
|
|
while len(patches):
|
|
|
|
|
|
|
|
to_delete = []
|
|
|
|
for i, patch in patches.iteritems():
|
|
|
|
if len(patch.verify_depends) == 0:
|
|
|
|
patch.verify_time[i] += 1
|
|
|
|
to_delete.append(i)
|
|
|
|
|
|
|
|
if len(to_delete) == 0:
|
2014-11-03 11:12:34 -08:00
|
|
|
raise PatchUpdaterError("Circular dependency (or disabled dependency) in set of patches: %s" %
|
2014-07-25 12:54:58 -07:00
|
|
|
", ".join([patch.name for i, patch in patches.iteritems()]))
|
2014-07-11 09:51:03 -07:00
|
|
|
|
|
|
|
for j in to_delete:
|
|
|
|
for i, patch in patches.iteritems():
|
|
|
|
if i != j and j in patch.verify_depends:
|
|
|
|
patch.verify_time = causal_time_combine(patch.verify_time, patches[j].verify_time)
|
|
|
|
patch.verify_depends.remove(j)
|
|
|
|
del patches[j]
|
|
|
|
|
|
|
|
# Find out which files are modified by multiple patches
|
|
|
|
modified_files = {}
|
2014-11-03 11:12:34 -08:00
|
|
|
for i, patch in enabled_patches.iteritems():
|
2014-07-24 18:32:01 -07:00
|
|
|
for f in patch.modified_files:
|
2014-07-11 09:51:03 -07:00
|
|
|
if f not in modified_files:
|
|
|
|
modified_files[f] = []
|
|
|
|
modified_files[f].append(i)
|
|
|
|
|
2014-07-26 12:29:46 -07:00
|
|
|
# Check if patches always apply correctly
|
2014-07-27 17:32:13 -07:00
|
|
|
_load_patch_cache()
|
2014-11-28 23:17:16 -08:00
|
|
|
pool = multiprocessing.pool.ThreadPool(processes=8)
|
2014-07-25 07:39:08 -07:00
|
|
|
try:
|
|
|
|
for f, indices in modified_files.iteritems():
|
2014-11-28 23:17:16 -08:00
|
|
|
verify_patch_order(enabled_patches, indices, f, pool)
|
2014-07-25 07:39:08 -07:00
|
|
|
finally:
|
2014-07-27 17:32:13 -07:00
|
|
|
_save_patch_cache()
|
2014-11-28 23:17:16 -08:00
|
|
|
pool.close()
|
2014-07-11 09:51:03 -07:00
|
|
|
|
2014-08-12 14:11:15 -07:00
|
|
|
def generate_makefile(all_patches):
|
2014-07-25 12:34:03 -07:00
|
|
|
"""Generate Makefile for a specific set of patches."""
|
2014-07-11 09:51:03 -07:00
|
|
|
|
2014-07-27 17:41:50 -07:00
|
|
|
with open(config.path_template_Makefile) as template_fp:
|
2014-07-26 12:29:46 -07:00
|
|
|
template = template_fp.read()
|
2014-07-11 09:51:03 -07:00
|
|
|
|
2014-08-12 14:11:15 -07:00
|
|
|
with open(config.path_Makefile, "w") as fp:
|
2014-11-03 11:12:34 -08:00
|
|
|
fp.write(template.format(patchlist="\t" + " \\\n\t".join(
|
2014-11-28 23:17:16 -08:00
|
|
|
["%s.ok" % patch.name for _, patch in all_patches.iteritems() if not patch.disabled])))
|
2014-08-12 14:11:15 -07:00
|
|
|
|
2014-11-28 23:17:16 -08:00
|
|
|
for _, patch in all_patches.iteritems():
|
2014-08-12 14:11:15 -07:00
|
|
|
fp.write("# Patchset %s\n" % patch.name)
|
2014-07-11 09:51:03 -07:00
|
|
|
fp.write("# |\n")
|
|
|
|
|
2014-08-12 14:11:15 -07:00
|
|
|
# List all bugs fixed by this patchset
|
2014-08-12 14:45:39 -07:00
|
|
|
if any([bugid is not None for bugid, bugname in patch.fixes]):
|
2014-08-12 14:11:15 -07:00
|
|
|
fp.write("# | This patchset fixes the following Wine bugs:\n")
|
2014-08-12 14:45:39 -07:00
|
|
|
for bugid, bugname in patch.fixes:
|
2014-08-12 14:11:15 -07:00
|
|
|
if bugid is not None:
|
|
|
|
fp.write("# | *\t%s\n" % "\n# | \t".join(textwrap.wrap("[#%d] %s" % (bugid, bugname), 120)))
|
|
|
|
fp.write("# |\n")
|
|
|
|
|
|
|
|
# List all modified files
|
2014-09-06 16:53:29 -07:00
|
|
|
fp.write("# | Modified files:\n")
|
2014-08-12 14:11:15 -07:00
|
|
|
fp.write("# | *\t%s\n" % "\n# | \t".join(textwrap.wrap(", ".join(sorted(patch.modified_files)), 120)))
|
|
|
|
fp.write("# |\n")
|
|
|
|
|
|
|
|
# Generate dependencies and code to apply patches
|
|
|
|
fp.write(".INTERMEDIATE: %s.ok\n" % patch.name)
|
2014-12-11 15:58:54 -08:00
|
|
|
depends = " ".join([""] + sorted(["%s.ok" % all_patches[d].name for d in patch.depends])) if len(patch.depends) else ""
|
2014-08-12 14:11:15 -07:00
|
|
|
fp.write("%s.ok:%s\n" % (patch.name, depends))
|
|
|
|
for f in patch.files:
|
|
|
|
fp.write("\t$(call APPLY_FILE,%s)\n" % os.path.join(patch.name, f))
|
|
|
|
|
|
|
|
# Create *.ok file (used to generate patchlist)
|
2014-11-14 21:35:26 -08:00
|
|
|
if len(patch.patches):
|
2014-08-12 14:11:15 -07:00
|
|
|
fp.write("\t@( \\\n")
|
2014-11-15 01:01:12 -08:00
|
|
|
for p in _unique(patch.patches, key=lambda p: (p.patch_author, p.patch_subject, p.patch_revision)):
|
|
|
|
fp.write("\t\techo '+ { \"%s\", \"%s\", %d },'; \\\n" % \
|
|
|
|
(_escape(p.patch_author), _escape(p.patch_subject), p.patch_revision))
|
2014-08-12 14:11:15 -07:00
|
|
|
fp.write("\t) > %s.ok\n" % patch.name)
|
|
|
|
else:
|
|
|
|
fp.write("\ttouch %s.ok\n" % patch.name)
|
|
|
|
fp.write("\n");
|
|
|
|
|
2014-08-12 16:59:01 -07:00
|
|
|
def generate_markdown(all_patches, stable_patches, stable_compholio_version):
|
2014-11-29 18:54:53 -08:00
|
|
|
"""Generate README.md including information about specific patches and bugfixes."""
|
2014-07-11 12:25:18 -07:00
|
|
|
|
2014-08-12 16:59:01 -07:00
|
|
|
def _format_bug(mode, bugid, bugname):
|
|
|
|
if mode < 0: bugname = "~~%s~~" % bugname
|
|
|
|
if bugid is None: return "* %s" % bugname
|
2014-10-31 07:15:13 -07:00
|
|
|
return "* %s ([Wine Bug #%d](https://bugs.winehq.org/show_bug.cgi?id=%d))" % \
|
2014-09-21 21:27:47 -07:00
|
|
|
(bugname, bugid, bugid) #, short_desc.replace("\\", "\\\\").replace("\"", "\\\""))
|
2014-07-12 16:30:44 -07:00
|
|
|
|
2014-08-12 16:59:01 -07:00
|
|
|
all_fixes = {}
|
2014-08-12 14:11:15 -07:00
|
|
|
|
2014-08-12 16:59:01 -07:00
|
|
|
# Get fixes for current version
|
2014-11-28 23:17:16 -08:00
|
|
|
for _, patch in all_patches.iteritems():
|
2014-08-12 16:59:01 -07:00
|
|
|
for bugid, bugname in patch.fixes:
|
|
|
|
key = bugid if bugid is not None else bugname
|
|
|
|
all_fixes[key] = [1, bugid, bugname]
|
|
|
|
|
|
|
|
# Compare with fixes for latest stable version
|
2014-11-28 23:17:16 -08:00
|
|
|
for _, patch in stable_patches.iteritems():
|
2014-08-12 16:59:01 -07:00
|
|
|
for bugid, bugname in patch.fixes:
|
|
|
|
key = bugid if bugid is not None else bugname
|
|
|
|
if all_fixes.has_key(key):
|
|
|
|
all_fixes[key][0] = 0
|
|
|
|
else:
|
|
|
|
all_fixes[key] = [-1, bugid, bugname]
|
|
|
|
|
|
|
|
# Generate lists for all new and old fixes
|
|
|
|
new_fixes = [(mode, bugid, bugname) for dummy, (mode, bugid, bugname) in
|
|
|
|
all_fixes.iteritems() if mode > 0]
|
|
|
|
old_fixes = [(mode, bugid, bugname) for dummy, (mode, bugid, bugname) in
|
|
|
|
all_fixes.iteritems() if mode <= 0]
|
|
|
|
|
2014-08-12 17:45:21 -07:00
|
|
|
# List of old fixes is not available when releasing a new version
|
|
|
|
if len(old_fixes) == 0:
|
|
|
|
old_fixes = new_fixes
|
|
|
|
new_fixes = []
|
|
|
|
|
2014-08-12 16:59:01 -07:00
|
|
|
# Generate information for current version
|
|
|
|
lines = []
|
|
|
|
if len(new_fixes):
|
2014-08-12 17:45:21 -07:00
|
|
|
lines.append("**Bugfixes and features included in the next upcoming release [%d]:**" % len(new_fixes))
|
2014-08-12 16:59:01 -07:00
|
|
|
lines.append("")
|
|
|
|
for mode, bugid, bugname in sorted(new_fixes, key=lambda x: x[2]):
|
|
|
|
lines.append(_format_bug(mode, bugid, bugname))
|
|
|
|
lines.append("")
|
|
|
|
lines.append("")
|
2014-11-03 15:47:24 -08:00
|
|
|
lines.append("**Bugs fixed in Wine Staging %s [%d]:**" % (stable_compholio_version, len(old_fixes)))
|
2014-08-12 16:59:01 -07:00
|
|
|
lines.append("")
|
|
|
|
for mode, bugid, bugname in sorted(old_fixes, key=lambda x: x[2]):
|
|
|
|
lines.append(_format_bug(mode, bugid, bugname))
|
|
|
|
|
|
|
|
# Update README.md
|
2014-08-12 14:11:15 -07:00
|
|
|
with open(config.path_template_README_md) as template_fp:
|
2014-07-25 18:31:08 -07:00
|
|
|
template = template_fp.read()
|
2014-08-12 14:11:15 -07:00
|
|
|
with open(config.path_README_md, "w") as fp:
|
2014-08-12 16:59:01 -07:00
|
|
|
fp.write(template.format(fixes="\n".join(lines)))
|
2014-07-28 17:33:30 -07:00
|
|
|
|
2014-07-11 09:51:03 -07:00
|
|
|
if __name__ == "__main__":
|
2014-11-28 23:17:16 -08:00
|
|
|
verbose = "-v" in sys.argv[1:]
|
|
|
|
|
|
|
|
# Hack to avoid KeyboardInterrupts on worker threads
|
|
|
|
def _sig_int(signum=None, frame=None):
|
|
|
|
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
|
|
|
raise RuntimeError("CTRL+C pressed")
|
|
|
|
signal.signal(signal.SIGINT, _sig_int)
|
|
|
|
|
2014-08-12 16:59:01 -07:00
|
|
|
try:
|
2014-07-25 07:39:08 -07:00
|
|
|
|
2014-08-12 16:59:01 -07:00
|
|
|
# Get information about Wine and Compholio version
|
|
|
|
latest_wine_commit = _latest_wine_commit()
|
|
|
|
stable_compholio_version = _stable_compholio_version()
|
2014-07-27 17:22:58 -07:00
|
|
|
|
2014-08-12 16:59:01 -07:00
|
|
|
# Read current and stable patches
|
|
|
|
all_patches = read_patchset()
|
|
|
|
stable_patches = read_patchset(revision="v%s" % stable_compholio_version)
|
|
|
|
|
|
|
|
# Check dependencies
|
2014-07-25 12:54:58 -07:00
|
|
|
verify_dependencies(all_patches)
|
2014-08-12 16:59:01 -07:00
|
|
|
|
2014-11-29 18:54:53 -08:00
|
|
|
# Update Makefile and README.md
|
2014-08-12 16:59:01 -07:00
|
|
|
generate_makefile(all_patches)
|
|
|
|
generate_markdown(all_patches, stable_patches, stable_compholio_version)
|
|
|
|
|
2014-07-25 12:54:58 -07:00
|
|
|
except PatchUpdaterError as e:
|
|
|
|
print ""
|
|
|
|
print "ERROR: %s" % e
|
|
|
|
print ""
|
|
|
|
exit(1)
|