2019-01-16 16:20:30 -08:00
|
|
|
#!/usr/bin/env python2
|
2015-09-28 14:05:59 -07:00
|
|
|
# -*- coding: utf-8 -*-
|
2014-07-25 12:34:03 -07:00
|
|
|
#
|
2016-01-06 10:58:40 -08:00
|
|
|
# Automatic patch dependency checker and apply script generator.
|
2014-07-25 12:34:03 -07:00
|
|
|
#
|
2017-01-19 00:15:23 -08:00
|
|
|
# Copyright (C) 2014-2017 Sebastian Lackner
|
2015-09-28 14:05:59 -07:00
|
|
|
# Copyright (C) 2015 Michael Müller
|
2014-07-25 12:34:03 -07:00
|
|
|
#
|
|
|
|
# This library is free software; you can redistribute it and/or
|
|
|
|
# modify it under the terms of the GNU Lesser General Public
|
|
|
|
# License as published by the Free Software Foundation; either
|
|
|
|
# version 2.1 of the License, or (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This library is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
# Lesser General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU Lesser General Public
|
|
|
|
# License along with this library; if not, write to the Free Software
|
|
|
|
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
|
|
|
|
#
|
|
|
|
|
2017-01-19 00:15:23 -08:00
|
|
|
from patchutils import escape_sh, escape_c
|
2015-09-02 00:49:59 -07:00
|
|
|
import argparse
|
2014-11-28 23:17:16 -08:00
|
|
|
import binascii
|
|
|
|
import cPickle as pickle
|
2015-03-05 15:23:05 -08:00
|
|
|
import contextlib
|
2015-02-22 11:46:03 -08:00
|
|
|
import fnmatch
|
2014-07-24 18:32:01 -07:00
|
|
|
import hashlib
|
|
|
|
import itertools
|
2014-11-28 23:17:16 -08:00
|
|
|
import math
|
|
|
|
import multiprocessing.pool
|
2014-11-14 21:35:26 -08:00
|
|
|
import operator
|
2014-07-25 12:34:03 -07:00
|
|
|
import os
|
2014-07-24 18:32:01 -07:00
|
|
|
import patchutils
|
2014-11-28 23:17:16 -08:00
|
|
|
import progressbar
|
2014-07-11 12:25:18 -07:00
|
|
|
import re
|
2014-11-28 23:17:16 -08:00
|
|
|
import signal
|
2014-07-25 12:34:03 -07:00
|
|
|
import subprocess
|
2014-11-28 23:17:16 -08:00
|
|
|
import sys
|
|
|
|
import tempfile
|
2014-07-25 12:34:03 -07:00
|
|
|
import textwrap
|
2015-09-28 14:05:59 -07:00
|
|
|
import xmlrpclib
|
2015-09-28 17:20:49 -07:00
|
|
|
import ConfigParser
|
2014-07-11 09:51:03 -07:00
|
|
|
|
2014-11-28 23:17:16 -08:00
|
|
|
_devnull = open(os.devnull, 'wb')
|
|
|
|
|
2014-07-25 07:39:08 -07:00
|
|
|
# Cached information to speed up patch dependency checks
|
2016-01-06 12:26:02 -08:00
|
|
|
upstream_commit = None
|
2014-07-25 07:39:08 -07:00
|
|
|
|
2014-07-27 17:41:50 -07:00
|
|
|
class config(object):
|
2015-04-12 15:23:44 -07:00
|
|
|
path_cache = ".patchupdate.cache"
|
2015-09-28 17:20:49 -07:00
|
|
|
path_config = os.path.expanduser("~/.config/patchupdate.conf")
|
2015-04-12 15:23:44 -07:00
|
|
|
|
2014-08-12 16:59:01 -07:00
|
|
|
path_patches = "patches"
|
2016-01-06 11:11:28 -08:00
|
|
|
path_version = "staging/VERSION"
|
2015-11-22 17:43:28 -08:00
|
|
|
path_wine = "staging/wine"
|
2014-07-27 17:41:50 -07:00
|
|
|
|
2015-11-22 17:43:28 -08:00
|
|
|
path_template_script = "staging/patchinstall.sh.in"
|
2015-01-07 00:49:06 -08:00
|
|
|
path_script = "patches/patchinstall.sh"
|
2014-07-27 17:41:50 -07:00
|
|
|
|
2014-12-13 21:03:05 -08:00
|
|
|
path_IfDefined = "9999-IfDefined.patch"
|
|
|
|
|
2015-09-28 15:33:21 -07:00
|
|
|
bugtracker_url = "https://bugs.winehq.org/xmlrpc.cgi"
|
2015-10-02 17:07:34 -07:00
|
|
|
bugtracker_defaultcc = ["michael@fds-team.de", "sebastian@fds-team.de",
|
2016-02-08 05:35:57 -08:00
|
|
|
"erich.e.hoover@wine-staging.com", "dmitry@baikal.ru"]
|
2015-09-28 17:20:49 -07:00
|
|
|
bugtracker_user = None
|
|
|
|
bugtracker_pass = None
|
|
|
|
|
2018-03-27 16:30:38 -07:00
|
|
|
github_url = "https://github.com/wine-staging/wine-staging"
|
2015-09-28 15:33:21 -07:00
|
|
|
|
2014-07-25 12:54:58 -07:00
|
|
|
class PatchUpdaterError(RuntimeError):
|
|
|
|
"""Failed to update patches."""
|
|
|
|
pass
|
|
|
|
|
2014-07-11 09:51:03 -07:00
|
|
|
class PatchSet(object):
|
2014-12-13 21:03:05 -08:00
|
|
|
def __init__(self, name, directory):
|
2014-07-24 18:32:01 -07:00
|
|
|
self.name = name
|
2015-01-07 00:49:06 -08:00
|
|
|
self.variable = None
|
2014-12-13 21:03:05 -08:00
|
|
|
self.directory = directory
|
2017-04-02 16:41:01 -07:00
|
|
|
self.config = []
|
2014-07-24 18:32:01 -07:00
|
|
|
self.fixes = []
|
|
|
|
self.changes = []
|
2014-11-03 11:12:34 -08:00
|
|
|
self.disabled = False
|
2014-12-13 21:03:05 -08:00
|
|
|
self.ifdefined = None
|
2014-07-11 09:51:03 -07:00
|
|
|
|
2014-07-24 18:32:01 -07:00
|
|
|
self.files = []
|
|
|
|
self.patches = []
|
|
|
|
self.modified_files = set()
|
|
|
|
self.depends = set()
|
2015-02-22 11:46:03 -08:00
|
|
|
self.auto_depends = set()
|
2014-07-11 09:51:03 -07:00
|
|
|
|
|
|
|
self.verify_time = None
|
|
|
|
|
2014-07-27 17:32:13 -07:00
|
|
|
def _pairs(a):
|
|
|
|
"""Iterate over all pairs of elements contained in the list a."""
|
|
|
|
for i, j in enumerate(a):
|
|
|
|
for k in a[i+1:]:
|
|
|
|
yield (j, k)
|
|
|
|
|
2014-11-14 21:35:26 -08:00
|
|
|
def _unique(iterable, key=None):
|
|
|
|
"List unique elements, preserving order. Remember only the element just seen."
|
2014-11-23 10:45:14 -08:00
|
|
|
# _unique('AAAABBBCCDAABBB') --> A B C D A B
|
|
|
|
# _unique('ABBCcAD', str.lower) --> A B C A D
|
2014-11-14 21:35:26 -08:00
|
|
|
return itertools.imap(next, itertools.imap(operator.itemgetter(1), itertools.groupby(iterable, key)))
|
|
|
|
|
2017-08-07 08:27:38 -07:00
|
|
|
def _binomial(n, k):
|
|
|
|
"Compute binomial coefficient."
|
|
|
|
num = 1
|
|
|
|
div = 1
|
|
|
|
for t in xrange(1, min(k, n - k) + 1):
|
|
|
|
num *= n
|
|
|
|
div *= t
|
|
|
|
n -= 1
|
|
|
|
return num // div
|
|
|
|
|
2014-11-28 23:17:16 -08:00
|
|
|
def _split_seq(iterable, size):
|
|
|
|
"""Split an iterator into chunks of a given size."""
|
|
|
|
it = iter(iterable)
|
|
|
|
items = list(itertools.islice(it, size))
|
|
|
|
while items:
|
|
|
|
yield items
|
|
|
|
items = list(itertools.islice(it, size))
|
|
|
|
|
2014-07-27 17:32:13 -07:00
|
|
|
def _load_dict(filename):
|
|
|
|
"""Load a Python dictionary object from a file."""
|
|
|
|
try:
|
|
|
|
with open(filename) as fp:
|
|
|
|
return pickle.load(fp)
|
|
|
|
except IOError:
|
|
|
|
return {}
|
|
|
|
|
|
|
|
def _save_dict(filename, value):
|
|
|
|
"""Save a Python dictionary object to a file."""
|
2015-04-12 15:23:44 -07:00
|
|
|
with open("%s.new" % filename, "wb") as fp:
|
2014-07-27 17:32:13 -07:00
|
|
|
pickle.dump(value, fp, pickle.HIGHEST_PROTOCOL)
|
2015-04-12 15:23:44 -07:00
|
|
|
os.rename("%s.new" % filename, filename)
|
2014-07-27 17:32:13 -07:00
|
|
|
|
2014-11-28 23:17:16 -08:00
|
|
|
def _sha256(fp):
|
|
|
|
"""Calculate sha256sum from a file descriptor."""
|
|
|
|
m = hashlib.sha256()
|
|
|
|
fp.seek(0)
|
|
|
|
while True:
|
|
|
|
buf = fp.read(16384)
|
|
|
|
if buf == "": break
|
|
|
|
m.update(buf)
|
|
|
|
return m.digest()
|
|
|
|
|
|
|
|
def _parse_int(val, default=0):
|
2014-11-03 11:12:34 -08:00
|
|
|
"""Parse an integer or boolean value."""
|
2016-01-06 12:26:02 -08:00
|
|
|
if re.match("^[0-9]+$", val):
|
2014-11-03 11:12:34 -08:00
|
|
|
return int(val)
|
|
|
|
try:
|
|
|
|
return {'true': 1, 'yes': 1, 'false': 0, 'no': 0}[val.lower()]
|
2016-01-06 12:26:02 -08:00
|
|
|
except KeyError:
|
2014-11-03 11:12:34 -08:00
|
|
|
return default
|
|
|
|
|
2016-01-06 11:11:28 -08:00
|
|
|
def _staging_version():
|
|
|
|
"""Get the current version number of Wine Staging."""
|
|
|
|
with open(config.path_version) as fp:
|
|
|
|
return fp.read().strip()
|
2014-08-12 16:59:01 -07:00
|
|
|
|
2016-01-06 12:26:02 -08:00
|
|
|
def _upstream_commit(commit=None):
|
2014-11-28 23:17:16 -08:00
|
|
|
"""Get latest wine commit."""
|
2014-08-12 16:59:01 -07:00
|
|
|
if not os.path.isdir(config.path_wine):
|
|
|
|
raise PatchUpdaterError("Please create a symlink to the wine repository in %s" % config.path_wine)
|
2015-01-15 20:05:54 -08:00
|
|
|
if commit is None:
|
|
|
|
commit = subprocess.check_output(["git", "rev-parse", "origin/master"], cwd=config.path_wine).strip()
|
2015-05-06 12:52:06 -07:00
|
|
|
assert len(commit) == 40 and commit == commit.lower()
|
2014-08-12 16:59:01 -07:00
|
|
|
return commit
|
|
|
|
|
2016-01-06 12:26:02 -08:00
|
|
|
def enum_patchsets(path):
|
|
|
|
"""Return a sorted list of all subdirectories of path."""
|
2014-08-12 16:59:01 -07:00
|
|
|
dirs = []
|
2016-01-06 11:57:22 -08:00
|
|
|
for name in os.listdir(path):
|
|
|
|
directory = os.path.join(path, name)
|
|
|
|
if not os.path.isdir(directory):
|
|
|
|
continue
|
|
|
|
dirs.append((name, directory))
|
2016-01-06 12:26:02 -08:00
|
|
|
return sorted(dirs)
|
2014-08-12 16:59:01 -07:00
|
|
|
|
2016-01-06 12:26:02 -08:00
|
|
|
def load_patchsets():
|
2016-01-06 11:57:22 -08:00
|
|
|
"""Read information about all patchsets."""
|
2014-07-25 12:34:03 -07:00
|
|
|
unique_id = itertools.count()
|
|
|
|
all_patches = {}
|
|
|
|
name_to_id = {}
|
|
|
|
|
2016-01-06 12:26:02 -08:00
|
|
|
for name, directory in enum_patchsets(config.path_patches):
|
2014-12-13 21:03:05 -08:00
|
|
|
patch = PatchSet(name, directory)
|
2014-07-25 12:34:03 -07:00
|
|
|
|
2017-04-02 16:41:01 -07:00
|
|
|
# Load the definition file
|
|
|
|
try:
|
|
|
|
with open(os.path.join(directory, "definition")) as fp:
|
|
|
|
for line in fp:
|
|
|
|
if line.startswith("#"):
|
|
|
|
continue
|
|
|
|
tmp = line.split(":", 1)
|
|
|
|
if len(tmp) != 2:
|
|
|
|
continue
|
|
|
|
patch.config.append((tmp[0].lower(), tmp[1].strip()))
|
|
|
|
except IOError:
|
|
|
|
pass
|
|
|
|
|
2016-01-06 11:57:22 -08:00
|
|
|
# Enumerate .patch files in the given directory, enumerate individual patches and affected files
|
|
|
|
for f in sorted(os.listdir(directory)):
|
|
|
|
if not re.match("^[0-9]{4}-.*\\.patch$", f):
|
|
|
|
continue
|
|
|
|
if f.startswith(config.path_IfDefined):
|
|
|
|
continue
|
2017-04-02 16:41:01 -07:00
|
|
|
if ("exclude", f) in patch.config:
|
|
|
|
continue
|
2016-01-06 11:57:22 -08:00
|
|
|
if not os.path.isfile(os.path.join(directory, f)):
|
2014-07-25 12:34:03 -07:00
|
|
|
continue
|
2016-01-06 11:57:22 -08:00
|
|
|
patch.files.append(f)
|
|
|
|
for p in patchutils.read_patch(os.path.join(directory, f)):
|
|
|
|
patch.modified_files.add(p.modified_file)
|
|
|
|
patch.patches.append(p)
|
|
|
|
|
|
|
|
# No single patch within this directory, ignore it
|
|
|
|
if len(patch.patches) == 0:
|
2018-04-09 17:57:40 -07:00
|
|
|
print "WARNING: No patches found in directory %s" % (directory)
|
2016-01-06 11:57:22 -08:00
|
|
|
del patch
|
|
|
|
continue
|
2014-07-25 12:34:03 -07:00
|
|
|
|
|
|
|
i = next(unique_id)
|
|
|
|
all_patches[i] = patch
|
|
|
|
name_to_id[name] = i
|
|
|
|
|
|
|
|
# Now read the definition files in a second step
|
|
|
|
for i, patch in all_patches.iteritems():
|
2017-04-02 16:41:01 -07:00
|
|
|
for key, val in patch.config:
|
2015-02-22 11:46:03 -08:00
|
|
|
if key == "depends":
|
|
|
|
if not name_to_id.has_key(val):
|
2017-04-02 16:41:01 -07:00
|
|
|
raise PatchUpdaterError("Definition file for %s references unknown dependency %s" % (patch.name, val))
|
2015-02-22 11:46:03 -08:00
|
|
|
patch.depends.add(name_to_id[val])
|
|
|
|
|
|
|
|
elif key == "apply-after":
|
|
|
|
for j, other_patch in all_patches.iteritems():
|
|
|
|
if i != j and any([fnmatch.fnmatch(f, val) for f in other_patch.modified_files]):
|
|
|
|
patch.auto_depends.add(j)
|
|
|
|
|
2015-08-30 19:58:17 -07:00
|
|
|
elif key == "apply-before":
|
|
|
|
for j, other_patch in all_patches.iteritems():
|
|
|
|
if i != j and any([fnmatch.fnmatch(f, val) for f in other_patch.modified_files]):
|
|
|
|
other_patch.auto_depends.add(i)
|
|
|
|
|
2015-02-22 11:46:03 -08:00
|
|
|
elif key == "fixes":
|
2015-11-14 20:40:26 -08:00
|
|
|
r = re.match("^\\[ *(!)? *([0-9]+) *\\](.*)$", val)
|
2015-02-22 11:46:03 -08:00
|
|
|
if r:
|
2015-11-14 20:40:26 -08:00
|
|
|
sync = (r.group(1) != "!")
|
|
|
|
bugid = int(r.group(2))
|
|
|
|
patch.fixes.append((sync, bugid, r.group(3).strip()))
|
2015-02-22 11:46:03 -08:00
|
|
|
continue
|
2015-11-14 20:40:26 -08:00
|
|
|
patch.fixes.append((False, None, val))
|
2015-02-22 11:46:03 -08:00
|
|
|
|
|
|
|
elif key == "disabled":
|
|
|
|
patch.disabled = _parse_int(val)
|
|
|
|
|
2017-04-02 16:41:01 -07:00
|
|
|
elif key == "exclude":
|
|
|
|
pass # Already processed above
|
|
|
|
|
2015-02-22 11:46:03 -08:00
|
|
|
elif key == "ifdefined":
|
|
|
|
patch.ifdefined = val
|
|
|
|
|
2016-01-06 11:57:22 -08:00
|
|
|
else:
|
2017-04-02 16:41:01 -07:00
|
|
|
print "WARNING: Ignoring unknown command in definition file for %s: %s" % (patch.name, line)
|
2014-07-25 12:34:03 -07:00
|
|
|
|
2016-02-09 13:48:52 -08:00
|
|
|
# Filter autodepends on disabled patchsets
|
|
|
|
for i, patch in all_patches.iteritems():
|
|
|
|
patch.auto_depends = set([j for j in patch.auto_depends if not all_patches[j].disabled])
|
|
|
|
|
2014-07-25 12:34:03 -07:00
|
|
|
return all_patches
|
2014-07-11 09:51:03 -07:00
|
|
|
|
|
|
|
def causal_time_combine(a, b):
|
2014-07-25 12:34:03 -07:00
|
|
|
"""Combines two timestamps into a new one."""
|
2014-07-11 16:33:04 -07:00
|
|
|
return [max(a, b) for a, b in zip(a, b)]
|
2014-07-11 09:51:03 -07:00
|
|
|
|
|
|
|
def causal_time_smaller(a, b):
|
2014-07-25 12:34:03 -07:00
|
|
|
"""Checks if timestamp a is smaller than timestamp b."""
|
2014-07-11 09:51:03 -07:00
|
|
|
return all([i <= j for i, j in zip(a,b)]) and any([i < j for i, j in zip(a,b)])
|
|
|
|
|
2014-07-26 14:19:39 -07:00
|
|
|
def causal_time_relation(all_patches, indices):
|
|
|
|
"""Checks if the dependencies of patches are compatible with a specific apply order."""
|
2014-07-25 12:34:03 -07:00
|
|
|
for i, j in _pairs(indices):
|
2014-07-26 14:19:39 -07:00
|
|
|
if causal_time_smaller(all_patches[j].verify_time, all_patches[i].verify_time):
|
2014-07-25 07:39:08 -07:00
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
2014-07-26 14:19:39 -07:00
|
|
|
def causal_time_relation_any(all_patches, indices):
|
|
|
|
"""Similar to causal_time_relation(), but also check all possible permutations of indices."""
|
|
|
|
for i, j in _pairs(indices):
|
|
|
|
if not (causal_time_smaller(all_patches[i].verify_time, all_patches[j].verify_time) or \
|
|
|
|
causal_time_smaller(all_patches[j].verify_time, all_patches[i].verify_time)):
|
2014-07-26 14:13:27 -07:00
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
2014-07-25 07:39:08 -07:00
|
|
|
def contains_binary_patch(all_patches, indices, filename):
|
2014-07-25 12:34:03 -07:00
|
|
|
"""Checks if any patch with given indices affecting filename is a binary patch."""
|
2014-07-25 07:39:08 -07:00
|
|
|
for i in indices:
|
|
|
|
for patch in all_patches[i].patches:
|
2016-04-02 14:13:01 -07:00
|
|
|
if patch.modified_file == filename and patch.is_binary:
|
2014-07-25 07:39:08 -07:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2014-11-28 23:17:16 -08:00
|
|
|
def get_wine_file(filename):
|
2015-04-12 15:23:44 -07:00
|
|
|
"""Return the content of a file."""
|
2016-01-06 12:26:02 -08:00
|
|
|
entry = "%s:%s" % (upstream_commit, filename)
|
2014-11-28 23:17:16 -08:00
|
|
|
result = tempfile.NamedTemporaryFile()
|
2014-07-27 17:22:58 -07:00
|
|
|
try:
|
2014-11-28 23:17:16 -08:00
|
|
|
content = subprocess.check_call(["git", "show", entry], cwd=config.path_wine, \
|
|
|
|
stdout=result, stderr=_devnull)
|
2014-07-27 17:22:58 -07:00
|
|
|
except subprocess.CalledProcessError as e:
|
2014-08-12 17:45:21 -07:00
|
|
|
if e.returncode != 128: raise
|
2014-11-28 23:17:16 -08:00
|
|
|
result.flush() # shouldn't be necessary because the subprocess writes directly to the fd
|
|
|
|
return result
|
|
|
|
|
2014-12-13 19:57:16 -08:00
|
|
|
def extract_patch(patchset, filename):
|
|
|
|
"""Extract all changes to a specific file from a patchset."""
|
|
|
|
p = tempfile.NamedTemporaryFile()
|
|
|
|
m = hashlib.sha256()
|
|
|
|
|
|
|
|
for patch in patchset.patches:
|
|
|
|
if patch.modified_file != filename:
|
|
|
|
continue
|
2016-07-22 08:29:14 -07:00
|
|
|
assert not patch.is_binary
|
2014-12-13 19:57:16 -08:00
|
|
|
for chunk in patch.read_chunks():
|
|
|
|
p.write(chunk)
|
|
|
|
m.update(chunk)
|
|
|
|
p.write("\n")
|
|
|
|
m.update("\n")
|
|
|
|
|
|
|
|
p.flush()
|
|
|
|
return (m.digest(), p)
|
|
|
|
|
2014-11-28 23:17:16 -08:00
|
|
|
def select_patches(all_patches, indices, filename):
|
|
|
|
"""Create a temporary patch file for each patchset and calculate the checksum."""
|
|
|
|
selected_patches = {}
|
|
|
|
for i in indices:
|
2014-12-13 19:57:16 -08:00
|
|
|
selected_patches[i] = extract_patch(all_patches[i], filename)
|
2014-11-28 23:17:16 -08:00
|
|
|
return selected_patches
|
|
|
|
|
2015-08-30 19:58:17 -07:00
|
|
|
def resolve_dependencies(all_patches, index = None, depends = None, auto_deps = True):
|
2014-12-13 21:03:05 -08:00
|
|
|
"""Returns a sorted list with all dependencies for a given patch."""
|
|
|
|
|
|
|
|
def _resolve(depends):
|
2015-08-10 22:27:26 -07:00
|
|
|
for i in sorted(depends):
|
2016-01-06 12:26:02 -08:00
|
|
|
if all_patches[i].disabled: # Check for disabled patch
|
2015-01-07 00:49:06 -08:00
|
|
|
raise PatchUpdaterError("Encountered dependency on disabled patchset %s" % all_patches[i].name)
|
2016-01-06 12:26:02 -08:00
|
|
|
if all_patches[i].verify_resolved > 0: # Dependencies already resolved
|
2014-12-13 21:03:05 -08:00
|
|
|
continue
|
2016-01-06 12:26:02 -08:00
|
|
|
if all_patches[i].verify_resolved < 0: # Detect circular dependency
|
2015-01-07 00:49:06 -08:00
|
|
|
raise PatchUpdaterError("Circular dependency while trying to resolve %s" % all_patches[i].name)
|
2014-12-13 21:03:05 -08:00
|
|
|
|
|
|
|
# Recusively resolve dependencies
|
|
|
|
all_patches[i].verify_resolved = -1
|
2014-12-14 12:45:45 -08:00
|
|
|
_resolve(all_patches[i].depends)
|
2015-08-30 19:58:17 -07:00
|
|
|
if auto_deps: _resolve(all_patches[i].auto_depends)
|
2014-12-13 21:03:05 -08:00
|
|
|
all_patches[i].verify_resolved = 1
|
|
|
|
resolved.append(i)
|
|
|
|
|
|
|
|
for _, patch in all_patches.iteritems():
|
|
|
|
patch.verify_resolved = 0
|
|
|
|
|
|
|
|
resolved = []
|
2015-01-07 00:49:06 -08:00
|
|
|
if depends is None:
|
2015-08-30 19:58:17 -07:00
|
|
|
_resolve(all_patches[index].depends)
|
|
|
|
if auto_deps: _resolve(all_patches[index].auto_depends)
|
|
|
|
else:
|
|
|
|
_resolve(depends)
|
2014-12-13 21:03:05 -08:00
|
|
|
return resolved
|
|
|
|
|
2015-09-28 17:20:49 -07:00
|
|
|
def sync_bug_status(bugtracker, bug, url):
|
|
|
|
"""Automatically updates the STAGED information of a referenced bug."""
|
|
|
|
|
|
|
|
# We don't want to reopen bugs
|
|
|
|
if bug['status'] not in ["UNCONFIRMED", "NEW", "ASSIGNED", "REOPENED", "STAGED"]:
|
|
|
|
return
|
|
|
|
|
|
|
|
if config.bugtracker_user is None or config.bugtracker_pass is None:
|
|
|
|
raise PatchUpdaterError("Can't update bug without username/password set")
|
|
|
|
|
|
|
|
changes = { 'ids' : bug['id'],
|
|
|
|
'Bugzilla_login' : config.bugtracker_user,
|
|
|
|
'Bugzilla_password' : config.bugtracker_pass }
|
|
|
|
|
|
|
|
# Update bug status
|
|
|
|
if bug['status'] != "STAGED":
|
|
|
|
changes['status'] = "STAGED"
|
|
|
|
|
|
|
|
# Update patchset URL
|
|
|
|
if bug['cf_staged_patchset'] != url:
|
|
|
|
changes['cf_staged_patchset'] = url
|
|
|
|
|
|
|
|
# Add missing CC contacts
|
|
|
|
missing_cc = []
|
|
|
|
for cc in config.bugtracker_defaultcc:
|
|
|
|
if cc not in bug['cc']:
|
|
|
|
missing_cc.append(cc)
|
|
|
|
if len(missing_cc):
|
|
|
|
changes["cc"] = {"add" : missing_cc}
|
|
|
|
|
|
|
|
bugtracker.Bug.update(changes)
|
|
|
|
|
|
|
|
def check_bug_status(all_patches, sync_bugs=False):
|
|
|
|
"""Checks the information in the referenced bugs and corrects them if sync_bugs is set."""
|
|
|
|
|
2015-09-28 10:41:52 -07:00
|
|
|
all_bugids = set()
|
2015-09-28 15:33:21 -07:00
|
|
|
url_map = {}
|
2015-09-28 10:41:52 -07:00
|
|
|
|
|
|
|
for _, patch in all_patches.iteritems():
|
2015-09-28 15:33:21 -07:00
|
|
|
url = "%s/tree/master/%s" % (config.github_url, patch.directory)
|
2015-11-14 20:40:26 -08:00
|
|
|
for sync, bugid, bugname in patch.fixes:
|
|
|
|
if sync and bugid is not None:
|
2015-09-28 15:33:21 -07:00
|
|
|
url_map[bugid] = url
|
2015-09-28 10:41:52 -07:00
|
|
|
all_bugids.add(bugid)
|
|
|
|
|
2015-09-28 15:33:21 -07:00
|
|
|
bugtracker = xmlrpclib.ServerProxy(config.bugtracker_url)
|
|
|
|
bug_list = bugtracker.Bug.get(dict(ids=list(all_bugids)))
|
|
|
|
staged_bugs = bugtracker.Bug.search(dict(status="STAGED"))
|
2015-09-28 14:05:59 -07:00
|
|
|
|
2015-09-28 10:41:52 -07:00
|
|
|
once = True
|
2015-09-28 14:05:59 -07:00
|
|
|
for bug in bug_list['bugs']:
|
2018-03-27 16:30:38 -07:00
|
|
|
if bug['status'] != "STAGED":
|
2015-09-28 10:41:52 -07:00
|
|
|
if once:
|
|
|
|
print ""
|
|
|
|
print "WARNING: The following bugs might require attention:"
|
|
|
|
print ""
|
|
|
|
once = False
|
2015-09-28 15:33:21 -07:00
|
|
|
print " #%d - \"%s\" - %s %s - %s" % (bug['id'], bug['summary'], bug['status'],
|
|
|
|
bug['resolution'], bug['cf_staged_patchset'])
|
2015-09-28 17:20:49 -07:00
|
|
|
if sync_bugs:
|
|
|
|
sync_bug_status(bugtracker, bug, url_map[bug['id']])
|
2018-06-19 14:10:43 -07:00
|
|
|
patchset = bug['cf_staged_patchset']
|
|
|
|
if '.patch' in patchset: patchset = patchset[0:patchset.rindex('/')].replace('/blob/','/tree/')
|
2018-06-19 14:11:56 -07:00
|
|
|
if bug['status'] == 'STAGED' and patchset != url_map[bug['id']]:
|
2018-03-27 16:30:38 -07:00
|
|
|
print 'Invalid staged patchset: #%d - \"%s\" - %s' %(bug['id'], bug['summary'], bug['cf_staged_patchset'])
|
2015-09-28 14:46:29 -07:00
|
|
|
|
|
|
|
once = True
|
|
|
|
for bug in staged_bugs['bugs']:
|
|
|
|
if bug['id'] not in all_bugids:
|
|
|
|
if once:
|
|
|
|
print ""
|
2015-09-28 17:20:49 -07:00
|
|
|
print "WARNING: The following bugs are incorrectly marked as STAGED:"
|
2015-09-28 14:46:29 -07:00
|
|
|
print ""
|
|
|
|
once = False
|
|
|
|
print " #%d - \"%s\" - %s %s" % (bug['id'], bug['summary'], bug['status'],
|
2015-09-28 15:33:21 -07:00
|
|
|
bug['resolution'])
|
2015-09-28 14:46:29 -07:00
|
|
|
|
2015-09-28 10:41:52 -07:00
|
|
|
print ""
|
|
|
|
|
2015-09-02 00:49:59 -07:00
|
|
|
def generate_ifdefined(all_patches, skip_checks=False):
|
2014-12-13 21:03:05 -08:00
|
|
|
"""Update autogenerated ifdefined patches, which can be used to selectively disable features at compile time."""
|
2017-08-08 05:54:24 -07:00
|
|
|
for i, patch in all_patches.iteritems():
|
2014-12-13 21:03:05 -08:00
|
|
|
if patch.ifdefined is None:
|
|
|
|
continue
|
2017-08-08 05:54:24 -07:00
|
|
|
if patch.disabled:
|
|
|
|
continue
|
2014-12-13 21:03:05 -08:00
|
|
|
|
|
|
|
filename = os.path.join(patch.directory, config.path_IfDefined)
|
2015-09-02 00:49:59 -07:00
|
|
|
headers = { 'author': "Wine Staging Team",
|
|
|
|
'email': "webmaster@fds-team.de",
|
|
|
|
'subject': "Autogenerated #ifdef patch for %s." % patch.name }
|
|
|
|
|
|
|
|
if skip_checks:
|
2016-07-22 08:29:14 -07:00
|
|
|
patch.files = [os.path.basename(filename)]
|
2015-09-02 00:49:59 -07:00
|
|
|
continue
|
|
|
|
|
2014-12-13 21:03:05 -08:00
|
|
|
with open(filename, "wb") as fp:
|
2015-09-02 00:49:59 -07:00
|
|
|
fp.write("From: %s <%s>\n" % (headers['author'], headers['email']))
|
|
|
|
fp.write("Subject: %s\n" % headers['subject'])
|
2014-12-13 21:03:05 -08:00
|
|
|
fp.write("\n")
|
2016-07-23 08:27:59 -07:00
|
|
|
fp.write("Based on patches by:\n")
|
|
|
|
for author, email in sorted(set([(p.patch_author, p.patch_email) for p in patch.patches])):
|
|
|
|
fp.write(" %s <%s>\n" % (author, email))
|
|
|
|
fp.write("\n")
|
2014-12-13 21:03:05 -08:00
|
|
|
|
2017-08-08 05:54:24 -07:00
|
|
|
depends = resolve_dependencies(all_patches, i)
|
2016-01-06 12:38:47 -08:00
|
|
|
for f in sorted(patch.modified_files):
|
2014-12-13 21:03:05 -08:00
|
|
|
|
|
|
|
# Reconstruct the state after applying the dependencies
|
|
|
|
original = get_wine_file(f)
|
2017-08-08 05:54:24 -07:00
|
|
|
selected_patches = select_patches(all_patches, depends, f)
|
2015-09-01 21:55:43 -07:00
|
|
|
failed = []
|
|
|
|
|
|
|
|
try:
|
|
|
|
for j in depends:
|
|
|
|
failed.append(j)
|
|
|
|
original = patchutils.apply_patch(original, selected_patches[j][1], fuzz=0)
|
|
|
|
except patchutils.PatchApplyError:
|
|
|
|
raise PatchUpdaterError("Changes to file %s don't apply: %s" %
|
|
|
|
(f, ", ".join([all_patches[j].name for j in failed])))
|
2014-12-13 21:03:05 -08:00
|
|
|
|
|
|
|
# Now apply the main patch
|
2014-12-13 21:20:48 -08:00
|
|
|
p = extract_patch(patch, f)[1]
|
2015-09-01 21:55:43 -07:00
|
|
|
|
|
|
|
try:
|
|
|
|
failed.append(i)
|
|
|
|
patched = patchutils.apply_patch(original, p, fuzz=0)
|
|
|
|
except patchutils.PatchApplyError:
|
|
|
|
raise PatchUpdaterError("Changes to file %s don't apply: %s" %
|
|
|
|
(f, ", ".join([all_patches[j].name for j in failed])))
|
2014-12-13 21:03:05 -08:00
|
|
|
|
|
|
|
# Now get the diff between both
|
|
|
|
diff = patchutils.generate_ifdef_patch(original, patched, ifdef=patch.ifdefined)
|
|
|
|
if diff is not None:
|
|
|
|
fp.write("diff --git a/%s b/%s\n" % (f, f))
|
|
|
|
fp.write("--- a/%s\n" % f)
|
|
|
|
fp.write("+++ b/%s\n" % f)
|
|
|
|
while True:
|
|
|
|
buf = diff.read(16384)
|
|
|
|
if buf == "": break
|
|
|
|
fp.write(buf)
|
|
|
|
diff.close()
|
|
|
|
|
|
|
|
# Close the file
|
|
|
|
fp.close()
|
|
|
|
|
2014-12-13 21:29:32 -08:00
|
|
|
# Add changes to git
|
|
|
|
subprocess.call(["git", "add", filename])
|
|
|
|
|
2014-12-13 21:03:05 -08:00
|
|
|
# Add the autogenerated file as a last patch
|
2016-07-22 08:29:14 -07:00
|
|
|
patch.files = [os.path.basename(filename)]
|
|
|
|
for p in patch.patches:
|
|
|
|
p.filename = None
|
|
|
|
p.modified_file = None
|
2014-12-13 21:03:05 -08:00
|
|
|
for p in patchutils.read_patch(filename):
|
|
|
|
assert p.modified_file in patch.modified_files
|
2016-07-22 08:29:14 -07:00
|
|
|
p.patch_author = None
|
2014-12-13 21:03:05 -08:00
|
|
|
patch.patches.append(p)
|
|
|
|
|
2016-02-10 10:29:24 -08:00
|
|
|
def generate_apply_order(all_patches, skip_checks=False):
|
2015-01-07 00:49:06 -08:00
|
|
|
"""Resolve dependencies, and afterwards check if everything applies properly."""
|
|
|
|
depends = sorted([i for i, patch in all_patches.iteritems() if not patch.disabled])
|
|
|
|
resolved = resolve_dependencies(all_patches, depends=depends)
|
|
|
|
max_patches = max(resolved) + 1
|
2014-07-11 09:51:03 -07:00
|
|
|
|
2016-02-10 10:29:24 -08:00
|
|
|
if skip_checks:
|
|
|
|
return resolved
|
2015-09-02 00:49:59 -07:00
|
|
|
|
2016-02-10 10:29:24 -08:00
|
|
|
# Generate timestamps based on dependencies, still required for binary patches
|
|
|
|
# Find out which files are modified by multiple patches
|
|
|
|
modified_files = {}
|
|
|
|
for i, patch in [(i, all_patches[i]) for i in resolved]:
|
|
|
|
patch.verify_time = [0]*max_patches
|
|
|
|
patch.verify_time[i] += 1
|
|
|
|
for j in patch.depends:
|
|
|
|
patch.verify_time = causal_time_combine(patch.verify_time, all_patches[j].verify_time)
|
|
|
|
|
|
|
|
for f in patch.modified_files:
|
|
|
|
if f not in modified_files:
|
|
|
|
modified_files[f] = []
|
|
|
|
modified_files[f].append(i)
|
|
|
|
|
|
|
|
# Check dependencies
|
|
|
|
dependency_cache = _load_dict(config.path_cache)
|
|
|
|
pool = multiprocessing.pool.ThreadPool(processes=4)
|
|
|
|
try:
|
|
|
|
for filename, indices in modified_files.iteritems():
|
|
|
|
|
|
|
|
# If one of patches is a binary patch, then we cannot / won't verify it - require dependencies in this case
|
|
|
|
if contains_binary_patch(all_patches, indices, filename):
|
|
|
|
if not causal_time_relation_any(all_patches, indices):
|
|
|
|
raise PatchUpdaterError("Because of binary patch modifying file %s the following patches need explicit dependencies: %s" %
|
|
|
|
(filename, ", ".join([all_patches[i].name for i in indices])))
|
|
|
|
continue
|
|
|
|
|
|
|
|
original_content = get_wine_file(filename)
|
|
|
|
original_hash = _sha256(original_content)
|
|
|
|
selected_patches = select_patches(all_patches, indices, filename)
|
|
|
|
|
|
|
|
# Generate a unique id based on the original content, the selected patches
|
|
|
|
# and the dependency information. Since this information only has to be compared
|
|
|
|
# we can throw it into a single hash.
|
|
|
|
m = hashlib.sha256()
|
|
|
|
m.update(original_hash)
|
|
|
|
for i in indices:
|
|
|
|
m.update("P%s" % selected_patches[i][0])
|
|
|
|
for j in indices:
|
|
|
|
if causal_time_smaller(all_patches[j].verify_time, all_patches[i].verify_time):
|
|
|
|
m.update("D%s" % selected_patches[j][0])
|
|
|
|
unique_hash = m.digest()
|
|
|
|
|
|
|
|
# Skip checks if it matches the information from the cache
|
|
|
|
# For backwards compatibility, convert string entries to list
|
|
|
|
if dependency_cache.has_key(filename):
|
|
|
|
if not isinstance(dependency_cache[filename], list):
|
|
|
|
dependency_cache[filename] = [dependency_cache[filename]]
|
|
|
|
if unique_hash in dependency_cache[filename]:
|
|
|
|
dependency_cache[filename].append(unique_hash)
|
|
|
|
dependency_cache[filename].remove(unique_hash)
|
2015-04-12 15:23:44 -07:00
|
|
|
continue
|
2015-09-02 00:49:59 -07:00
|
|
|
|
2016-02-10 10:29:24 -08:00
|
|
|
chunk_size = 20
|
2017-08-07 08:27:38 -07:00
|
|
|
iterables = []
|
|
|
|
total = 0
|
|
|
|
for i in xrange(1, len(indices) + 1):
|
|
|
|
# HACK: It is no longer feasible to check all combinations for configure.ac.
|
|
|
|
# Only check corner cases (applying individual patches and applying all patches).
|
|
|
|
if filename == "configure.ac" and i > 4 and i <= len(indices) - 4: continue
|
|
|
|
iterables.append(itertools.combinations(indices, i))
|
|
|
|
total += _binomial(len(indices), i)
|
|
|
|
|
|
|
|
# Show a progress bar while applying the patches - this task might take some time
|
|
|
|
with progressbar.ProgressBar(desc=filename, total=total / chunk_size) as progress:
|
2016-02-10 10:29:24 -08:00
|
|
|
|
|
|
|
def test_apply(current):
|
|
|
|
set_apply = [(i, all_patches[i]) for i in current]
|
|
|
|
set_skip = [(i, all_patches[i]) for i in indices if i not in current]
|
|
|
|
|
|
|
|
# Check if there is any patch2 which depends directly or indirectly on patch1.
|
|
|
|
# If this is the case we found an impossible situation, we can be skipped in this test.
|
|
|
|
for i, patch1 in set_apply:
|
|
|
|
for j, patch2 in set_skip:
|
|
|
|
if causal_time_smaller(patch2.verify_time, patch1.verify_time):
|
|
|
|
return True # we can skip this test
|
|
|
|
|
|
|
|
try:
|
|
|
|
original = original_content
|
|
|
|
for i, _ in set_apply:
|
|
|
|
original = patchutils.apply_patch(original, selected_patches[i][1], fuzz=0)
|
|
|
|
except patchutils.PatchApplyError:
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True # everything is fine
|
|
|
|
|
|
|
|
def test_apply_seq(current_list):
|
|
|
|
for current in current_list:
|
|
|
|
if not test_apply(current):
|
|
|
|
return current
|
|
|
|
return None
|
|
|
|
|
|
|
|
it = _split_seq(itertools.chain(*iterables), chunk_size)
|
|
|
|
for k, failed in enumerate(pool.imap_unordered(test_apply_seq, it)):
|
|
|
|
if failed is not None:
|
|
|
|
progress.finish("<failed to apply>")
|
|
|
|
raise PatchUpdaterError("Changes to file %s don't apply: %s" %
|
|
|
|
(filename, ", ".join([all_patches[i].name for i in failed])))
|
|
|
|
progress.update(k)
|
|
|
|
|
|
|
|
# Update the dependency cache, store max 10 entries per file
|
|
|
|
if not dependency_cache.has_key(filename):
|
|
|
|
dependency_cache[filename] = []
|
|
|
|
dependency_cache[filename].append(unique_hash)
|
|
|
|
dependency_cache[filename] = dependency_cache[filename][-10:]
|
|
|
|
|
|
|
|
# Delete outdated cache information
|
|
|
|
for filename in dependency_cache.keys():
|
|
|
|
if not modified_files.has_key(filename):
|
|
|
|
del dependency_cache[filename]
|
|
|
|
finally:
|
|
|
|
pool.close()
|
|
|
|
_save_dict(config.path_cache, dependency_cache)
|
|
|
|
|
|
|
|
return resolved
|
|
|
|
|
|
|
|
def generate_script(all_patches, resolved):
|
|
|
|
"""Generate script to apply patches."""
|
2015-01-07 00:49:06 -08:00
|
|
|
|
|
|
|
# Generate code for helper functions
|
|
|
|
lines = []
|
|
|
|
lines.append("# Enable or disable all patchsets\n")
|
|
|
|
lines.append("patch_enable_all ()\n")
|
|
|
|
lines.append("{\n")
|
2015-01-07 17:12:38 -08:00
|
|
|
for i, patch in sorted([(i, all_patches[i]) for i in resolved], key=lambda x:x[1].name):
|
2015-06-06 19:41:40 -07:00
|
|
|
patch.variable = "enable_%s" % patch.name.replace("-","_").replace(".","_")
|
|
|
|
lines.append("\t%s=\"$1\"\n" % patch.variable)
|
|
|
|
lines.append("}\n")
|
|
|
|
lines.append("\n")
|
|
|
|
|
2017-01-23 12:30:25 -08:00
|
|
|
lines.append("# Enable or disable a specific patchset\n")
|
2015-01-07 00:49:06 -08:00
|
|
|
lines.append("patch_enable ()\n")
|
|
|
|
lines.append("{\n")
|
|
|
|
lines.append("\tcase \"$1\" in\n")
|
2015-01-07 17:12:38 -08:00
|
|
|
for i, patch in sorted([(i, all_patches[i]) for i in resolved], key=lambda x:x[1].name):
|
2015-01-07 00:49:06 -08:00
|
|
|
lines.append("\t\t%s)\n" % patch.name)
|
|
|
|
lines.append("\t\t\t%s=\"$2\"\n" % patch.variable)
|
|
|
|
lines.append("\t\t\t;;\n")
|
|
|
|
lines.append("\t\t*)\n")
|
|
|
|
lines.append("\t\t\treturn 1\n")
|
|
|
|
lines.append("\t\t\t;;\n")
|
|
|
|
lines.append("\tesac\n")
|
|
|
|
lines.append("\treturn 0\n")
|
|
|
|
lines.append("}\n")
|
|
|
|
lines_helpers = lines
|
|
|
|
|
|
|
|
# Generate code for dependency resolver
|
|
|
|
lines = []
|
|
|
|
for i, patch in [(i, all_patches[i]) for i in reversed(resolved)]:
|
|
|
|
if len(patch.depends):
|
2015-01-10 06:30:31 -08:00
|
|
|
lines.append("if test \"$%s\" -eq 1; then\n" % patch.variable)
|
2015-01-07 14:06:07 -08:00
|
|
|
for j in sorted(patch.depends):
|
2015-01-10 06:30:31 -08:00
|
|
|
lines.append("\tif test \"$%s\" -gt 1; then\n" % all_patches[j].variable)
|
2015-01-07 14:06:07 -08:00
|
|
|
lines.append("\t\tabort \"Patchset %s disabled, but %s depends on that.\"\n" %
|
|
|
|
(all_patches[j].name, patch.name))
|
|
|
|
lines.append("\tfi\n")
|
|
|
|
for j in sorted(patch.depends):
|
2015-01-07 00:49:06 -08:00
|
|
|
lines.append("\t%s=1\n" % all_patches[j].variable)
|
|
|
|
lines.append("fi\n\n")
|
|
|
|
lines_resolver = lines
|
|
|
|
|
|
|
|
# Generate code for applying all patchsets
|
|
|
|
lines = []
|
|
|
|
for i, patch in [(i, all_patches[i]) for i in resolved]:
|
|
|
|
lines.append("# Patchset %s\n" % patch.name)
|
|
|
|
lines.append("# |\n")
|
|
|
|
|
2015-08-10 22:43:35 -07:00
|
|
|
# List dependencies (if any)
|
|
|
|
if len(patch.depends):
|
2015-08-30 19:58:17 -07:00
|
|
|
depends = resolve_dependencies(all_patches, i, auto_deps=False)
|
2015-08-20 23:49:39 -07:00
|
|
|
lines.append("# | This patchset has the following (direct or indirect) dependencies:\n")
|
2015-08-10 22:43:35 -07:00
|
|
|
lines.append("# | *\t%s\n" % "\n# | \t".join(textwrap.wrap(
|
2015-08-20 23:49:39 -07:00
|
|
|
", ".join([all_patches[j].name for j in depends]), 120)))
|
2015-08-10 22:43:35 -07:00
|
|
|
lines.append("# |\n")
|
|
|
|
|
2015-01-07 00:49:06 -08:00
|
|
|
# List all bugs fixed by this patchset
|
2015-11-14 20:40:26 -08:00
|
|
|
if any([bugid is not None for sync, bugid, bugname in patch.fixes]):
|
2015-01-07 00:49:06 -08:00
|
|
|
lines.append("# | This patchset fixes the following Wine bugs:\n")
|
2015-11-14 20:40:26 -08:00
|
|
|
for sync, bugid, bugname in patch.fixes:
|
2015-01-07 00:49:06 -08:00
|
|
|
if bugid is not None:
|
|
|
|
lines.append("# | *\t%s\n" % "\n# | \t".join(textwrap.wrap("[#%d] %s" % (bugid, bugname), 120)))
|
|
|
|
lines.append("# |\n")
|
|
|
|
|
|
|
|
# List all modified files
|
|
|
|
lines.append("# | Modified files:\n")
|
|
|
|
lines.append("# | *\t%s\n" % "\n# | \t".join(textwrap.wrap(", ".join(sorted(patch.modified_files)), 120)))
|
|
|
|
lines.append("# |\n")
|
2015-01-10 06:30:31 -08:00
|
|
|
lines.append("if test \"$%s\" -eq 1; then\n" % patch.variable)
|
2015-01-07 00:49:06 -08:00
|
|
|
for f in patch.files:
|
|
|
|
lines.append("\tpatch_apply %s\n" % os.path.join(patch.name, f))
|
|
|
|
if len(patch.patches):
|
|
|
|
lines.append("\t(\n")
|
|
|
|
for p in _unique(patch.patches, key=lambda p: (p.patch_author, p.patch_subject, p.patch_revision)):
|
2016-07-22 08:29:14 -07:00
|
|
|
if p.patch_author is None: continue
|
2017-01-19 00:15:23 -08:00
|
|
|
lines.append("\t\tprintf '%%s\\n' '+ { \"%s\", \"%s\", %d },';\n" %
|
|
|
|
(escape_sh(escape_c(p.patch_author)), escape_sh(escape_c(p.patch_subject)), p.patch_revision))
|
2015-01-07 00:49:06 -08:00
|
|
|
lines.append("\t) >> \"$patchlist\"\n")
|
|
|
|
lines.append("fi\n\n")
|
|
|
|
lines_apply = lines
|
|
|
|
|
|
|
|
with open(config.path_template_script) as template_fp:
|
|
|
|
template = template_fp.read()
|
|
|
|
with open(config.path_script, "w") as fp:
|
2016-01-06 11:11:28 -08:00
|
|
|
fp.write(template.format(staging_version=_staging_version(),
|
2016-01-06 12:26:02 -08:00
|
|
|
upstream_commit=upstream_commit,
|
2015-05-06 12:52:06 -07:00
|
|
|
patch_helpers="".join(lines_helpers).rstrip("\n"),
|
2015-01-07 00:49:06 -08:00
|
|
|
patch_resolver="".join(lines_resolver).rstrip("\n"),
|
|
|
|
patch_apply="".join(lines_apply).rstrip("\n")))
|
2014-08-12 14:11:15 -07:00
|
|
|
|
2014-12-13 21:29:32 -08:00
|
|
|
# Add changes to git
|
2015-01-07 00:49:06 -08:00
|
|
|
subprocess.call(["git", "add", config.path_script])
|
2014-12-13 21:29:32 -08:00
|
|
|
|
2015-05-06 17:24:03 -07:00
|
|
|
|
2014-07-11 09:51:03 -07:00
|
|
|
if __name__ == "__main__":
|
2014-11-28 23:17:16 -08:00
|
|
|
|
2015-01-07 00:49:06 -08:00
|
|
|
# Hack to avoid KeyboardInterrupts on different threads
|
2014-11-28 23:17:16 -08:00
|
|
|
def _sig_int(signum=None, frame=None):
|
|
|
|
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
|
|
|
raise RuntimeError("CTRL+C pressed")
|
|
|
|
signal.signal(signal.SIGINT, _sig_int)
|
|
|
|
|
2015-09-02 00:49:59 -07:00
|
|
|
def _check_commit_hash(commit):
|
|
|
|
if len(commit) != 40 or commit != commit.lower():
|
|
|
|
raise argparse.ArgumentTypeError("not a valid commit hash")
|
|
|
|
return commit
|
|
|
|
|
2016-01-06 10:58:40 -08:00
|
|
|
parser = argparse.ArgumentParser(description="Automatic patch dependency checker and apply script generator.")
|
2015-09-02 00:49:59 -07:00
|
|
|
parser.add_argument('--skip-checks', action='store_true', help="Skip dependency checks")
|
|
|
|
parser.add_argument('--commit', type=_check_commit_hash, help="Use given commit hash instead of HEAD")
|
2015-09-28 17:20:49 -07:00
|
|
|
parser.add_argument('--sync-bugs', action='store_true', help="Update bugs in bugtracker (requires admin rights)")
|
2018-03-11 12:17:56 -07:00
|
|
|
parser.add_argument('--skip-bugs', action='store_true', help="Skip bugtracker checks")
|
2015-09-02 00:49:59 -07:00
|
|
|
args = parser.parse_args()
|
|
|
|
|
2015-06-02 22:32:57 -07:00
|
|
|
tools_directory = os.path.dirname(os.path.realpath(__file__))
|
2015-11-20 13:22:26 -08:00
|
|
|
os.chdir(os.path.join(tools_directory, "./.."))
|
2015-06-02 22:32:57 -07:00
|
|
|
|
2015-09-28 17:20:49 -07:00
|
|
|
config_parser = ConfigParser.ConfigParser()
|
|
|
|
config_parser.read(config.path_config)
|
|
|
|
|
|
|
|
try:
|
|
|
|
config.bugtracker_user = config_parser.get('bugtracker', 'username')
|
|
|
|
config.bugtracker_pass = config_parser.get('bugtracker', 'password')
|
|
|
|
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
|
|
|
|
config.bugtracker_user = None
|
|
|
|
config.bugtracker_pass = None
|
|
|
|
|
2014-08-12 16:59:01 -07:00
|
|
|
try:
|
2016-01-06 12:26:02 -08:00
|
|
|
upstream_commit = _upstream_commit(args.commit)
|
|
|
|
all_patches = load_patchsets()
|
2014-08-12 16:59:01 -07:00
|
|
|
|
2015-09-28 10:41:52 -07:00
|
|
|
# Check bugzilla
|
2018-03-11 12:17:56 -07:00
|
|
|
if not args.skip_bugs:
|
|
|
|
check_bug_status(all_patches, sync_bugs=args.sync_bugs)
|
2015-09-28 10:41:52 -07:00
|
|
|
|
|
|
|
# Update autogenerated files
|
2015-09-02 00:49:59 -07:00
|
|
|
generate_ifdefined(all_patches, skip_checks=args.skip_checks)
|
2016-02-10 10:29:24 -08:00
|
|
|
resolved = generate_apply_order(all_patches, skip_checks=args.skip_checks)
|
|
|
|
generate_script(all_patches, resolved)
|
2014-08-12 16:59:01 -07:00
|
|
|
|
2014-07-25 12:54:58 -07:00
|
|
|
except PatchUpdaterError as e:
|
|
|
|
print ""
|
|
|
|
print "ERROR: %s" % e
|
|
|
|
print ""
|
|
|
|
exit(1)
|