gitupdate.py: Add license header and some code cleanup.

This commit is contained in:
Sebastian Lackner 2014-07-25 21:34:03 +02:00
parent 6db9cf0e65
commit aa54f206eb

View File

@ -1,17 +1,37 @@
#!/usr/bin/python
#
# Automatic patch dependency checker and Makefile/README.md generator.
#
# Copyright (C) 2014 Sebastian Lackner
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
#
from multiprocessing import Pool
from xml.dom import minidom
import contextlib
import textwrap
import urllib
import sys
import hashlib
import itertools
import patchutils
import subprocess
import pickle
import os
import patchutils
import pickle
import re
import subprocess
import sys
import textwrap
import urllib
# Cached information to speed up patch dependency checks
cached_patch_result = {}
@ -42,25 +62,152 @@ def abort(m):
print m
exit(1)
def pairs(a):
for i, j in enumerate(a):
for k in a[i+1:]:
yield (j, k)
def download(url):
"""Open a specific URL and return the content."""
with contextlib.closing(urllib.urlopen(url)) as fp:
return fp.read()
def read_patchsets(directory):
"""Read information about all patchsets in a given directory."""
def _iter_kv_from_file(filename):
with open(filename) as fp:
for line in fp:
if line.startswith("#"):
continue
tmp = line.split(":", 1)
if len(tmp) != 2:
yield None, None
else:
yield tmp[0].lower(), tmp[1].strip()
unique_id = itertools.count()
all_patches = {}
name_to_id = {}
all_bugs = []
# Read in sorted order (to ensure created Makefile doesn't change too much)
for name in sorted(os.listdir(directory)):
if name in [".", ".."]: continue
subdirectory = os.path.join(directory, name)
if not os.path.isdir(subdirectory): continue
patch = PatchSet(name)
# Enumerate .patch files in the given directory, enumerate individual patches and affected files
for f in sorted(os.listdir(subdirectory)):
if not f.endswith(".patch") or not os.path.isfile(os.path.join(subdirectory, f)):
continue
patch.files.append(f)
for p in patchutils.read_patch(os.path.join(subdirectory, f)):
patch.patches.append(p)
patch.modified_files.add(p.modified_file)
# No single patch within this directory, ignore it
if len(patch.patches) == 0:
del patch
continue
i = next(unique_id)
all_patches[i] = patch
name_to_id[name] = i
# Now read the definition files in a second step
for i, patch in all_patches.iteritems():
deffile = os.path.join(os.path.join(directory, patch.name), "definition")
if not os.path.isfile(deffile):
abort("Missing definition file %s" % deffile)
info = AuthorInfo()
for key, val in _iter_kv_from_file(deffile):
if key is None:
if len(info.author) and len(info.subject) and len(info.revision):
patch.authors.append(info)
info = AuthorInfo()
continue
if key == "author":
if len(info.author): info.author += ", "
info.author += val
elif key == "subject" or key == "title":
if len(info.subject): info.subject += " "
info.subject += val
elif key == "revision":
if len(info.revision): info.revision += ", "
info.revision += val
elif key == "fixes":
r = re.match("^[0-9]+$", val)
if r:
bugid = int(val)
patch.fixes.append((bugid, None, None))
all_bugs.append(bugid)
continue
r = re.match("^\\[ *([0-9]+) *\\](.*)$", val)
if r:
bugid, description = int(r.group(1)), r.group(2).strip()
patch.fixes.append((bugid, None, description))
all_bugs.append(bugid)
continue
patch.fixes.append((None, None, val))
elif key == "depends":
if not name_to_id.has_key(val):
abort("Definition file %s references unknown dependency %s" % (deffile, val))
patch.depends.add(name_to_id[val])
else:
print "** Ignoring unknown command in definition file %s: %s" % (deffile, line)
if len(info.author) and len(info.subject) and len(info.revision):
patch.authors.append(info)
# In a third step query information for the patches from Wine bugzilla
pool = Pool(8)
bug_short_desc = {None:None}
for bugid, data in zip(all_bugs, pool.map(download, ["http://bugs.winehq.org/show_bug.cgi?id=%d&ctype=xml&field=short_desc" % bugid for bugid in all_bugs])):
bug_short_desc[bugid] = minidom.parseString(data).getElementsByTagName('short_desc')[0].firstChild.data
pool.close()
for i, patch in all_patches.iteritems():
patch.fixes = [(bugid, bug_short_desc[bugid], description) for bugid, dummy, description in patch.fixes]
return all_patches
def causal_time_combine(a, b):
"""Combines two timestamps into a new one."""
return [max(a, b) for a, b in zip(a, b)]
def causal_time_smaller(a, b):
"""Checks if timestamp a is smaller than timestamp b."""
return all([i <= j for i, j in zip(a,b)]) and any([i < j for i, j in zip(a,b)])
def causal_time_relation(all_patches, indices):
for i, j in pairs(indices):
"""Checks if the patches with given indices are applied in a very specific order."""
def _pairs(a):
for i, j in enumerate(a):
for k in a[i+1:]:
yield (j, k)
for i, j in _pairs(indices):
if not (causal_time_smaller(all_patches[i].verify_time, all_patches[j].verify_time) or \
causal_time_smaller(all_patches[j].verify_time, all_patches[i].verify_time)):
return False
return True
def causal_time_permutations(all_patches, indices, filename):
"""Iterate over all possible permutations of patches affecting
a specific file, which are compatible with dependencies."""
for perm in itertools.permutations(indices):
for i, j in zip(perm[:-1], perm[1:]):
if causal_time_smaller(all_patches[j].verify_time, all_patches[i].verify_time):
@ -72,6 +219,7 @@ def causal_time_permutations(all_patches, indices, filename):
yield selected_patches
def contains_binary_patch(all_patches, indices, filename):
"""Checks if any patch with given indices affecting filename is a binary patch."""
for i in indices:
for patch in all_patches[i].patches:
if patch.modified_file == filename and patch.is_binary():
@ -79,6 +227,7 @@ def contains_binary_patch(all_patches, indices, filename):
return False
def load_patch_cache():
"""Load dictionary for cached patch dependency tests into cached_patch_result."""
global cached_patch_result
try:
with open("./.depcache") as fp:
@ -87,10 +236,12 @@ def load_patch_cache():
cached_patch_result = {}
def save_patch_cache():
"""Save dictionary for cached patch depdency tests."""
with open("./.depcache", "wb") as fp:
pickle.dump(cached_patch_result, fp, pickle.HIGHEST_PROTOCOL)
def verify_patch_order(all_patches, indices, filename):
"""Checks if the dependencies are defined correctly by applying on the patches on a copy from the git tree."""
global cached_patch_result
# If one of patches is a binary patch, then we cannot / won't verify it - require dependencies in this case
@ -159,6 +310,7 @@ def verify_patch_order(all_patches, indices, filename):
assert len(last_result_hash) == 32
def verify_dependencies(all_patches):
"""Resolve dependencies, and afterwards run verify_patch_order() to check if everything applies properly."""
max_patches = max(all_patches.keys()) + 1
for i, patch in all_patches.iteritems():
@ -201,112 +353,9 @@ def verify_dependencies(all_patches):
finally:
save_patch_cache()
def download(url):
with contextlib.closing(urllib.urlopen(url)) as fp:
return fp.read()
def generate_makefile(all_patches, fp):
"""Generate Makefile for a specific set of patches."""
def read_patchsets(directory):
next_patch = 0
patches = {}
name_to_id = {}
all_bugs = []
for name in sorted(os.listdir(directory)): # Read in sorted order to ensure created Makefile doesn't change too much
if name in [".", ".."]: continue
subdirectory = os.path.join(directory, name)
if not os.path.isdir(subdirectory): continue
patch = PatchSet(name)
for f in sorted(os.listdir(subdirectory)):
if not f.endswith(".patch") or not os.path.isfile(os.path.join(subdirectory, f)):
continue
patch.files.append(f)
for p in patchutils.read_patch(os.path.join(subdirectory, f)):
patch.patches.append(p)
patch.modified_files.add(p.modified_file)
# No single patch within this directory, ignore it
if len(patch.patches) == 0:
del patch
continue
patches[next_patch] = patch
name_to_id[name] = next_patch
next_patch += 1
# Now read the definition files in a second step
for i, patch in patches.iteritems():
deffile = os.path.join(os.path.join(directory, patch.name), "definition")
if not os.path.isfile(deffile):
abort("Missing definition file %s" % deffile)
info = AuthorInfo()
with open(deffile) as fp:
for line in fp:
if line.startswith("#"): continue
tmp = line.split(":", 1)
if len(tmp) < 2:
if len(info.author) and len(info.subject) and len(info.revision):
patch.authors.append(info)
info = AuthorInfo()
continue
cmd = tmp[0].lower()
val = tmp[1].strip()
if cmd == "author":
if len(info.author): info.author += ", "
info.author += val
elif cmd == "subject" or cmd == "title":
if len(info.subject): info.subject += " "
info.subject += val
elif cmd == "revision":
if len(info.revision): info.revision += ", "
info.revision += val
elif cmd == "fixes":
r = re.match("^[0-9]+$", val)
if r:
bugid = int(val)
patch.fixes.append((bugid, None, None))
all_bugs.append(bugid)
continue
r = re.match("^\\[ *([0-9]+) *\\](.*)$", val)
if r:
bugid, description = int(r.group(1)), r.group(2).strip()
patch.fixes.append((bugid, None, description))
all_bugs.append(bugid)
continue
patch.fixes.append((None, None, val))
elif cmd == "depends":
if not name_to_id.has_key(val):
abort("Definition file %s references unknown dependency %s" % (deffile, val))
patch.depends.add(name_to_id[val])
else:
print "** Ignoring unknown command in definition file %s: %s" % (deffile, line)
if len(info.author) and len(info.subject) and len(info.revision):
patch.authors.append(info)
# In a third step query information for the patches from Wine bugzilla
pool = Pool(8)
bug_short_desc = {None:None}
for bugid, data in zip(all_bugs, pool.map(download, ["http://bugs.winehq.org/show_bug.cgi?id=%d&ctype=xml&field=short_desc" % bugid for bugid in all_bugs])):
bug_short_desc[bugid] = minidom.parseString(data).getElementsByTagName('short_desc')[0].firstChild.data
pool.close()
for i, patch in patches.iteritems():
patch.fixes = [(bugid, bug_short_desc[bugid], description) for bugid, dummy, description in patch.fixes]
return patches
def read_changelog():
with open("debian/changelog") as fp:
for line in fp:
r = re.match("^([a-zA-Z0-9][^(]*)\((.*)\) ([^;]*)", line)
if r: yield (r.group(1).strip(), r.group(2).strip(), r.group(3).strip())
def generate_makefile(patches, fp):
fp.write("#\n")
fp.write("# This file is automatically generated, DO NOT EDIT!\n")
fp.write("#\n")
@ -314,7 +363,7 @@ def generate_makefile(patches, fp):
fp.write("CURDIR ?= ${.CURDIR}\n")
fp.write("PATCH := $(CURDIR)/../debian/tools/gitapply.sh -d $(DESTDIR)\n")
fp.write("\n")
fp.write("PATCHLIST :=\t%s\n" % " \\\n\t\t".join(["%s.ok" % patch.name for i, patch in patches.iteritems()]))
fp.write("PATCHLIST :=\t%s\n" % " \\\n\t\t".join(["%s.ok" % patch.name for i, patch in all_patches.iteritems()]))
fp.write("\n")
fp.write(".PHONY: install\n")
fp.write("install:\n")
@ -336,10 +385,12 @@ def generate_makefile(patches, fp):
fp.write(".NOTPARALLEL:\n")
fp.write("\n")
for i, patch in patches.iteritems():
for i, patch in all_patches.iteritems():
fp.write("# Patchset %s\n" % patch.name)
fp.write("# |\n")
fp.write("# | Included patches:\n")
# List all patches and their corresponding authors
for info in patch.authors:
if not info.subject: continue
s = []
@ -349,6 +400,7 @@ def generate_makefile(patches, fp):
fp.write("# | *\t%s\n" % "\n# | \t".join(textwrap.wrap(info.subject + s, 120)))
fp.write("# |\n")
# List all bugs fixed by this patchset
if any([bugid is not None for bugid, bugname, description in patch.fixes]):
fp.write("# | This patchset fixes the following Wine bugs:\n")
for bugid, bugname, description in patch.fixes:
@ -356,15 +408,18 @@ def generate_makefile(patches, fp):
fp.write("# | *\t%s\n" % "\n# | \t".join(textwrap.wrap("[#%d] %s" % (bugid, bugname), 120)))
fp.write("# |\n")
# List all modified files
fp.write("# | Modified files: \n")
fp.write("# | *\t%s\n" % "\n# | \t".join(textwrap.wrap(", ".join(sorted(patch.modified_files)), 120)))
fp.write("# |\n")
depends = " ".join([""] + ["%s.ok" % patches[d].name for d in patch.depends]) if len(patch.depends) else ""
# Generate dependencies and code to apply patches
depends = " ".join([""] + ["%s.ok" % all_patches[d].name for d in patch.depends]) if len(patch.depends) else ""
fp.write("%s.ok:%s\n" % (patch.name, depends))
for f in patch.files:
fp.write("\t$(PATCH) < %s\n" % os.path.join(patch.name, f))
# Create *.ok file (used to generate patchlist)
if len(patch.authors):
fp.write("\t( \\\n")
for info in patch.authors:
@ -446,30 +501,42 @@ make -C ./patches DESTDIR=$(pwd) install -W DIRNAME.ok
```
"""
def generate_readme(patches, fp):
def generate_readme(all_patches, fp):
"""Generate README.md including information about specific patches and bugfixes."""
# Get list of all bugs
def _all_bugs():
all_bugs = []
for i, patch in patches.iteritems():
for i, patch in all_patches.iteritems():
for (bugid, bugname, description) in patch.fixes:
if bugid is not None: all_bugs.append((bugid, bugname, description))
for (bugid, bugname, description) in sorted(all_bugs):
if description is None: description = bugname
yield "%s ([Wine Bug #%d](http://bugs.winehq.org/show_bug.cgi?id=%d \"%s\"))" % (description, bugid, bugid, bugname)
# Get list of all fixes
def _all_fixes():
all_fixes = []
for i, patch in patches.iteritems():
for i, patch in all_patches.iteritems():
for (bugid, bugname, description) in patch.fixes:
if bugid is None: all_fixes.append(description)
for description in sorted(all_fixes):
yield description
# Create enumeration from list
def _enum(x):
return "* " + "\n* ".join(x)
# Read information from changelog
def _read_changelog():
with open("debian/changelog") as fp:
for line in fp:
r = re.match("^([a-zA-Z0-9][^(]*)\((.*)\) ([^;]*)", line)
if r: yield (r.group(1).strip(), r.group(2).strip(), r.group(3).strip())
# Get version number of the latest stable release
def _latest_stable_version():
for package, version, distro in read_changelog():
for package, version, distro in _read_changelog():
if distro.lower() == "unreleased": continue
return version
@ -479,11 +546,11 @@ if __name__ == "__main__":
if not os.path.isdir("./debian/tools/wine"):
raise RuntimeError("Please create a symlink to the wine repository in ./debian/tools/wine")
patches = read_patchsets("./patches")
verify_dependencies(patches)
all_patches = read_patchsets("./patches")
verify_dependencies(all_patches)
with open("./patches/Makefile", "w") as fp:
generate_makefile(patches, fp)
generate_makefile(all_patches, fp)
with open("./README.md", "w") as fp:
generate_readme(patches, fp)
generate_readme(all_patches, fp)