mirror of
https://gitlab.winehq.org/wine/wine-gecko.git
synced 2024-09-13 09:24:08 -07:00
Bug 1203085 - Support fetching installer and test url from TaskCluster. r=jlund
If a Buildbot test job is scheduled through TaskCluster (The Buildbot Bridge supports this), then the generated Buildbot Change associated to a test job does not have the installer and test url necessary to Mozharness to run the test job. Since we can't modify how a test job is called on Buildbot (we can't switch from --read-builbot-config to --installer-url and --test-url), we have to detect that there is a 'taskId' defined for the test job (this indicates that the job was scheduled through the BBB) and based on suc 'taskID' we can determine the parent task and the artifacts it uploaded. Changes to ScriptMixin: * Refactor _retry_download_file() to _retry_download() * If no file is specified when calling_retry_download() we call _urlopen() instead of _download_file() * Add load_json_url() method to fetch the contents of a json file without writing to disk Changes to TestingMixin: * If the job is triggered through Buildbot we look for the Changes object, otherwise, we look for artifacts of the parent task * Added functions find_artifacts_from_buildbot_changes (original behaviour) and find_artifacts_from_taskcluster (functionality via TaskClusterArtifactsFinderMixin) * Call self.exception() instead of raising exceptions + minor fixes New TaskClusterArtifactsFinderMixin: * It allows any inheriting class to find the artifacts of the build job which triggers this test job
This commit is contained in:
parent
2325bddf48
commit
356adbeb06
@ -390,12 +390,14 @@ class ScriptMixin(PlatformMixin):
|
||||
self.warning("Socket error when accessing %s: %s" % (url, str(e)))
|
||||
raise
|
||||
|
||||
def _retry_download_file(self, url, file_name, error_level, retry_config=None):
|
||||
""" Helper method to retry _download_file().
|
||||
def _retry_download(self, url, error_level, file_name=None, retry_config=None):
|
||||
""" Helper method to retry download methods
|
||||
Split out so we can alter the retry logic in mozharness.mozilla.testing.gaia_test.
|
||||
|
||||
This method calls `self.retry` on `self._download_file` using the passed
|
||||
parameters.
|
||||
parameters if a file_name is specified. If no file is specified, we will
|
||||
instead call `self._urlopen`, which grabs the contents of a url but does
|
||||
not create a file on disk.
|
||||
|
||||
Args:
|
||||
url (str): URL path where the file is located.
|
||||
@ -421,12 +423,25 @@ class ScriptMixin(PlatformMixin):
|
||||
if retry_config:
|
||||
retry_args.update(retry_config)
|
||||
|
||||
download_func = self._urlopen
|
||||
kwargs = {"url": url}
|
||||
if file_name:
|
||||
download_func = self._download_file
|
||||
kwargs = {"url": url, "file_name": file_name}
|
||||
|
||||
return self.retry(
|
||||
self._download_file,
|
||||
args=(url, file_name),
|
||||
download_func,
|
||||
kwargs=kwargs,
|
||||
**retry_args
|
||||
)
|
||||
|
||||
def load_json_url(self, url, error_level=None, *args, **kwargs):
|
||||
""" Returns a json object from a url (it retries). """
|
||||
contents = self._retry_download(
|
||||
url=url, error_level=error_level, *args, **kwargs
|
||||
)
|
||||
return json.loads(contents.read())
|
||||
|
||||
# http://www.techniqal.com/blog/2008/07/31/python-file-read-write-with-urllib2/
|
||||
# TODO thinking about creating a transfer object.
|
||||
def download_file(self, url, file_name=None, parent_dir=None,
|
||||
@ -467,7 +482,12 @@ class ScriptMixin(PlatformMixin):
|
||||
if create_parent_dir:
|
||||
self.mkdir_p(parent_dir, error_level=error_level)
|
||||
self.info("Downloading %s to %s" % (url, file_name))
|
||||
status = self._retry_download_file(url, file_name, error_level, retry_config=retry_config)
|
||||
status = self._retry_download(
|
||||
url=url,
|
||||
error_level=error_level,
|
||||
file_name=file_name,
|
||||
retry_config=retry_config
|
||||
)
|
||||
if status == file_name:
|
||||
self.info("Downloaded %d bytes." % os.path.getsize(file_name))
|
||||
return status
|
||||
|
@ -3,6 +3,8 @@
|
||||
"""
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
from urlparse import urljoin
|
||||
|
||||
from mozharness.base.log import LogMixin
|
||||
|
||||
|
||||
@ -112,3 +114,65 @@ class Taskcluster(LogMixin):
|
||||
self.task_id,
|
||||
os.path.basename(filename)
|
||||
)
|
||||
|
||||
|
||||
# TasckClusterArtifactFinderMixin {{{1
|
||||
class TaskClusterArtifactFinderMixin(object):
|
||||
# This class depends that you have extended from the base script
|
||||
QUEUE_URL = 'https://queue.taskcluster.net/v1/task/'
|
||||
SCHEDULER_URL = 'https://scheduler.taskcluster.net/v1/task-graph/'
|
||||
|
||||
def get_task(self, task_id):
|
||||
""" Get Task Definition """
|
||||
# Signature: task(taskId) : result
|
||||
return self.load_json_url(urljoin(self.QUEUE_URL, task_id))
|
||||
|
||||
def get_list_latest_artifacts(self, task_id):
|
||||
""" Get Artifacts from Latest Run """
|
||||
# Signature: listLatestArtifacts(taskId) : result
|
||||
|
||||
# Notice that this grabs the most recent run of a task since we don't
|
||||
# know the run_id. This slightly slower, however, it is more convenient
|
||||
return self.load_json_url(urljoin(self.QUEUE_URL, '{}/artifacts'.format(task_id)))
|
||||
|
||||
def url_to_artifact(self, task_id, full_path):
|
||||
""" Return a URL for an artifact. """
|
||||
return urljoin(self.QUEUE_URL, '{}/artifacts/{}'.format(task_id, full_path))
|
||||
|
||||
def get_inspect_graph(self, task_group_id):
|
||||
""" Inspect Task Graph """
|
||||
# Signature: inspect(taskGraphId) : result
|
||||
return self.load_json_url(urljoin(self.SCHEDULER_URL, '{}/inspect'.format(task_group_id)))
|
||||
|
||||
def find_parent_task_id(self, task_id):
|
||||
""" Returns the task_id of the parent task associated to the given task_id."""
|
||||
# Find group id to associated to all related tasks
|
||||
task_group_id = self.get_task(task_id)['taskGroupId']
|
||||
|
||||
# Find child task and determine on which task it depends on
|
||||
for task in self.get_inspect_graph(task_group_id)['tasks']:
|
||||
if task['taskId'] == task_id:
|
||||
parent_task_id = task['requires'][0]
|
||||
|
||||
return parent_task_id
|
||||
|
||||
def set_artifacts(self, task_id):
|
||||
""" Sets installer, test and symbols URLs from the artifacts of a task.
|
||||
|
||||
In this case we set:
|
||||
self.installer_url
|
||||
self.test_url (points to test_packages.json)
|
||||
self.symbols_url
|
||||
"""
|
||||
# The tasks which represent a buildbot job only uploads one artifact:
|
||||
# the properties.json file
|
||||
p = self.load_json_url(
|
||||
self.url_to_artifact(task_id, 'public/properties.json'))
|
||||
|
||||
# Set importants artifacts for test jobs
|
||||
self.installer_url = p['packageUrl'][0] if p.get('packageUrl') else None
|
||||
self.test_url = p['testPackagesUrl'][0] if p.get('testPackagesUrl') else None
|
||||
self.symbols_url = p['symbolsUrl'][0] if p.get('symbolsUrl') else None
|
||||
|
||||
def set_parent_artifacts(self, child_task_id):
|
||||
self.set_artifacts(self.find_parent_task_id(child_task_id))
|
||||
|
@ -174,7 +174,7 @@ class GaiaTest(TestingMixin, MercurialScript, TransferMixin, GaiaMixin, BlobUplo
|
||||
self.proxxy = proxxy
|
||||
return self.proxxy
|
||||
|
||||
def _retry_download_file(self, url, file_name, error_level=FATAL, retry_config=None):
|
||||
def _retry_download(self, url, file_name, error_level=FATAL, retry_config=None):
|
||||
if self.config.get("bypass_download_cache"):
|
||||
n = 0
|
||||
# ignore retry_config in this case
|
||||
@ -196,7 +196,9 @@ class GaiaTest(TestingMixin, MercurialScript, TransferMixin, GaiaMixin, BlobUplo
|
||||
self.info("Sleeping %s before retrying..." % sleeptime)
|
||||
time.sleep(sleeptime)
|
||||
else:
|
||||
return super(GaiaTest, self)._retry_download_file(
|
||||
# Since we're overwritting _retry_download() we can't call download_file()
|
||||
# directly
|
||||
return super(GaiaTest, self)._retry_download(
|
||||
url, file_name, error_level, retry_config=retry_config,
|
||||
)
|
||||
|
||||
|
@ -24,6 +24,7 @@ from mozharness.base.python import (
|
||||
from mozharness.mozilla.buildbot import BuildbotMixin, TBPL_WARNING
|
||||
from mozharness.mozilla.proxxy import Proxxy
|
||||
from mozharness.mozilla.structuredlog import StructuredOutputParser
|
||||
from mozharness.mozilla.taskcluster_helper import TaskClusterArtifactFinderMixin
|
||||
from mozharness.mozilla.testing.unittest import DesktopUnittestOutputParser
|
||||
from mozharness.mozilla.testing.try_tools import TryToolsMixin
|
||||
from mozharness.mozilla.tooltool import TooltoolMixin
|
||||
@ -86,8 +87,8 @@ testing_config_options = [
|
||||
|
||||
|
||||
# TestingMixin {{{1
|
||||
class TestingMixin(VirtualenvMixin, BuildbotMixin, ResourceMonitoringMixin, TooltoolMixin,
|
||||
TryToolsMixin):
|
||||
class TestingMixin(VirtualenvMixin, BuildbotMixin, ResourceMonitoringMixin,
|
||||
TaskClusterArtifactFinderMixin, TooltoolMixin, TryToolsMixin):
|
||||
"""
|
||||
The steps to identify + download the proper bits for [browser] unit
|
||||
tests and Talos.
|
||||
@ -196,10 +197,11 @@ class TestingMixin(VirtualenvMixin, BuildbotMixin, ResourceMonitoringMixin, Tool
|
||||
return new_url
|
||||
return url
|
||||
|
||||
assert c["installer_url"], "You must use --installer-url with developer_config.py"
|
||||
if c.get("installer_url") is None:
|
||||
self.exception("You must use --installer-url with developer_config.py")
|
||||
if c.get("require_test_zip"):
|
||||
if not c.get('test_url') and not c.get('test_packages_url'):
|
||||
raise AssertionError("You must use --test-url or --test-packages-url with developer_config.py")
|
||||
self.exception("You must use --test-url or --test-packages-url with developer_config.py")
|
||||
|
||||
c["installer_url"] = _replace_url(c["installer_url"], c["replace_urls"])
|
||||
if c.get("test_url"):
|
||||
@ -246,6 +248,48 @@ class TestingMixin(VirtualenvMixin, BuildbotMixin, ResourceMonitoringMixin, Tool
|
||||
|
||||
# read_buildbot_config is in BuildbotMixin.
|
||||
|
||||
def find_artifacts_from_buildbot_changes(self):
|
||||
c = self.config
|
||||
try:
|
||||
files = self.buildbot_config['sourcestamp']['changes'][-1]['files']
|
||||
buildbot_prop_branch = self.buildbot_config['properties']['branch']
|
||||
|
||||
# Bug 868490 - Only require exactly two files if require_test_zip;
|
||||
# otherwise accept either 1 or 2, since we'll be getting a
|
||||
# test_zip url that we don't need.
|
||||
expected_length = [1, 2, 3]
|
||||
if c.get("require_test_zip") and not self.test_url:
|
||||
expected_length = [2, 3]
|
||||
if buildbot_prop_branch.startswith('gaia-try'):
|
||||
expected_length = range(1, 1000)
|
||||
actual_length = len(files)
|
||||
if actual_length not in expected_length:
|
||||
self.fatal("Unexpected number of files in buildbot config %s.\nExpected these number(s) of files: %s, but got: %d" %
|
||||
(c['buildbot_json_path'], str(expected_length), actual_length))
|
||||
for f in files:
|
||||
if f['name'].endswith('tests.zip'): # yuk
|
||||
if not self.test_url:
|
||||
# str() because of unicode issues on mac
|
||||
self.test_url = str(f['name'])
|
||||
self.info("Found test url %s." % self.test_url)
|
||||
elif f['name'].endswith('crashreporter-symbols.zip'): # yuk
|
||||
self.symbols_url = str(f['name'])
|
||||
self.info("Found symbols url %s." % self.symbols_url)
|
||||
elif f['name'].endswith('test_packages.json'):
|
||||
self.test_packages_url = str(f['name'])
|
||||
self.info("Found a test packages url %s." % self.test_packages_url)
|
||||
elif not any(f['name'].endswith(s) for s in ('code-coverage-gcno.zip',)):
|
||||
if not self.installer_url:
|
||||
self.installer_url = str(f['name'])
|
||||
self.info("Found installer url %s." % self.installer_url)
|
||||
except IndexError, e:
|
||||
self.error(str(e))
|
||||
|
||||
def find_artifacts_from_taskcluster(self):
|
||||
self.info("Finding installer, test and symbols from parent task. ")
|
||||
task_id = self.buildbot_config['properties']['taskId']
|
||||
self.set_parent_artifacts(task_id)
|
||||
|
||||
def postflight_read_buildbot_config(self):
|
||||
"""
|
||||
Determine which files to download from the buildprops.json file
|
||||
@ -260,40 +304,17 @@ class TestingMixin(VirtualenvMixin, BuildbotMixin, ResourceMonitoringMixin, Tool
|
||||
self.test_url = c['test_url']
|
||||
if c.get("test_packages_url"):
|
||||
self.test_packages_url = c['test_packages_url']
|
||||
try:
|
||||
files = self.buildbot_config['sourcestamp']['changes'][-1]['files']
|
||||
buildbot_prop_branch = self.buildbot_config['properties']['branch']
|
||||
|
||||
# Bug 868490 - Only require exactly two files if require_test_zip;
|
||||
# otherwise accept either 1 or 2, since we'll be getting a
|
||||
# test_zip url that we don't need.
|
||||
expected_length = [1, 2, 3]
|
||||
if c.get("require_test_zip") and not self.test_url:
|
||||
expected_length = [2, 3]
|
||||
if buildbot_prop_branch.startswith('gaia-try'):
|
||||
expected_length = range(1, 1000)
|
||||
actual_length = len(files)
|
||||
if actual_length not in expected_length:
|
||||
self.fatal("Unexpected number of files in buildbot config %s.\nExpected these number(s) of files: %s, but got: %d" %
|
||||
(c['buildbot_json_path'], str(expected_length), actual_length))
|
||||
for f in files:
|
||||
if f['name'].endswith('tests.zip'): # yuk
|
||||
if not self.test_url:
|
||||
# str() because of unicode issues on mac
|
||||
self.test_url = str(f['name'])
|
||||
self.info("Found test url %s." % self.test_url)
|
||||
elif f['name'].endswith('crashreporter-symbols.zip'): # yuk
|
||||
self.symbols_url = str(f['name'])
|
||||
self.info("Found symbols url %s." % self.symbols_url)
|
||||
elif f['name'].endswith('test_packages.json'):
|
||||
self.test_packages_url = str(f['name'])
|
||||
self.info("Found a test packages url %s." % self.test_packages_url)
|
||||
elif not any(f['name'].endswith(s) for s in ('code-coverage-gcno.zip',)):
|
||||
if not self.installer_url:
|
||||
self.installer_url = str(f['name'])
|
||||
self.info("Found installer url %s." % self.installer_url)
|
||||
except IndexError, e:
|
||||
self.error(str(e))
|
||||
if self.buildbot_config['sourcestamp']['changes']:
|
||||
self.find_artifacts_from_buildbot_changes()
|
||||
elif 'taskId' in self.buildbot_config['properties']:
|
||||
self.find_artifacts_from_taskcluster()
|
||||
else:
|
||||
self.exception(
|
||||
"We have not been able to determine which artifacts "
|
||||
"to use in order to run the tests."
|
||||
)
|
||||
|
||||
missing = []
|
||||
if not self.installer_url:
|
||||
missing.append("installer_url")
|
||||
|
@ -697,7 +697,8 @@ class DesktopSingleLocale(LocalesMixin, ReleaseMixin, MockMixin, BuildbotMixin,
|
||||
# has a different version number from the one in the current
|
||||
# checkout.
|
||||
self.bootstrap_env['ZIP_IN'] = dst_filename
|
||||
return self._retry_download_file(binary_file, dst_filename, error_level=FATAL)
|
||||
return self.download_file(url=binary_file, file_name=dst_filename,
|
||||
error_level=FATAL)
|
||||
|
||||
# binary url is not an installer, use make wget-en-US to download it
|
||||
return self._make(target=["wget-en-US"], cwd=cwd, env=env)
|
||||
|
Loading…
Reference in New Issue
Block a user