Bug 1245170 - Upload rust builds to tooltool. r=dustin

Update the gecko-rust-build docker container to upload the
constructed toolchain to tooltool when running on taskcluster,
so it can be used in normal gecko builds (buildbot as well as tc)

Also include a mac build script (which can't run in taskcluster
currently) and a python script for launching the build task from
a local checkout. This is based on Ted's script for building
breakpad's minidump_stackwalk utility.

Matches contents of https://github.com/rillian/rust-build
commit 407c6854e91541341508e96e6a2781434535b7a5
tag v0.2.0
This commit is contained in:
Ralph Giles 2016-02-15 12:30:01 -08:00
parent 02c1f966d9
commit 4274261777
7 changed files with 305 additions and 3 deletions

View File

@ -15,7 +15,7 @@ ADD https://raw.githubusercontent.com/mozilla/build-tooltool/master/tooltool.py
RUN chmod +rx /build/tooltool.py
# Add build scripts.
ADD fetch_rust.sh build_rust.sh /build/
ADD fetch_rust.sh build_rust.sh upload_rust.sh /build/
RUN chmod +x /build/*
# Create user for doing the build.
@ -29,4 +29,4 @@ WORKDIR ${HOME}
# Invoke our build scripts by default, but allow other commands.
USER ${USER}
ENTRYPOINT /build/fetch_rust.sh && /build/build_rust.sh
ENTRYPOINT /build/fetch_rust.sh && /build/build_rust.sh && /build/upload_rust.sh

View File

@ -1 +1 @@
0.1.0
0.2.0

View File

@ -0,0 +1,30 @@
#!/bin/bash -vex
: WORKSPACE ${WORKSPACE:=$PWD}
CORES=$(nproc || grep -c ^processor /proc/cpuinfo || sysctl -n hw.ncpu)
echo Building on $CORES cpus...
OPTIONS="--enable-rpath --disable-elf-tls --disable-docs"
TARGETS="x86_64-apple-darwin,i686-apple-darwin"
PREFIX=${WORKSPACE}/rustc
mkdir -p ${WORKSPACE}/gecko-rust-mac
pushd ${WORKSPACE}/gecko-rust-mac
export MACOSX_DEPLOYMENT_TARGET=10.7
${WORKSPACE}/rust/configure --prefix=${PREFIX} --target=${TARGETS} ${OPTIONS}
make -j ${CORES}
rm -rf ${PREFIX}
mkdir ${PREFIX}
make dist
make install
popd
# Package the toolchain for upload.
pushd ${WORKSPACE}
tar cvjf rustc.tar.bz2 rustc/*
python tooltool.py add --visibility=public --unpack rustc.tar.bz2
popd

View File

@ -13,3 +13,8 @@ set -v
# Check out rust sources
git clone $RUST_REPOSITORY -b $RUST_BRANCH ${WORKSPACE}/rust
# Report version
VERSION=$(git -C ${WORKSPACE}/rust describe --tags --dirty)
COMMIT=$(git -C ${WORKSPACE}/rust rev-parse HEAD)
echo "rust ${VERSION} (commit ${COMMIT})" | tee rust-version

View File

@ -0,0 +1,37 @@
{
"provisionerId": "aws-provisioner-v1",
"workerType": "rustbuild",
"created": "{task_created}",
"deadline": "{task_deadline}",
"payload": {
"image": "quay.io/rust/gecko-rust-build",
"env": {
"RUST_BRANCH": "{rust_branch}"
},
"artifacts": {
"public/rustc.tar.xz": {
"path": "/home/worker/rustc.tar.xz",
"expires": "{artifacts_expires}",
"type": "file"
},
"public/manifest.tt": {
"path": "/home/worker/manifest.tt",
"expires": "{artifacts_expires}",
"type": "file"
}
},
"features": {
"relengAPIProxy": true
},
"maxRunTime": 6000
},
"scopes": [
"docker-worker:relengapi-proxy:tooltool.upload.public"
],
"metadata": {
"name": "Rust toolchain build",
"description": "Builds the rust toolchain for use in gecko builders.",
"owner": "giles@mozilla.com",
"source": "https://github.com/rillian/rust-build/"
}
}

View File

@ -0,0 +1,206 @@
#!/bin/env python
'''
This script triggers a taskcluster task, waits for it to finish,
fetches the artifacts, uploads them to tooltool, and updates
the in-tree tooltool manifests.
'''
from __future__ import print_function
import requests.packages.urllib3
requests.packages.urllib3.disable_warnings()
import argparse
import datetime
import json
import os
import shutil
import sys
import taskcluster
import tempfile
import time
import tooltool
def local_file(filename):
'''
Return a path to a file next to this script.
'''
return os.path.join(os.path.dirname(__file__), filename)
def read_tc_auth(tc_auth_file):
'''
Read taskcluster credentials from tc_auth_file and return them as a dict.
'''
return json.load(open(tc_auth_file, 'rb'))
def fill_template_dict(d, keys):
for key, val in d.items():
if isinstance(val, basestring) and '{' in val:
d[key] = val.format(**keys)
elif isinstance(val, dict):
fill_template_dict(val, keys)
def fill_template(template_file, keys):
'''
Take the file object template_file, parse it as JSON, and
interpolate (using str.template) its keys using keys.
'''
template = json.load(template_file)
fill_template_dict(template, keys)
return template
def spawn_task(queue, args):
'''
Spawn a Taskcluster task in queue using args.
'''
task_id = taskcluster.utils.slugId()
with open(local_file('task.json'), 'rb') as template:
keys = vars(args)
now = datetime.datetime.utcnow()
keys['task_created'] = now.isoformat() + 'Z'
keys['task_deadline'] = (now + datetime.timedelta(hours=2)).isoformat() + 'Z'
keys['artifacts_expires'] = (now + datetime.timedelta(days=1)).isoformat() + 'Z'
payload = fill_template(template, keys)
queue.createTask(task_id, payload)
print('--- %s task %s submitted ---' % (now, task_id))
return task_id
def wait_for_task(queue, task_id, initial_wait=5):
'''
Wait until queue reports that task task_id is completed, and return
its run id.
Sleep for initial_wait seconds before checking status the first time.
Then poll periodically and print a running log of the task status.
'''
time.sleep(initial_wait)
previous_state = None
have_ticks = False
while True:
res = queue.status(task_id)
state = res['status']['state']
if state != previous_state:
now = datetime.datetime.utcnow()
if have_ticks:
sys.stdout.write('\n')
have_ticks = False
print('--- %s task %s %s ---' % (now, task_id, state))
previous_state = state
if state == 'completed':
return len(res['status']['runs']) - 1
if state in ('failed', 'exception'):
raise Exception('Task failed')
sys.stdout.write('.')
sys.stdout.flush()
have_ticks = True
time.sleep(10)
def fetch_artifact(queue, task_id, run_id, name, dest_dir):
'''
Fetch the artifact with name from task_id and run_id in queue,
write it to a file in dest_dir, and return the path to the written
file.
'''
url = queue.buildUrl('getArtifact', task_id, run_id, name)
fn = os.path.join(dest_dir, os.path.basename(name))
print('Fetching %s...' % name)
try:
r = requests.get(url, stream=True)
r.raise_for_status()
with open(fn, 'wb') as f:
for chunk in r.iter_content(1024):
f.write(chunk)
except requests.exceptions.HTTPError:
print('HTTP Error %d fetching %s' % (r.status_code, name))
return None
return fn
def make_artifact_dir(task_id, run_id):
prefix = 'tc-artifacts.%s.%d.' % (task_id, run_id)
print('making artifact dir %s' % prefix)
return tempfile.mkdtemp(prefix=prefix)
def fetch_artifacts(queue, task_id, run_id):
'''
Fetch all artifacts from task_id and run_id in queue, write them to
temporary files, and yield the path to each.
'''
try:
tempdir = make_artifact_dir(task_id, run_id)
res = queue.listArtifacts(task_id, run_id)
for a in res['artifacts']:
# Skip logs
if a['name'].startswith('public/logs'):
continue
# Skip interfaces
if a['name'].startswith('private/docker-worker'):
continue
yield fetch_artifact(queue, task_id, run_id, a['name'], tempdir)
finally:
if os.path.isdir(tempdir):
#shutil.rmtree(tempdir)
print('Artifacts downloaded to %s' % tempdir)
pass
def upload_to_tooltool(tooltool_auth, task_id, artifact):
'''
Upload artifact to tooltool using tooltool_auth as the authentication token.
Return the path to the generated tooltool manifest.
'''
try:
oldcwd = os.getcwd()
os.chdir(os.path.dirname(artifact))
manifest = artifact + '.manifest'
tooltool.main([
'tooltool.py',
'add',
'--visibility=public',
'-m', manifest,
artifact
])
tooltool.main([
'tooltool.py',
'upload',
'-m', manifest,
'--authentication-file', tooltool_auth,
'--message', 'Built from taskcluster task {}'.format(task_id),
])
return manifest
finally:
os.chdir(oldcwd)
def update_manifest(artifact, manifest, local_gecko_clone):
platform = linux
manifest_dir = os.path.join(local_gecko_clone,
'testing', 'config', 'tooltool-manifests')
platform_dir = [p for p in os.listdir(manifest_dir)
if p.startswith(platform)][0]
tree_manifest = os.path.join(manifest_dir, platform_dir, 'releng.manifest')
print('%s -> %s' % (manifest, tree_manifest))
shutil.copyfile(manifest, tree_manifest)
def main():
parser = argparse.ArgumentParser(description='Build and upload binaries')
parser.add_argument('taskcluster_auth', help='Path to a file containing Taskcluster client ID and authentication token as a JSON file in the form {"clientId": "...", "accessToken": "..."}')
parser.add_argument('--tooltool-auth', help='Path to a file containing a tooltool authentication token valid for uploading files')
parser.add_argument('--local-gecko-clone', help='Path to a local Gecko clone whose tooltool manifests will be updated with the newly-built binaries')
parser.add_argument('--rust-branch', default='stable',
help='Revision of the rust repository to use')
parser.add_argument('--task', help='Use an existing task')
args = parser.parse_args()
tc_auth = read_tc_auth(args.taskcluster_auth)
queue = taskcluster.Queue({'credentials': tc_auth})
if args.task:
task_id, initial_wait = args.task, 0
else:
task_id, initial_wait = spawn_task(queue, args), 25
run_id = wait_for_task(queue, task_id, initial_wait)
for artifact in fetch_artifacts(queue, task_id, run_id):
if args.tooltool_auth:
manifest = upload_to_tooltool(args.tooltool_auth, task_id, artifact)
if args.local_gecko_clone:
update_manifest(artifact, manifest, args.local_gecko_clone)
if __name__ == '__main__':
main()

View File

@ -0,0 +1,24 @@
#!/bin/bash -vex
set -x -e
: WORKSPACE ${WORKSPACE:=/home/worker}
CORES=$(nproc || grep -c ^processor /proc/cpuinfo || sysctl -n hw.ncpu)
set -v
# Upload artifacts packaged by the build script.
pushd ${WORKSPACE}
if test -n "$TASK_ID"; then
# If we're running on task cluster, use the upload-capable tunnel.
TOOLTOOL_OPTS="--url=http://relengapi/tooltool/"
MESSAGE="Taskcluster upload ${TASK_ID}/${RUN_ID} $0"
else
MESSAGE="Rust toolchain build for gecko"
fi
if test -r rust-version; then
MESSAGE="$MESSAGE $(cat rust-version)"
fi
/build/tooltool.py upload ${TOOLTOOL_OPTS} --message="${MESSAGE}"
popd