diff --git a/testing/mozharness/configs/b2g_bumper/master.py b/testing/mozharness/configs/b2g_bumper/master.py deleted file mode 100644 index 4e59060aafb..00000000000 --- a/testing/mozharness/configs/b2g_bumper/master.py +++ /dev/null @@ -1,100 +0,0 @@ -#!/usr/bin/env python -config = { - "exes": { - # Get around the https warnings - "hg": ['/usr/local/bin/hg', "--config", "web.cacerts=/etc/pki/tls/certs/ca-bundle.crt"], - "hgtool.py": ["/usr/local/bin/hgtool.py"], - "gittool.py": ["/usr/local/bin/gittool.py"], - }, - 'gecko_pull_url': 'https://hg.mozilla.org/integration/b2g-inbound', - 'gecko_push_url': 'ssh://hg.mozilla.org/integration/b2g-inbound', - 'gecko_local_dir': 'b2g-inbound', - 'git_ref_cache': '/builds/b2g_bumper/git_ref_cache.json', - - 'manifests_repo': 'https://git.mozilla.org/b2g/b2g-manifest.git', - 'manifests_revision': 'origin/master', - - 'hg_user': 'B2G Bumper Bot ', - "ssh_key": "~/.ssh/ffxbld_rsa", - "ssh_user": "ffxbld", - - 'hgtool_base_bundle_urls': ['https://ftp-ssl.mozilla.org/pub/mozilla.org/firefox/bundles'], - - 'gaia_repo_url': 'https://hg.mozilla.org/integration/gaia-central', - 'gaia_revision_file': 'b2g/config/gaia.json', - 'gaia_max_revisions': 1, - # Which git branch this hg repo corresponds to - 'gaia_git_branch': 'master', - 'gaia_git_repo': 'https://git.mozilla.org/releases/gaia.git', - 'gaia_mapper_project': 'gaia', - 'mapper_url': 'http://cruncher.build.mozilla.org/mapper/{project}/{vcs}/{rev}', - - 'devices': { - 'dolphin': { - 'ignore_projects': ['gecko'], - 'ignore_groups': ['darwin'], - }, - 'emulator-l': { - 'ignore_projects': ['gecko'], - 'ignore_groups': ['darwin'], - }, - 'emulator-kk': { - 'ignore_projects': ['gecko'], - 'ignore_groups': ['darwin'], - }, - 'emulator-jb': { - 'ignore_projects': ['gecko'], - 'ignore_groups': ['darwin'], - }, - 'emulator-ics': { - 'ignore_projects': ['gecko'], - 'ignore_groups': ['darwin'], - 'manifest_file': 'emulator.xml', - }, - # Equivalent to emulator-ics - see bug 916134 - # Remove once the above bug resolved - 'emulator': { - 'ignore_projects': ['gecko'], - 'ignore_groups': ['darwin'], - 'manifest_file': 'emulator.xml', - }, - 'flame-kk': { - 'ignore_projects': ['gecko'], - 'ignore_groups': ['darwin'], - }, - 'nexus-4': { - 'ignore_projects': ['gecko'], - 'ignore_groups': ['darwin'], - }, - 'nexus-4-kk': { - 'ignore_projects': ['gecko'], - 'ignore_groups': ['darwin'], - }, - 'nexus-5-l': { - 'ignore_projects': ['gecko'], - 'ignore_groups': ['darwin'], - }, - 'aries': { - 'ignore_projects': ['gecko'], - 'ignore_groups': ['darwin'], - 'manifest_file': 'shinano.xml', - }, - }, - 'repo_remote_mappings': { - 'https://android.googlesource.com/': 'https://git.mozilla.org/external/aosp', - 'git://codeaurora.org/': 'https://git.mozilla.org/external/caf', - 'git://github.com/mozilla-b2g/': 'https://git.mozilla.org/b2g', - 'git://github.com/mozilla/': 'https://git.mozilla.org/b2g', - 'https://git.mozilla.org/releases': 'https://git.mozilla.org/releases', - 'http://android.git.linaro.org/git-ro/': 'https://git.mozilla.org/external/linaro', - 'http://sprdsource.spreadtrum.com:8085/b2g/android': 'https://git.mozilla.org/external/sprd-aosp', - 'git://github.com/apitrace/': 'https://git.mozilla.org/external/apitrace', - 'git://github.com/t2m-foxfone/': 'https://git.mozilla.org/external/t2m-foxfone', - # Some mappings to ourself, we want to leave these as-is! - 'https://git.mozilla.org/external/aosp': 'https://git.mozilla.org/external/aosp', - 'https://git.mozilla.org/external/caf': 'https://git.mozilla.org/external/caf', - 'https://git.mozilla.org/b2g': 'https://git.mozilla.org/b2g', - 'https://git.mozilla.org/external/apitrace': 'https://git.mozilla.org/external/apitrace', - 'https://git.mozilla.org/external/t2m-foxfone': 'https://git.mozilla.org/external/t2m-foxfone', - }, -} diff --git a/testing/mozharness/configs/b2g_bumper/v2.1s.py b/testing/mozharness/configs/b2g_bumper/v2.1s.py deleted file mode 100644 index 1a143ae88c7..00000000000 --- a/testing/mozharness/configs/b2g_bumper/v2.1s.py +++ /dev/null @@ -1,87 +0,0 @@ -#!/usr/bin/env python -config = { - "exes": { - # Get around the https warnings - "hg": ['/usr/local/bin/hg', "--config", "web.cacerts=/etc/pki/tls/certs/ca-bundle.crt"], - "hgtool.py": ["/usr/local/bin/hgtool.py"], - "gittool.py": ["/usr/local/bin/gittool.py"], - }, - 'gecko_pull_url': 'https://hg.mozilla.org/releases/mozilla-b2g34_v2_1s/', - 'gecko_push_url': 'ssh://hg.mozilla.org/releases/mozilla-b2g34_v2_1s/', - 'gecko_local_dir': 'mozilla-b2g34_v2_1s', - 'git_ref_cache': '/builds/b2g_bumper/git_ref_cache.json', - - 'manifests_repo': 'https://git.mozilla.org/b2g/b2g-manifest.git', - 'manifests_revision': 'origin/v2.1s', - - 'hg_user': 'B2G Bumper Bot ', - "ssh_key": "~/.ssh/ffxbld_rsa", - "ssh_user": "ffxbld", - - 'hgtool_base_bundle_urls': ['https://ftp-ssl.mozilla.org/pub/mozilla.org/firefox/bundles'], - - 'gaia_repo_url': 'https://hg.mozilla.org/integration/gaia-2_1s', - 'gaia_revision_file': 'b2g/config/gaia.json', - 'gaia_max_revisions': 5, - # Which git branch this hg repo corresponds to - 'gaia_git_branch': 'v2.1s', - 'gaia_git_repo': 'https://git.mozilla.org/releases/gaia.git', - 'gaia_mapper_project': 'gaia', - 'mapper_url': 'http://cruncher.build.mozilla.org/mapper/{project}/{vcs}/{rev}', - - 'devices': { - 'dolphin': { - 'ignore_projects': ['gecko'], - 'ignore_groups': ['darwin'], - }, - 'dolphin-512': { - 'ignore_projects': ['gecko'], - 'ignore_groups': ['darwin'], - }, - 'emulator-kk': { - 'ignore_projects': ['gecko'], - 'ignore_groups': ['darwin'], - }, - 'emulator-jb': { - 'ignore_projects': ['gecko'], - 'ignore_groups': ['darwin'], - }, - 'emulator-ics': { - 'ignore_projects': ['gecko'], - 'ignore_groups': ['darwin'], - 'manifest_file': 'emulator.xml', - }, - # Equivalent to emulator-ics - see bug 916134 - # Remove once the above bug resolved - 'emulator': { - 'ignore_projects': ['gecko'], - 'ignore_groups': ['darwin'], - 'manifest_file': 'emulator.xml', - }, - 'flame-kk': { - 'ignore_projects': ['gecko'], - 'ignore_groups': ['darwin'], - }, - 'nexus-4': { - 'ignore_projects': ['gecko'], - 'ignore_groups': ['darwin'], - }, - }, - 'repo_remote_mappings': { - 'https://android.googlesource.com/': 'https://git.mozilla.org/external/aosp', - 'git://codeaurora.org/': 'https://git.mozilla.org/external/caf', - 'git://github.com/mozilla-b2g/': 'https://git.mozilla.org/b2g', - 'git://github.com/mozilla/': 'https://git.mozilla.org/b2g', - 'https://git.mozilla.org/releases': 'https://git.mozilla.org/releases', - 'http://android.git.linaro.org/git-ro/': 'https://git.mozilla.org/external/linaro', - 'http://sprdsource.spreadtrum.com:8085/b2g/android': 'https://git.mozilla.org/external/sprd-aosp', - 'git://github.com/apitrace/': 'https://git.mozilla.org/external/apitrace', - 'git://github.com/t2m-foxfone/': 'https://git.mozilla.org/external/t2m-foxfone', - # Some mappings to ourself, we want to leave these as-is! - 'https://git.mozilla.org/external/aosp': 'https://git.mozilla.org/external/aosp', - 'https://git.mozilla.org/external/caf': 'https://git.mozilla.org/external/caf', - 'https://git.mozilla.org/b2g': 'https://git.mozilla.org/b2g', - 'https://git.mozilla.org/external/apitrace': 'https://git.mozilla.org/external/apitrace', - 'https://git.mozilla.org/external/t2m-foxfone': 'https://git.mozilla.org/external/t2m-foxfone', - }, -} diff --git a/testing/mozharness/configs/b2g_bumper/v2.2.py b/testing/mozharness/configs/b2g_bumper/v2.2.py deleted file mode 100644 index 0fe12c70988..00000000000 --- a/testing/mozharness/configs/b2g_bumper/v2.2.py +++ /dev/null @@ -1,91 +0,0 @@ -#!/usr/bin/env python -config = { - "exes": { - # Get around the https warnings - "hg": ['/usr/local/bin/hg', "--config", "web.cacerts=/etc/pki/tls/certs/ca-bundle.crt"], - "hgtool.py": ["/usr/local/bin/hgtool.py"], - "gittool.py": ["/usr/local/bin/gittool.py"], - }, - 'gecko_pull_url': 'https://hg.mozilla.org/releases/mozilla-b2g37_v2_2/', - 'gecko_push_url': 'ssh://hg.mozilla.org/releases/mozilla-b2g37_v2_2/', - 'gecko_local_dir': 'mozilla-b2g37_v2_2', - 'git_ref_cache': '/builds/b2g_bumper/git_ref_cache.json', - - 'manifests_repo': 'https://git.mozilla.org/b2g/b2g-manifest.git', - 'manifests_revision': 'origin/v2.2', - - 'hg_user': 'B2G Bumper Bot ', - "ssh_key": "~/.ssh/ffxbld_rsa", - "ssh_user": "ffxbld", - - 'hgtool_base_bundle_urls': ['https://ftp-ssl.mozilla.org/pub/mozilla.org/firefox/bundles'], - - 'gaia_repo_url': 'https://hg.mozilla.org/integration/gaia-2_2', - 'gaia_revision_file': 'b2g/config/gaia.json', - 'gaia_max_revisions': 5, - # Which git branch this hg repo corresponds to - 'gaia_git_branch': 'v2.2', - 'gaia_git_repo': 'https://git.mozilla.org/releases/gaia.git', - 'gaia_mapper_project': 'gaia', - 'mapper_url': 'http://cruncher.build.mozilla.org/mapper/{project}/{vcs}/{rev}', - - 'devices': { - 'dolphin': { - 'ignore_projects': ['gecko'], - 'ignore_groups': ['darwin'], - }, - 'emulator-l': { - 'ignore_projects': ['gecko'], - 'ignore_groups': ['darwin'], - }, - 'emulator-kk': { - 'ignore_projects': ['gecko'], - 'ignore_groups': ['darwin'], - }, - 'emulator-jb': { - 'ignore_projects': ['gecko'], - 'ignore_groups': ['darwin'], - }, - 'emulator-ics': { - 'ignore_projects': ['gecko'], - 'ignore_groups': ['darwin'], - 'manifest_file': 'emulator.xml', - }, - # Equivalent to emulator-ics - see bug 916134 - # Remove once the above bug resolved - 'emulator': { - 'ignore_projects': ['gecko'], - 'ignore_groups': ['darwin'], - 'manifest_file': 'emulator.xml', - }, - 'flame-kk': { - 'ignore_projects': ['gecko'], - 'ignore_groups': ['darwin'], - }, - 'nexus-4': { - 'ignore_projects': ['gecko'], - 'ignore_groups': ['darwin'], - }, - 'nexus-5-l': { - 'ignore_projects': ['gecko'], - 'ignore_groups': ['darwin'], - }, - }, - 'repo_remote_mappings': { - 'https://android.googlesource.com/': 'https://git.mozilla.org/external/aosp', - 'git://codeaurora.org/': 'https://git.mozilla.org/external/caf', - 'git://github.com/mozilla-b2g/': 'https://git.mozilla.org/b2g', - 'git://github.com/mozilla/': 'https://git.mozilla.org/b2g', - 'https://git.mozilla.org/releases': 'https://git.mozilla.org/releases', - 'http://android.git.linaro.org/git-ro/': 'https://git.mozilla.org/external/linaro', - 'http://sprdsource.spreadtrum.com:8085/b2g/android': 'https://git.mozilla.org/external/sprd-aosp', - 'git://github.com/apitrace/': 'https://git.mozilla.org/external/apitrace', - 'git://github.com/t2m-foxfone/': 'https://git.mozilla.org/external/t2m-foxfone', - # Some mappings to ourself, we want to leave these as-is! - 'https://git.mozilla.org/external/aosp': 'https://git.mozilla.org/external/aosp', - 'https://git.mozilla.org/external/caf': 'https://git.mozilla.org/external/caf', - 'https://git.mozilla.org/b2g': 'https://git.mozilla.org/b2g', - 'https://git.mozilla.org/external/apitrace': 'https://git.mozilla.org/external/apitrace', - 'https://git.mozilla.org/external/t2m-foxfone': 'https://git.mozilla.org/external/t2m-foxfone', - }, -} diff --git a/testing/mozharness/configs/vcs_sync/beagle.py b/testing/mozharness/configs/vcs_sync/beagle.py deleted file mode 100644 index b1eeb7ac052..00000000000 --- a/testing/mozharness/configs/vcs_sync/beagle.py +++ /dev/null @@ -1,789 +0,0 @@ -# This is for gecko-dev, which is a developer-oriented repo with -# release-train and inbound branches. - -import os -import socket -hostname = socket.gethostname() - -CVS_MANIFEST = """[{ -"size": 1301484692, -"digest": "89df462d8d20f54402caaaa4e3c10aa54902a1d7196cdf86b7790b76e62d302ade3102dc3f7da4145dd832e6938b0472370ce6a321e0b3bcf0ad050937bd0e9a", -"algorithm": "sha512", -"filename": "mozilla-cvs-history.tar.bz2" -}] -""" - -config = { - "log_name": "beagle", - "log_max_rotate": 99, - "repos": [{ - "repo": "https://hg.mozilla.org/users/hwine_mozilla.com/repo-sync-tools", - "vcs": "hg", - }], - "job_name": "beagle", - "conversion_dir": "beagle", - "initial_repo": { - "repo": "https://hg.mozilla.org/mozilla-central", - "revision": "default", - "repo_name": "mozilla-central", - "targets": [{ - "target_dest": "beagle/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "gitmo-beagle", - }, { - "target_dest": "github-beagle", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "master", - }, - }, - }, - "backup_dir": "/mnt/netapp/github_sync/aki/%s" % hostname, - "cvs_manifest": CVS_MANIFEST, - "cvs_history_tarball": "/home/pmoore/mozilla-cvs-history.tar.bz2", - "env": { - "PATH": "%(PATH)s:/usr/libexec/git-core", - }, - "conversion_repos": [{ - "repo": "https://hg.mozilla.org/releases/mozilla-b2g18", - "revision": "default", - "repo_name": "mozilla-b2g18", - "targets": [{ - "target_dest": "beagle/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "gitmo-beagle", - }, { - "target_dest": "github-beagle", - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "b2g18", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - "mapper": { - "url": "https://api.pub.build.mozilla.org/mapper", - "project": "gecko-dev" - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-b2g26_v1_2", - "revision": "default", - "repo_name": "mozilla-b2g26_v1_2", - "targets": [{ - "target_dest": "beagle/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "gitmo-beagle", - }, { - "target_dest": "github-beagle", - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "b2g26_v1_2", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - "mapper": { - "url": "https://api.pub.build.mozilla.org/mapper", - "project": "gecko-dev" - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-b2g26_v1_2f", - "revision": "default", - "repo_name": "mozilla-b2g26_v1_2f", - "targets": [{ - "target_dest": "beagle/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "gitmo-beagle", - }, { - "target_dest": "github-beagle", - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "b2g26_v1_2f", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - "mapper": { - "url": "https://api.pub.build.mozilla.org/mapper", - "project": "gecko-dev" - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-b2g28_v1_3", - "revision": "default", - "repo_name": "mozilla-b2g28_v1_3", - "targets": [{ - "target_dest": "beagle/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "gitmo-beagle", - }, { - "target_dest": "github-beagle", - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "b2g28_v1_3", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - "mapper": { - "url": "https://api.pub.build.mozilla.org/mapper", - "project": "gecko-dev" - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-b2g28_v1_3t", - "revision": "default", - "repo_name": "mozilla-b2g28_v1_3t", - "targets": [{ - "target_dest": "beagle/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "gitmo-beagle", - }, { - "target_dest": "github-beagle", - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "b2g28_v1_3t", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - "mapper": { - "url": "https://api.pub.build.mozilla.org/mapper", - "project": "gecko-dev" - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-b2g30_v1_4", - "revision": "default", - "repo_name": "mozilla-b2g30_v1_4", - "targets": [{ - "target_dest": "beagle/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "gitmo-beagle", - }, { - "target_dest": "github-beagle", - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "b2g30_v1_4", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - "mapper": { - "url": "https://api.pub.build.mozilla.org/mapper", - "project": "gecko-dev" - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-b2g34_v2_1", - "revision": "default", - "repo_name": "mozilla-b2g34_v2_1", - "targets": [{ - "target_dest": "beagle/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "gitmo-beagle", - }, { - "target_dest": "github-beagle", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "b2g34_v2_1", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - "mapper": { - "url": "https://api.pub.build.mozilla.org/mapper", - "project": "gecko-dev" - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-b2g37_v2_2", - "revision": "default", - "repo_name": "mozilla-b2g37_v2_2", - "targets": [{ - "target_dest": "beagle/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "gitmo-beagle", - }, { - "target_dest": "github-beagle", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "b2g37_v2_2", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - "mapper": { - "url": "https://api.pub.build.mozilla.org/mapper", - "project": "gecko-dev" - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-b2g37_v2_2r", - "revision": "default", - "repo_name": "mozilla-b2g37_v2_2r", - "targets": [{ - "target_dest": "beagle/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "gitmo-beagle", - }, { - "target_dest": "github-beagle", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "b2g37_v2_2r", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - "mapper": { - "url": "https://api.pub.build.mozilla.org/mapper", - "project": "gecko-dev" - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-b2g32_v2_0", - "revision": "default", - "repo_name": "mozilla-b2g32_v2_0", - "targets": [{ - "target_dest": "beagle/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "gitmo-beagle", - }, { - "target_dest": "github-beagle", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "b2g32_v2_0", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - "mapper": { - "url": "https://api.pub.build.mozilla.org/mapper", - "project": "gecko-dev" - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-b2g32_v2_0m", - "revision": "default", - "repo_name": "mozilla-b2g32_v2_0m", - "targets": [{ - "target_dest": "beagle/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "gitmo-beagle", - }, { - "target_dest": "github-beagle", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "b2g32_v2_0m", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - "mapper": { - "url": "https://api.pub.build.mozilla.org/mapper", - "project": "gecko-dev" - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-b2g18_v1_1_0_hd", - "revision": "default", - "repo_name": "mozilla-b2g18_v1_1_0_hd", - "targets": [{ - "target_dest": "beagle/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "gitmo-beagle", - }, { - "target_dest": "github-beagle", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "b2g18_v1_1_0_hd", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - "mapper": { - "url": "https://api.pub.build.mozilla.org/mapper", - "project": "gecko-dev" - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-b2g18_v1_0_1", - "revision": "default", - "repo_name": "mozilla-b2g18_v1_0_1", - "targets": [{ - "target_dest": "beagle/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "gitmo-beagle", - }, { - "target_dest": "github-beagle", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "b2g18_v1_0_1", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - "mapper": { - "url": "https://api.pub.build.mozilla.org/mapper", - "project": "gecko-dev" - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-b2g18_v1_0_0", - "revision": "default", - "repo_name": "mozilla-b2g18_v1_0_0", - "targets": [{ - "target_dest": "beagle/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "gitmo-beagle", - }, { - "target_dest": "github-beagle", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "b2g18_v1_0_0", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - "mapper": { - "url": "https://api.pub.build.mozilla.org/mapper", - "project": "gecko-dev" - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-aurora", - "revision": "default", - "repo_name": "mozilla-aurora", - "targets": [{ - "target_dest": "beagle/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "gitmo-beagle", - }, { - "target_dest": "github-beagle", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "aurora", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - "mapper": { - "url": "https://api.pub.build.mozilla.org/mapper", - "project": "gecko-dev" - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-beta", - "revision": "default", - "repo_name": "mozilla-beta", - "targets": [{ - "target_dest": "beagle/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "gitmo-beagle", - }, { - "target_dest": "github-beagle", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "beta", - }, - "branch_regexes": [ - "^GECKO[0-9_]*RELBRANCH$", - "^MOBILE[0-9_]*RELBRANCH$", - ], - }, - "tag_config": { - "tag_regexes": [ - "^(B2G|RELEASE_BASE)_", - ], - }, - "mapper": { - "url": "https://api.pub.build.mozilla.org/mapper", - "project": "gecko-dev" - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-release", - "revision": "default", - "repo_name": "mozilla-release", - "targets": [{ - "target_dest": "beagle/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "gitmo-beagle", - }, { - "target_dest": "github-beagle", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "release", - }, - "branch_regexes": [ - "^GECKO[0-9_]*RELBRANCH$", - "^MOBILE[0-9_]*RELBRANCH$", - ], - }, - "tag_config": { - "tag_regexes": [ - "^(B2G|RELEASE_BASE)_", - ], - }, - "mapper": { - "url": "https://api.pub.build.mozilla.org/mapper", - "project": "gecko-dev" - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-esr17", - "revision": "default", - "repo_name": "mozilla-esr17", - "targets": [{ - "target_dest": "beagle/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "gitmo-beagle", - }, { - "target_dest": "github-beagle", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "esr17", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - "mapper": { - "url": "https://api.pub.build.mozilla.org/mapper", - "project": "gecko-dev" - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-esr31", - "revision": "default", - "repo_name": "mozilla-esr31", - "targets": [{ - "target_dest": "beagle/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "gitmo-beagle", - }, { - "target_dest": "github-beagle", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "esr31", - }, - "branch_regexes": [ - "^GECKO[0-9]+esr_[0-9]+_RELBRANCH$", - ], - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - "mapper": { - "url": "https://api.pub.build.mozilla.org/mapper", - "project": "gecko-dev" - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-esr38", - "revision": "default", - "repo_name": "mozilla-esr38", - "targets": [{ - "target_dest": "beagle/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "gitmo-beagle", - }, { - "target_dest": "github-beagle", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "esr38", - }, - "branch_regexes": [ - "^GECKO[0-9]+esr_[0-9]+_RELBRANCH$", - ], - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - "mapper": { - "url": "https://api.pub.build.mozilla.org/mapper", - "project": "gecko-dev" - }, - }, { - "repo": "https://hg.mozilla.org/integration/mozilla-inbound", - "revision": "default", - "repo_name": "mozilla-inbound", - "targets": [{ - "target_dest": "beagle/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "gitmo-beagle", - }, { - "target_dest": "github-beagle", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "inbound", - }, - }, - "tag_config": {}, - "mapper": { - "url": "https://api.pub.build.mozilla.org/mapper", - "project": "gecko-dev" - }, - }, { - "repo": "https://hg.mozilla.org/integration/b2g-inbound", - "revision": "default", - "repo_name": "b2g-inbound", - "targets": [{ - "target_dest": "beagle/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "gitmo-beagle", - }, { - "target_dest": "github-beagle", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "b2g-inbound", - }, - }, - "tag_config": {}, - "mapper": { - "url": "https://api.pub.build.mozilla.org/mapper", - "project": "gecko-dev" - }, - }, { - "repo": "https://hg.mozilla.org/integration/fx-team", - "revision": "default", - "repo_name": "fx-team", - "targets": [{ - "target_dest": "beagle/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "gitmo-beagle", - }, { - "target_dest": "github-beagle", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "fx-team", - }, - }, - "tag_config": {}, - "mapper": { - "url": "https://api.pub.build.mozilla.org/mapper", - "project": "gecko-dev" - }, - }], - "remote_targets": { - "github-beagle": { - "repo": "git@github.com:mozilla/gecko-dev.git", - "ssh_key": "~/.ssh/releng-github-id_rsa", - "vcs": "git", - }, - "gitmo-beagle": { - "repo": "gitolite3@git.mozilla.org:integration/gecko-dev.git", - "ssh_key": "~/.ssh/vcs-sync_rsa", - "vcs": "git", - }, - }, - - "exes": { - # bug 828140 - shut https warnings up. - # http://kiln.stackexchange.com/questions/2816/mercurial-certificate-warning-certificate-not-verified-web-cacerts - "hg": [os.path.join(os.getcwd(), "build", "venv", "bin", "hg"), "--config", "web.cacerts=/etc/pki/tls/certs/ca-bundle.crt"], - "tooltool.py": [ - os.path.join(os.getcwd(), "build", "venv", "bin", "python"), - os.path.join(os.getcwd(), "mozharness", "external_tools", "tooltool.py"), - ], - }, - - "virtualenv_modules": [ - "bottle==0.11.6", - "dulwich==0.9.0", - "ordereddict==1.1", - "hg-git==0.4.0-moz2", - "mapper==0.1", - "mercurial==2.6.3", - "mozfile==0.9", - "mozinfo==0.5", - "mozprocess==0.11", - "requests==2.8.1", - ], - "find_links": [ - "http://pypi.pvt.build.mozilla.org/pub", - "http://pypi.pub.build.mozilla.org/pub", - ], - "pip_index": False, - - "default_notify_from": "developer-services+%s@mozilla.org" % hostname, - "notify_config": [{ - "to": "releng-ops-trial@mozilla.com", - "failure_only": False, - "skip_empty_messages": True, - }], - - # Disallow sharing, since we want pristine .hg and .git directories. - "vcs_share_base": None, - "hg_share_base": None, - "default_actions": [ - 'list-repos', - 'create-virtualenv', - 'update-stage-mirror', - 'update-work-mirror', - 'publish-to-mapper', - 'push', - 'combine-mapfiles', - 'notify', - ], -} diff --git a/testing/mozharness/configs/vcs_sync/build-repos.py b/testing/mozharness/configs/vcs_sync/build-repos.py deleted file mode 100644 index 14fee3955e7..00000000000 --- a/testing/mozharness/configs/vcs_sync/build-repos.py +++ /dev/null @@ -1,113 +0,0 @@ -import os -import socket -hostname = socket.gethostname() - -build_repos = ( - 'autoland', - 'buildapi', - 'buildbot-configs', - 'buildbotcustom', - 'mozharness', - 'opsi-package-sources', - 'partner-repacks', - 'preproduction', - 'puppet', - 'puppet-manifests', - 'rpm-sources', - 'talos', - 'tools', -) - -conversion_repos = [] -remote_targets = {} - -for repo in build_repos: - conversion_repos.append({ - "repo": "https://hg.mozilla.org/build/%s" % repo, - "repo_name": "build-%s" % repo, - "conversion_dir": "build-%s" % repo, - "targets": [{ - "target_dest": "build-%s-github" % repo, - "force_push": True - }], - "vcs": "hg", - "mapper": { - "url": "https://api.pub.build.mozilla.org/mapper", - "project": "build-%s" % repo, - }, - "branch_config": { - "branches": { - "default": "master", - }, - "branch_regexes": [ - "^.*$" - ] - }, -# Bug 1036819 - build/* repos currently not able to push tags to github -# temporarily disable tags in conversion. -# When bug 1020613 is resolved, this tag_config below can be enabled again. -# "tag_config": { -# "tag_regexes": [ -# "^.*$" -# ] -# }, - }) - remote_targets["build-%s-github" % repo] = { - "repo": "git@github.com:mozilla/build-%s.git" % repo, - "ssh_key": "~/.ssh/releng-github-id_rsa", - "vcs": "git", - } - -config = { - "log_name": "build-repos", - "log_max_rotate": 99, - "job_name": "build-repos", - "env": { - "PATH": "%(PATH)s:/usr/libexec/git-core", - }, - "conversion_repos": conversion_repos, - "remote_targets": remote_targets, - "virtualenv_modules": [ - "dulwich==0.9.0", - "ordereddict==1.1", - "hg-git==0.4.0-moz2", - "mapper==0.1", - "mercurial==2.6.3", - "mozfile==0.9", - "mozinfo==0.5", - "mozprocess==0.11", - "requests==2.8.1", - ], - "find_links": [ - "http://pypi.pub.build.mozilla.org/pub" - ], - "pip_index": False, - - "default_notify_from": "developer-services+%s@mozilla.org" % hostname, - "notify_config": [{ - "to": "releng-ops-trial@mozilla.com", - "failure_only": False, - "skip_empty_messages": True, - }], - - # Disallow sharing, since we want pristine .hg and .git directories. - "vcs_share_base": None, - "hg_share_base": None, - - # any hg command line options - "hg_options": ( - "--config", - "web.cacerts=/etc/pki/tls/certs/ca-bundle.crt" - ), - - "default_actions": [ - 'list-repos', - 'create-virtualenv', - 'update-stage-mirror', - 'update-work-mirror', - 'publish-to-mapper', - 'push', - 'combine-mapfiles', - 'notify', - ], -} diff --git a/testing/mozharness/configs/vcs_sync/gecko-git.py b/testing/mozharness/configs/vcs_sync/gecko-git.py deleted file mode 100644 index 552c9ac9a7b..00000000000 --- a/testing/mozharness/configs/vcs_sync/gecko-git.py +++ /dev/null @@ -1,437 +0,0 @@ -# This is for gecko.git, which is a partner-oriented repo with -# B2G release branches + tags. - -import os -import socket -hostname = socket.gethostname() - -CVS_MANIFEST = """[{ -"size": 1301484692, -"digest": "89df462d8d20f54402caaaa4e3c10aa54902a1d7196cdf86b7790b76e62d302ade3102dc3f7da4145dd832e6938b0472370ce6a321e0b3bcf0ad050937bd0e9a", -"algorithm": "sha512", -"filename": "mozilla-cvs-history.tar.bz2" -}] -""" - -config = { - "log_name": "gecko-git", - "log_max_rotate": 99, - "repos": [{ - "repo": "https://hg.mozilla.org/users/hwine_mozilla.com/repo-sync-tools", - "vcs": "hg", - }], - "job_name": "gecko-git", - "conversion_dir": "gecko-git", - "initial_repo": { - "repo": "https://hg.mozilla.org/mozilla-central", - "revision": "default", - "repo_name": "mozilla-central", - "targets": [{ - "target_dest": "gecko-git/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "github-gecko-git", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "master", - }, - }, - }, - "backup_dir": "/mnt/netapp/github_sync/aki/%s" % hostname, - "cvs_manifest": CVS_MANIFEST, - "cvs_history_tarball": "/home/pmoore/mozilla-cvs-history.tar.bz2", - "env": { - "PATH": "%(PATH)s:/usr/libexec/git-core", - }, - "conversion_repos": [{ - "repo": "https://hg.mozilla.org/releases/mozilla-b2g37_v2_2", - "revision": "default", - "repo_name": "mozilla-b2g37_v2_2", - "targets": [{ - "target_dest": "gecko-git/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "github-gecko-git", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "v2.2", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-b2g37_v2_2r", - "revision": "default", - "repo_name": "mozilla-b2g37_v2_2r", - "targets": [{ - "target_dest": "gecko-git/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "github-gecko-git", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "v2.2r", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-b2g34_v2_1", - "revision": "default", - "repo_name": "mozilla-b2g34_v2_1", - "targets": [{ - "target_dest": "gecko-git/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "github-gecko-git", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "v2.1", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-b2g34_v2_1s", - "revision": "default", - "repo_name": "mozilla-b2g34_v2_1s", - "targets": [{ - "target_dest": "gecko-git/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "github-gecko-git", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "v2.1s", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-b2g32_v2_0", - "revision": "default", - "repo_name": "mozilla-b2g32_v2_0", - "targets": [{ - "target_dest": "gecko-git/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "github-gecko-git", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "v2.0", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-b2g32_v2_0m", - "revision": "default", - "repo_name": "mozilla-b2g32_v2_0m", - "targets": [{ - "target_dest": "gecko-git/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "github-gecko-git", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "v2.0m", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-b2g30_v1_4", - "revision": "default", - "repo_name": "mozilla-b2g30_v1_4", - "targets": [{ - "target_dest": "gecko-git/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "github-gecko-git", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "v1.4", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-b2g28_v1_3", - "revision": "default", - "repo_name": "mozilla-b2g28_v1_3", - "targets": [{ - "target_dest": "gecko-git/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "github-gecko-git", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "v1.3", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-b2g28_v1_3t", - "revision": "default", - "repo_name": "mozilla-b2g28_v1_3t", - "targets": [{ - "target_dest": "gecko-git/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "github-gecko-git", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "v1.3t", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-b2g26_v1_2", - "revision": "default", - "repo_name": "mozilla-b2g26_v1_2", - "targets": [{ - "target_dest": "gecko-git/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "github-gecko-git", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "v1.2", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-b2g26_v1_2f", - "revision": "default", - "repo_name": "mozilla-b2g26_v1_2f", - "targets": [{ - "target_dest": "gecko-git/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "github-gecko-git", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "v1.2f", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-b2g18", - "revision": "default", - "repo_name": "mozilla-b2g18", - "targets": [{ - "target_dest": "gecko-git/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "github-gecko-git", - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "gecko-18", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-b2g18_v1_1_0_hd", - "revision": "default", - "repo_name": "mozilla-b2g18_v1_1_0_hd", - "targets": [{ - "target_dest": "gecko-git/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "github-gecko-git", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "v1.1.0hd", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-b2g18_v1_0_1", - "revision": "default", - "repo_name": "mozilla-b2g18_v1_0_1", - "targets": [{ - "target_dest": "gecko-git/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "github-gecko-git", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "v1.0.1", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - }, { - "repo": "https://hg.mozilla.org/releases/mozilla-b2g18_v1_0_0", - "revision": "default", - "repo_name": "mozilla-b2g18_v1_0_0", - "targets": [{ - "target_dest": "gecko-git/.git", - "vcs": "git", - "test_push": True, - }, { - "target_dest": "github-gecko-git", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "v1.0.0", - }, - }, - "tag_config": { - "tag_regexes": [ - "^B2G_", - ], - }, - }], - "remote_targets": { - "github-gecko-git": { - "repo": "git@github.com:escapewindow/test-gecko-git.git", - "ssh_key": "~/.ssh/escapewindow_github_rsa", - "vcs": "git", - }, - }, - - "exes": { - # bug 828140 - shut https warnings up. - # http://kiln.stackexchange.com/questions/2816/mercurial-certificate-warning-certificate-not-verified-web-cacerts - "hg": [os.path.join(os.getcwd(), "build", "venv", "bin", "hg"), "--config", "web.cacerts=/etc/pki/tls/certs/ca-bundle.crt"], - "tooltool.py": [ - os.path.join(os.getcwd(), "build", "venv", "bin", "python"), - os.path.join(os.getcwd(), "mozharness", "external_tools", "tooltool.py"), - ], - }, - - "virtualenv_modules": [ - "bottle==0.11.6", - "dulwich==0.9.0", - "ordereddict==1.1", - "hg-git==0.4.0-moz2", - "mapper==0.1", - "mercurial==2.6.3", - "mozfile==0.9", - "mozinfo==0.5", - "mozprocess==0.11", - ], - "find_links": [ - "http://pypi.pvt.build.mozilla.org/pub", - "http://pypi.pub.build.mozilla.org/pub", - ], - "pip_index": False, - - "upload_config": [{ - "ssh_key": "~/.ssh/id_rsa", - "ssh_user": "pmoore", - "remote_host": "github-sync2", - "remote_path": "/home/pmoore/upload/gecko-git-upload", - }], - - "default_notify_from": "developer-services+%s@mozilla.org" % hostname, - "notify_config": [{ - "to": "releng-ops-trial@mozilla.com", - "failure_only": False, - "skip_empty_messages": True, - }], - - # Disallow sharing, since we want pristine .hg and .git directories. - "vcs_share_base": None, - "hg_share_base": None, -} diff --git a/testing/mozharness/configs/vcs_sync/l10n.py b/testing/mozharness/configs/vcs_sync/l10n.py deleted file mode 100644 index 938189d1890..00000000000 --- a/testing/mozharness/configs/vcs_sync/l10n.py +++ /dev/null @@ -1,311 +0,0 @@ -from copy import deepcopy -import socket -hostname = socket.gethostname() - -GECKO_BRANCHES = { - 'v2.1': 'mozilla-beta', - 'v2.2': 'mozilla-central', -} - -GECKO_CONFIG_TEMPLATE = { - - 'mozilla-release': { - 'generate_git_notes': False, # we can change this when bug 1034725 is resolved - 'mapper': { - 'project': 'gitmo-gecko-l10n', - 'url': 'https://api.pub.build.mozilla.org/mapper' - }, - 'locales_file_url': 'https://hg.mozilla.org/releases/mozilla-release/raw-file/default/b2g/locales/all-locales', - 'hg_url': 'https://hg.mozilla.org/releases/l10n/mozilla-release/%(locale)s', - 'targets': [{ - 'target_dest': 'releases-l10n-%(locale)s-gecko/.git', - 'test_push': True, - 'vcs': 'git' - }, { - 'target_dest': 'gitmo-gecko-l10n-%(locale)s', - }], - 'tag_config': { - 'tag_regexes': [ - '^B2G_', - ], - }, - }, - - 'mozilla-beta': { - 'generate_git_notes': False, # we can change this when bug 1034725 is resolved - 'mapper': { - 'project': 'gitmo-gecko-l10n', - 'url': 'https://api.pub.build.mozilla.org/mapper' - }, - 'locales_file_url': 'https://hg.mozilla.org/releases/mozilla-beta/raw-file/default/b2g/locales/all-locales', - 'hg_url': 'https://hg.mozilla.org/releases/l10n/mozilla-beta/%(locale)s', - 'targets': [{ - 'target_dest': 'releases-l10n-%(locale)s-gecko/.git', - 'test_push': True, - 'vcs': 'git' - }, { - 'target_dest': 'gitmo-gecko-l10n-%(locale)s', - }], - 'tag_config': { - 'tag_regexes': [ - '^B2G_', - ], - }, - }, - - 'mozilla-aurora': { - 'generate_git_notes': False, # we can change this when bug 1034725 is resolved - 'mapper': { - 'project': 'gitmo-gecko-l10n', - 'url': 'https://api.pub.build.mozilla.org/mapper' - }, - 'locales_file_url': 'https://hg.mozilla.org/releases/mozilla-aurora/raw-file/default/b2g/locales/all-locales', - 'hg_url': 'https://hg.mozilla.org/releases/l10n/mozilla-aurora/%(locale)s', - 'targets': [{ - 'target_dest': 'releases-l10n-%(locale)s-gecko/.git', - 'test_push': True, - 'vcs': 'git' - }, { - 'target_dest': 'gitmo-gecko-l10n-%(locale)s', - }], - 'tag_config': { - 'tag_regexes': [ - '^B2G_', - ], - }, - }, - - 'mozilla-central': { - 'generate_git_notes': False, # we can change this when bug 1034725 is resolved - 'mapper': { - 'project': 'gitmo-gecko-l10n', - 'url': 'https://api.pub.build.mozilla.org/mapper' - }, - 'locales_file_url': 'https://hg.mozilla.org/mozilla-central/raw-file/default/b2g/locales/all-locales', - 'hg_url': 'https://hg.mozilla.org/l10n-central/%(locale)s', - 'targets': [{ - 'target_dest': 'releases-l10n-%(locale)s-gecko/.git', - 'test_push': True, - 'vcs': 'git' - }, { - 'target_dest': 'gitmo-gecko-l10n-%(locale)s', - }], - 'tag_config': { - 'tag_regexes': [ - '^B2G_', - ], - }, - }, -} - -# Build gecko_config -GECKO_CONFIG = {} -for version, branch in GECKO_BRANCHES.items(): - GECKO_CONFIG[branch] = deepcopy(GECKO_CONFIG_TEMPLATE[branch]) - GECKO_CONFIG[branch]['git_branch_name'] = version - -config = { - "log_name": "l10n", - "log_max_rotate": 99, - "job_name": "l10n", - "env": { - "PATH": "%(PATH)s:/usr/libexec/git-core", - }, - "conversion_type": "b2g-l10n", - "combined_mapfile": "l10n-mapfile", - "l10n_config": { - "gecko_config": GECKO_CONFIG, - "gaia_config": { - 'v2_1': { - 'generate_git_notes': False, # we can change this when bug 1034725 is resolved - 'mapper': { - 'project': 'gitmo-gaia-l10n', - 'url': 'https://api.pub.build.mozilla.org/mapper' - }, - 'locales_file_url': 'https://raw.github.com/mozilla-b2g/gaia/v2.1/locales/languages_all.json', - 'hg_url': 'https://hg.mozilla.org/releases/gaia-l10n/v2_1/%(locale)s', - 'git_branch_name': 'v2.1', - 'targets': [{ - 'target_dest': 'releases-l10n-%(locale)s-gaia/.git', - 'test_push': True, - 'vcs': 'git' - }, { - 'target_dest': 'gitmo-gaia-l10n-%(locale)s', - }], - 'tag_config': { - 'tag_regexes': [ - '^B2G_', - ], - }, - }, - 'v2_0': { - 'generate_git_notes': False, # we can change this when bug 1034725 is resolved - 'mapper': { - 'project': 'gitmo-gaia-l10n', - 'url': 'https://api.pub.build.mozilla.org/mapper' - }, - 'locales_file_url': 'https://raw.github.com/mozilla-b2g/gaia/v2.0/locales/languages_all.json', - 'hg_url': 'https://hg.mozilla.org/releases/gaia-l10n/v2_0/%(locale)s', - 'git_branch_name': 'v2.0', - 'targets': [{ - 'target_dest': 'releases-l10n-%(locale)s-gaia/.git', - 'test_push': True, - 'vcs': 'git' - }, { - 'target_dest': 'gitmo-gaia-l10n-%(locale)s', - }], - 'tag_config': { - 'tag_regexes': [ - '^B2G_', - ], - }, - }, - 'v1_4': { - 'generate_git_notes': False, # we can change this when bug 1034725 is resolved - 'mapper': { - 'project': 'gitmo-gaia-l10n', - 'url': 'https://api.pub.build.mozilla.org/mapper' - }, - 'locales_file_url': 'https://raw.github.com/mozilla-b2g/gaia/v1.4/locales/languages_all.json', - 'hg_url': 'https://hg.mozilla.org/releases/gaia-l10n/v1_4/%(locale)s', - 'git_branch_name': 'v1.4', - 'targets': [{ - 'target_dest': 'releases-l10n-%(locale)s-gaia/.git', - 'test_push': True, - 'vcs': 'git' - }, { - 'target_dest': 'gitmo-gaia-l10n-%(locale)s', - }], - 'tag_config': { - 'tag_regexes': [ - '^B2G_', - ], - }, - }, - 'v1_3': { - 'generate_git_notes': False, # we can change this when bug 1034725 is resolved - 'mapper': { - 'project': 'gitmo-gaia-l10n', - 'url': 'https://api.pub.build.mozilla.org/mapper' - }, - 'locales_file_url': 'https://raw.github.com/mozilla-b2g/gaia/v1.3/locales/languages_dev.json', - 'hg_url': 'https://hg.mozilla.org/releases/gaia-l10n/v1_3/%(locale)s', - 'git_branch_name': 'v1.3', - 'targets': [{ - 'target_dest': 'releases-l10n-%(locale)s-gaia/.git', - 'test_push': True, - 'vcs': 'git' - }, { - 'target_dest': 'gitmo-gaia-l10n-%(locale)s', - }], - 'tag_config': { - 'tag_regexes': [ - '^B2G_', - ], - }, - }, - 'v1_2': { - 'generate_git_notes': False, # we can change this when bug 1034725 is resolved - 'mapper': { - 'project': 'gitmo-gaia-l10n', - 'url': 'https://api.pub.build.mozilla.org/mapper' - }, - 'locales_file_url': 'https://raw.github.com/mozilla-b2g/gaia/v1.2/locales/languages_all.json', - 'hg_url': 'https://hg.mozilla.org/releases/gaia-l10n/v1_2/%(locale)s', - 'git_branch_name': 'v1.2', - 'targets': [{ - 'target_dest': 'releases-l10n-%(locale)s-gaia/.git', - 'test_push': True, - 'vcs': 'git' - }, { - 'target_dest': 'gitmo-gaia-l10n-%(locale)s', - }], - 'tag_config': { - 'tag_regexes': [ - '^B2G_', - ], - }, - }, - 'master': { - 'generate_git_notes': False, # we can change this when bug 1034725 is resolved - 'mapper': { - 'project': 'gitmo-gaia-l10n', - 'url': 'https://api.pub.build.mozilla.org/mapper' - }, - 'locales_file_url': 'https://raw.github.com/mozilla-b2g/gaia/master/locales/languages_all.json', - 'hg_url': 'https://hg.mozilla.org/gaia-l10n/%(locale)s', - 'git_branch_name': 'master', - 'targets': [{ - 'target_dest': 'releases-l10n-%(locale)s-gaia/.git', - 'test_push': True, - 'vcs': 'git' - }, { - 'target_dest': 'gitmo-gaia-l10n-%(locale)s', - }], - 'tag_config': { - 'tag_regexes': [ - '^B2G_', - ], - }, - }, - }, - }, - - "remote_targets": { - "gitmo-gecko-l10n-%(locale)s": { - "repo": 'gitolite3@git.mozilla.org:releases/l10n/%(locale)s/gecko.git', - "ssh_key": "~/.ssh/vcs-sync_rsa", - "vcs": "git", - }, - "gitmo-gaia-l10n-%(locale)s": { - "repo": 'gitolite3@git.mozilla.org:releases/l10n/%(locale)s/gaia.git', - "ssh_key": "~/.ssh/vcs-sync_rsa", - "vcs": "git", - }, - }, - - "virtualenv_modules": [ - "bottle==0.11.6", - "dulwich==0.9.0", - "ordereddict==1.1", - "hg-git==0.4.0-moz2", - "mapper==0.1", - "mercurial==2.6.3", - "mozfile==0.9", - "mozinfo==0.5", - "mozprocess==0.11", - "requests==2.8.1", - ], - "find_links": [ - "http://pypi.pub.build.mozilla.org/pub", - ], - "pip_index": False, - - "default_notify_from": "developer-services+%s@mozilla.org" % hostname, - "notify_config": [{ - "to": "releng-ops-trial@mozilla.com", - "failure_only": False, - "skip_empty_messages": True, - }], - - # Disallow sharing, since we want pristine .hg and .git directories. - "vcs_share_base": None, - "hg_share_base": None, - - # any hg command line options - "hg_options": ( - "--config", - "web.cacerts=/etc/pki/tls/certs/ca-bundle.crt" - ), - - "default_actions": [ - 'list-repos', - 'create-virtualenv', - 'update-stage-mirror', - 'update-work-mirror', - 'publish-to-mapper', - 'push', - 'combine-mapfiles', - 'notify', - ], -} diff --git a/testing/mozharness/configs/vcs_sync/project-branches.py b/testing/mozharness/configs/vcs_sync/project-branches.py deleted file mode 100644 index c4199199c66..00000000000 --- a/testing/mozharness/configs/vcs_sync/project-branches.py +++ /dev/null @@ -1,115 +0,0 @@ -# This is for gecko-projects, which is a low-SLA developer-oriented repo -# with mozilla-central based project branches. - -import os -import socket -hostname = socket.gethostname() - -# These all need to be under hg.m.o/projects. -# If you need to add a different repo, add it to CONVERSION_REPOS. -PROJECT_BRANCHES = [ - # twig projects - "alder", - "ash", - "birch", - "cedar", - "cypress", - "date", - "elm", - "fig", - "gum", - "holly", - "jamun", - "larch", - "maple", - "oak", - "pine", - # Non-twig projects - "build-system", - "graphics", - "ux", -] - -# Non-hg.m.o/projects/ repos. -CONVERSION_REPOS = [{ - "repo": "https://hg.mozilla.org/services/services-central", - "revision": "default", - "repo_name": "services-central", - "targets": [{ - "target_dest": "github-project-branches", - }], - "vcs": "hg", - "branch_config": { - "branches": { - "default": "services", - }, - }, - "tag_config": {}, -}] - -config = { - "log_name": "project-branches", - "log_max_rotate": 99, - "repos": [{ - "repo": "https://hg.mozilla.org/users/hwine_mozilla.com/repo-sync-tools", - "vcs": "hg", - }], - "job_name": "project-branches", - "conversion_dir": "project-branches", - "mapfile_name": "project-branches-mapfile", - "env": { - "PATH": "%(PATH)s:/usr/libexec/git-core", - }, - "conversion_type": "project-branches", - "project_branches": PROJECT_BRANCHES, - "project_branch_repo_url": "https://hg.mozilla.org/projects/%(project)s", - "conversion_repos": CONVERSION_REPOS, - "remote_targets": { - "github-project-branches": { - "repo": "git@github.com:mozilla/gecko-projects.git", - "ssh_key": "~/.ssh/releng-github-id_rsa", - "vcs": "git", - "force_push": True, - }, - }, - - "exes": { - # bug 828140 - shut https warnings up. - # http://kiln.stackexchange.com/questions/2816/mercurial-certificate-warning-certificate-not-verified-web-cacerts - "hg": [os.path.join(os.getcwd(), "build", "venv", "bin", "hg"), "--config", "web.cacerts=/etc/pki/tls/certs/ca-bundle.crt"], - "tooltool.py": [ - os.path.join(os.getcwd(), "build", "venv", "bin", "python"), - os.path.join(os.getcwd(), "mozharness", "external_tools", "tooltool.py"), - ], - }, - - "virtualenv_modules": [ - "bottle==0.11.6", - "dulwich==0.9.0", - "ordereddict==1.1", - "hg-git==0.4.0-moz2", - "mapper==0.1", - "mercurial==2.6.3", - "mozfile==0.9", - "mozinfo==0.5", - "mozprocess==0.11", - ], - "find_links": [ - "http://pypi.pvt.build.mozilla.org/pub", - "http://pypi.pub.build.mozilla.org/pub", - ], - "pip_index": False, - - "combined_mapfile": "combined_gecko_mapfile", - - "default_notify_from": "developer-services+%s@mozilla.org" % hostname, - "notify_config": [{ - "to": "releng-ops-trial@mozilla.com", - "failure_only": False, - "skip_empty_messages": True, - }], - - # Disallow sharing, since we want pristine .hg and .git directories. - "vcs_share_base": None, - "hg_share_base": None, -} diff --git a/testing/mozharness/scripts/b2g_bumper.py b/testing/mozharness/scripts/b2g_bumper.py deleted file mode 100755 index d34a5ac3afb..00000000000 --- a/testing/mozharness/scripts/b2g_bumper.py +++ /dev/null @@ -1,652 +0,0 @@ -#!/usr/bin/env python -# ***** BEGIN LICENSE BLOCK ***** -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this file, -# You can obtain one at http://mozilla.org/MPL/2.0/. -# ***** END LICENSE BLOCK ***** -""" b2g_bumper.py - - Updates a gecko repo with up to date information from B2G repositories. - - In particular, it updates gaia.json which is used by B2G desktop builds, - and updates the XML manifests used by device builds. - - This is to tie the external repository revisions to a visible gecko commit - which appears on TBPL, so sheriffs can blame the appropriate changes. -""" - -import os -import sys -from multiprocessing.pool import ThreadPool -import subprocess -import time -from urlparse import urlparse -try: - import simplejson as json - assert json -except ImportError: - import json - -sys.path.insert(1, os.path.dirname(sys.path[0])) - -from mozharness.base.errors import HgErrorList -from mozharness.base.vcs.vcsbase import VCSScript -from mozharness.mozilla import repo_manifest -from mozharness.base.log import ERROR -from mozharness.mozilla.mapper import MapperMixin - - -class B2GBumper(VCSScript, MapperMixin): - config_options = [ - [['--no-write'], { - 'dest': 'do_write', - 'action': 'store_const', - 'const': False, - 'help': 'disable writing in-tree manifests', - }], - [['--device'], { - 'dest': 'device_override', - 'help': 'specific device to process', - }], - ] - - def __init__(self, require_config_file=True): - super(B2GBumper, self).__init__( - config_options=self.config_options, - all_actions=[ - 'delete-git-ref-cache', - 'import-git-ref-cache', - 'clobber', - 'check-treestatus', - 'checkout-gecko', - 'bump-gaia', - 'checkout-manifests', - 'massage-manifests', - 'commit-manifests', - 'push', - 'push-loop', - 'export-git-ref-cache', - ], - default_actions=[ - 'push-loop', - ], - require_config_file=require_config_file, - # Default config options - config={ - 'treestatus_base_url': 'https://treestatus.mozilla.org', - 'log_max_rotate': 99, - 'do_write': True, - } - ) - - # Mapping of device name to manifest - self.device_manifests = {} - - # Cache of "%s:%s" % (remote url, refname) to revision hashes - self._git_ref_cache = {} - - # File location for persisting _git_ref_cache dictionary above as a json file - self.git_ref_cache_file = self.config.get('git_ref_cache', os.path.join(self.query_abs_dirs()['abs_work_dir'], 'git_ref_cache.json')) - - # Cache of new remotes to original upstreams - self._remote_mappings = {} - - # What's the latest gaia revsion we have for hg - self.gaia_hg_revision = None - self.gaia_git_rev = None - - # Helper methods {{{1 - def query_abs_dirs(self): - if self.abs_dirs: - return self.abs_dirs - - abs_dirs = super(B2GBumper, self).query_abs_dirs() - - abs_dirs.update({ - 'manifests_dir': - os.path.join(abs_dirs['abs_work_dir'], 'manifests'), - 'gecko_local_dir': - os.path.join(abs_dirs['abs_work_dir'], - self.config['gecko_local_dir']), - }) - self.abs_dirs = abs_dirs - return self.abs_dirs - - def query_manifest(self, device_name): - if device_name in self.device_manifests: - return self.device_manifests[device_name] - dirs = self.query_abs_dirs() - c = self.config - manifest_file = c['devices'][device_name].get('manifest_file', - '%s.xml' % device_name) - manifest_file = os.path.join(dirs['manifests_dir'], manifest_file) - self.info("Loading %s" % manifest_file) - manifest = repo_manifest.load_manifest(manifest_file) - self.device_manifests[device_name] = manifest - return manifest - - def filter_projects(self, device_config, manifest): - for p in device_config['ignore_projects']: - removed = repo_manifest.remove_project(manifest, path=p) - if removed: - self.info("Removed %s" % removed.toxml()) - - def filter_groups(self, device_config, manifest): - for g in device_config.get('ignore_groups', []): - removed = repo_manifest.remove_group(manifest, g) - for r in removed: - self.info("Removed %s" % r.toxml()) - - def map_remotes(self, manifest): - def mapping_func(r): - orig_url = r.getAttribute('fetch') - m = repo_manifest.map_remote(r, self.config['repo_remote_mappings']) - self._remote_mappings[m.getAttribute('fetch')] = orig_url - return m - repo_manifest.rewrite_remotes(manifest, mapping_func) - - def resolve_git_ref(self, remote_url, revision): - cache_key = "%s:%s" % (remote_url, revision) - cmd = ['git', 'ls-remote', remote_url, revision] - self.debug("Running %s" % cmd) - # Retry this a few times, in case there are network errors or somesuch - max_retries = 5 - for _ in range(max_retries): - proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, - stderr=subprocess.STDOUT) - if proc.wait() != 0: - self.warning("Returned %i - sleeping and retrying" % - proc.returncode) - self.warning("%s - got output: %s" % (cache_key, proc.stdout.read())) - time.sleep(30) - continue - output = proc.stdout.read() - self.info("%s - got output: %s" % (cache_key, output)) - try: - abs_revision = output.split()[0].strip() - self._git_ref_cache[cache_key] = abs_revision - return abs_revision - except IndexError: - # Couldn't split the output properly - self.warning("no output from: git ls-remote %s %s" % (remote_url, revision)) - return None - return None - - def resolve_refs(self, manifest): - worker_pool = ThreadPool(20) - lookup_threads_by_project = {} - lookup_threads_by_parameters = {} - - # Resolve refnames - for p in manifest.getElementsByTagName('project'): - name = p.getAttribute('name') - remote_url = repo_manifest.get_project_remote_url(manifest, p) - revision = repo_manifest.get_project_revision(manifest, p) - - # commit ids are already done - if repo_manifest.is_commitid(revision): - self.debug("%s is already locked to %s; skipping" % - (name, revision)) - continue - - # gaia is special - make sure we're using the same revision we used - # for gaia.json - if self.gaia_hg_revision and p.getAttribute('path') == 'gaia' and revision == self.config['gaia_git_branch']: - git_rev = self.query_gaia_git_rev() - self.info("Using %s for gaia to match %s in gaia.json" % (git_rev, self.gaia_hg_revision)) - p.setAttribute('revision', git_rev) - continue - - # If there's no '/' in the revision, assume it's a head - if '/' not in revision: - revision = 'refs/heads/%s' % revision - - cache_key = "%s:%s" % (remote_url, revision) - - # Check to see if we've looked up this revision on this remote - # before. If we have, reuse the previous value rather than looking - # it up again. This will make sure revisions for the same ref name - # are consistent between devices, as long as they use the same - # remote/refname. - if cache_key in self._git_ref_cache: - abs_revision = self._git_ref_cache[cache_key] - self.debug( - "Reusing previous lookup %s -> %s" % - (cache_key, abs_revision)) - p.setAttribute('revision', abs_revision) - continue - - # Maybe a thread already exists for this lookup, even if the result has not - # yet been retrieved and placed in _git_ref_cache... - # Please note result.get() can be called multiple times without problems; - # the git command will only be executed once. Therefore we can associate many - # projects to the same thread result, without problems later when we call - # get() multiple times against the same thread result. - if cache_key in lookup_threads_by_parameters: - self.debug("Reusing currently running thread to look up %s" % cache_key) - lookup_threads_by_project[p] = lookup_threads_by_parameters.get(cache_key) - else: - async_result = worker_pool.apply_async(self.resolve_git_ref, - (remote_url, revision)) - lookup_threads_by_parameters[cache_key] = async_result - lookup_threads_by_project[p] = async_result - - # TODO: alert/notify on missing repositories - abort = False - failed = [] - for p, result in lookup_threads_by_project.iteritems(): - abs_revision = result.get(timeout=300) - remote_url = repo_manifest.get_project_remote_url(manifest, p) - revision = repo_manifest.get_project_revision(manifest, p) - if not abs_revision: - abort = True - self.error("Couldn't resolve reference %s %s" % (remote_url, revision)) - failed.append(p) - p.setAttribute('revision', abs_revision) - if abort: - # Write message about how to set up syncing - default = repo_manifest.get_default(manifest) - for p in failed: - if p.hasAttribute('remote'): - remote = repo_manifest.get_remote(manifest, p.getAttribute('remote')) - else: - remote = repo_manifest.get_remote(manifest, default.getAttribute('remote')) - - new_fetch_url = remote.getAttribute('fetch') - orig_fetch_url = self._remote_mappings[new_fetch_url] - name = p.getAttribute('name') - self.info("needs sync? %s/%s -> %s/%s" % (orig_fetch_url, name, new_fetch_url, name)) - - self.fatal("couldn't resolve some refs; exiting") - - def query_manifest_path(self, device): - dirs = self.query_abs_dirs() - device_config = self.config['devices'][device] - manifest_file = os.path.join( - dirs['gecko_local_dir'], - 'b2g', 'config', - device_config.get('gecko_device_dir', device), - 'sources.xml') - return manifest_file - - def hg_add(self, repo_path, path): - """ - Runs 'hg add' on path - """ - hg = self.query_exe('hg', return_type='list') - cmd = hg + ['add', path] - self.run_command(cmd, cwd=repo_path) - - def hg_commit(self, repo_path, message): - """ - Commits changes in repo_path, with specified user and commit message - """ - user = self.config['hg_user'] - hg = self.query_exe('hg', return_type='list') - cmd = hg + ['commit', '-u', user, '-m', message] - env = self.query_env(partial_env={'LANG': 'en_US.UTF-8'}) - status = self.run_command(cmd, cwd=repo_path, env=env) - return status == 0 - - def hg_push(self, repo_path): - hg = self.query_exe('hg', return_type='list') - command = hg + ["push", "-e", - "ssh -oIdentityFile=%s -l %s" % ( - self.config["ssh_key"], self.config["ssh_user"], - ), - self.config["gecko_push_url"]] - status = self.run_command(command, cwd=repo_path, - error_list=HgErrorList) - if status != 0: - # We failed; get back to a known state so we can either retry - # or fail out and continue later. - self.run_command(hg + ["--config", "extensions.mq=", - "strip", "--no-backup", "outgoing()"], - cwd=repo_path) - self.run_command(hg + ["up", "-C"], - cwd=repo_path) - self.run_command(hg + ["--config", "extensions.purge=", - "purge", "--all"], - cwd=repo_path) - return False - return True - - def _read_json(self, path): - if not os.path.exists(path): - self.error("%s doesn't exist!" % path) - return - contents = self.read_from_file(path) - try: - json_contents = json.loads(contents) - return json_contents - except ValueError: - self.error("%s is invalid json!" % path) - - def get_revision_list(self, repo_config, prev_revision=None): - revision_list = [] - url = repo_config['polling_url'] - branch = repo_config.get('branch', 'default') - max_revisions = self.config['gaia_max_revisions'] - dirs = self.query_abs_dirs() - if prev_revision: - # hgweb json-pushes hardcode - url += '&fromchange=%s' % prev_revision - file_name = os.path.join(dirs['abs_work_dir'], - '%s.json' % repo_config['repo_name']) - # might be nice to have a load-from-url option; til then, - # download then read - if self.retry( - self.download_file, - args=(url, ), - kwargs={'file_name': file_name}, - error_level=ERROR, - sleeptime=0, - ) != file_name: - return None - contents = self.read_from_file(file_name) - revision_dict = json.loads(contents) - if not revision_dict: - return [] - # Discard any revisions not on the branch we care about. - for k in sorted(revision_dict, key=int): # numeric sort - v = revision_dict[k] - if v['changesets'][-1]['branch'] == branch: - revision_list.append(v) - # Limit the list to max_revisions. - return revision_list[:max_revisions] - - def update_gaia_json(self, path, - hg_revision, hg_repo_path, - git_revision, git_repo, - ): - """ Update path with repo_path + revision. - - If the revision hasn't changed, don't do anything. - If the repo_path changes or the current json is invalid, error but don't fail. - """ - if not os.path.exists(path): - self.add_summary( - "%s doesn't exist; can't update with repo_path %s revision %s!" % - (path, hg_repo_path, hg_revision), - level=ERROR, - ) - return -1 - contents = self._read_json(path) - if contents: - if contents.get("repo_path") != hg_repo_path: - self.error("Current repo_path %s differs from %s!" % (str(contents.get("repo_path")), hg_repo_path)) - if contents.get("revision") == hg_revision: - self.info("Revision %s is the same. No action needed." % hg_revision) - self.add_summary("Revision %s is unchanged for repo_path %s." % (hg_revision, hg_repo_path)) - return 0 - contents = { - "repo_path": hg_repo_path, - "revision": hg_revision, - "git": { - "remote": git_repo, - "branch": "", - "git_revision": git_revision, - } - } - if self.write_to_file(path, json.dumps(contents, indent=4) + "\n") != path: - self.add_summary( - "Unable to update %s with new revision %s!" % (path, hg_revision), - level=ERROR, - ) - return -2 - - def build_commit_message(self, revision_list, repo_name, repo_url): - revisions = [] - comments = '' - for revision_config in reversed(revision_list): - for changeset_config in reversed(revision_config['changesets']): - revisions.append(changeset_config['node']) - comments += "\n========\n" - comments += u'\n%s/rev/%s\nAuthor: %s\nDesc: %s\n' % ( - repo_url, - changeset_config['node'][:12], - changeset_config['author'], - changeset_config['desc'], - ) - message = 'Bumping gaia.json for %d %s revision(s) a=gaia-bump\n' % ( - len(revisions), - repo_name - ) - message += comments - message = message.encode("utf-8") - return message - - def query_treestatus(self): - "Return True if we can land based on treestatus" - c = self.config - dirs = self.query_abs_dirs() - tree = c.get('treestatus_tree', os.path.basename(c['gecko_pull_url'].rstrip("/"))) - treestatus_url = "%s/%s?format=json" % (c['treestatus_base_url'], tree) - treestatus_json = os.path.join(dirs['abs_work_dir'], 'treestatus.json') - if not os.path.exists(dirs['abs_work_dir']): - self.mkdir_p(dirs['abs_work_dir']) - - if self.download_file(treestatus_url, file_name=treestatus_json) != treestatus_json: - # Failed to check tree status...assume we can land - self.info("failed to check tree status - assuming we can land") - return True - - treestatus = self._read_json(treestatus_json) - if treestatus['status'] != 'closed': - self.info("treestatus is %s - assuming we can land" % repr(treestatus['status'])) - return True - - return False - - def query_devices(self): - c = self.config - override = c.get('device_override') - if override: - return {override: c['devices'][override]} - else: - return c['devices'] - - - def query_gaia_git_rev(self): - """Returns (and caches) the git revision for gaia corresponding to the - latest hg revision on our branch.""" - if not self.gaia_hg_revision: - return None - - if not self.gaia_git_rev: - self.gaia_git_rev = self.query_mapper_git_revision( - self.config['mapper_url'], - self.config['gaia_mapper_project'], - self.gaia_hg_revision, - ) - return self.gaia_git_rev - - # Actions {{{1 - def check_treestatus(self): - if not self.query_treestatus(): - self.info("breaking early since treestatus is closed") - sys.exit(0) - - def checkout_gecko(self): - c = self.config - dirs = self.query_abs_dirs() - dest = dirs['gecko_local_dir'] - repos = [{ - 'repo': c['gecko_pull_url'], - 'tag': c.get('gecko_tag', 'default'), - 'dest': dest, - 'vcs': 'hgtool', - 'hgtool_base_bundle_urls': c.get('hgtool_base_bundle_urls'), - }] - self.vcs_checkout_repos(repos) - - def checkout_manifests(self): - c = self.config - dirs = self.query_abs_dirs() - repos = [ - {'vcs': 'gittool', - 'repo': c['manifests_repo'], - 'revision': c['manifests_revision'], - 'dest': dirs['manifests_dir']}, - ] - self.vcs_checkout_repos(repos) - - def massage_manifests(self): - """ - For each device in config['devices'], we'll strip projects mentioned in - 'ignore_projects', or that have group attribute mentioned in - 'filter_groups'. - We'll also map remote urls - Finally, we'll resolve absolute refs for projects that aren't fully - specified. - """ - for device, device_config in self.query_devices().items(): - self.info("Massaging manifests for %s" % device) - manifest = self.query_manifest(device) - self.filter_projects(device_config, manifest) - self.filter_groups(device_config, manifest) - self.map_remotes(manifest) - self.resolve_refs(manifest) - repo_manifest.cleanup(manifest) - self.device_manifests[device] = manifest - - manifest_path = self.query_manifest_path(device) - manifest_xml = manifest.toxml() - if not manifest_xml.endswith("\n"): - manifest_xml += "\n" - - if self.config['do_write']: - self.mkdir_p(os.path.dirname(manifest_path)) - self.write_to_file(manifest_path, manifest_xml) - - def commit_manifests(self): - dirs = self.query_abs_dirs() - repo_path = dirs['gecko_local_dir'] - for device, device_config in self.query_devices().items(): - manifest_path = self.query_manifest_path(device) - self.hg_add(repo_path, manifest_path) - - message = "Bumping manifests a=b2g-bump" - return self.hg_commit(repo_path, message) - - def bump_gaia(self): - dirs = self.query_abs_dirs() - repo_path = dirs['gecko_local_dir'] - gaia_json_path = os.path.join(repo_path, - self.config['gaia_revision_file']) - contents = self._read_json(gaia_json_path) - - # Get the list of changes - if contents: - prev_revision = contents.get('revision') - self.gaia_hg_revision = prev_revision - else: - prev_revision = None - - polling_url = "%s/json-pushes?full=1" % self.config['gaia_repo_url'] - repo_config = { - 'polling_url': polling_url, - 'branch': self.config.get('gaia_branch', 'default'), - 'repo_name': 'gaia', - } - revision_list = self.get_revision_list(repo_config=repo_config, - prev_revision=prev_revision) - if not revision_list: - # No changes - return False - - # Update the gaia.json with the list of changes - hg_gaia_repo_path = urlparse(self.config['gaia_repo_url']).path.lstrip('/') - hg_revision = revision_list[-1]['changesets'][-1]['node'] - self.gaia_hg_revision = hg_revision - - git_revision = self.query_gaia_git_rev() - git_gaia_repo = self.config['gaia_git_repo'] - - self.update_gaia_json(gaia_json_path, - hg_revision, hg_gaia_repo_path, - git_revision, git_gaia_repo, - ) - - # Commit - message = self.build_commit_message(revision_list, 'gaia', - self.config['gaia_repo_url']) - self.hg_commit(repo_path, message) - return True - - def push(self): - dirs = self.query_abs_dirs() - repo_path = dirs['gecko_local_dir'] - return self.hg_push(repo_path) - - def push_loop(self): - max_retries = 5 - for _ in range(max_retries): - changed = False - if not self.query_treestatus(): - # Tree is closed; exit early to avoid a bunch of wasted time - self.info("breaking early since treestatus is closed") - break - - self.checkout_gecko() - if not self.config.get('skip_gaia_json') and self.bump_gaia(): - changed = True - self.checkout_manifests() - self.massage_manifests() - if self.commit_manifests(): - changed = True - - if not changed: - # Nothing changed, we're all done - self.info("No changes - all done") - break - - if self.push(): - # We did it! Hurray! - self.info("Great success!") - break - # If we're here, then the push failed. It also stripped any - # outgoing commits, so we should be in a pristine state again - # Empty our local cache of manifests so they get loaded again next - # time through this loop. This makes sure we get fresh upstream - # manifests, and avoids problems like bug 979080 - self.device_manifests = {} - - # Sleep before trying again - self.info("Sleeping 60 before trying again") - time.sleep(60) - else: - self.fatal("Didn't complete successfully (hit max_retries)") - - def import_git_ref_cache(self): - """ This action imports the git ref cache created during a previous run. This is - useful for sharing the cache across multiple branches (for example). - """ - if os.path.exists(self.git_ref_cache_file): - self._git_ref_cache = self._read_json(self.git_ref_cache_file) - - def export_git_ref_cache(self): - """ This action exports the git ref cache created during this run. This is useful - for sharing the cache across multiple branches (for example). - """ - if self.write_to_file(self.git_ref_cache_file, json.dumps(self._git_ref_cache, sort_keys=True, indent=4) + "\n") != self.git_ref_cache_file: - self.add_summary( - "Unable to update %s with git ref cache" % self.git_ref_cache_file, - level=ERROR, - ) - return -2 - - def delete_git_ref_cache(self): - """ Used to delete the git ref cache from the file system. The cache can be used - to persist git ls-remote lookup results, for example to reuse them between b2g bumper - runs. Since the results are stale and do not get updated, the cache should be - periodically deleted, so that the new refs can be fetched. The cache can also be used - across branches/devices. - """ - self.log("Deleting git ls-remote look-up cache file ('%s')...") - os.remove(self.git_ref_cache_file) - -# __main__ {{{1 -if __name__ == '__main__': - bumper = B2GBumper() - bumper.run_and_exit() diff --git a/testing/mozharness/scripts/vcs-sync/initial_beagle.py b/testing/mozharness/scripts/vcs-sync/initial_beagle.py deleted file mode 100644 index 7374599f255..00000000000 --- a/testing/mozharness/scripts/vcs-sync/initial_beagle.py +++ /dev/null @@ -1,744 +0,0 @@ -#!/usr/bin/env python -# ***** BEGIN LICENSE BLOCK ***** -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this file, -# You can obtain one at http://mozilla.org/MPL/2.0/. -# ***** END LICENSE BLOCK ***** -"""initial_beagle.py - -Multi-repo m-c hg->git initial conversion with cvs prepending, specifically for -gecko.git and beagle support. - -Separated from hg_git.py for a) simplifying hg_git.py for its main purpose, -and b) somewhat protecting the initial conversion steps from future edits. -""" - -from copy import deepcopy -import mmap -import os -import re -import sys - -sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0]))) - -import mozharness -external_tools_path = os.path.join( - os.path.abspath(os.path.dirname(os.path.dirname(mozharness.__file__))), - 'external_tools', -) - -from mozharness.base.errors import HgErrorList, GitErrorList, TarErrorList -from mozharness.base.log import INFO, FATAL -from mozharness.base.python import VirtualenvMixin, virtualenv_config_options -from mozharness.base.transfer import TransferMixin -from mozharness.base.vcs.vcsbase import VCSScript -from mozharness.mozilla.tooltool import TooltoolMixin - - -# HgGitScript {{{1 -class HgGitScript(VirtualenvMixin, TooltoolMixin, TransferMixin, VCSScript): - """ Beagle-oriented hg->git script (lots of mozilla-central hardcodes; - assumption that we're going to be importing lots of branches). - - Beagle is a git repo of mozilla-central, with full cvs history, - and a number of developer-oriented repositories and branches added. - - The partner-oriented gecko.git could also be incorporated into this - script with some changes. - """ - - mapfile_binary_search = None - successful_repos = [] # Unused; for notify() capability with vcs_sync.py - - def __init__(self, require_config_file=True): - super(HgGitScript, self).__init__( - config_options=virtualenv_config_options, - all_actions=[ - 'clobber', - 'create-virtualenv', - 'pull', - 'create-stage-mirror', - 'create-work-mirror', - 'initial-conversion', - 'prepend-cvs', - 'fix-tags', - 'notify', - ], - # These default actions are the update loop that we run after the - # initial steps to create the work mirror with all the branches + - # cvs history have been run. - require_config_file=require_config_file - ) - - # Helper methods {{{1 - def query_abs_dirs(self): - """ Define paths. - """ - if self.abs_dirs: - return self.abs_dirs - abs_dirs = super(HgGitScript, self).query_abs_dirs() - abs_dirs['abs_cvs_history_dir'] = os.path.join( - abs_dirs['abs_work_dir'], 'mozilla-cvs-history') - abs_dirs['abs_conversion_dir'] = os.path.join( - abs_dirs['abs_work_dir'], 'conversion', - self.config['conversion_dir'] - ) - abs_dirs['abs_source_dir'] = os.path.join( - abs_dirs['abs_work_dir'], 'stage_source') - abs_dirs['abs_repo_sync_tools_dir'] = os.path.join( - abs_dirs['abs_work_dir'], 'repo-sync-tools') - abs_dirs['abs_git_rewrite_dir'] = os.path.join( - abs_dirs['abs_work_dir'], 'mc-git-rewrite') - abs_dirs['abs_target_dir'] = os.path.join(abs_dirs['abs_work_dir'], - 'target') - self.abs_dirs = abs_dirs - return self.abs_dirs - - def query_all_repos(self): - """ Very simple method, but we need this concatenated list many times - throughout the script. - """ - return [self.config['initial_repo']] - - def _update_stage_repo(self, repo_config, retry=True, clobber=False): - """ Update a stage repo. - The stage mirror is a buffer clean clone of repositories. - The logic behind this is that we get occasional corruption from - |hg pull|. It's much less time-consuming to detect this in - a clean clone, and reclone, than to detect this in a working - conversion directory, and try to repair or reclone+reconvert. - - We pull the stage mirror into the work mirror, where the - conversion - is done. - """ - hg = self.query_exe('hg', return_type='list') - dirs = self.query_abs_dirs() - source_dest = os.path.join(dirs['abs_source_dir'], - repo_config['repo_name']) - if clobber: - self.rmtree(source_dest) - if not os.path.exists(source_dest): - if self.retry( - self.run_command, - args=(hg + ['clone', '--noupdate', repo_config['repo'], - source_dest], ), - kwargs={ - 'output_timeout': 15 * 60, - 'cwd': dirs['abs_work_dir'], - 'error_list': HgErrorList, - }, - ): - if retry: - return self._update_stage_repo( - repo_config, retry=False, clobber=True) - else: - self.fatal("Can't clone %s!" % repo_config['repo']) - cmd = hg + ['pull'] - if self.retry( - self.run_command, - args=(cmd, ), - kwargs={ - 'output_timeout': 15 * 60, - 'cwd': source_dest, - }, - ): - if retry: - return self._update_stage_repo( - repo_config, retry=False, clobber=True) - else: - self.fatal("Can't pull %s!" % repo_config['repo']) - # commenting out hg verify since it takes ~5min per repo; hopefully - # exit codes will save us -# if self.run_command(hg + ["verify"], cwd=source_dest): -# if retry: -# return self._update_stage_repo(repo_config, retry=False, clobber=True) -# else: -# self.fatal("Can't verify %s!" % source_dest) - - def _check_initial_git_revisions(self, repo_path, expected_sha1, - expected_sha2): - """ Verify that specific git revisions match expected shas. - - This involves some hardcodes, which is unfortunate, but they save - time, especially since we're hardcoding mozilla-central behavior - anyway. - """ - git = self.query_exe('git', return_type='list') - output = self.get_output_from_command( - git + ['log', '--oneline', '--grep', '374866'], - cwd=repo_path - ) - # hardcode test - if not output: - self.fatal("No output from git log!") - rev = output.split(' ')[0] - if not rev.startswith(expected_sha1): - self.fatal("Output doesn't match expected sha %s for initial hg commit: %s" % (expected_sha1, str(output))) - output = self.get_output_from_command( - git + ['log', '-n', '1', '%s^' % rev], - cwd=repo_path - ) - if not output: - self.fatal("No output from git log!") - rev = output.splitlines()[0].split(' ')[1] - if rev != expected_sha2: - self.fatal("Output rev %s doesn't show expected rev %s:\n\n%s" % (rev, expected_sha2, output)) - - def munge_mapfile(self): - """ From https://github.com/ehsan/mozilla-history-tools/blob/master/initial_conversion/translate_git-mapfile.py - """ - self.info("Updating pre-cvs mapfile...") - dirs = self.query_abs_dirs() - orig_mapfile = os.path.join(dirs['abs_upload_dir'], 'pre-cvs-mapfile') - conversion_dir = dirs['abs_conversion_dir'] - mapfile = os.path.join(dirs['abs_work_dir'], 'post-cvs-mapfile') - mapdir = os.path.join(dirs['abs_git_rewrite_dir'], 'map') - orig_mapfile_fh = open(orig_mapfile, "r") - mapfile_fh = open(mapfile, "w") - for line in orig_mapfile_fh: - tokens = line.split(" ") - if len(tokens) == 2: - git_sha = tokens[0].strip() - hg_sha = tokens[1].strip() - new_path = os.path.join(mapdir, git_sha) - if os.path.exists(new_path): - translated_git_sha = open(new_path).read().strip() - print >>mapfile_fh, "%s %s" % (translated_git_sha, hg_sha) - else: - print >>mapfile_fh, "%s %s" % (git_sha, hg_sha) - orig_mapfile_fh.close() - mapfile_fh.close() - self.copyfile( - mapfile, - os.path.join(conversion_dir, '.hg', 'git-mapfile'), - error_level=FATAL, - ) - self.copy_to_upload_dir(mapfile, dest="post-cvs-mapfile", - log_level=INFO) - - def make_repo_bare(self, path, tmpdir=None): - """ Since we do a |git checkout| in prepend_cvs(), and later want - a bare repo. - """ - self.info("Making %s/.git a bare repo..." % path) - for p in (path, os.path.join(path, ".git")): - if not os.path.exists(p): - self.error("%s doesn't exist! Skipping..." % p) - if tmpdir is None: - tmpdir = os.path.dirname(os.path.abspath(path)) - git = self.query_exe("git", return_type="list") - for dirname in (".git", ".hg"): - if os.path.exists(os.path.join(path, dirname)): - self.move( - os.path.join(path, dirname), - os.path.join(tmpdir, dirname), - error_level=FATAL, - ) - self.rmtree(path, error_level=FATAL) - self.mkdir_p(path) - for dirname in (".git", ".hg"): - if os.path.exists(os.path.join(tmpdir, dirname)): - self.move( - os.path.join(tmpdir, dirname), - os.path.join(path, dirname), - error_level=FATAL, - ) - self.run_command( - git + ['--git-dir', os.path.join(path, ".git"), - 'config', '--bool', 'core.bare', 'true'], - halt_on_failure=True, - ) - - def _fix_tags(self, conversion_dir, git_rewrite_dir): - """ Ehsan's git tag fixer, ported from bash. - - `` Git's history rewriting is not smart about preserving the tags in - your repository, so you would end up with tags which point to - commits in the old history line. If you push your repository to - some other repository for example, all of the tags in the target - repository would be invalid, since they would be pointing to - commits that don't exist in that repository. '' - - https://github.com/ehsan/mozilla-history-tools/blob/master/initial_conversion/translate_git_tags.sh - """ - self.info("Fixing tags...") - git = self.query_exe('git', return_type='list') - output = self.get_output_from_command( - git + ['for-each-ref'], - cwd=conversion_dir, - halt_on_failure=True, - ) - for line in output.splitlines(): - old_sha1, the_rest = line.split(' ') - git_type, name = the_rest.split(' ') - if git_type == 'commit' and name.startswith('refs/tags'): - path = os.path.join(git_rewrite_dir, 'map', old_sha1) - if os.path.exists(path): - new_sha1 = self.read_from_file(path).rstrip() - self.run_command( - git + ['update-ref', name, - new_sha1, old_sha1], - cwd=conversion_dir, - error_list=GitErrorList, - halt_on_failure=True, - ) - - def _do_push_repo(self, base_command, refs_list=None, kwargs=None): - """ Helper method for _push_repo() since it has to be able to break - out of the target_repo list loop, and the commands loop borks that. - """ - commands = [] - if refs_list: - while len(refs_list) > 10: - commands.append(base_command + refs_list[0:10]) - refs_list = refs_list[10:] - commands.append(base_command + refs_list) - else: - commands = [base_command] - if kwargs is None: - kwargs = {} - for command in commands: - # Do the push, with retry! - if self.retry( - self.run_command, - args=(command, ), - kwargs=kwargs, - ): - return -1 - - def _push_repo(self, repo_config): - """ Push a repo to a path ("test_push") or remote server. - - This was meant to be a cross-vcs method, but currently only - covers git pushes. - """ - dirs = self.query_abs_dirs() - conversion_dir = dirs['abs_conversion_dir'] - source_dir = os.path.join(dirs['abs_source_dir'], repo_config['repo_name']) - git = self.query_exe('git', return_type='list') - hg = self.query_exe('hg', return_type='list') - return_status = '' - for target_config in repo_config['targets']: - if target_config.get("vcs", "git") == "git": - base_command = git + ['push'] - env = {} - if target_config.get("force_push"): - base_command.append("-f") - if target_config.get("test_push"): - target_name = os.path.join( - dirs['abs_target_dir'], target_config['target_dest']) - base_command.append(target_name) - else: - target_name = target_config['target_dest'] - remote_config = self.config.get('remote_targets', {}).get(target_name) - if not remote_config: - self.fatal("Can't find %s in remote_targets!" % target_name) - base_command.append(remote_config['repo']) - # Allow for using a custom git ssh key. - env['GIT_SSH_KEY'] = remote_config['ssh_key'] - env['GIT_SSH'] = os.path.join(external_tools_path, 'git-ssh-wrapper.sh') - # Allow for pushing a subset of repo branches to the target. - # If we specify that subset, we can also specify different - # names for those branches (e.g. b2g18 -> master for a - # standalone b2g18 repo) - # We query hg for these because the conversion dir will have - # branches from multiple hg repos, and the regexes may match - # too many things. - refs_list = [] - branch_map = self.query_branches( - target_config.get('branch_config', repo_config.get('branch_config', {})), - source_dir, - ) - # If the target_config has a branch_config, the key is the - # local git branch and the value is the target git branch. - if target_config.get("branch_config"): - for (branch, target_branch) in branch_map.items(): - refs_list += ['+refs/heads/%s:refs/heads/%s' % (branch, target_branch)] - # Otherwise the key is the hg branch and the value is the git - # branch; use the git branch for both local and target git - # branch names. - else: - for (hg_branch, git_branch) in branch_map.items(): - refs_list += ['+refs/heads/%s:refs/heads/%s' % (git_branch, git_branch)] - # Allow for pushing a subset of tags to the target, via name or - # regex. Again, query hg for this list because the conversion - # dir will contain tags from multiple hg repos, and the regexes - # may match too many things. - tag_config = target_config.get('tag_config', repo_config.get('tag_config', {})) - if tag_config.get('tags'): - for (tag, target_tag) in tag_config['tags'].items(): - refs_list += ['+refs/tags/%s:refs/tags/%s' % (tag, target_tag)] - if tag_config.get('tag_regexes'): - regex_list = [] - for regex in tag_config['tag_regexes']: - regex_list.append(re.compile(regex)) - tag_list = self.get_output_from_command( - hg + ['tags'], - cwd=source_dir, - ) - for tag_line in tag_list.splitlines(): - if not tag_line: - continue - tag_parts = tag_line.split() - if not tag_parts: - self.warning("Bogus tag_line? %s" % str(tag_line)) - continue - tag_name = tag_parts[0] - for regex in regex_list: - if regex.search(tag_name) is not None: - refs_list += ['+refs/tags/%s:refs/tags/%s' % (tag_name, tag_name)] - continue - error_msg = "%s: Can't push %s to %s!\n" % (repo_config['repo_name'], conversion_dir, target_name) - if self._do_push_repo( - base_command, - refs_list=refs_list, - kwargs={ - 'output_timeout': target_config.get("output_timeout", 30 * 60), - 'cwd': os.path.join(conversion_dir, '.git'), - 'error_list': GitErrorList, - 'partial_env': env, - } - ): - if target_config.get("test_push"): - error_msg += "This was a test push that failed; not proceeding any further with %s!\n" % repo_config['repo_name'] - self.error(error_msg) - return_status += error_msg - if target_config.get("test_push"): - break - else: - # TODO write hg - error_msg = "%s: Don't know how to deal with vcs %s!\n" % ( - target_config['target_dest'], target_config['vcs']) - self.error(error_msg) - return_status += error_msg - return return_status - - def _query_mapped_revision(self, revision=None, mapfile=None): - """ Use the virtualenv mapper module to search a mapfile for a - revision. - """ - if not callable(self.mapfile_binary_search): - site_packages_path = self.query_python_site_packages_path() - sys.path.append(os.path.join(site_packages_path, 'mapper')) - try: - from bsearch import mapfile_binary_search - global log - log = self.log_obj - self.mapfile_binary_search = mapfile_binary_search - except ImportError, e: - self.fatal("Can't import mapfile_binary_search! %s\nDid you create-virtualenv?" % str(e)) - # I wish mapper did this for me, but ... - fd = open(mapfile, 'rb') - m = mmap.mmap(fd.fileno(), 0, mmap.MAP_PRIVATE, mmap.PROT_READ) - return self.mapfile_binary_search(m, revision) - - def _post_fatal(self, message=None, exit_code=None): - """ After we call fatal(), run this method before exiting. - """ - if 'notify' in self.actions: - self.notify(message=message, fatal=True) - self.copy_logs_to_upload_dir() - - def query_branches(self, branch_config, repo_path, vcs='hg'): - """ Given a branch_config of branches and branch_regexes, return - a dict of existing branch names to target branch names. - """ - branch_map = {} - if "branches" in branch_config: - branch_map = deepcopy(branch_config['branches']) - if "branch_regexes" in branch_config: - regex_list = list(branch_config['branch_regexes']) - full_branch_list = [] - if vcs == 'hg': - hg = self.query_exe("hg", return_type="list") - # This assumes we always want closed branches as well. - # If not we may need more options. - output = self.get_output_from_command( - hg + ['branches', '-a'], - cwd=repo_path - ) - if output: - for line in output.splitlines(): - full_branch_list.append(line.split()[0]) - elif vcs == 'git': - git = self.query_exe("git", return_type="list") - output = self.get_output_from_command( - git + ['branch', '-l'], - cwd=repo_path - ) - if output: - for line in output.splitlines(): - full_branch_list.append(line.replace('*', '').split()[0]) - for regex in regex_list: - for branch in full_branch_list: - m = re.search(regex, branch) - if m: - # Don't overwrite branch_map[branch] if it exists - branch_map.setdefault(branch, branch) - return branch_map - - # Actions {{{1 - def create_stage_mirror(self): - """ Rather than duplicate the logic here and in update_stage_mirror(), - just call it. - - We could just create the initial_repo stage mirror here, but - there's no real harm in cloning all repos here either. Putting - the time hit in the one-time-setup, rather than the first update - loop, makes sense. - """ - for repo_config in self.query_all_repos(): - self._update_stage_repo(repo_config) - - def write_hggit_hgrc(self, dest): - # Update .hg/hgrc, if not already updated - hgrc = os.path.join(dest, '.hg', 'hgrc') - contents = '' - if os.path.exists(hgrc): - contents = self.read_from_file(hgrc) - if 'hggit=' not in contents: - hgrc_update = """[extensions] -hggit= -[git] -intree=1 -""" - self.write_to_file(hgrc, hgrc_update, open_mode='a') - - def create_work_mirror(self): - """ Create the work_mirror, initial_repo only, from the stage_mirror. - This is where the conversion will occur. - """ - hg = self.query_exe("hg", return_type="list") - git = self.query_exe("git", return_type="list") - dirs = self.query_abs_dirs() - repo_config = deepcopy(self.config['initial_repo']) - work_dest = dirs['abs_conversion_dir'] - source_dest = os.path.join( - dirs['abs_source_dir'], repo_config['repo_name']) - if not os.path.exists(work_dest): - self.run_command(hg + ["init", work_dest], - halt_on_failure=True) - self.run_command(hg + ["pull", source_dest], - cwd=work_dest, - error_list=HgErrorList, - halt_on_failure=True) - # The revision 82e4f1b7bbb6e30a635b49bf2107b41a8c26e3d2 - # reacts poorly to git-filter-branch-keep-rewrites (in - # prepend-cvs), resulting in diverging shas. - # To avoid this, strip back to 317fe0f314ab so the initial conversion - # doesn't include 82e4f1b7bbb6e30a635b49bf2107b41a8c26e3d2, and - # git-filter-branch-keep-rewrites is never run against this - # revision. This takes 3 strips, due to forking/merging. - # See https://bugzilla.mozilla.org/show_bug.cgi?id=847727#c40 through - # https://bugzilla.mozilla.org/show_bug.cgi?id=847727#c60 - # Also, yay hardcodes! - for hg_revision in ("26cb30a532a1", "aad29aa89237", "9f2fa4839e98", "f8d0784186b7"): - self.run_command(hg + ["--config", "extensions.mq=", "strip", - "--no-backup", hg_revision], - cwd=work_dest, - error_list=HgErrorList, - halt_on_failure=True) - # Make sure 317fe0f314ab is the only head! - self.info("Making sure we've stripped m-c down to a single head 317fe0f314ab...") - output = self.get_output_from_command(hg + ["heads"], - cwd=work_dest, - halt_on_failure=True) - for line in output.splitlines(): - if line.startswith("changeset:") and not line.endswith("317fe0f314ab"): - self.fatal("Found a head that is not 317fe0f314ab! hg strip or git conversions. Needs to support both the monolithic beagle/gecko.git -type conversions, as well as many-to-many (l10n, build repos, etc.) -""" - -from copy import deepcopy -import mmap -import os -import pprint -import re -import sys -import time - -try: - import simplejson as json - assert json -except ImportError: - import json - -sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0]))) - -import mozharness -external_tools_path = os.path.join( - os.path.abspath(os.path.dirname(os.path.dirname(mozharness.__file__))), - 'external_tools', -) - -from mozharness.base.errors import HgErrorList, GitErrorList -from mozharness.base.log import INFO, ERROR, FATAL -from mozharness.base.python import VirtualenvMixin, virtualenv_config_options -from mozharness.base.transfer import TransferMixin -from mozharness.base.vcs.vcssync import VCSSyncScript -from mozharness.mozilla.tooltool import TooltoolMixin - - -# HgGitScript {{{1 -class HgGitScript(VirtualenvMixin, TooltoolMixin, TransferMixin, VCSSyncScript): - """ Beagle-oriented hg->git script (lots of mozilla-central hardcodes; - assumption that we're going to be importing lots of branches). - - Beagle is a git repo of mozilla-central, with full cvs history, - and a number of developer-oriented repositories and branches added. - - The partner-oriented gecko.git could also be incorporated into this - script with some changes. - """ - - mapfile_binary_search = None - all_repos = None - successful_repos = [] - config_options = [ - [["--no-check-incoming", ], { - "action": "store_false", - "dest": "check_incoming", - "default": True, - "help": "Don't check for incoming changesets", - }], - [["--max-log-sample-size", ], { - "action": "store", - "dest": "email_max_log_sample_size", - "type": "int", - "default": 102400, - "help": "Specify the maximum number of characters from a log file to be " - "embedded in the email body, per embedding (not total - note we " - "embed two separate log samples into the email - so maximum " - "email body size can end up a little over 2x this amount).", - }], - ] - - def __init__(self, require_config_file=True): - super(HgGitScript, self).__init__( - config_options=virtualenv_config_options + self.config_options, - all_actions=[ - 'clobber', - 'list-repos', - 'create-virtualenv', - 'update-stage-mirror', - 'update-work-mirror', - 'create-git-notes', - 'publish-to-mapper', - 'push', - 'combine-mapfiles', - 'upload', - 'notify', - ], - # These default actions are the update loop that we run after the - # initial steps to create the work mirror with all the branches + - # cvs history have been run. - default_actions=[ - 'list-repos', - 'create-virtualenv', - 'update-stage-mirror', - 'update-work-mirror', - 'push', - 'combine-mapfiles', - 'upload', - 'notify', - ], - require_config_file=require_config_file - ) - self.remote_targets = None - - # Helper methods {{{1 - def query_abs_dirs(self): - """ Define paths. - """ - if self.abs_dirs: - return self.abs_dirs - abs_dirs = super(HgGitScript, self).query_abs_dirs() - abs_dirs['abs_cvs_history_dir'] = os.path.join( - abs_dirs['abs_work_dir'], 'mozilla-cvs-history') - abs_dirs['abs_source_dir'] = os.path.join( - abs_dirs['abs_work_dir'], 'stage_source') - abs_dirs['abs_repo_sync_tools_dir'] = os.path.join( - abs_dirs['abs_work_dir'], 'repo-sync-tools') - abs_dirs['abs_git_rewrite_dir'] = os.path.join( - abs_dirs['abs_work_dir'], 'mc-git-rewrite') - abs_dirs['abs_target_dir'] = os.path.join(abs_dirs['abs_work_dir'], - 'target') - if 'conversion_dir' in self.config: - abs_dirs['abs_conversion_dir'] = os.path.join( - abs_dirs['abs_work_dir'], 'conversion', - self.config['conversion_dir'] - ) - self.abs_dirs = abs_dirs - return self.abs_dirs - - def init_git_repo(self, path, additional_args=None, deny_deletes=False): - """ Create a git repo, with retries. - - We call this with additional_args=['--bare'] to save disk + - make things cleaner. - """ - git = self.query_exe("git", return_type="list") - cmd = git + ['init'] - # generally for --bare - if additional_args: - cmd.extend(additional_args) - cmd.append(path) - status = self.retry( - self.run_command, - args=(cmd, ), - error_level=FATAL, - error_message="Can't set up %s!" % path - ) - status = self.run_command( - git + ['config', 'receive.denyNonFastForwards', 'true'], - cwd=path - ) - if deny_deletes: - status = self.run_command( - git + ['config', 'receive.denyDeletes', 'true'], - cwd=path - ) - return status - - def write_hggit_hgrc(self, dest): - """ Update .hg/hgrc, if not already updated - """ - hgrc = os.path.join(dest, '.hg', 'hgrc') - contents = '' - if os.path.exists(hgrc): - contents = self.read_from_file(hgrc) - if 'hggit=' not in contents: - hgrc_update = """[extensions] -hggit= -[git] -intree=1 -""" - self.write_to_file(hgrc, hgrc_update, open_mode='a') - - def _process_locale(self, locale, type, config, l10n_remote_targets, name, l10n_repos): - """ This contains the common processing that we do on both gecko_config - and gaia_config for a given locale. - """ - replace_dict = {'locale': locale} - new_targets = deepcopy(config.get('targets', {})) - for target in new_targets: - dest = target['target_dest'] - if '%(locale)s' in dest: - new_dest = dest % replace_dict - target['target_dest'] = new_dest - remote_target = l10n_remote_targets.get(new_dest) - if remote_target is None: # generate target if not seen before - possible_remote_target = l10n_remote_targets.get(dest) - # target may be remote, or local - we can tell by seeing - # local targets will be none - remote targets will not, - # so we can use this to test if it is local or remote - if possible_remote_target is not None: - remote_target = deepcopy(possible_remote_target) - remote_repo = remote_target.get('repo') - if '%(locale)s' in remote_repo: - remote_target['repo'] = remote_repo % replace_dict - l10n_remote_targets[new_dest] = remote_target - long_name = '%s_%s_%s' % (type, name, locale) - repo_dict = { - 'repo': config['hg_url'] % replace_dict, - 'revision': 'default', - 'repo_name': long_name, - 'conversion_dir': long_name, - 'mapfile_name': '%s-mapfile' % long_name, - 'targets': new_targets, - 'vcs': 'hg', - 'branch_config': { - 'branches': { - 'default': config['git_branch_name'], - }, - }, - 'tag_config': config.get('tag_config', {}), - 'mapper': config.get('mapper', {}), - 'generate_git_notes': config.get('generate_git_notes', {}), - } - l10n_repos.append(repo_dict) - - def _query_l10n_repos(self): - """ Since I didn't want to have to build a huge static list of l10n - repos, and since it would be nicest to read the list of locales - from their SSoT files. - """ - l10n_repos = [] - l10n_remote_targets = deepcopy(self.config['remote_targets']) - dirs = self.query_abs_dirs() - gecko_dict = deepcopy(self.config['l10n_config'].get('gecko_config', {})) - for name, gecko_config in gecko_dict.items(): - file_name = self.download_file(gecko_config['locales_file_url'], - parent_dir=dirs['abs_work_dir']) - if not os.path.exists(file_name): - self.error("Can't download locales from %s; skipping!" % gecko_config['locales_file_url']) - continue - contents = self.read_from_file(file_name) - for locale in contents.splitlines(): - self._process_locale(locale, 'gecko', gecko_config, l10n_remote_targets, name, l10n_repos) - - gaia_dict = deepcopy(self.config['l10n_config'].get('gaia_config', {})) - for name, gaia_config in gaia_dict.items(): - contents = self.retry( - self.load_json_from_url, - args=(gaia_config['locales_file_url'],) - ) - if not contents: - self.error("Can't download locales from %s; skipping!" % gaia_config['locales_file_url']) - continue - for locale in dict(contents).keys(): - self._process_locale(locale, 'gaia', gaia_config, l10n_remote_targets, name, l10n_repos) - self.info("Built l10n_repos...") - self.info(pprint.pformat(l10n_repos, indent=4)) - self.info("Remote targets...") - self.info(pprint.pformat(l10n_remote_targets, indent=4)) - self.remote_targets = l10n_remote_targets - return l10n_repos - - def _query_project_repos(self): - """ Since I didn't want to have to build a huge static list of project - branch repos. - """ - project_repos = [] - for project in self.config.get("project_branches", []): - repo_dict = { - 'repo': self.config['project_branch_repo_url'] % {'project': project}, - 'revision': 'default', - 'repo_name': project, - 'targets': [{ - 'target_dest': 'github-project-branches', - }], - 'vcs': 'hg', - 'branch_config': { - 'branches': { - 'default': project, - }, - }, - 'tag_config': {}, - } - project_repos.append(repo_dict) - self.info("Built project_repos...") - self.info(pprint.pformat(project_repos, indent=4)) - return project_repos - - def query_all_repos(self): - """ Very simple method, but we need this concatenated list many times - throughout the script. - """ - if self.all_repos: - return self.all_repos - if self.config.get('conversion_type') == 'b2g-l10n': - self.all_repos = self._query_l10n_repos() - elif self.config.get('initial_repo'): - self.all_repos = [self.config['initial_repo']] + list(self.config.get('conversion_repos', [])) - else: - self.all_repos = list(self.config.get('conversion_repos', [])) - if self.config.get('conversion_type') == 'project-branches': - self.all_repos += self._query_project_repos() - return self.all_repos - - def query_all_non_failed_repos(self): - """ Same as query_all_repos(self) but filters out repos that failed in an earlier - action - so use this for downstream actions that require earlier actions did - not fail for a given repo. - """ - all_repos = self.query_all_repos() - return [repo for repo in all_repos if repo.get('repo_name') not in self.failures] - - def _query_repo_previous_status(self, repo_name, repo_map=None): - """ Return False if previous run was unsuccessful. - Return None if no previous run information. - """ - if repo_map is None: - repo_map = self._read_repo_update_json() - return repo_map.get('repos', {}).get(repo_name, {}).get('previous_push_successful') - - def _update_repo_previous_status(self, repo_name, successful_flag, repo_map=None, write_update=False): - """ Set the repo_name to successful_flag (False for unsuccessful, True for successful) - """ - if repo_map is None: - repo_map = self._read_repo_update_json() - repo_map.setdefault('repos', {}).setdefault(repo_name, {})['previous_push_successful'] = successful_flag - if write_update: - self._write_repo_update_json(repo_map) - return repo_map - - def _update_stage_repo(self, repo_config, retry=True, clobber=False): - """ Update a stage repo. - See update_stage_mirror() for a description of the stage repos. - """ - hg = self._query_hg_exe() - dirs = self.query_abs_dirs() - repo_name = repo_config['repo_name'] - source_dest = os.path.join(dirs['abs_source_dir'], - repo_name) - if clobber: - self.rmtree(source_dest) - if not os.path.exists(source_dest): - if self.retry( - self.run_command, - args=(hg + ['clone', '--noupdate', repo_config['repo'], - source_dest], ), - kwargs={ - 'output_timeout': 15 * 60, - 'cwd': dirs['abs_work_dir'], - 'error_list': HgErrorList, - }, - ): - if retry: - return self._update_stage_repo( - repo_config, retry=False, clobber=True) - else: - # Don't leave a failed clone behind - self.rmtree(source_dest) - self._update_repo_previous_status(repo_name, successful_flag=False, write_update=True) - self.add_failure( - repo_name, - message="Can't clone %s!" % repo_config['repo'], - level=ERROR, - ) - elif self.config['check_incoming'] and repo_config.get("check_incoming", True): - previous_status = self._query_repo_previous_status(repo_name) - if previous_status is None: - self.info("No previous status for %s; skipping incoming check!" % repo_name) - elif previous_status is False: - self.info("Previously unsuccessful status for %s; skipping incoming check!" % repo_name) - else: - # Run |hg incoming| and skip all subsequent actions if there - # are no no changes. - # If you want to bypass this behavior (e.g. to update branches/tags - # on a repo without requiring a new commit), set - # repo_config["incoming_check"] = False. - cmd = hg + ['incoming', '-n', '-l', '1'] - status = self.retry( - self.run_command, - args=(cmd, ), - kwargs={ - 'output_timeout': 5 * 60, - 'cwd': source_dest, - 'error_list': HgErrorList, - 'success_codes': [0, 1, 256], - }, - ) - if status in (1, 256): - self.info("No changes for %s; skipping." % repo_name) - # Overload self.failures to tell downstream actions to noop on - # this repo - self.failures.append(repo_name) - return - elif status != 0: - self.add_failure( - repo_name, - message="Error getting changes for %s; skipping!" % repo_config['repo_name'], - level=ERROR, - ) - self._update_repo_previous_status(repo_name, successful_flag=False, write_update=True) - return - cmd = hg + ['pull'] - if self.retry( - self.run_command, - args=(cmd, ), - kwargs={ - 'output_timeout': 15 * 60, - 'cwd': source_dest, - 'error_list': HgErrorList, - }, - ): - if retry: - return self._update_stage_repo( - repo_config, retry=False, clobber=True) - else: - self._update_repo_previous_status(repo_name, successful_flag=False, write_update=True) - self.add_failure( - repo_name, - message="Can't pull %s!" % repo_config['repo'], - level=ERROR, - ) - # commenting out hg verify since it takes ~5min per repo; hopefully - # exit codes will save us -# if self.run_command(hg + ["verify"], cwd=source_dest): -# if retry: -# return self._update_stage_repo(repo_config, retry=False, clobber=True) -# else: -# self.fatal("Can't verify %s!" % source_dest) - - def _do_push_repo(self, base_command, refs_list=None, kwargs=None): - """ Helper method for _push_repo() since it has to be able to break - out of the target_repo list loop, and the commands loop borks that. - """ - commands = [] - if refs_list: - while len(refs_list) > 10: - commands.append(base_command + refs_list[0:10]) - refs_list = refs_list[10:] - commands.append(base_command + refs_list) - else: - commands = [base_command] - if kwargs is None: - kwargs = {} - for command in commands: - # Do the push, with retry! - if self.retry( - self.run_command, - args=(command, ), - kwargs=kwargs, - ): - return -1 - - def _push_repo(self, repo_config): - """ Push a repo to a path ("test_push") or remote server. - - This was meant to be a cross-vcs method, but currently only - covers git pushes. - """ - dirs = self.query_abs_dirs() - conversion_dir = self.query_abs_conversion_dir(repo_config) - if not conversion_dir: - self.fatal("No conversion_dir for %s!" % repo_config['repo_name']) - source_dir = os.path.join(dirs['abs_source_dir'], repo_config['repo_name']) - git = self.query_exe('git', return_type='list') - hg = self._query_hg_exe() - return_status = '' - for target_config in repo_config['targets']: - test_push = False - remote_config = {} - if target_config.get("test_push"): - test_push = True - force_push = target_config.get("force_push") - target_name = os.path.join( - dirs['abs_target_dir'], target_config['target_dest']) - target_vcs = target_config.get("vcs") - else: - target_name = target_config['target_dest'] - if self.remote_targets is None: - self.remote_targets = self.config.get('remote_targets', {}) - remote_config = self.remote_targets.get(target_name, target_config) - force_push = remote_config.get("force_push", target_config.get("force_push")) - target_vcs = remote_config.get("vcs", target_config.get("vcs")) - if target_vcs == "git": - base_command = git + ['push'] - env = {} - if force_push: - base_command.append("-f") - if test_push: - target_git_repo = target_name - else: - target_git_repo = remote_config['repo'] - # Allow for using a custom git ssh key. - env['GIT_SSH_KEY'] = remote_config['ssh_key'] - env['GIT_SSH'] = os.path.join(external_tools_path, 'git-ssh-wrapper.sh') - base_command.append(target_git_repo) - # Allow for pushing a subset of repo branches to the target. - # If we specify that subset, we can also specify different - # names for those branches (e.g. b2g18 -> master for a - # standalone b2g18 repo) - # We query hg for these because the conversion dir will have - # branches from multiple hg repos, and the regexes may match - # too many things. - refs_list = [] - if repo_config.get('generate_git_notes', False): - refs_list.append('+refs/notes/commits:refs/notes/commits') - branch_map = self.query_branches( - target_config.get('branch_config', repo_config.get('branch_config', {})), - source_dir, - ) - # If the target_config has a branch_config, the key is the - # local git branch and the value is the target git branch. - if target_config.get("branch_config"): - for (branch, target_branch) in branch_map.items(): - refs_list += ['+refs/heads/%s:refs/heads/%s' % (branch, target_branch)] - # Otherwise the key is the hg branch and the value is the git - # branch; use the git branch for both local and target git - # branch names. - else: - for (hg_branch, git_branch) in branch_map.items(): - refs_list += ['+refs/heads/%s:refs/heads/%s' % (git_branch, git_branch)] - # Allow for pushing a subset of tags to the target, via name or - # regex. Again, query hg for this list because the conversion - # dir will contain tags from multiple hg repos, and the regexes - # may match too many things. - tag_config = target_config.get('tag_config', repo_config.get('tag_config', {})) - if tag_config.get('tags'): - for (tag, target_tag) in tag_config['tags'].items(): - refs_list += ['+refs/tags/%s:refs/tags/%s' % (tag, target_tag)] - if tag_config.get('tag_regexes'): - regex_list = [] - for regex in tag_config['tag_regexes']: - regex_list.append(re.compile(regex)) - tag_list = self.get_output_from_command( - hg + ['tags'], - cwd=source_dir, - ) - if tag_list is not None: - for tag_line in tag_list.splitlines(): - if not tag_line: - continue - tag_parts = tag_line.split() - if not tag_parts: - self.error("Bogus tag_line? %s" % str(tag_line)) - continue - tag_name = tag_parts[0] - for regex in regex_list: - if tag_name != 'tip' and regex.search(tag_name) is not None: - refs_list += ['+refs/tags/%s:refs/tags/%s' % (tag_name, tag_name)] - continue - error_msg = "%s: Can't push %s to %s!\n" % (repo_config['repo_name'], conversion_dir, target_git_repo) - if self._do_push_repo( - base_command, - refs_list=refs_list, - kwargs={ - 'output_timeout': target_config.get("output_timeout", 30 * 60), - 'cwd': os.path.join(conversion_dir, '.git'), - 'error_list': GitErrorList, - 'partial_env': env, - } - ): - if target_config.get("test_push"): - error_msg += "This was a test push that failed; not proceeding any further with %s!\n" % repo_config['repo_name'] - self.error(error_msg) - return_status += error_msg - if target_config.get("test_push"): - break - else: - # TODO write hg - error_msg = "%s: Don't know how to deal with vcs %s!\n" % ( - target_config['target_dest'], target_vcs) - self.error(error_msg) - return_status += error_msg - return return_status - - def _query_mapped_revision(self, revision=None, mapfile=None): - """ Use the virtualenv mapper module to search a mapfile for a - revision. - """ - if not callable(self.mapfile_binary_search): - site_packages_path = self.query_python_site_packages_path() - sys.path.append(os.path.join(site_packages_path, 'mapper')) - try: - from bsearch import mapfile_binary_search - global log - log = self.log_obj - self.mapfile_binary_search = mapfile_binary_search - except ImportError, e: - self.fatal("Can't import mapfile_binary_search! %s\nDid you create-virtualenv?" % str(e)) - # I wish mapper did this for me, but ... - fd = open(mapfile, 'rb') - m = mmap.mmap(fd.fileno(), 0, mmap.MAP_PRIVATE, mmap.PROT_READ) - return self.mapfile_binary_search(m, revision) - - def _post_fatal(self, message=None, exit_code=None): - """ After we call fatal(), run this method before exiting. - """ - if 'notify' in self.actions: - self.notify(message=message, fatal=True) - self.copy_logs_to_upload_dir() - - def _read_repo_update_json(self): - """ repo_update.json is a file we create with information about each - repo we're converting: git/hg branch names, git/hg revisions, - pull datetime/timestamp, and push datetime/timestamp. - - Since we want to be able to incrementally update portions of this - file as we pull/push each branch, we need to be able to read the - json into memory, so we can update the dict and re-write the json - to disk. - """ - repo_map = {} - dirs = self.query_abs_dirs() - path = os.path.join(dirs['abs_upload_dir'], 'repo_update.json') - if os.path.exists(path): - fh = open(path, 'r') - repo_map = json.load(fh) - fh.close() - return repo_map - - def query_abs_conversion_dir(self, repo_config): - dirs = self.query_abs_dirs() - if repo_config.get('conversion_dir'): - dest = os.path.join(dirs['abs_work_dir'], 'conversion', - repo_config['conversion_dir']) - else: - dest = dirs.get('abs_conversion_dir') - return dest - - def _write_repo_update_json(self, repo_map): - """ The write portion of _read_repo_update_json(). - """ - dirs = self.query_abs_dirs() - contents = json.dumps(repo_map, sort_keys=True, indent=4) - self.write_to_file( - os.path.join(dirs['abs_upload_dir'], 'repo_update.json'), - contents, - create_parent_dir=True - ) - - def _query_hg_exe(self): - """Returns the hg executable command as a list - """ - # If "hg" is set in "exes" section of config use that. - # If not, get path from self.query_virtualenv_path() method - # (respects --work-dir and --venv-path and --virtualenv-path). - exe_command = self.query_exe('hg', return_type="list", default=[os.path.join(self.query_virtualenv_path(), "bin", "hg")]) - - # possible additional command line options can be specified in "hg_options" of self.config - hg_options = self.config.get("hg_options", ()) - exe_command.extend(hg_options) - return exe_command - - def query_branches(self, branch_config, repo_path, vcs='hg'): - """ Given a branch_config of branches and branch_regexes, return - a dict of existing branch names to target branch names. - """ - branch_map = {} - if "branches" in branch_config: - branch_map = deepcopy(branch_config['branches']) - if "branch_regexes" in branch_config: - regex_list = list(branch_config['branch_regexes']) - full_branch_list = [] - if vcs == 'hg': - hg = self._query_hg_exe() - # This assumes we always want closed branches as well. - # If not we may need more options. - output = self.get_output_from_command( - hg + ['branches', '-a'], - cwd=repo_path - ) - if output: - for line in output.splitlines(): - full_branch_list.append(line.split()[0]) - elif vcs == 'git': - git = self.query_exe("git", return_type="list") - output = self.get_output_from_command( - git + ['branch', '-l'], - cwd=repo_path - ) - if output: - for line in output.splitlines(): - full_branch_list.append(line.replace('*', '').split()[0]) - for regex in regex_list: - for branch in full_branch_list: - m = re.search(regex, branch) - if m: - # Don't overwrite branch_map[branch] if it exists - branch_map.setdefault(branch, branch) - return branch_map - - def _combine_mapfiles(self, mapfiles, combined_mapfile, cwd=None): - """ Adapted from repo-sync-tools/combine_mapfiles - - Consolidate multiple conversion processes' mapfiles into a - single mapfile. - """ - self.info("Determining whether we need to combine mapfiles...") - if cwd is None: - cwd = self.query_abs_dirs()['abs_upload_dir'] - existing_mapfiles = [] - for f in mapfiles: - f_path = os.path.join(cwd, f) - if os.path.exists(f_path): - existing_mapfiles.append(f) - else: - self.warning("%s doesn't exist!" % f_path) - combined_mapfile_path = os.path.join(cwd, combined_mapfile) - if os.path.exists(combined_mapfile_path): - combined_timestamp = os.path.getmtime(combined_mapfile_path) - for f in existing_mapfiles: - f_path = os.path.join(cwd, f) - if os.path.getmtime(f_path) > combined_timestamp: - # Yes, we want to combine mapfiles - break - else: - self.info("No new mapfiles to combine.") - return - self.move(combined_mapfile_path, "%s.old" % combined_mapfile_path) - output = self.get_output_from_command( - ['sort', '--unique', '-t', ' ', - '--key=2'] + existing_mapfiles, - silent=True, halt_on_failure=True, - cwd=cwd, - ) - self.write_to_file(combined_mapfile_path, output, verbose=False, - error_level=FATAL) - self.run_command(['ln', '-sf', combined_mapfile, - '%s-latest' % combined_mapfile], - cwd=cwd) - - # Actions {{{1 - - def list_repos(self): - repos = self.query_all_repos() - self.info(pprint.pformat(repos)) - - def create_test_targets(self): - """ This action creates local directories to do test pushes to. - """ - dirs = self.query_abs_dirs() - for repo_config in self.query_all_non_failed_repos(): - for target_config in repo_config['targets']: - if not target_config.get('test_push'): - continue - target_dest = os.path.join(dirs['abs_target_dir'], target_config['target_dest']) - if not os.path.exists(target_dest): - self.info("Creating local target repo %s." % target_dest) - if target_config.get("vcs", "git") == "git": - self.init_git_repo(target_dest, additional_args=['--bare', '--shared=all'], - deny_deletes=True) - else: - self.fatal("Don't know how to deal with vcs %s!" % target_config['vcs']) - else: - self.debug("%s exists; skipping." % target_dest) - - def update_stage_mirror(self): - """ The stage mirror is a buffer clean clone of repositories. - The logic behind this is that we get occasional corruption from - |hg pull|. It's much less time-consuming to detect this in - a clean clone, and reclone, than to detect this in a working - conversion directory, and try to repair or reclone+reconvert. - - We pull the stage mirror into the work mirror, where the conversion - is done. - """ - for repo_config in self.query_all_non_failed_repos(): - self._update_stage_repo(repo_config) - - def pull_out_new_sha_lookups(self, old_file, new_file): - """ This method will return an iterator which iterates through lines in file - new_file that do not exist in old_file. If old_file can't be read, all - lines in new_file are returned. It does not cause any problems if lines - exist in old_file that do not exist in new_file. Results are sorted by - the second field (text after first space in line). - - This is somewhat equivalent to: - ( [ ! -f "${old_file}" ] && cat "${new_file}" || diff "${old_file}" "${new_file}" | sed -n 's/> //p' ) | sort -k2""" - with self.opened(old_file) as (old, err): - if err: - self.info('Map file %s not found - probably first time this has run.' % old_file) - old_set = frozenset() - else: - old_set = frozenset(old) - with self.opened(new_file, 'rt') as (new, err): - if err: - self.error('Could not read contents of map file %s:\n%s' % (new_file, err.message)) - new_set = frozenset() - else: - new_set = frozenset(new) - for line in sorted(new_set.difference(old_set), key=lambda line: line.partition(' ')[2]): - yield line - - def update_work_mirror(self): - """ Pull the latest changes into the work mirror, update the repo_map - json, and run |hg gexport| to convert those latest changes into - the git conversion repo. - """ - hg = self._query_hg_exe() - git = self.query_exe("git", return_type="list") - dirs = self.query_abs_dirs() - repo_map = self._read_repo_update_json() - timestamp = int(time.time()) - datetime = time.strftime('%Y-%m-%d %H:%M %Z') - repo_map['last_pull_timestamp'] = timestamp - repo_map['last_pull_datetime'] = datetime - for repo_config in self.query_all_non_failed_repos(): - repo_name = repo_config['repo_name'] - source = os.path.join(dirs['abs_source_dir'], repo_name) - dest = self.query_abs_conversion_dir(repo_config) - if not dest: - self.fatal("No conversion_dir for %s!" % repo_name) - if not os.path.exists(dest): - self.mkdir_p(os.path.dirname(dest)) - self.run_command(hg + ['clone', '--noupdate', source, dest], - error_list=HgErrorList, - halt_on_failure=False) - if os.path.exists(dest): - self.write_hggit_hgrc(dest) - self.init_git_repo('%s/.git' % dest, additional_args=['--bare']) - self.run_command( - git + ['--git-dir', '%s/.git' % dest, 'config', 'gc.auto', '0'], - ) - else: - self.add_failure( - repo_name, - message="Failed to clone %s!" % source, - level=ERROR, - ) - continue - # Build branch map. - branch_map = self.query_branches( - repo_config.get('branch_config', {}), - source, - ) - for (branch, target_branch) in branch_map.items(): - output = self.get_output_from_command( - hg + ['id', '-r', branch], - cwd=source - ) - if output: - rev = output.split(' ')[0] - else: - self.add_failure( - repo_name, - message="Branch %s doesn't exist in %s (%s cloned into staging directory %s)!" % (branch, repo_name, repo_config.get('repo'), source), - level=ERROR, - ) - continue - timestamp = int(time.time()) - datetime = time.strftime('%Y-%m-%d %H:%M %Z') - if self.run_command(hg + ['pull', '-r', rev, source], cwd=dest, - error_list=HgErrorList): - # We shouldn't have an issue pulling! - self.add_failure( - repo_name, - message="Unable to pull %s from stage_source; clobbering and skipping!" % repo_name, - level=ERROR, - ) - self._update_repo_previous_status(repo_name, successful_flag=False, write_update=True) - # don't leave a dirty checkout behind, and skip remaining branches - self.rmtree(source) - break - self.run_command( - hg + ['bookmark', '-f', '-r', rev, target_branch], - cwd=dest, error_list=HgErrorList, - ) - # This might get a little large. - repo_map.setdefault('repos', {}).setdefault(repo_name, {}).setdefault('branches', {})[branch] = { - 'hg_branch': branch, - 'hg_revision': rev, - 'git_branch': target_branch, - 'pull_timestamp': timestamp, - 'pull_datetime': datetime, - } - if self.query_failure(repo_name): - # We hit an error in the for loop above - continue - self.retry( - self.run_command, - args=(hg + ['-v', 'gexport'], ), - kwargs={ - 'output_timeout': 15 * 60, - 'cwd': dest, - 'error_list': HgErrorList, - }, - error_level=FATAL, - ) - generated_mapfile = os.path.join(dest, '.hg', 'git-mapfile') - self.copy_to_upload_dir( - generated_mapfile, - dest=repo_config.get('mapfile_name', self.config.get('mapfile_name', "gecko-mapfile")), - log_level=INFO - ) - for (branch, target_branch) in branch_map.items(): - git_revision = self._query_mapped_revision( - revision=rev, mapfile=generated_mapfile) - repo_map['repos'][repo_name]['branches'][branch]['git_revision'] = git_revision - self._write_repo_update_json(repo_map) - - - def create_git_notes(self): - git = self.query_exe("git", return_type="list") - for repo_config in self.query_all_non_failed_repos(): - repo = repo_config['repo'] - if repo_config.get('generate_git_notes', False): - dest = self.query_abs_conversion_dir(repo_config) - # 'git-mapfile' is created by hggit plugin, containing all the mappings - complete_mapfile = os.path.join(dest, '.hg', 'git-mapfile') - # 'added-to-git-notes' is the set of mappings known to be recorded in the git notes - # of the project (typically 'git-mapfile' from previous run) - added_to_git_notes = os.path.join(dest, '.hg', 'added-to-git-notes') - # 'delta-git-notes' is the set of new mappings found on this iteration, that - # now need to be added to the git notes of the project (the diff between the - # previous two files described) - delta_git_notes = os.path.join(dest, '.hg', 'delta-git-notes') - git_dir = os.path.join(dest, '.git') - self.rmtree(delta_git_notes) - git_notes_adding_successful = True - with self.opened(delta_git_notes, open_mode='w') as (delta_out, err): - if err: - git_notes_adding_successful = False - self.warn("Could not write list of unprocessed git note mappings to file %s - not critical" % delta_git_notes) - else: - for sha_lookup in self.pull_out_new_sha_lookups(added_to_git_notes, complete_mapfile): - print >>delta_out, sha_lookup, - (git_sha, hg_sha) = sha_lookup.split() - # only add git note if not already there - note - # devs may have added their own notes, so don't - # replace any existing notes, just add to them - output = self.get_output_from_command( - git + ['notes', 'show', git_sha], - cwd=git_dir, - ignore_errors=True - ) - git_note_text='Upstream source: %s/rev/%s' % (repo, hg_sha) - git_notes_add_return_code = 1 - if not output or output.find(git_note_text) < 0: - git_notes_add_return_code = self.run_command( - git + ['notes', 'append', '-m', git_note_text, git_sha], - cwd=git_dir - ) - # if note was successfully added, or it was already there, we can - # mark it as added, by putting it in the delta file... - if git_notes_add_return_code == 0 or output.find(git_note_text) >= 0: - print >>delta_out, sha_lookup, - else: - self.error("Was not able to append required git note for git commit %s ('%s')" % (git_sha, git_note_text)) - git_notes_adding_successful = False - if git_notes_adding_successful: - self.copyfile(complete_mapfile, added_to_git_notes) - else: - self.info("Not creating git notes for repo %s (generate_git_notes not set to True)" % repo) - - def publish_to_mapper(self): - """ This method will attempt to create git notes for any new git<->hg mappings - found in the generated_mapfile file and also push new mappings to mapper service.""" - for repo_config in self.query_all_non_failed_repos(): - dest = self.query_abs_conversion_dir(repo_config) - # 'git-mapfile' is created by hggit plugin, containing all the mappings - complete_mapfile = os.path.join(dest, '.hg', 'git-mapfile') - # 'published-to-mapper' is all the mappings that are known to be published - # to mapper, for this project (typically the 'git-mapfile' from the previous - # run) - published_to_mapper = os.path.join(dest, '.hg', 'published-to-mapper') - # 'delta-for-mapper' is the set of mappings that need to be published to - # mapper on this iteration, i.e. the diff between the previous two files - # described - delta_for_mapper = os.path.join(dest, '.hg', 'delta-for-mapper') - self.rmtree(delta_for_mapper) - # we only replace published_to_mapper if we successfully updated - # pushed to mapper - mapper_config = repo_config.get('mapper', {}) - if mapper_config: - site_packages_path = self.query_python_site_packages_path() - if site_packages_path not in sys.path: - sys.path.append(site_packages_path) - try: - import requests - except ImportError as e: - self.error("Can't import requests: %s\nDid you create-virtualenv?" % str(e)) - mapper_url = mapper_config['url'] - mapper_project = mapper_config['project'] - insert_url = "%s/%s/insert/ignoredups" % (mapper_url, mapper_project) - headers = { - 'Content-Type': 'text/plain', - 'Authentication': 'Bearer %s' % os.environ["RELENGAPI_INSERT_HGGIT_MAPPINGS_AUTH_TOKEN"] - } - all_new_mappings = [] - all_new_mappings.extend(self.pull_out_new_sha_lookups(published_to_mapper, complete_mapfile)) - self.write_to_file(delta_for_mapper, "".join(all_new_mappings)) - # due to timeouts on load balancer, we only push 200 lines at a time - # this means that we should get http response back within 30 seconds - # including the time it takes to insert the mappings in the database - publish_successful = True - for i in range(0, len(all_new_mappings), 200): - r = requests.post(insert_url, data="".join(all_new_mappings[i:i+200]), headers=headers) - if (r.status_code != 200): - self.error("Could not publish mapfile ('%s') line range [%s, %s] to mapper (%s) - received http %s code" % (delta_for_mapper, i, i+200, insert_url, r.status_code)) - publish_successful = False - # we won't break out, since we may be able to publish other mappings - # and duplicates are allowed, so we will push the whole lot again next - # time anyway - else: - self.info("Published mapfile ('%s') line range [%s, %s] to mapper (%s)" % (delta_for_mapper, i, i+200, insert_url)) - if publish_successful: - # if we get this far, we know we could successfully post to mapper, so now - # we can copy the mapfile over "previously generated" version - # so that we don't push to mapper for these commits again - self.copyfile(complete_mapfile, published_to_mapper) - else: - self.copyfile(complete_mapfile, published_to_mapper) - - def combine_mapfiles(self): - """ This method is for any job (l10n, project-branches) that needs to combine - mapfiles. - """ - if not self.config.get("combined_mapfile"): - self.info("No combined_mapfile set in config; skipping!") - return - dirs = self.query_abs_dirs() - mapfiles = [] - if self.config.get('conversion_type') == 'b2g-l10n': - for repo_config in self.query_all_non_failed_repos(): - if repo_config.get("mapfile_name"): - mapfiles.append(repo_config['mapfile_name']) - else: - mapfiles.append(self.config.get('mapfile_name', 'gecko-mapfile')) - if self.config.get('external_mapfile_urls'): - for url in self.config['external_mapfile_urls']: - file_name = self.download_file( - url, - parent_dir=dirs['abs_upload_dir'], - error_level=FATAL, - ) - mapfiles.append(file_name) - if not mapfiles: - self.info("No mapfiles to combine; skipping!") - return - self._combine_mapfiles(mapfiles, self.config['combined_mapfile']) - - def push(self): - """ Push to all targets. test_targets are local directory test repos; - the rest are remote. Updates the repo_map json. - """ - self.create_test_targets() - repo_map = self._read_repo_update_json() - failure_msg = "" - timestamp = int(time.time()) - datetime = time.strftime('%Y-%m-%d %H:%M %Z') - repo_map['last_push_timestamp'] = timestamp - repo_map['last_push_datetime'] = datetime - for repo_config in self.query_all_non_failed_repos(): - timestamp = int(time.time()) - datetime = time.strftime('%Y-%m-%d %H:%M %Z') - status = self._push_repo(repo_config) - repo_name = repo_config['repo_name'] - if not status: # good - if repo_name not in self.successful_repos: - self.successful_repos.append(repo_name) - repo_map.setdefault('repos', {}).setdefault(repo_name, {})['push_timestamp'] = timestamp - repo_map['repos'][repo_name]['push_datetime'] = datetime - previous_status = self._query_repo_previous_status(repo_name, repo_map=repo_map) - if previous_status is None: - self.add_summary("Possibly the first successful push of %s." % repo_name) - elif previous_status is False: - self.add_summary("Previously unsuccessful push of %s is now successful!" % repo_name) - self._update_repo_previous_status(repo_name, successful_flag=True, repo_map=repo_map, write_update=True) - else: - self.add_failure( - repo_name, - message="Unable to push %s." % repo_name, - level=ERROR, - ) - failure_msg += status + "\n" - self._update_repo_previous_status(repo_name, successful_flag=False, repo_map=repo_map, write_update=True) - if not failure_msg: - repo_map['last_successful_push_timestamp'] = repo_map['last_push_timestamp'] - repo_map['last_successful_push_datetime'] = repo_map['last_push_datetime'] - self._write_repo_update_json(repo_map) - if failure_msg: - self.fatal("Unable to push these repos:\n%s" % failure_msg) - - def preflight_upload(self): - if not self.config.get("copy_logs_post_run", True): - self.copy_logs_to_upload_dir() - - def upload(self): - """ Upload the upload_dir according to the upload_config. - """ - failure_msg = '' - dirs = self.query_abs_dirs() - for upload_config in self.config.get('upload_config', []): - if self.retry( - self.rsync_upload_directory, - args=( - dirs['abs_upload_dir'], - ), - kwargs=upload_config, - ): - failure_msg += '%s:%s' % (upload_config['remote_host'], - upload_config['remote_path']) - if failure_msg: - self.fatal("Unable to upload to this location:\n%s" % failure_msg) - - -# __main__ {{{1 -if __name__ == '__main__': - conversion = HgGitScript() - conversion.run()